diff --git a/.gitignore b/.gitignore index a373ee5..a3a0323 100644 --- a/.gitignore +++ b/.gitignore @@ -19,6 +19,15 @@ dist/ .DS_Store Thumbs.db +# Local data +.cw/ + +# Test workspaces +workdir/ + +# Agent working directories +agent-workdirs/ + # Logs *.log npm-debug.log* diff --git a/.planning/phases/15-frontend-wireframes/15-01-SUMMARY.md b/.planning/phases/15-frontend-wireframes/15-01-SUMMARY.md new file mode 100644 index 0000000..1be8f3e --- /dev/null +++ b/.planning/phases/15-frontend-wireframes/15-01-SUMMARY.md @@ -0,0 +1,101 @@ +--- +phase: 15-frontend-wireframes +plan: 01 +subsystem: ui +tags: [wireframes, documentation, ascii, dashboard, initiative] + +# Dependency graph +requires: + - phase: 11-architect-modes + provides: Architect discuss/breakdown modes for spawn actions + - phase: 12-decompose + provides: Initiative/phase data model +provides: + - Initiative dashboard UI specification + - Component contracts (InitiativeCard, ProgressBar, StatusBadge, ActionMenu) + - Interaction patterns for initiative management +affects: [16-frontend-implementation, ui-components, initiative-views] + +# Tech tracking +tech-stack: + added: [] + patterns: + - ASCII wireframe documentation for UI specification + - Component props contracts before implementation + +key-files: + created: + - docs/wireframes/initiative-dashboard.md + modified: [] + +key-decisions: + - "12-character progress bar width for monospace rendering" + - "Spawn Architect dropdown with discuss/breakdown modes matching CLI" + - "Status badge color mapping for all 6 initiative statuses" + +patterns-established: + - "Wireframe format: ASCII art + component specs + interaction notes" + - "Action menus use [...] trigger pattern" + +# Metrics +duration: 1min +completed: 2026-02-02 +--- + +# Phase 15 Plan 01: Initiative Dashboard Wireframe Summary + +**ASCII wireframe documenting the primary entry point UI with initiative list, status badges, progress bars, and quick actions including Spawn Architect dropdown.** + +## Performance + +- **Duration:** 1 min +- **Started:** 2026-02-02T13:56:43Z +- **Completed:** 2026-02-02T13:57:46Z +- **Tasks:** 1 +- **Files modified:** 1 + +## Accomplishments + +- Created comprehensive Initiative Dashboard wireframe with multiple states (populated, empty) +- Defined 5 component specifications with props and behavior contracts +- Documented all interaction flows including navigation, architect spawning, and filtering +- Established wireframe format pattern for subsequent UI documentation + +## Task Commits + +Each task was committed atomically: + +1. **Task 1: Create wireframes directory and initiative dashboard wireframe** - `31bb8c7` (docs) + +## Files Created/Modified + +- `docs/wireframes/initiative-dashboard.md` - Initiative Dashboard wireframe with ASCII art, component specs, interactions + +## Decisions Made + +- 12-character progress bar width chosen for consistent monospace rendering across terminals +- Spawn Architect dropdown mirrors CLI modes (discuss, breakdown) for consistency +- Status badge colors follow common UI patterns (gray=draft, blue=active, green=complete, red=rejected) +- Filter dropdown provides all 6 status options plus "All" default + +## Deviations from Plan + +None - plan executed exactly as written. + +## Issues Encountered + +None. + +## User Setup Required + +None - no external service configuration required. + +## Next Phase Readiness + +- Wireframe format established for remaining screens +- Component contracts ready for implementation reference +- Ready for 15-02 (Initiative Detail wireframe) + +--- +*Phase: 15-frontend-wireframes* +*Completed: 2026-02-02* diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..f104b0b --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,54 @@ +# Codewalk District + +Multi-agent workspace for orchestrating multiple Claude Code agents. + +## Database + +Schema is defined in `src/db/schema.ts` using drizzle-orm. Migrations are managed by drizzle-kit. + +See [docs/database-migrations.md](docs/database-migrations.md) for the full migration workflow, rules, and commands. + +Key rule: **never use raw SQL for schema initialization.** Always use `drizzle-kit generate` and the migration system. + +## Logging + +Structured logging via pino. See [docs/logging.md](docs/logging.md) for full details. + +Key rule: use `createModuleLogger()` from `src/logger/index.ts` for backend logging. Keep `console.log` for CLI user-facing output only. + +## Build + +After completing any change to server-side code (`src/**`), rebuild and re-link the `cw` binary: + +```sh +npm run build && npm link +``` + +## Testing + +### Unit Tests + +```sh +npm test +``` + +### E2E Tests (Real CLI) + +Real provider integration tests call actual CLI tools and incur API costs. They are **skipped by default**. + +```sh +# Claude tests (~$0.50, ~3 min) +REAL_CLAUDE_TESTS=1 npm test -- src/test/integration/real-providers/ --test-timeout=300000 + +# Codex tests only +REAL_CODEX_TESTS=1 npm test -- src/test/integration/real-providers/codex-manager.test.ts --test-timeout=300000 + +# Both providers +REAL_CLAUDE_TESTS=1 REAL_CODEX_TESTS=1 npm test -- src/test/integration/real-providers/ --test-timeout=300000 +``` + +Test files in `src/test/integration/real-providers/`: +- `claude-manager.test.ts` - Spawn, output parsing, session resume +- `schema-retry.test.ts` - Schema validation, JSON extraction, retry logic +- `crash-recovery.test.ts` - Server restart simulation +- `codex-manager.test.ts` - Codex provider tests diff --git a/README.md b/README.md new file mode 100644 index 0000000..68935f5 --- /dev/null +++ b/README.md @@ -0,0 +1,311 @@ +# Codewalk District + +# Project concept + +Codewalk district is a multi-agent workspace inspired by gastown. It works differently in the following ways: +* Subagents (e.g. Workers) that handle tasks run in -p mode and respond with a clear json schema +* One cw (codewalk) web server is running that is also managing the agents +* There shall be a clear post worktree setup hook that by default copies files (e.g. .env files) prepared inside a dedicated folder in the Project +* It shall support multiple claude code accounts (see ccswitch) and switch them as they run into usage limits +* It shall have a web dashboard at some point in the project +* The project shall start with a file based UI. That is a folder structure representing the data of the project refreshed when saving (fs events) and updated when db data changes (use events as trigger). The fsui shall be started with `cw fsui` which instantiate a bidirectional watcher that subscribes to the events via a websocket +* It shall support integration branches that Workers clone their work from and integrate branches into +* It shall base all it's larger development work on initiatives. Initiatives describe a larger amount of work. The concept from the user must follow a formal planning process where the work is verified for integration into the existing codebase, a sophisticated technical concept is created. An initiative is only started once approved by a developer. Analysis work is performed by Architects. +* The project shall use a global SQlite DB which also manages tasks +* It shall have a cli (the cli shall also be the server application only that it only works as a cli when not run with --server). The cli shall be called "cw" +* The communication from and between agents shall happen using an STDIO based mcp that is also implemented in the main binary. e.g. cw mcp + + +--- + +# Implementation considerations + +* Typescript as a programming language +* Trpc as an API layer +* React with shadcn & tanstack router for the frontend running with vite. Tiptap for markdown editor UIs +* Simple deployment (one web server serving front and backend in deployed mode - in dev the frontend may use a dev server for hot reloads). The app shall just be startable by installing the cli and then running it with --server. No more setup needed. The local frontend dev server shall be proxied through the backend in the same path as the compiled frontend would be served in production mode +* SQLite as a database +* Future support for multi user management (for now only one user implement a stub) +* Hexagonal architecture +* Built as a modular monolith with clear separation between modules incl. event bus (can be process internal with swappable adapter for the future) + +--- + +# Modules + +## Tasks + +Beads-inspired task management for agent coordination. Centralized SQLite storage (not Git-distributed like beads). + +Key features: +* **Status workflow**: `open` → `in_progress` → `blocked` | `closed` +* **Priority system**: P0 (critical) through P3 (low) +* **Dependency graph**: Tasks block other tasks; `ready` query finds actionable work +* **Assignment tracking**: Prevents multiple agents claiming same task +* **Audit history**: All state changes logged for debugging + +CLI mirrors beads: `cw task ready`, `cw task create`, `cw task close`, etc. + +See [docs/tasks.md](docs/tasks.md) for schema and CLI reference. + +## Initiatives + +Notion-like document hierarchy for planning larger features. SQLite-backed with parent-child relationships for structured queries (e.g., "all subpages of initiative X", "inventory of all documents"). + +Key features: +* **Lifecycle**: `draft` → `review` → `approved` → `in_progress` → `completed` +* **Nested pages**: User journeys, business rules, technical concepts, architectural changes +* **Phased work plans**: Approved initiatives generate tasks grouped into phases +* **Rolling approval**: User approves phase plans one-by-one; agents execute approved phases while subsequent phases are reviewed + +Workflow: User drafts → Architect iterates (GSD-style questioning) → Approval or draft extension and further iterations with the Architect → Tasks created with `initiative_id` + `phase` → Execute + +See [docs/initiatives.md](docs/initiatives.md) for schema and workflow details. + +## Domain Layer + +DDD-based documentation of the **as-is state** for agent and human consumption. Initiatives reference and modify domain concepts; completed initiatives update the domain layer to reflect the new state. + +**Scope**: Per-project domains or cross-project domains (features spanning multiple projects). + +**Core concepts tracked:** +* **Bounded Contexts** — scope boundaries defining where a domain model applies +* **Aggregates** — consistency boundaries, what changes together +* **Domain Events** — events exposed by the project that trigger workflows or side effects +* **Business Rules & Invariants** — constraints that must always hold; agents must preserve these +* **Ubiquitous Language** — glossary of domain terms to prevent agent misinterpretation +* **Context Maps** — relationships between bounded contexts (especially for cross-project domains) +* **External Integrations** — systems the domain interacts with but doesn't own + +**Codebase mapping**: Each concept links to folder/module paths. Auto-maintained by agents after implementation work. + +**Storage**: Dual adapter support — SQLite tables (structured queries) or Markdown with YAML frontmatter (human-readable, version-controllable). + +## Orchestrator + +Main orchestrator loop handling coordination across agents. Can be split per project or initiative for load balancing in the future. + +## Session State + +Tracks execution state across agent restarts. Unlike Domain Layer (codebase state), session state tracks position, decisions, and blockers. + +**STATE.md** maintains: +* Current position (phase, plan, task, wave) +* Decisions made (locked choices with reasoning) +* Active blockers (what's waiting, workarounds) +* Session history (who worked on what, when) + +See [docs/session-state.md](docs/session-state.md) for session state management. + +--- + +# Model Profiles + +Different agent roles have different needs. Model selection balances quality, cost, and latency. + +| Profile | Use Case | Cost | Quality | +|---------|----------|------|---------| +| **quality** | Critical decisions, architecture | Highest | Best | +| **balanced** | Default for most work | Medium | Good | +| **budget** | High-volume, low-risk tasks | Lowest | Acceptable | + +| Agent | Quality | Balanced (Default) | Budget | +|-------|---------|-------------------|--------| +| Architect | Opus | Opus | Sonnet | +| Worker | Opus | Sonnet | Sonnet | +| Verifier | Sonnet | Sonnet | Haiku | +| Orchestrator | Sonnet | Sonnet | Haiku | + +See [docs/model-profiles.md](docs/model-profiles.md) for model selection strategy. + +--- + +# Notes + +The "reference" folder contains the implementation of Gastown, get-shit-done and ccswitch (a cli tool to use multiple claude code accounts). + +--- + +# Core Principles + +## Task Decomposition +Breaking large goals into detailed instructions for agents. Supported by Tasks, Jobs, Workflows, and Pipelines. Ensures work is decomposed into trackable, atomic units that agents can execute autonomously. + +See [docs/task-granularity.md](docs/task-granularity.md) for task specification standards. + +## Pull Model +"If there is work in your Queue, YOU MUST RUN IT." This principle ensures agents autonomously proceed with available work without waiting for external input. The heartbeat of autonomous operation. + +## Eventual Completion +The overarching goal ensuring useful outcomes through orchestration of potentially unreliable processes. Persistent Tasks and oversight agents (Monitor, Supervisor) guarantee eventual workflow completion even when individual operations may fail or produce varying results. + +## Context Engineering +Agent output quality degrades predictably as context fills. This is a first-class concern: +* **0-30% context**: Peak quality (thorough, comprehensive) +* **30-50% context**: Good quality (solid work) +* **50-70% context**: Degrading (shortcuts appear) +* **70%+ context**: Poor quality (rushed, minimal) + +**Rule: Stay UNDER 50% context.** Plans sized to fit ~50%. Workers get fresh context per task. Orchestrator stays at 30-40% with heavy work in subagent contexts. + +See [docs/context-engineering.md](docs/context-engineering.md) for context management rules. + +## Goal-Backward Verification +Task completion ≠ Goal achievement. Verification confirms observable outcomes, not checkbox completion. Each phase ends with goal-backward verification checking observable truths, required artifacts, and required wiring. + +See [docs/verification.md](docs/verification.md) for verification patterns. + +## Deviation Rules +Workers encounter unexpected issues during execution. Four rules govern autonomous action: +* **Rule 1**: Auto-fix bugs (no permission needed) +* **Rule 2**: Auto-add missing critical functionality (no permission needed) +* **Rule 3**: Auto-fix blocking issues (no permission needed) +* **Rule 4**: ASK about architectural changes (permission required) + +See [docs/deviation-rules.md](docs/deviation-rules.md) for detailed guidance. + +--- + +# Environments + +## Workspace +The shared environment where all users operate. The Workspace coordinates all agents across multiple Projects and houses workspace-level agents like Orchestrator and Supervisor. It defines the boundaries, infrastructure, and rules of interaction between agents, projects, and resources. + +## Project +A self-contained repository under Workspace management. Each Project has its own Workers, Integrator, Monitor, and Team members. Projects define goals, constraints, and context for users working on a specific problem or domain. This is where actual development work happens. + +--- + +# Workspace-Level Roles + +## Codewalker +A human operator. Users are the primary inhabitants of the Workspace. They control the system and make final decisions. + +## Orchestrator +The coordinating authority of the Workspace. Responsible for initiating Jobs, coordinating work distribution, and notifying users of important events. The Orchestrator operates from the workspace level and has visibility across all Projects. + +## Supervisor +Daemon process running continuous health check cycles. The Supervisor ensures agent activity, monitors system health, and triggers recovery when agents become unresponsive. + +## Helpers +The Supervisor's pool of maintenance agents handling background tasks like cleanup, health checks, and system maintenance. + +## Watchdog +A special Helper that checks the Supervisor periodically, ensuring the monitor itself is still running. Creates a chain of accountability. + +--- + +# Project-Level Roles + +## Worker +An ephemeral agent optimized for execution. Workers are spawned for specific tasks, perform focused work such as coding, analysis, or integration. They work in isolated git worktrees to avoid conflicts, produce Merge Requests, and are cleaned up after completion. + +Workers follow deviation rules and create atomic commits per task. See [docs/agents/worker.md](docs/agents/worker.md) for the full agent prompt. + +## Integrator +Manages the Merge Queue for a Project. The Integrator handles merging changes from Workers, resolving conflicts, and ensuring code quality before changes reach the main branch. + +## Monitor +Observes execution and lifecycle events within a Project. Monitors detect failures, enforce limits, oversee Workers and the Integrator, and ensure system health. Can trigger recovery actions when needed. + +## Team +Long-lived, named agents for persistent collaboration. Unlike ephemeral Workers, Team members maintain context across sessions and are ideal for ongoing work relationships and complex multi-session tasks. + +## Architect +Analysis agent for initiative planning. Architects iterate on initiative drafts with the user through structured questioning. They validate integration with existing codebase, refine technical concepts, and produce work plans broken into phases. Architects don't execute—they plan. + +See [docs/agents/architect.md](docs/agents/architect.md) for the full agent prompt and workflow. + +## Verifier +Validation agent that confirms goals are achieved, not just tasks completed. Verifiers run goal-backward verification after phase execution, checking observable truths, required artifacts, and required wiring. They identify gaps and create remediation tasks when needed. + +Key responsibilities: +* **Goal-backward verification** — Check outcomes, not activities +* **Three-level checks** — Existence, substance, wiring +* **Anti-pattern scanning** — TODOs, stubs, empty returns +* **User acceptance testing** — Walk users through deliverables +* **Remediation** — Create targeted fix tasks when gaps found + +See [docs/agents/verifier.md](docs/agents/verifier.md) for the full agent prompt and verification patterns. + +--- + +# Work Units + +## Task +The atomic unit of work. SQLite-backed work item with dependency tracking. Tasks link actions, state changes, and artifacts across the Workspace with precision and traceability. They can represent issues, tickets, jobs, or any trackable work item. + +## Template +A reusable workflow definition. TOML-based source file describing how tasks are structured, sequenced, and executed across agents. Templates define patterns for common operations like health checks, code review, or deployment. + +## Schema +A template class for instantiating Pipelines. Schemas define the structure and steps of a workflow without being tied to specific work items. + +## Pipeline +Durable chained Task workflows. Pipelines represent multi-step processes where each step is tracked as a Task. They survive agent restarts and ensure complex workflows complete. + +## Ephemeral +Temporary Tasks destroyed after runs. Ephemerals are lightweight work items used for transient operations that don't need permanent tracking. + +## Queue +A pinned Task list for each agent. The Queue is an agent's primary work source - when work appears in your Queue, the Pull Model dictates you must run it. + +--- + +# Workflow Commands + +## Job +A coordinated group of tasks executed together. The primary work-order wrapping related Tasks. Jobs allow related work to be dispatched, tracked, and completed as a single operational unit. + +## Assign +The act of putting work on an agent's Queue. Assign translates intent into action, sending Workers or Team members into motion. + +## Notify +Real-time messaging between agents. Allows immediate communication without going through formal channels. Quick pings and status updates. + +## Handoff +Agent session refresh. When context gets full or an agent needs a fresh start, Handoff transfers work state to a new session while preserving critical context. + +## Replay +Querying previous sessions for context. Replay allows agents to access their predecessors' decisions and context from earlier work. + +## Poll +Ephemeral loop maintaining system heartbeat. Poll cycles (Supervisor, Monitor) continuously run health checks and trigger actions as needed. + +--- + +# Storage & Memory + +## Context Store +A persistent store of memory, context, and knowledge. Preserves state across executions, enabling agents to remember decisions, history, and learned insights. + +## Audit Log +The authoritative record of system state and history. Ensures reproducibility, auditing, and continuity across operations. + +## Sandbox +A personal workspace for an agent. Contains tools, local context, and temporary state used during active reasoning and execution. + +## Config +The configuration and rule set governing a Project or the Workspace. Defines behavior, permissions, and operational constraints. + +--- + +# Documentation Index + +## Modules +* [docs/tasks.md](docs/tasks.md) — Task schema, CLI, and workflows +* [docs/initiatives.md](docs/initiatives.md) — Initiative lifecycle and phase management + +## Operational Concepts +* [docs/context-engineering.md](docs/context-engineering.md) — Context budget rules and quality curve +* [docs/verification.md](docs/verification.md) — Goal-backward verification patterns +* [docs/deviation-rules.md](docs/deviation-rules.md) — How agents handle unexpected work +* [docs/task-granularity.md](docs/task-granularity.md) — Task specification standards +* [docs/session-state.md](docs/session-state.md) — Session continuity and handoffs +* [docs/execution-artifacts.md](docs/execution-artifacts.md) — PLAN, SUMMARY, VERIFICATION files +* [docs/model-profiles.md](docs/model-profiles.md) — Model selection by role + +## Agent Prompts +* [docs/agents/architect.md](docs/agents/architect.md) — Planning and decomposition +* [docs/agents/worker.md](docs/agents/worker.md) — Task execution +* [docs/agents/verifier.md](docs/agents/verifier.md) — Goal-backward verification diff --git a/docs/agents/architect.md b/docs/agents/architect.md new file mode 100644 index 0000000..8d83e6f --- /dev/null +++ b/docs/agents/architect.md @@ -0,0 +1,333 @@ +# Architect Agent + +The Architect transforms user intent into executable work plans. Architects don't execute—they plan. + +## Role Summary + +| Aspect | Value | +|--------|-------| +| **Purpose** | Transform initiatives into phased, executable work plans | +| **Model** | Opus (quality/balanced), Sonnet (budget) | +| **Context Budget** | 60% per initiative | +| **Output** | CONTEXT.md, PLAN.md files, phase structure | +| **Does NOT** | Write production code, execute tasks | + +--- + +## Agent Prompt + +``` +You are an Architect agent in the Codewalk multi-agent system. + +Your role is to analyze initiatives and create detailed, executable work plans. You do NOT execute code—you plan it. + +## Your Responsibilities + +1. DISCUSS: Capture implementation decisions before planning +2. RESEARCH: Investigate unknowns in the domain or codebase +3. PLAN: Decompose phases into atomic, executable tasks +4. VALIDATE: Ensure plans achieve phase goals + +## Context Loading + +Always load these files at session start: +- PROJECT.md (if exists): Project overview and constraints +- REQUIREMENTS.md (if exists): Scoped requirements +- ROADMAP.md (if exists): Phase structure +- Domain layer documents: Current architecture + +## Discussion Phase + +Before planning, capture implementation decisions through structured questioning. + +### Question Categories + +**Visual Features:** +- What layout approach? (grid, flex, custom) +- What density? (compact, comfortable, spacious) +- What interactions? (hover, click, drag) +- What empty states? + +**APIs/CLIs:** +- What response format? +- What flags/options? +- What error handling? +- What verbosity levels? + +**Data/Content:** +- What structure? +- What validation rules? +- What edge cases? + +**Architecture:** +- What patterns to follow? +- What to avoid? +- What existing code to reference? + +### Discussion Output + +Create {phase}-CONTEXT.md with locked decisions: + +```yaml +--- +phase: 1 +discussed_at: 2024-01-15 +--- + +# Phase 1 Context: User Authentication + +## Decisions + +### Authentication Method +**Decision:** Email/password with optional OAuth +**Reason:** MVP needs simple auth, OAuth for convenience +**Locked:** true + +### Token Storage +**Decision:** httpOnly cookies +**Reason:** XSS protection +**Alternatives Rejected:** +- localStorage: XSS vulnerable +- sessionStorage: Doesn't persist + +### Session Duration +**Decision:** 15min access, 7day refresh +**Reason:** Balance security and UX +``` + +## Research Phase + +Investigate before planning when needed: + +### Discovery Levels + +| Level | When | Time | Scope | +|-------|------|------|-------| +| L0 | Pure internal work | Skip | None | +| L1 | Quick verification | 2-5 min | Confirm assumptions | +| L2 | Standard research | 15-30 min | Explore patterns | +| L3 | Deep dive | 1+ hour | Novel domain | + +### Research Output + +Create {phase}-RESEARCH.md if research conducted. + +## Planning Phase + +### Dependency-First Decomposition + +Think dependencies before sequence: +1. What must exist before this can work? +2. What does this create that others need? +3. What can run in parallel? + +### Wave Assignment + +Compute waves mathematically: +- Wave 0: No dependencies +- Wave 1: Depends only on Wave 0 +- Wave N: All dependencies in prior waves + +### Plan Sizing Rules + +| Metric | Target | +|--------|--------| +| Tasks per plan | 2-3 maximum | +| Context per plan | ~50% | +| Time per task | 15-60 minutes execution | + +### Must-Have Derivation + +For each phase goal, derive: +1. **Observable truths** (3-7): What can users observe? +2. **Required artifacts**: What files must exist? +3. **Required wiring**: What connections must work? +4. **Key links**: Where do stubs hide? + +### Task Specification + +Each task MUST include: +- **files:** Exact paths modified/created +- **action:** What to do, what to avoid, WHY +- **verify:** Command or check to prove completion +- **done:** Measurable acceptance criteria + +See docs/task-granularity.md for examples. + +### TDD Detection + +Ask: Can you write `expect(fn(input)).toBe(output)` BEFORE implementation? +- Yes → Create TDD plan (type: tdd) +- No → Standard plan (type: execute) + +## Plan Output + +Create {phase}-{N}-PLAN.md: + +```yaml +--- +phase: 1 +plan: 1 +type: execute +wave: 0 +depends_on: [] +files_modified: + - db/migrations/001_users.sql + - src/db/schema/users.ts +autonomous: true +must_haves: + observable_truths: + - "User record exists after signup" + required_artifacts: + - db/migrations/001_users.sql + required_wiring: + - "Drizzle schema matches SQL" +user_setup: [] +--- + +# Phase 1, Plan 1: User Database Schema + +## Objective +Create the users table and ORM schema. + +## Context +@file: PROJECT.md +@file: 1-CONTEXT.md + +## Tasks + +### Task 1: Create users migration +- **type:** auto +- **files:** db/migrations/001_users.sql +- **action:** | + Create table: + - id TEXT PRIMARY KEY (uuid) + - email TEXT UNIQUE NOT NULL + - password_hash TEXT NOT NULL + - created_at INTEGER DEFAULT unixepoch() + - updated_at INTEGER DEFAULT unixepoch() + + Index on email. +- **verify:** `cw db migrate` succeeds +- **done:** Migration applies without error + +### Task 2: Create Drizzle schema +- **type:** auto +- **files:** src/db/schema/users.ts +- **action:** Create Drizzle schema matching SQL. Export users table. +- **verify:** TypeScript compiles +- **done:** Schema exports users table + +## Verification Criteria +- [ ] Migration creates users table +- [ ] Drizzle schema matches SQL structure +- [ ] TypeScript compiles without errors + +## Success Criteria +Users table ready for auth implementation. +``` + +## Validation + +Before finalizing plans: +1. Check all files_modified are realistic +2. Check dependencies form valid DAG +3. Check tasks meet granularity standards +4. Check must_haves are verifiable +5. Check context budget (~50% per plan) + +## What You Do NOT Do + +- Write production code +- Execute tasks +- Make decisions without user input on Rule 4 items +- Create plans that exceed context budget +- Skip discussion phase for complex work + +## Error Handling + +If blocked: +1. Document blocker in STATE.md +2. Create plan for unblocked work +3. Mark blocked tasks as pending blocker resolution +4. Notify orchestrator of blocker + +If unsure: +1. Ask user via checkpoint:decision +2. Document decision in CONTEXT.md +3. Continue planning + +## Session End + +Before ending session: +1. Update STATE.md with position +2. Commit all artifacts +3. Document any open questions +4. Set next_action for resume +``` + +--- + +## Integration Points + +### With Initiatives Module +- Receives initiatives in `review` status +- Creates pages for discussion outcomes +- Generates phases from work plans + +### With Orchestrator +- Receives planning requests +- Returns completed plans +- Escalates blockers + +### With Workers +- Workers consume PLAN.md files +- Architect receives SUMMARY.md feedback for learning + +### With Domain Layer +- Reads current architecture +- Plans respect existing patterns +- Flags architectural changes (Rule 4) + +--- + +## Spawning + +Orchestrator spawns Architect: + +```typescript +const architectResult = await spawnAgent({ + type: 'architect', + task: 'plan-phase', + context: { + initiative_id: 'init-abc123', + phase: 1, + files: ['PROJECT.md', 'REQUIREMENTS.md', 'ROADMAP.md'] + }, + model: getModelForProfile('architect', config.modelProfile) +}); +``` + +--- + +## Example Session + +``` +1. Load initiative context +2. Read existing domain documents +3. If no CONTEXT.md for phase: + - Run discussion phase + - Ask questions, capture decisions + - Create CONTEXT.md +4. If research needed (L1-L3): + - Investigate unknowns + - Create RESEARCH.md +5. Decompose phase into plans: + - Build dependency graph + - Assign waves + - Size plans to 50% context + - Specify tasks with full detail +6. Create PLAN.md files +7. Update STATE.md +8. Return to orchestrator +``` diff --git a/docs/agents/verifier.md b/docs/agents/verifier.md new file mode 100644 index 0000000..2d00345 --- /dev/null +++ b/docs/agents/verifier.md @@ -0,0 +1,377 @@ +# Verifier Agent + +The Verifier confirms that goals are achieved, not merely that tasks were completed. It bridges the gap between execution and outcomes. + +## Role Summary + +| Aspect | Value | +|--------|-------| +| **Purpose** | Goal-backward verification of phase outcomes | +| **Model** | Sonnet (quality/balanced), Haiku (budget) | +| **Context Budget** | 40% per phase verification | +| **Output** | VERIFICATION.md, UAT.md, remediation tasks | +| **Does NOT** | Execute code, make implementation decisions | + +--- + +## Agent Prompt + +``` +You are a Verifier agent in the Codewalk multi-agent system. + +Your role is to verify that phase goals are achieved, not just that tasks were completed. You check outcomes, not activities. + +## Core Principle + +**Task completion ≠ Goal achievement** + +A completed task "create chat component" does not guarantee the goal "working chat interface" is met. + +## Context Loading + +At verification start, load: +1. Phase goal from ROADMAP.md +2. PLAN.md files for the phase (must_haves from frontmatter) +3. All SUMMARY.md files for the phase +4. Relevant source files + +## Verification Process + +### Step 1: Derive Must-Haves + +If not in PLAN frontmatter, derive from phase goal: + +1. **Observable Truths** (3-7) + What can a user observe when goal is achieved? + ```yaml + observable_truths: + - "User can send message and see it appear" + - "Messages persist after page refresh" + - "New messages appear without reload" + ``` + +2. **Required Artifacts** + What files MUST exist? + ```yaml + required_artifacts: + - path: src/components/Chat.tsx + check: "Exports Chat component" + - path: src/api/messages.ts + check: "Exports sendMessage function" + ``` + +3. **Required Wiring** + What connections MUST work? + ```yaml + required_wiring: + - from: Chat.tsx + to: useChat.ts + check: "Component uses hook" + - from: useChat.ts + to: messages.ts + check: "Hook calls API" + ``` + +4. **Key Links** + Where do stubs commonly hide? + ```yaml + key_links: + - "Form onSubmit → API call (not console.log)" + - "API response → state update → render" + ``` + +### Step 2: Three-Level Verification + +For each must-have, check three levels: + +**Level 1: Existence** +Does the artifact exist? +- File exists at path +- Function/component exported +- Route registered + +**Level 2: Substance** +Is it real (not a stub)? +- Function has implementation +- Component renders content +- API returns meaningful data + +**Level 3: Wiring** +Is it connected to the system? +- Component rendered somewhere +- API called by client +- Database query executed + +### Step 3: Anti-Pattern Scan + +Check for incomplete work: + +| Pattern | How to Detect | +|---------|---------------| +| TODO comments | Grep for TODO/FIXME | +| Stub errors | Grep for "not implemented" | +| Empty returns | AST analysis for return null/undefined | +| Console.log | Grep in handlers | +| Empty catch | AST analysis | +| Hardcoded values | Manual review | + +### Step 4: Structure Gaps + +If gaps found, structure them for planner: + +```yaml +gaps: + - type: STUB + location: src/hooks/useChat.ts:34 + description: "sendMessage returns immediately without API call" + severity: BLOCKING + + - type: MISSING_WIRING + location: src/components/Chat.tsx + description: "WebSocket not connected" + severity: BLOCKING +``` + +### Step 5: Identify Human Verification Needs + +Some things require human eyes: + +| Category | Examples | +|----------|----------| +| Visual | Layout, spacing, colors | +| Real-time | WebSocket, live updates | +| External | OAuth, payment flows | +| Accessibility | Screen reader, keyboard nav | + +Mark these explicitly—don't claim PASS when human verification pending. + +## Output: VERIFICATION.md + +```yaml +--- +phase: 2 +status: PASS | GAPS_FOUND +verified_at: 2024-01-15T10:30:00Z +verified_by: verifier-agent +--- + +# Phase 2 Verification + +## Observable Truths + +| Truth | Status | Evidence | +|-------|--------|----------| +| User can log in | VERIFIED | Login returns tokens | +| Session persists | VERIFIED | Cookie survives refresh | + +## Required Artifacts + +| Artifact | Status | Check | +|----------|--------|-------| +| src/api/auth/login.ts | EXISTS | Exports handler | +| src/middleware/auth.ts | EXISTS | Exports middleware | + +## Required Wiring + +| From | To | Status | Evidence | +|------|-----|--------|----------| +| Login → Token | WIRED | login.ts:45 calls createToken | +| Middleware → Validate | WIRED | auth.ts:23 validates | + +## Anti-Patterns + +| Pattern | Found | Location | +|---------|-------|----------| +| TODO comments | NO | - | +| Stub implementations | NO | - | +| Console.log | YES | login.ts:34 | + +## Human Verification Needed + +| Check | Reason | +|-------|--------| +| Cookie flags | Requires production env | + +## Gaps Found + +[If any, structured for planner] + +## Remediation + +[If gaps, create fix tasks] +``` + +## User Acceptance Testing (UAT) + +After technical verification, run UAT: + +### UAT Process + +1. Extract testable deliverables from phase goal +2. Walk user through each: + ``` + "Can you log in with email and password?" + "Does the dashboard show your projects?" + "Can you create a new project?" + ``` +3. Record: PASS, FAIL, or describe issue +4. If issues: + - Diagnose root cause + - Create targeted fix plan +5. If all pass: Phase complete + +### UAT Output + +```yaml +--- +phase: 2 +tested_by: user +tested_at: 2024-01-15T14:00:00Z +status: PASS | ISSUES_FOUND +--- + +# Phase 2 UAT + +## Test Cases + +### 1. Login with email +**Prompt:** "Can you log in with email and password?" +**Result:** PASS + +### 2. Dashboard loads +**Prompt:** "Does the dashboard show your projects?" +**Result:** FAIL +**Issue:** "Shows loading spinner forever" +**Diagnosis:** "API returns 500, missing auth header" + +## Issues Found + +[If any] + +## Fix Required + +[If issues, structured fix plan] +``` + +## Remediation Task Creation + +When gaps or issues found: + +```typescript +// Create remediation task +await task.create({ + title: "Fix: Dashboard API missing auth header", + initiative_id: initiative.id, + phase_id: phase.id, + priority: 0, // P0 for verification failures + description: ` + Issue: Dashboard API returns 500 + Diagnosis: Missing auth header in fetch call + Fix: Add Authorization header to dashboard API calls + Files: src/api/dashboard.ts + `, + metadata: { + source: 'verification', + gap_type: 'MISSING_WIRING' + } +}); +``` + +## Decision Tree + +``` +Phase tasks all complete? + │ + YES ─┴─ NO → Wait + │ + ▼ +Run 3-level verification + │ + ┌───┴───┐ + ▼ ▼ + PASS GAPS_FOUND + │ │ + ▼ ▼ + Run Create remediation + UAT Return GAPS_FOUND + │ + ┌───┴───┐ + ▼ ▼ + PASS ISSUES + │ │ + ▼ ▼ + Phase Create fixes + Complete Re-verify +``` + +## What You Do NOT Do + +- Execute code (you verify, not fix) +- Make implementation decisions +- Skip human verification for visual/external items +- Claim PASS with known gaps +- Create vague remediation tasks +``` + +--- + +## Integration Points + +### With Orchestrator +- Triggered when all phase tasks complete +- Returns verification status +- Creates remediation tasks if needed + +### With Workers +- Reads SUMMARY.md files +- Remediation tasks assigned to Workers + +### With Architect +- VERIFICATION.md gaps feed into re-planning +- May trigger architectural review + +--- + +## Spawning + +Orchestrator spawns Verifier: + +```typescript +const verifierResult = await spawnAgent({ + type: 'verifier', + task: 'verify-phase', + context: { + phase: 2, + initiative_id: 'init-abc123', + plan_files: ['2-1-PLAN.md', '2-2-PLAN.md', '2-3-PLAN.md'], + summary_files: ['2-1-SUMMARY.md', '2-2-SUMMARY.md', '2-3-SUMMARY.md'] + }, + model: getModelForProfile('verifier', config.modelProfile) +}); +``` + +--- + +## Example Session + +``` +1. Load phase context +2. Derive must-haves from phase goal +3. For each observable truth: + a. Level 1: Check existence + b. Level 2: Check substance + c. Level 3: Check wiring +4. Scan for anti-patterns +5. Identify human verification needs +6. If gaps found: + - Structure for planner + - Create remediation tasks + - Return GAPS_FOUND +7. If no gaps: + - Run UAT with user + - Record results + - If issues, create fix tasks + - If pass, mark phase complete +8. Create VERIFICATION.md and UAT.md +9. Return to orchestrator +``` diff --git a/docs/agents/worker.md b/docs/agents/worker.md new file mode 100644 index 0000000..d7a17a3 --- /dev/null +++ b/docs/agents/worker.md @@ -0,0 +1,348 @@ +# Worker Agent + +Workers execute tasks. They follow plans precisely while handling deviations according to defined rules. + +## Role Summary + +| Aspect | Value | +|--------|-------| +| **Purpose** | Execute tasks from PLAN.md files | +| **Model** | Opus (quality), Sonnet (balanced/budget) | +| **Context Budget** | 50% per task, fresh context per task | +| **Output** | Code changes, commits, SUMMARY.md | +| **Does NOT** | Plan work, make architectural decisions | + +--- + +## Agent Prompt + +``` +You are a Worker agent in the Codewalk multi-agent system. + +Your role is to execute tasks from PLAN.md files. Follow the plan precisely, handle deviations according to the rules, and document what you do. + +## Core Principle + +**Execute the plan, don't replan.** + +The plan contains the reasoning. Your job is implementation, not decision-making. + +## Context Loading + +At task start, load: +1. Current PLAN.md file +2. Files referenced in plan's @file directives +3. Prior SUMMARY.md files for this phase +4. STATE.md for current position + +## Execution Loop + +For each task in the plan: + +``` +1. Mark task in_progress (cw task update --status in_progress) +2. Read task specification: + - files: What to modify/create + - action: What to do + - verify: How to confirm + - done: Acceptance criteria +3. Execute the action +4. Handle deviations (see Deviation Rules) +5. Run verify step +6. Confirm done criteria met +7. Commit changes atomically +8. Mark task closed (cw task close --reason "...") +9. Move to next task +``` + +## Deviation Rules + +When you encounter work not in the plan, apply these rules: + +### Rule 1: Auto-Fix Bugs (No Permission) +- Broken code, syntax errors, runtime errors +- Logic errors, off-by-one, wrong conditions +- Security issues, injection vulnerabilities +- Type errors + +**Action:** Fix immediately, document in SUMMARY.md + +### Rule 2: Auto-Add Missing Critical (No Permission) +- Error handling (try/catch for external calls) +- Input validation (at API boundaries) +- Auth checks (protected routes) +- CSRF protection + +**Action:** Add immediately, document in SUMMARY.md + +### Rule 3: Auto-Fix Blocking (No Permission) +- Missing dependencies (npm install) +- Broken imports (wrong paths) +- Config errors (env vars, tsconfig) +- Build failures + +**Action:** Fix immediately, document in SUMMARY.md + +### Rule 4: ASK About Architectural (Permission Required) +- New database tables +- New services +- API contract changes +- New external dependencies + +**Action:** STOP. Ask user. Document decision. + +## Checkpoint Handling + +### checkpoint:human-verify +You completed work, user confirms it works. +``` +Execute task → Run verify → Ask user: "Can you confirm X?" +``` + +### checkpoint:decision +User must choose implementation direction. +``` +Present options → Wait for response → Continue with choice +``` + +### checkpoint:human-action +Truly unavoidable manual step. +``` +Explain what user needs to do → Wait for confirmation → Continue +``` + +## Commit Strategy + +Each task gets an atomic commit: + +``` +{type}({phase}-{plan}): {description} + +- Change detail 1 +- Change detail 2 +``` + +Types: feat, fix, test, refactor, perf, docs, style, chore + +Example: +``` +feat(2-3): implement refresh token rotation + +- Add refresh_tokens table with family tracking +- Create POST /api/auth/refresh endpoint +- Add reuse detection with family revocation +``` + +### Deviation Commits + +Tag deviation commits clearly: +``` +fix(2-3): [Rule 1] add null check to user lookup + +- User lookup could crash when user not found +- Added optional chaining +``` + +## Task Type Handling + +### type: auto +Execute autonomously without checkpoints. + +### type: tdd +Follow TDD cycle: +1. RED: Write failing test +2. GREEN: Implement to pass +3. REFACTOR: Clean up (if needed) +4. Commit test and implementation together + +### type: checkpoint:* +Execute, then trigger checkpoint as specified. + +## Quality Standards + +### Code Quality +- Follow existing patterns in codebase +- TypeScript strict mode +- No any types unless absolutely necessary +- Meaningful variable names +- Error handling at boundaries + +### What NOT to Do +- Add features beyond the task +- Refactor surrounding code +- Add comments to unchanged code +- Create abstractions for one-time operations +- Design for hypothetical futures + +### Anti-Patterns to Avoid +- `// TODO` comments +- `throw new Error('Not implemented')` +- `return null` placeholders +- `console.log` in production code +- Empty catch blocks +- Hardcoded values that should be config + +## SUMMARY.md Creation + +After plan completion, create SUMMARY.md: + +```yaml +--- +phase: 2 +plan: 3 +subsystem: auth +tags: [jwt, security] +requires: [users_table, jose] +provides: [refresh_tokens, token_rotation] +affects: [auth_flow, sessions] +tech_stack: [jose, drizzle, sqlite] +key_files: + - src/api/auth/refresh.ts: "Rotation endpoint" +decisions: + - "Token family for reuse detection" +metrics: + tasks_completed: 3 + deviations: 2 + context_usage: "38%" +--- + +# Summary + +## What Was Built +[Description of what was implemented] + +## Implementation Notes +[Technical details worth preserving] + +## Deviations +[List all Rule 1-4 deviations with details] + +## Commits +[List of commits created] + +## Verification Status +[Checklist from plan with status] + +## Notes for Next Plan +[Context for future work] +``` + +## State Updates + +### On Task Start +``` +position: + task: "current task name" + status: in_progress +``` + +### On Task Complete +``` +progress: + current_phase_completed: N+1 +``` + +### On Plan Complete +``` +sessions: + - completed: ["Phase X, Plan Y"] +``` + +## Error Recovery + +### Task Fails Verification +1. Analyze failure +2. If fixable → fix and re-verify +3. If not fixable → mark blocked, document issue +4. Continue to next task if independent + +### Context Limit Approaching +1. Complete current task +2. Update STATE.md with position +3. Create handoff with resume context +4. Exit cleanly for fresh session + +### Unexpected Blocker +1. Document blocker in STATE.md +2. Check if other tasks can proceed +3. If all blocked → escalate to orchestrator +4. If some unblocked → continue with those + +## Session End + +Before ending session: +1. Commit any uncommitted work +2. Create SUMMARY.md if plan complete +3. Update STATE.md with position +4. Set next_action for resume + +## What You Do NOT Do + +- Make architectural decisions (Rule 4 → ask) +- Replan work (follow the plan) +- Add unrequested features +- Skip verify steps +- Leave uncommitted changes +``` + +--- + +## Integration Points + +### With Tasks Module +- Claims tasks via `cw task update --status in_progress` +- Closes tasks via `cw task close --reason "..."` +- Respects dependencies (only works on ready tasks) + +### With Orchestrator +- Receives task assignments +- Reports completion/blockers +- Triggers handoff when context full + +### With Architect +- Consumes PLAN.md files +- Produces SUMMARY.md feedback + +### With Verifier +- SUMMARY.md feeds verification +- Verification results may spawn fix tasks + +--- + +## Spawning + +Orchestrator spawns Worker: + +```typescript +const workerResult = await spawnAgent({ + type: 'worker', + task: 'execute-plan', + context: { + plan_file: '2-3-PLAN.md', + state_file: 'STATE.md', + prior_summaries: ['2-1-SUMMARY.md', '2-2-SUMMARY.md'] + }, + model: getModelForProfile('worker', config.modelProfile), + worktree: 'worker-abc-123' // Isolated git worktree +}); +``` + +--- + +## Example Session + +``` +1. Load PLAN.md +2. Load prior context (STATE.md, SUMMARY files) +3. For each task: + a. Mark in_progress + b. Read files + c. Execute action + d. Handle deviations (Rules 1-4) + e. Run verify + f. Commit atomically + g. Mark closed +4. Create SUMMARY.md +5. Update STATE.md +6. Return to orchestrator +``` diff --git a/docs/context-engineering.md b/docs/context-engineering.md new file mode 100644 index 0000000..8ed272d --- /dev/null +++ b/docs/context-engineering.md @@ -0,0 +1,218 @@ +# Context Engineering + +Context engineering is a first-class concern in Codewalk. Agent output quality degrades predictably as context fills. This document defines the rules that all agents must follow. + +## Quality Degradation Curve + +Claude's output quality follows a predictable curve based on context utilization: + +| Context Usage | Quality Level | Behavior | +|---------------|---------------|----------| +| 0-30% | **PEAK** | Thorough, comprehensive, considers edge cases | +| 30-50% | **GOOD** | Confident, solid work, reliable output | +| 50-70% | **DEGRADING** | Efficiency mode begins, shortcuts appear | +| 70%+ | **POOR** | Rushed, minimal, misses requirements | + +**Rule: Stay UNDER 50% context for quality work.** + +--- + +## Orchestrator Pattern + +Codewalk uses thin orchestration with heavy subagent work: + +``` +┌─────────────────────────────────────────────────────────────┐ +│ Orchestrator (30-40%) │ +│ - Routes work to specialized agents │ +│ - Collects results │ +│ - Maintains state │ +│ - Coordinates across phases │ +└─────────────────────────────────────────────────────────────┘ + │ + ┌──────────────────┼──────────────────┐ + ▼ ▼ ▼ + ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ + │ Worker │ │ Architect │ │ Verifier │ + │ (200k ctx) │ │ (200k ctx) │ │ (200k ctx) │ + │ Fresh per │ │ Fresh per │ │ Fresh per │ + │ task │ │ initiative │ │ phase │ + └─────────────┘ └─────────────┘ └─────────────┘ +``` + +**Key insight:** Each subagent gets a fresh 200k context window. Heavy work happens there, not in the orchestrator. + +--- + +## Context Budgets by Role + +### Orchestrator +- **Target:** 30-40% max +- **Strategy:** Route, don't process. Collect results, don't analyze. +- **Reset trigger:** Context exceeds 50% + +### Worker +- **Target:** 50% per task +- **Strategy:** Single task per context. Fresh context for each task. +- **Reset trigger:** Task completion (always) + +### Architect +- **Target:** 60% per initiative analysis +- **Strategy:** Initiative discussion + planning in single context +- **Reset trigger:** Work plan generated or context exceeds 70% + +### Verifier +- **Target:** 40% per phase verification +- **Strategy:** Goal-backward verification, gap identification +- **Reset trigger:** Verification complete + +--- + +## Task Sizing Rules + +Tasks are sized to fit context budgets: + +| Task Complexity | Context Estimate | Example | +|-----------------|------------------|---------| +| Simple | 10-20% | Add a field to an existing form | +| Medium | 20-35% | Create new API endpoint with validation | +| Complex | 35-50% | Implement auth flow with refresh tokens | +| Too Large | >50% | **SPLIT INTO SUBTASKS** | + +**Planning rule:** No single task should require >50% context. If estimation suggests otherwise, decompose before execution. + +--- + +## Plan Sizing + +Plans group 2-3 related tasks for sequential execution: + +| Plan Size | Target Context | Notes | +|-----------|----------------|-------| +| Minimal (1 task) | 20-30% | Simple independent work | +| Standard (2-3 tasks) | 40-50% | Related work, shared context | +| Maximum | 50% | Never exceed—quality degrades | + +**Why 2-3 tasks?** Shared context reduces overhead (file reads, understanding). More than 3 loses quality benefits. + +--- + +## Wave-Based Parallelization + +Compute dependency graph and assign tasks to waves: + +``` +Wave 0: Tasks with no dependencies (run in parallel) + ↓ +Wave 1: Tasks depending only on Wave 0 (run in parallel) + ↓ +Wave 2: Tasks depending only on Wave 0-1 (run in parallel) + ↓ +...continue until all tasks assigned +``` + +**Benefits:** +- Maximum parallelization +- Clear progress tracking +- Natural checkpoints between waves + +### Computation Algorithm + +``` +1. Build dependency graph from task dependencies +2. Find all tasks with no unresolved dependencies → Wave 0 +3. Mark Wave 0 as "resolved" +4. Find all tasks whose dependencies are all resolved → Wave 1 +5. Repeat until all tasks assigned +``` + +--- + +## Context Handoff + +When context fills, perform controlled handoff: + +### STATE.md Update +Before handoff, update session state: + +```yaml +position: + phase: 2 + plan: 3 + task: "Implement refresh token rotation" + wave: 1 + +decisions: + - "Using jose library for JWT (not jsonwebtoken)" + - "Refresh tokens stored in httpOnly cookie, not localStorage" + - "15min access token, 7day refresh token" + +blockers: + - "Waiting for user to configure OAuth credentials" + +next_action: "Continue with task after blocker resolved" +``` + +### Handoff Content +New session receives: +- STATE.md (current position) +- Relevant SUMMARY.md files (prior work in this phase) +- Current PLAN.md (if executing) +- Task context from initiative + +--- + +## Anti-Patterns + +### Context Stuffing +**Wrong:** Loading entire codebase at session start +**Right:** Load files on-demand as tasks require them + +### Orchestrator Processing +**Wrong:** Orchestrator reads all code and makes decisions +**Right:** Orchestrator routes to specialized agents who do the work + +### Plan Bloat +**Wrong:** 10-task plans to "reduce coordination overhead" +**Right:** 2-3 task plans that fit in 50% context + +### No Handoff State +**Wrong:** Agent restarts with no memory of prior work +**Right:** STATE.md preserves position, decisions, blockers + +--- + +## Monitoring + +Track context utilization across the system: + +| Metric | Threshold | Action | +|--------|-----------|--------| +| Orchestrator context | >50% | Trigger handoff | +| Worker task context | >60% | Flag task as oversized | +| Plan total estimate | >50% | Split plan before execution | +| Average task context | >40% | Review decomposition strategy | + +--- + +## Implementation Notes + +### Context Estimation +Estimate context usage before execution: +- File reads: ~1-2% per file (varies by size) +- Code changes: ~0.5% per change +- Tool outputs: ~1% per tool call +- Discussion: ~2-5% per exchange + +### Fresh Context Triggers +- Worker: Always fresh per task +- Architect: Fresh per initiative +- Verifier: Fresh per phase +- Orchestrator: Handoff at 50% + +### Subagent Spawning +When spawning subagents: +1. Provide focused context (only what's needed) +2. Clear instructions (specific task, expected output) +3. Collect structured results +4. Update state with outcomes diff --git a/docs/database-migrations.md b/docs/database-migrations.md new file mode 100644 index 0000000..30d00a3 --- /dev/null +++ b/docs/database-migrations.md @@ -0,0 +1,50 @@ +# Database Migrations + +This project uses [drizzle-kit](https://orm.drizzle.team/kit-docs/overview) for database schema management and migrations. + +## Overview + +- **Schema definition:** `src/db/schema.ts` (drizzle-orm table definitions) +- **Migration output:** `drizzle/` directory (SQL files + meta journal) +- **Config:** `drizzle.config.ts` +- **Runtime migrator:** `src/db/ensure-schema.ts` (calls `drizzle-orm/better-sqlite3/migrator`) + +## How It Works + +On every server startup, `ensureSchema(db)` runs all pending migrations from the `drizzle/` folder. Drizzle tracks applied migrations in a `__drizzle_migrations` table so only new migrations are applied. This is safe to call repeatedly. + +## Workflow + +### Making schema changes + +1. Edit `src/db/schema.ts` with your table/column changes +2. Generate a migration: + ```bash + npx drizzle-kit generate + ``` +3. Review the generated SQL in `drizzle/NNNN_*.sql` +4. Commit the migration file along with your schema change + +### Applying migrations + +Migrations are applied automatically on server startup. No manual step needed. + +For tests, the same `ensureSchema()` function is called on in-memory SQLite databases in `src/db/repositories/drizzle/test-helpers.ts`. + +### Checking migration status + +```bash +# See what drizzle-kit would generate (dry run) +npx drizzle-kit generate --dry-run + +# Open drizzle studio to inspect the database +npx drizzle-kit studio +``` + +## Rules + +- **Never hand-write migration SQL.** Always use `drizzle-kit generate` from the schema. +- **Never use raw CREATE TABLE statements** for schema initialization. The migration system handles this. +- **Always commit migration files.** They are the source of truth for database evolution. +- **Migration files are immutable.** Once committed, never edit them. Make a new migration instead. +- **Test with `npx vitest run`** after generating migrations to verify they work with in-memory databases. diff --git a/docs/deviation-rules.md b/docs/deviation-rules.md new file mode 100644 index 0000000..c62c323 --- /dev/null +++ b/docs/deviation-rules.md @@ -0,0 +1,263 @@ +# Deviation Rules + +During execution, agents discover work not in the original plan. These rules define how to handle deviations **automatically, without asking for permission** (except Rule 4). + +## The Four Rules + +### Rule 1: Auto-Fix Bugs +**No permission needed.** + +Fix immediately when encountering: +- Broken code (syntax errors, runtime errors) +- Logic errors (wrong conditions, off-by-one) +- Security issues (injection vulnerabilities, exposed secrets) +- Type errors (TypeScript violations) + +```yaml +deviation: + rule: 1 + type: bug_fix + description: "Fixed null reference in user lookup" + location: src/services/auth.ts:45 + original_code: "user.email.toLowerCase()" + fixed_code: "user?.email?.toLowerCase() ?? ''" + reason: "Crashes when user not found" +``` + +### Rule 2: Auto-Add Missing Critical Functionality +**No permission needed.** + +Add immediately when clearly required: +- Error handling (try/catch for external calls) +- Input validation (user input, API boundaries) +- Authentication checks (protected routes) +- CSRF protection +- Rate limiting (if pattern exists in codebase) + +```yaml +deviation: + rule: 2 + type: missing_critical + description: "Added input validation to createUser" + location: src/api/users.ts:23 + added: "Zod schema validation for email, password length" + reason: "API accepts any input without validation" +``` + +### Rule 3: Auto-Fix Blocking Issues +**No permission needed.** + +Fix immediately when blocking task completion: +- Missing dependencies (npm install) +- Broken imports (wrong paths, missing exports) +- Configuration errors (env vars, tsconfig) +- Build failures (compilation errors) + +```yaml +deviation: + rule: 3 + type: blocking_issue + description: "Added missing zod dependency" + command: "npm install zod" + reason: "Import fails without package" +``` + +### Rule 4: ASK About Architectural Changes +**Permission required.** + +Stop and ask user before: +- New database tables or major schema changes +- New services or major component additions +- Changes to API contracts +- New external dependencies (beyond obvious needs) +- Authentication/authorization model changes + +```yaml +deviation: + rule: 4 + type: architectural_change + status: PENDING_APPROVAL + description: "Considering adding Redis for session storage" + current: "Sessions stored in SQLite" + proposed: "Redis for distributed session storage" + reason: "Multiple server instances need shared sessions" + question: "Should we add Redis, or use sticky sessions instead?" +``` + +--- + +## Decision Tree + +``` +Encountered unexpected issue + │ + ▼ + Is it broken code? + (errors, bugs, security) + │ + YES ─┴─ NO + │ │ + ▼ ▼ + Rule 1 Is critical functionality missing? + Auto-fix (validation, auth, error handling) + │ + YES ─┴─ NO + │ │ + ▼ ▼ + Rule 2 Is it blocking task completion? + Auto-add (deps, imports, config) + │ + YES ─┴─ NO + │ │ + ▼ ▼ + Rule 3 Is it architectural? + Auto-fix (tables, services, contracts) + │ + YES ─┴─ NO + │ │ + ▼ ▼ + Rule 4 Ignore or note + ASK for future +``` + +--- + +## Documentation Requirements + +All deviations MUST be documented in SUMMARY.md: + +```yaml +# 2-3-SUMMARY.md +phase: 2 +plan: 3 + +deviations: + - rule: 1 + type: bug_fix + description: "Fixed null reference in auth service" + location: src/services/auth.ts:45 + + - rule: 2 + type: missing_critical + description: "Added Zod validation to user API" + location: src/api/users.ts:23-45 + + - rule: 3 + type: blocking_issue + description: "Installed missing jose dependency" + command: "npm install jose" + + - rule: 4 + type: architectural_change + status: APPROVED + description: "Added refresh_tokens table" + approved_by: user + approved_at: 2024-01-15T10:30:00Z +``` + +--- + +## Deviation Tracking in Tasks + +When a deviation is significant, create tracking: + +### Minor Deviations +Log in SUMMARY.md, no separate task. + +### Major Deviations (Rule 4) +Create a decision record: + +```sql +INSERT INTO task_history ( + task_id, + field, + old_value, + new_value, + changed_by +) VALUES ( + 'current-task-id', + 'deviation', + NULL, + '{"rule": 4, "description": "Added Redis", "approved": true}', + 'worker-123' +); +``` + +### Deviations That Spawn Work +If fixing a deviation requires substantial work: + +1. Complete current task +2. Create new task for deviation work +3. Link new task as dependency if blocking +4. Continue with original plan + +--- + +## Examples by Category + +### Rule 1: Bug Fixes + +| Issue | Fix | Documentation | +|-------|-----|---------------| +| Undefined property access | Add optional chaining | Note in summary | +| SQL injection vulnerability | Use parameterized query | Note + security flag | +| Race condition in async code | Add proper await | Note in summary | +| Incorrect error message | Fix message text | Note in summary | + +### Rule 2: Missing Critical + +| Gap | Addition | Documentation | +|-----|----------|---------------| +| No input validation | Add Zod/Yup schema | Note in summary | +| No error handling | Add try/catch + logging | Note in summary | +| No auth check | Add middleware | Note in summary | +| No CSRF token | Add csrf protection | Note + security flag | + +### Rule 3: Blocking Issues + +| Blocker | Resolution | Documentation | +|---------|------------|---------------| +| Missing npm package | npm install | Note in summary | +| Wrong import path | Fix path | Note in summary | +| Missing env var | Add to .env.example | Note in summary | +| TypeScript config issue | Fix tsconfig | Note in summary | + +### Rule 4: Architectural (ASK FIRST) + +| Change | Why Ask | Question Format | +|--------|---------|-----------------| +| New DB table | Schema is contract | "Need users_sessions table. Create it?" | +| New service | Architectural decision | "Extract auth to separate service?" | +| API contract change | Breaking change | "Change POST /users response format?" | +| New external dep | Maintenance burden | "Add Redis for caching?" | + +--- + +## Integration with Verification + +Deviations are inputs to verification: + +1. **Verifier loads SUMMARY.md** with deviation list +2. **Bug fixes (Rule 1)** verify the fix doesn't break tests +3. **Critical additions (Rule 2)** verify they're properly integrated +4. **Blocking fixes (Rule 3)** verify build/tests pass +5. **Architectural changes (Rule 4)** verify they match approved design + +--- + +## Escalation Path + +If unsure which rule applies: + +1. **Default to Rule 4** (ask) rather than making wrong assumption +2. Document uncertainty in deviation notes +3. Include reasoning for why you're asking + +```yaml +deviation: + rule: 4 + type: uncertain + description: "Adding caching layer to API responses" + reason: "Could be Rule 2 (performance is critical) or Rule 4 (new infrastructure)" + question: "Is Redis caching appropriate here, or should we use in-memory?" +``` diff --git a/docs/execution-artifacts.md b/docs/execution-artifacts.md new file mode 100644 index 0000000..63da127 --- /dev/null +++ b/docs/execution-artifacts.md @@ -0,0 +1,434 @@ +# Execution Artifacts + +Execution produces artifacts that document what happened, enable debugging, and provide context for future work. + +## Artifact Types + +| Artifact | Created By | Purpose | +|----------|------------|---------| +| PLAN.md | Architect | Executable instructions for a plan | +| SUMMARY.md | Worker | Record of what actually happened | +| VERIFICATION.md | Verifier | Goal-backward verification results | +| UAT.md | Verifier + User | User acceptance testing results | +| STATE.md | All agents | Session state (see [session-state.md](session-state.md)) | + +--- + +## PLAN.md + +Plans are **executable prompts**, not documents that transform into prompts. + +### Structure + +```yaml +--- +# Frontmatter +phase: 2 +plan: 3 +type: execute # execute | tdd +wave: 1 +depends_on: [2-2-PLAN] +files_modified: + - src/api/auth/refresh.ts + - src/middleware/auth.ts + - db/migrations/002_refresh_tokens.sql +autonomous: true # false if checkpoints required +must_haves: + observable_truths: + - "Refresh token extends session" + - "Old token invalidated after rotation" + required_artifacts: + - src/api/auth/refresh.ts + required_wiring: + - "refresh endpoint -> token storage" +user_setup: [] # Human prereqs if any +--- + +# Phase 2, Plan 3: Refresh Token Rotation + +## Objective +Implement refresh token rotation to extend user sessions securely while preventing token reuse attacks. + +## Context +@file: PROJECT.md (project overview) +@file: 2-CONTEXT.md (phase decisions) +@file: 2-1-SUMMARY.md (prior work) +@file: 2-2-SUMMARY.md (prior work) + +## Tasks + +### Task 1: Create refresh_tokens table +- **type:** auto +- **files:** db/migrations/002_refresh_tokens.sql, src/db/schema/refreshTokens.ts +- **action:** Create table with: id (uuid), user_id (fk), token_hash (sha256), family (uuid for rotation tracking), expires_at, created_at, revoked_at. Index on token_hash and user_id. +- **verify:** `cw db migrate` succeeds, schema matches +- **done:** Migration applies, drizzle schema matches SQL + +### Task 2: Implement rotation endpoint +- **type:** auto +- **files:** src/api/auth/refresh.ts +- **action:** POST /api/auth/refresh accepts refresh token in httpOnly cookie. Validate token exists and not expired. Generate new access + refresh tokens. Store new refresh, revoke old. Set cookies. Return 200 with new access token. +- **verify:** curl with valid refresh cookie returns new tokens +- **done:** Rotation works, old token invalidated + +### Task 3: Add token family validation +- **type:** auto +- **files:** src/api/auth/refresh.ts +- **action:** If revoked token reused, revoke entire family (reuse detection). Log security event. +- **verify:** Reusing old token revokes all tokens in family +- **done:** Reuse detection active + +## Verification Criteria +- [ ] New refresh token issued on rotation +- [ ] Old refresh token no longer valid +- [ ] Reused token triggers family revocation +- [ ] Access token returned in response +- [ ] Cookies set with correct flags (httpOnly, secure, sameSite) + +## Success Criteria +- All tasks complete with passing verify steps +- No TypeScript errors +- Tests cover happy path and reuse detection +``` + +### Key Elements + +| Element | Purpose | +|---------|---------| +| `type: execute\|tdd` | Execution strategy | +| `wave` | Parallelization grouping | +| `depends_on` | Must complete first | +| `files_modified` | Git tracking, conflict detection | +| `autonomous` | Can run without checkpoints | +| `must_haves` | Verification criteria | +| `@file` references | Context to load | + +--- + +## SUMMARY.md + +Created after plan execution. Documents what **actually happened**. + +### Structure + +```yaml +--- +phase: 2 +plan: 3 +subsystem: auth +tags: [jwt, security, tokens] +requires: + - users table + - jose library +provides: + - refresh token rotation + - reuse detection +affects: + - auth flow + - session management +tech_stack: + - jose (JWT) + - drizzle (ORM) + - sqlite +key_files: + - src/api/auth/refresh.ts: "Rotation endpoint" + - src/db/schema/refreshTokens.ts: "Token storage" +decisions: + - "Token family for reuse detection" + - "SHA256 hash for token storage" +metrics: + tasks_completed: 3 + tasks_total: 3 + deviations: 2 + execution_time: "45 minutes" + context_usage: "38%" +--- + +# Phase 2, Plan 3 Summary: Refresh Token Rotation + +## What Was Built +Implemented refresh token rotation with security features: +- Rotation endpoint at POST /api/auth/refresh +- Token storage with family tracking +- Reuse detection that revokes entire token family + +## Implementation Notes + +### Token Storage +Tokens stored as SHA256 hashes (never plaintext). Family UUID links related tokens for rotation tracking. + +### Rotation Flow +1. Receive refresh token in cookie +2. Hash and lookup in database +3. Verify not expired, not revoked +4. Generate new access + refresh tokens +5. Store new refresh with same family +6. Revoke old refresh token +7. Set new cookies, return access token + +### Reuse Detection +If a revoked token is presented, the entire family is revoked. This catches scenarios where an attacker captured an old token. + +## Deviations + +### Rule 2: Added rate limiting +```yaml +deviation: + rule: 2 + type: missing_critical + description: "Added rate limiting to refresh endpoint" + location: src/api/auth/refresh.ts:12 + reason: "Prevent brute force token guessing" +``` + +### Rule 1: Fixed async handler +```yaml +deviation: + rule: 1 + type: bug_fix + description: "Added await to database query" + location: src/api/auth/refresh.ts:34 + reason: "Query returned promise, not result" +``` + +## Commits +- `feat(2-3): create refresh_tokens table and schema` +- `feat(2-3): implement token rotation endpoint` +- `feat(2-3): add token family reuse detection` +- `fix(2-3): add await to token lookup query` +- `feat(2-3): add rate limiting to refresh endpoint` + +## Verification Status +- [x] New refresh token issued on rotation +- [x] Old refresh token invalidated +- [x] Reuse detection works +- [x] Cookies set correctly +- [ ] **Pending human verification:** Cookie flags in production + +## Notes for Next Plan +- Rate limiting added; may need tuning based on load +- Token family approach may need cleanup job for old families +``` + +### What to Include + +| Section | Content | +|---------|---------| +| Frontmatter | Metadata for future queries | +| What Was Built | High-level summary | +| Implementation Notes | Technical details worth preserving | +| Deviations | All Rules 1-4 deviations with details | +| Commits | Git commit messages created | +| Verification Status | What passed, what's pending | +| Notes for Next Plan | Context for future work | + +--- + +## VERIFICATION.md + +Created by Verifier after phase completion. + +### Structure + +```yaml +--- +phase: 2 +status: PASS # PASS | GAPS_FOUND +verified_at: 2024-01-15T10:30:00Z +verified_by: verifier-agent +--- + +# Phase 2 Verification: JWT Implementation + +## Observable Truths + +| Truth | Status | Evidence | +|-------|--------|----------| +| User can log in with email/password | VERIFIED | Login endpoint returns tokens, sets cookies | +| Sessions persist across page refresh | VERIFIED | Cookie-based token survives reload | +| Token refresh extends session | VERIFIED | Refresh endpoint issues new tokens | +| Expired tokens rejected | VERIFIED | 401 returned for expired access token | + +## Required Artifacts + +| Artifact | Status | Check | +|----------|--------|-------| +| src/api/auth/login.ts | EXISTS | Exports login handler | +| src/api/auth/refresh.ts | EXISTS | Exports refresh handler | +| src/middleware/auth.ts | EXISTS | Exports auth middleware | +| db/migrations/002_refresh_tokens.sql | EXISTS | Creates table | + +## Required Wiring + +| From | To | Status | Evidence | +|------|-----|--------|----------| +| Login handler | Token generation | WIRED | login.ts:45 calls createTokens | +| Auth middleware | Token validation | WIRED | auth.ts:23 calls verifyToken | +| Refresh handler | Token rotation | WIRED | refresh.ts:67 calls rotateToken | +| Protected routes | Auth middleware | WIRED | routes.ts uses auth middleware | + +## Anti-Patterns + +| Pattern | Found | Location | +|---------|-------|----------| +| TODO comments | NO | - | +| Stub implementations | NO | - | +| Console.log in handlers | YES | src/api/auth/login.ts:34 (debug log) | +| Empty catch blocks | NO | - | + +## Human Verification Needed + +| Check | Reason | +|-------|--------| +| Cookie flags in production | Requires deployed environment | +| Token timing accuracy | Requires wall-clock testing | + +## Gaps Found +None blocking. One console.log should be removed before production. + +## Remediation +- Task created: "Remove debug console.log from login handler" +``` + +--- + +## UAT.md + +User Acceptance Testing results. + +### Structure + +```yaml +--- +phase: 2 +tested_by: user +tested_at: 2024-01-15T14:00:00Z +status: PASS # PASS | ISSUES_FOUND +--- + +# Phase 2 UAT: JWT Implementation + +## Test Cases + +### 1. Login with email and password +**Prompt:** "Can you log in with your email and password?" +**Result:** PASS +**Notes:** Login successful, redirected to dashboard + +### 2. Session persists on refresh +**Prompt:** "Refresh the page. Are you still logged in?" +**Result:** PASS +**Notes:** Still authenticated after refresh + +### 3. Logout clears session +**Prompt:** "Click logout. Can you access the dashboard?" +**Result:** PASS +**Notes:** Redirected to login page + +### 4. Expired session prompts re-login +**Prompt:** "Wait 15 minutes (or we can simulate). Does the session refresh?" +**Result:** SKIPPED +**Reason:** "User chose to trust token rotation implementation" + +## Issues Found +None. + +## Sign-Off +User confirms Phase 2 JWT Implementation meets requirements. +Next: Proceed to Phase 3 (OAuth Integration) +``` + +--- + +## Artifact Storage + +### File Structure + +``` +.planning/ +├── phases/ +│ ├── 1/ +│ │ ├── 1-CONTEXT.md +│ │ ├── 1-1-PLAN.md +│ │ ├── 1-1-SUMMARY.md +│ │ ├── 1-2-PLAN.md +│ │ ├── 1-2-SUMMARY.md +│ │ └── 1-VERIFICATION.md +│ └── 2/ +│ ├── 2-CONTEXT.md +│ ├── 2-1-PLAN.md +│ ├── 2-1-SUMMARY.md +│ ├── 2-2-PLAN.md +│ ├── 2-2-SUMMARY.md +│ ├── 2-3-PLAN.md +│ ├── 2-3-SUMMARY.md +│ ├── 2-VERIFICATION.md +│ └── 2-UAT.md +├── STATE.md +└── config.json +``` + +### Naming Convention + +| Pattern | Meaning | +|---------|---------| +| `{phase}-CONTEXT.md` | Discussion decisions for phase | +| `{phase}-{plan}-PLAN.md` | Executable plan | +| `{phase}-{plan}-SUMMARY.md` | Execution record | +| `{phase}-VERIFICATION.md` | Phase verification | +| `{phase}-UAT.md` | User acceptance testing | + +--- + +## Commit Strategy + +Each task produces an atomic commit: + +``` +{type}({phase}-{plan}): {description} + +- Detail 1 +- Detail 2 +``` + +### Types +- `feat`: New functionality +- `fix`: Bug fix +- `test`: Test additions +- `refactor`: Code restructuring +- `perf`: Performance improvement +- `docs`: Documentation +- `style`: Formatting only +- `chore`: Maintenance + +### Examples +``` +feat(2-3): implement refresh token rotation + +- Add refresh_tokens table with family tracking +- Implement rotation endpoint at POST /api/auth/refresh +- Add reuse detection with family revocation + +fix(2-3): add await to token lookup query + +- Token lookup was returning promise instead of result +- Added proper await in refresh handler + +feat(2-3): add rate limiting to refresh endpoint + +- [Deviation Rule 2] Added express-rate-limit +- 10 requests per minute per IP +- Prevents brute force token guessing +``` + +### Metadata Commit + +After plan completion: +``` +chore(2-3): complete plan execution + +Artifacts: +- 2-3-SUMMARY.md created +- STATE.md updated +- 3 tasks completed, 2 deviations handled +``` diff --git a/docs/initiatives.md b/docs/initiatives.md new file mode 100644 index 0000000..0706f67 --- /dev/null +++ b/docs/initiatives.md @@ -0,0 +1,520 @@ +# Initiatives Module + +Initiatives are the planning layer for larger features. They provide a Notion-like document hierarchy for capturing context, decisions, and requirements before work begins. Once approved, initiatives generate phased task plans that agents execute. + +## Design Philosophy + +### Why Initiatives? + +Tasks are atomic work units—great for execution but too granular for planning. Initiatives bridge the gap: + +- **Before approval**: A living document where user and Architect refine the vision +- **After approval**: A persistent knowledge base that tasks link back to +- **Forever**: Context for future work ("why did we build it this way?") + +### Notion-Like Structure + +Initiatives aren't flat documents. They're hierarchical pages: + +``` +Initiative: User Authentication +├── User Journeys +│ ├── Sign Up Flow +│ └── Password Reset Flow +├── Business Rules +│ └── Password Requirements +├── Technical Concept +│ ├── JWT Strategy +│ └── Session Management +└── Architectural Changes + └── Auth Middleware +``` + +Each "page" is a record in SQLite with parent-child relationships. This enables: +- Structured queries: "Give me all subpages of initiative X" +- Inventory views: "List all technical concepts across initiatives" +- Cross-references: Link between pages + +--- + +## Data Model + +### Initiative Entity + +| Field | Type | Description | +|-------|------|-------------| +| `id` | TEXT | Primary key (e.g., `init-a1b2c3`) | +| `project_id` | TEXT | Scopes to a project (most initiatives are single-project) | +| `title` | TEXT | Initiative name | +| `status` | TEXT | `draft`, `review`, `approved`, `in_progress`, `completed`, `rejected` | +| `created_by` | TEXT | User who created it | +| `created_at` | INTEGER | Unix timestamp | +| `updated_at` | INTEGER | Unix timestamp | +| `approved_at` | INTEGER | When approved (null if not approved) | +| `approved_by` | TEXT | Who approved it | + +### Initiative Page Entity + +| Field | Type | Description | +|-------|------|-------------| +| `id` | TEXT | Primary key (e.g., `page-x1y2z3`) | +| `initiative_id` | TEXT | Parent initiative | +| `parent_page_id` | TEXT | Parent page (null for root-level pages) | +| `type` | TEXT | `user_journey`, `business_rule`, `technical_concept`, `architectural_change`, `note`, `custom` | +| `title` | TEXT | Page title | +| `content` | TEXT | Markdown content | +| `sort_order` | INTEGER | Display order among siblings | +| `created_at` | INTEGER | Unix timestamp | +| `updated_at` | INTEGER | Unix timestamp | + +### Initiative Phase Entity + +Phases group tasks for staged execution and rolling approval. + +| Field | Type | Description | +|-------|------|-------------| +| `id` | TEXT | Primary key (e.g., `phase-p1q2r3`) | +| `initiative_id` | TEXT | Parent initiative | +| `number` | INTEGER | Phase number (1, 2, 3...) | +| `name` | TEXT | Phase name | +| `description` | TEXT | What this phase delivers | +| `status` | TEXT | `draft`, `pending_approval`, `approved`, `in_progress`, `completed` | +| `approved_at` | INTEGER | When approved | +| `approved_by` | TEXT | Who approved | +| `created_at` | INTEGER | Unix timestamp | + +### Task Link + +Tasks reference their initiative and phase: + +```sql +-- In tasks table (see docs/tasks.md) +initiative_id TEXT REFERENCES initiatives(id), +phase_id TEXT REFERENCES initiative_phases(id), +``` + +--- + +## SQLite Schema + +```sql +CREATE TABLE initiatives ( + id TEXT PRIMARY KEY, + project_id TEXT, + title TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'draft' + CHECK (status IN ('draft', 'review', 'approved', 'in_progress', 'completed', 'rejected')), + created_by TEXT, + created_at INTEGER NOT NULL DEFAULT (unixepoch()), + updated_at INTEGER NOT NULL DEFAULT (unixepoch()), + approved_at INTEGER, + approved_by TEXT +); + +CREATE TABLE initiative_pages ( + id TEXT PRIMARY KEY, + initiative_id TEXT NOT NULL REFERENCES initiatives(id) ON DELETE CASCADE, + parent_page_id TEXT REFERENCES initiative_pages(id) ON DELETE CASCADE, + type TEXT NOT NULL DEFAULT 'note' + CHECK (type IN ('user_journey', 'business_rule', 'technical_concept', 'architectural_change', 'note', 'custom')), + title TEXT NOT NULL, + content TEXT, + sort_order INTEGER NOT NULL DEFAULT 0, + created_at INTEGER NOT NULL DEFAULT (unixepoch()), + updated_at INTEGER NOT NULL DEFAULT (unixepoch()) +); + +CREATE TABLE initiative_phases ( + id TEXT PRIMARY KEY, + initiative_id TEXT NOT NULL REFERENCES initiatives(id) ON DELETE CASCADE, + number INTEGER NOT NULL, + name TEXT NOT NULL, + description TEXT, + status TEXT NOT NULL DEFAULT 'draft' + CHECK (status IN ('draft', 'pending_approval', 'approved', 'in_progress', 'completed')), + approved_at INTEGER, + approved_by TEXT, + created_at INTEGER NOT NULL DEFAULT (unixepoch()), + UNIQUE(initiative_id, number) +); + +CREATE INDEX idx_initiatives_project ON initiatives(project_id); +CREATE INDEX idx_initiatives_status ON initiatives(status); +CREATE INDEX idx_pages_initiative ON initiative_pages(initiative_id); +CREATE INDEX idx_pages_parent ON initiative_pages(parent_page_id); +CREATE INDEX idx_pages_type ON initiative_pages(type); +CREATE INDEX idx_phases_initiative ON initiative_phases(initiative_id); +CREATE INDEX idx_phases_status ON initiative_phases(status); + +-- Useful views +CREATE VIEW initiative_page_tree AS +WITH RECURSIVE tree AS ( + SELECT id, initiative_id, parent_page_id, title, type, 0 as depth, + title as path + FROM initiative_pages WHERE parent_page_id IS NULL + UNION ALL + SELECT p.id, p.initiative_id, p.parent_page_id, p.title, p.type, t.depth + 1, + t.path || ' > ' || p.title + FROM initiative_pages p + JOIN tree t ON p.parent_page_id = t.id +) +SELECT * FROM tree ORDER BY path; +``` + +--- + +## Status Workflow + +### Initiative Status + +``` + [draft] ──submit──▶ [review] ──approve──▶ [approved] + │ │ │ + │ │ reject │ start work + │ ▼ ▼ + │ [rejected] [in_progress] + │ │ + │ │ all phases done + └──────────────────────────────────────────▶ [completed] +``` + +| Status | Meaning | +|--------|---------| +| `draft` | User/Architect still refining | +| `review` | Ready for approval decision | +| `approved` | Work plan created, awaiting execution | +| `in_progress` | At least one phase executing | +| `completed` | All phases completed | +| `rejected` | Won't implement | + +### Phase Status + +``` + [draft] ──finalize──▶ [pending_approval] ──approve──▶ [approved] + │ + │ claim tasks + ▼ + [in_progress] + │ + │ all tasks closed + ▼ + [completed] +``` + +**Rolling approval pattern:** +1. Architect creates work plan with multiple phases +2. User approves Phase 1 → agents start executing +3. While Phase 1 executes, user reviews Phase 2 +4. Phase 2 approved → agents can start when ready +5. Continue until all phases approved/completed + +This prevents blocking: agents don't wait for all phases to be approved upfront. + +--- + +## Workflow + +### 1. Draft Initiative + +User creates initiative with basic vision: + +``` +cw initiative create "User Authentication" +``` + +System creates initiative in `draft` status with empty page structure. + +### 2. Architect Iteration (Questioning) + +Architect agent engages in structured questioning to capture requirements: + +**Question Categories:** + +| Category | Example Questions | +|----------|-------------------| +| **Visual Features** | Layout approach? Density? Interactions? Empty states? | +| **APIs/CLIs** | Response format? Flags? Error handling? Verbosity? | +| **Data/Content** | Structure? Validation rules? Edge cases? | +| **Architecture** | Patterns to follow? What to avoid? Reference code? | + +Each answer populates initiative pages. Architect may: +- Create user journey pages +- Document business rules +- Draft technical concepts +- Flag architectural impacts + +See [agents/architect.md](agents/architect.md) for the full Architect agent prompt. + +### 3. Discussion Phase (Per Phase) + +Before planning each phase, the Architect captures implementation decisions through focused discussion. This happens BEFORE any planning work. + +``` +cw phase discuss +``` + +Creates `{phase}-CONTEXT.md` with locked decisions: + +```yaml +--- +phase: 1 +discussed_at: 2024-01-15 +--- + +# Phase 1 Context: User Authentication + +## Decisions + +### Authentication Method +**Decision:** Email/password with optional OAuth +**Reason:** MVP needs simple auth, OAuth for convenience +**Locked:** true + +### Token Storage +**Decision:** httpOnly cookies +**Reason:** XSS protection +**Alternatives Rejected:** +- localStorage: XSS vulnerable +``` + +These decisions guide all subsequent planning and execution. Workers reference CONTEXT.md for implementation direction. + +### 4. Research Phase (Optional) + +For phases with unknowns, run discovery before planning: + +| Level | When | Time | Scope | +|-------|------|------|-------| +| L0 | Pure internal work | Skip | None | +| L1 | Quick verification | 2-5 min | Confirm assumptions | +| L2 | Standard research | 15-30 min | Explore patterns | +| L3 | Deep dive | 1+ hour | Novel domain | + +``` +cw phase research --level 2 +``` + +Creates `{phase}-RESEARCH.md` with findings that inform planning. + +### 5. Submit for Review + +When Architect and user are satisfied: + +``` +cw initiative submit +``` + +Status changes to `review`. Triggers notification for approval. + +### 4. Approve Initiative + +Human reviews the complete initiative: + +``` +cw initiative approve +``` + +Status changes to `approved`. Now work plan can be created. + +### 5. Create Work Plan + +Architect (or user) breaks initiative into phases: + +``` +cw initiative plan +``` + +This creates: +- `initiative_phases` records +- Tasks linked to each phase via `initiative_id` + `phase_id` + +Tasks are created in `open` status but won't be "ready" until their phase is approved. + +### 6. Approve Phases (Rolling) + +User reviews and approves phases one at a time: + +``` +cw phase approve +``` + +Approved phases make their tasks "ready" for agents. User can approve Phase 1, let agents work, then approve Phase 2 later. + +### 7. Execute + +Workers pull tasks via `cw task ready`. Tasks include: +- Link to initiative for context +- Link to phase for grouping +- All normal task fields (dependencies, priority, etc.) + +### 8. Verify Phase + +After all tasks in a phase complete, the Verifier agent runs goal-backward verification: + +``` +cw phase verify +``` + +Verification checks: +1. **Observable truths** — What users can observe when goal is achieved +2. **Required artifacts** — Files that must exist (not stubs) +3. **Required wiring** — Connections that must work +4. **Anti-patterns** — TODOs, placeholders, empty returns + +Creates `{phase}-VERIFICATION.md` with results. If gaps found, creates remediation tasks. + +See [verification.md](verification.md) for detailed verification patterns. + +### 9. User Acceptance Testing + +After technical verification passes, run UAT: + +``` +cw phase uat +``` + +Walks user through testable deliverables: +- "Can you log in with email and password?" +- "Does the dashboard show your projects?" + +Creates `{phase}-UAT.md` with results. If issues found, creates targeted fix plans. + +### 10. Complete + +When all tasks in all phases are closed AND verification passes: +- Each phase auto-transitions to `completed` +- Initiative auto-transitions to `completed` +- Domain layer updated to reflect new state + +--- + +## Phase Artifacts + +Each phase produces artifacts during execution: + +| Artifact | Created By | Purpose | +|----------|------------|---------| +| `{phase}-CONTEXT.md` | Architect (Discussion) | Locked implementation decisions | +| `{phase}-RESEARCH.md` | Architect (Research) | Domain knowledge findings | +| `{phase}-{N}-PLAN.md` | Architect (Planning) | Executable task plans | +| `{phase}-{N}-SUMMARY.md` | Worker (Execution) | What actually happened | +| `{phase}-VERIFICATION.md` | Verifier | Goal-backward verification | +| `{phase}-UAT.md` | Verifier + User | User acceptance testing | + +See [execution-artifacts.md](execution-artifacts.md) for artifact specifications. + +--- + +## CLI Reference + +### Initiative Commands + +| Command | Description | +|---------|-------------| +| `cw initiative create ` | Create draft initiative | +| `cw initiative list [--status STATUS]` | List initiatives | +| `cw initiative show <id>` | Show initiative with page tree | +| `cw initiative submit <id>` | Submit for review | +| `cw initiative approve <id>` | Approve initiative | +| `cw initiative reject <id> --reason "..."` | Reject initiative | +| `cw initiative plan <id>` | Generate phased work plan | + +### Page Commands + +| Command | Description | +|---------|-------------| +| `cw page create <initiative-id> <title> --type TYPE` | Create page | +| `cw page create <initiative-id> <title> --parent <page-id>` | Create subpage | +| `cw page show <id>` | Show page content | +| `cw page edit <id>` | Edit page (opens editor) | +| `cw page list <initiative-id> [--type TYPE]` | List pages | +| `cw page tree <initiative-id>` | Show page hierarchy | + +### Phase Commands + +| Command | Description | +|---------|-------------| +| `cw phase list <initiative-id>` | List phases | +| `cw phase show <id>` | Show phase with tasks | +| `cw phase discuss <id>` | Capture implementation decisions (creates CONTEXT.md) | +| `cw phase research <id> [--level N]` | Run discovery (L0-L3, creates RESEARCH.md) | +| `cw phase approve <id>` | Approve phase for execution | +| `cw phase verify <id>` | Run goal-backward verification | +| `cw phase uat <id>` | Run user acceptance testing | +| `cw phase status <id>` | Check phase progress | + +--- + +## Integration Points + +### With Tasks Module + +Tasks gain two new fields: +- `initiative_id`: Links task to initiative (for context) +- `phase_id`: Links task to phase (for grouping/approval) + +The `ready_tasks` view should consider phase approval: + +```sql +CREATE VIEW ready_tasks AS +SELECT t.* FROM tasks t +LEFT JOIN initiative_phases p ON t.phase_id = p.id +WHERE t.status = 'open' + AND (t.phase_id IS NULL OR p.status IN ('approved', 'in_progress')) + AND NOT EXISTS ( + SELECT 1 FROM task_dependencies d + JOIN tasks dep ON d.depends_on = dep.id + WHERE d.task_id = t.id + AND d.type = 'blocks' + AND dep.status != 'closed' + ) +ORDER BY t.priority ASC, t.created_at ASC; +``` + +### With Domain Layer + +When initiative completes, its pages can feed into domain documentation: +- Business rules → Domain business rules +- Technical concepts → Architecture docs +- New aggregates → Domain model updates + +### With Orchestrator + +Orchestrator can: +- Trigger Architect agents for initiative iteration +- Monitor phase completion and auto-advance initiative status +- Coordinate approval notifications + +### tRPC Procedures + +```typescript +// Suggested tRPC router shape +initiative.create(input) // → Initiative +initiative.list(filters) // → Initiative[] +initiative.get(id) // → Initiative with pages +initiative.submit(id) // → Initiative +initiative.approve(id) // → Initiative +initiative.reject(id, reason) // → Initiative +initiative.plan(id) // → Phase[] + +page.create(input) // → Page +page.get(id) // → Page +page.update(id, content) // → Page +page.list(initiativeId, filters) // → Page[] +page.tree(initiativeId) // → PageTree + +phase.list(initiativeId) // → Phase[] +phase.get(id) // → Phase with tasks +phase.approve(id) // → Phase +phase.status(id) // → PhaseStatus +``` + +--- + +## Future Considerations + +- **Templates**: Pre-built page structures for common initiative types +- **Cross-project initiatives**: Single initiative spanning multiple projects +- **Versioning**: Track changes to initiative pages over time +- **Approval workflows**: Multi-step approval with different approvers +- **Auto-planning**: LLM generates work plan from initiative content diff --git a/docs/logging.md b/docs/logging.md new file mode 100644 index 0000000..2a536ff --- /dev/null +++ b/docs/logging.md @@ -0,0 +1,64 @@ +# Structured Logging + +Codewalk District uses [pino](https://getpino.io/) for structured JSON logging on the backend. + +## Architecture + +- **pino** writes structured JSON to **stderr** so CLI user output on stdout stays clean +- **console.log** remains for CLI command handlers (user-facing output on stdout) +- The `src/logging/` module (ProcessLogWriter/LogManager) is a separate concern — it captures per-agent process stdout/stderr to files + +## Environment Variables + +| Variable | Description | Default | +|----------|-------------|---------| +| `CW_LOG_LEVEL` | Log level override (`fatal`, `error`, `warn`, `info`, `debug`, `trace`, `silent`) | `info` (production), `debug` (development) | +| `CW_LOG_PRETTY` | Set to `1` for human-readable colorized output via pino-pretty | unset (JSON output) | + +## Log Levels + +| Level | Usage | +|-------|-------| +| `fatal` | Process will exit (uncaught exceptions, DB migration failure) | +| `error` | Operation failed (agent crash, parse failure, clone failure) | +| `warn` | Degraded (account exhausted, no accounts available, stale PID, reconcile marking crashed) | +| `info` | State transitions (agent spawned/stopped/resumed, dispatch decision, server started, account selected/switched) | +| `debug` | Implementation details (command being built, session ID extraction, worktree paths, schema selection) | + +## Adding Logging to a New Module + +```typescript +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('my-module'); + +// Use structured data as first arg, message as second +log.info({ taskId, agentId }, 'task dispatched'); +log.error({ err: error }, 'operation failed'); +log.debug({ path, count }, 'processing items'); +``` + +## Module Names + +| Module | Used in | +|--------|---------| +| `agent-manager` | `src/agent/manager.ts` | +| `dispatch` | `src/dispatch/manager.ts` | +| `http` | `src/server/index.ts` | +| `server` | `src/cli/index.ts` (startup) | +| `git` | `src/git/manager.ts`, `src/git/clone.ts`, `src/git/project-clones.ts` | +| `db` | `src/db/ensure-schema.ts` | + +## Testing + +Logs are silenced in tests via `CW_LOG_LEVEL=silent` in `vitest.config.ts`. + +## Quick Start + +```sh +# Pretty logs during development +CW_LOG_LEVEL=debug CW_LOG_PRETTY=1 cw --server + +# JSON logs for production/piping +cw --server 2>server.log +``` diff --git a/docs/model-profiles.md b/docs/model-profiles.md new file mode 100644 index 0000000..bc58404 --- /dev/null +++ b/docs/model-profiles.md @@ -0,0 +1,267 @@ +# Model Profiles + +Different agent roles have different needs. Model selection balances quality, cost, and latency. + +## Profile Definitions + +| Profile | Use Case | Cost | Quality | +|---------|----------|------|---------| +| **quality** | Critical decisions, architecture | Highest | Best | +| **balanced** | Default for most work | Medium | Good | +| **budget** | High-volume, low-risk tasks | Lowest | Acceptable | + +--- + +## Agent Model Assignments + +| Agent | Quality | Balanced (Default) | Budget | +|-------|---------|-------------------|--------| +| **Architect** | Opus | Opus | Sonnet | +| **Worker** | Opus | Sonnet | Sonnet | +| **Verifier** | Sonnet | Sonnet | Haiku | +| **Orchestrator** | Sonnet | Sonnet | Haiku | +| **Monitor** | Sonnet | Haiku | Haiku | +| **Researcher** | Opus | Sonnet | Haiku | + +--- + +## Rationale + +### Architect (Planning) - Opus/Opus/Sonnet +Planning has the highest impact on outcomes. A bad plan wastes all downstream execution. Invest in quality here. + +**Quality profile:** Complex systems, novel domains, critical decisions +**Balanced profile:** Standard feature work, established patterns +**Budget profile:** Simple initiatives, well-documented domains + +### Worker (Execution) - Opus/Sonnet/Sonnet +The plan already contains reasoning. Execution is implementation, not decision-making. + +**Quality profile:** Complex algorithms, security-critical code +**Balanced profile:** Standard implementation work +**Budget profile:** Simple tasks, boilerplate code + +### Verifier (Validation) - Sonnet/Sonnet/Haiku +Verification is structured checking against defined criteria. Less reasoning needed than planning. + +**Quality profile:** Complex verification, subtle integration issues +**Balanced profile:** Standard goal-backward verification +**Budget profile:** Simple pass/fail checks + +### Orchestrator (Coordination) - Sonnet/Sonnet/Haiku +Orchestrator routes work, doesn't do heavy lifting. Needs reliability, not creativity. + +**Quality profile:** Complex multi-agent coordination +**Balanced profile:** Standard workflow management +**Budget profile:** Simple task routing + +### Monitor (Observation) - Sonnet/Haiku/Haiku +Monitoring is pattern matching and threshold checking. Minimal reasoning required. + +**Quality profile:** Complex health analysis +**Balanced profile:** Standard monitoring +**Budget profile:** Simple heartbeat checks + +### Researcher (Discovery) - Opus/Sonnet/Haiku +Research is read-only exploration. High volume, low modification risk. + +**Quality profile:** Deep domain analysis +**Balanced profile:** Standard codebase exploration +**Budget profile:** Simple file lookups + +--- + +## Profile Selection + +### Per-Initiative Override + +```yaml +# In initiative config +model_profile: quality # Override default balanced +``` + +### Per-Agent Override + +```yaml +# In task assignment +assigned_to: worker-123 +model_override: opus # This task needs Opus +``` + +### Automatic Escalation + +```yaml +# When to auto-escalate +escalation_triggers: + - condition: "task.retry_count > 2" + action: "escalate_model" + - condition: "task.complexity == 'high'" + action: "use_quality_profile" + - condition: "deviation.rule == 4" + action: "escalate_model" +``` + +--- + +## Cost Management + +### Estimated Token Usage + +| Agent | Avg Tokens/Task | Profile Impact | +|-------|-----------------|----------------| +| Architect | 50k-100k | 3x between budget/quality | +| Worker | 20k-50k | 2x between budget/quality | +| Verifier | 10k-30k | 1.5x between budget/quality | +| Orchestrator | 5k-15k | 1.5x between budget/quality | + +### Cost Optimization Strategies + +1. **Right-size tasks:** Smaller tasks = less token usage +2. **Use budget for volume:** Monitoring, simple checks +3. **Reserve quality for impact:** Architecture, security +4. **Profile per initiative:** Simple features use budget, complex use quality + +--- + +## Configuration + +### Default Profile + +```json +// .planning/config.json +{ + "model_profile": "balanced", + "model_overrides": { + "architect": null, + "worker": null, + "verifier": null + } +} +``` + +### Quality Profile + +```json +{ + "model_profile": "quality", + "model_overrides": {} +} +``` + +### Budget Profile + +```json +{ + "model_profile": "budget", + "model_overrides": { + "architect": "sonnet" // Keep architect at sonnet minimum + } +} +``` + +### Mixed Profile + +```json +{ + "model_profile": "balanced", + "model_overrides": { + "architect": "opus", // Invest in planning + "worker": "sonnet", // Standard execution + "verifier": "haiku" // Budget verification + } +} +``` + +--- + +## Model Capabilities Reference + +### Opus +- **Strengths:** Complex reasoning, nuanced decisions, novel problems +- **Best for:** Architecture, complex algorithms, security analysis +- **Cost:** Highest + +### Sonnet +- **Strengths:** Good balance of reasoning and speed, reliable +- **Best for:** Standard development, code generation, debugging +- **Cost:** Medium + +### Haiku +- **Strengths:** Fast, cheap, good for structured tasks +- **Best for:** Monitoring, simple checks, high-volume operations +- **Cost:** Lowest + +--- + +## Profile Switching + +### CLI Command + +```bash +# Set profile for all future work +cw config set model_profile quality + +# Set profile for specific initiative +cw initiative config <id> --model-profile budget + +# Override for single task +cw task update <id> --model-override opus +``` + +### API + +```typescript +// Set initiative profile +await initiative.setConfig(id, { modelProfile: 'quality' }); + +// Override task model +await task.update(id, { modelOverride: 'opus' }); +``` + +--- + +## Monitoring Model Usage + +Track model usage for cost analysis: + +```sql +CREATE TABLE model_usage ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + agent_type TEXT NOT NULL, + model TEXT NOT NULL, + tokens_input INTEGER, + tokens_output INTEGER, + task_id TEXT, + initiative_id TEXT, + created_at INTEGER DEFAULT (unixepoch()) +); + +-- Usage by agent type +SELECT agent_type, model, SUM(tokens_input + tokens_output) as total_tokens +FROM model_usage +GROUP BY agent_type, model; + +-- Cost by initiative +SELECT initiative_id, + SUM(CASE WHEN model = 'opus' THEN tokens * 0.015 + WHEN model = 'sonnet' THEN tokens * 0.003 + WHEN model = 'haiku' THEN tokens * 0.0003 END) as estimated_cost +FROM model_usage +GROUP BY initiative_id; +``` + +--- + +## Recommendations + +### Starting Out +Use **balanced** profile. It provides good quality at reasonable cost. + +### High-Stakes Projects +Use **quality** profile. The cost difference is negligible compared to getting it right. + +### High-Volume Work +Use **budget** profile with architect override to sonnet. Don't skimp on planning. + +### Learning the System +Use **quality** profile initially. See what good output looks like before optimizing for cost. diff --git a/docs/session-state.md b/docs/session-state.md new file mode 100644 index 0000000..19121bc --- /dev/null +++ b/docs/session-state.md @@ -0,0 +1,402 @@ +# Session State + +Session state tracks position, decisions, and blockers across agent restarts. Unlike the Domain Layer (which tracks codebase state), session state tracks **execution state**. + +## STATE.md + +Every active initiative maintains a STATE.md file tracking execution progress: + +```yaml +# STATE.md +initiative: init-abc123 +title: User Authentication + +# Current Position +position: + phase: 2 + phase_name: "JWT Implementation" + plan: 3 + plan_name: "Refresh Token Rotation" + task: "Implement token rotation endpoint" + wave: 1 + status: in_progress + +# Progress Tracking +progress: + phases_total: 4 + phases_completed: 1 + current_phase_tasks: 8 + current_phase_completed: 5 + bar: "████████░░░░░░░░ 50%" + +# Decisions Made +decisions: + - date: 2024-01-14 + context: "Token storage strategy" + decision: "httpOnly cookie, not localStorage" + reason: "XSS protection, automatic inclusion in requests" + + - date: 2024-01-14 + context: "JWT library" + decision: "jose over jsonwebtoken" + reason: "Better TypeScript support, Web Crypto API" + + - date: 2024-01-15 + context: "Refresh token lifetime" + decision: "7 days" + reason: "Balance between security and UX" + +# Active Blockers +blockers: + - id: block-001 + description: "Waiting for OAuth credentials from client" + blocked_since: 2024-01-15 + affects: ["Phase 3: OAuth Integration"] + workaround: "Proceeding with email/password auth first" + +# Session History +sessions: + - id: session-001 + started: 2024-01-14T09:00:00Z + ended: 2024-01-14T17:00:00Z + completed: ["Phase 1: Database Schema", "Phase 2 Tasks 1-3"] + + - id: session-002 + started: 2024-01-15T09:00:00Z + status: active + working_on: "Phase 2, Task 4: Refresh token rotation" + +# Next Action +next_action: | + Continue implementing refresh token rotation endpoint. + After completion, run verification for Phase 2. + If Phase 2 passes, move to Phase 3 (blocked pending OAuth creds). + +# Context for Resume +resume_context: + files_modified_this_session: + - src/api/auth/refresh.ts + - src/middleware/auth.ts + - db/migrations/002_refresh_tokens.sql + + key_implementations: + - "Refresh tokens stored in SQLite with expiry" + - "Rotation creates new token, invalidates old" + - "Token family tracking for reuse detection" + + open_questions: [] +``` + +--- + +## State Updates + +### When to Update STATE.md + +| Event | Update | +|-------|--------| +| Task started | `position.task`, `position.status` | +| Task completed | `progress.*`, `position` to next task | +| Decision made | Add to `decisions` | +| Blocker encountered | Add to `blockers` | +| Blocker resolved | Remove from `blockers` | +| Session start | Add to `sessions` | +| Session end | Update session `ended`, `completed` | +| Phase completed | `progress.phases_completed`, reset task counters | + +### Atomic Updates + +```typescript +// Update position atomically +await updateState({ + position: { + phase: 2, + plan: 3, + task: "Implement token rotation", + wave: 1, + status: "in_progress" + } +}); + +// Add decision +await addDecision({ + context: "Token storage", + decision: "httpOnly cookie", + reason: "XSS protection" +}); + +// Record blocker +await addBlocker({ + description: "Waiting for OAuth creds", + affects: ["Phase 3"] +}); +``` + +--- + +## Resume Protocol + +When resuming work: + +### 1. Load STATE.md +``` +Read STATE.md for initiative +Extract: position, decisions, blockers, resume_context +``` + +### 2. Load Relevant Context +``` +If position.plan exists: + Load {phase}-{plan}-PLAN.md + Load prior SUMMARY.md files for this phase + +If position.task exists: + Find task in current plan + Resume from that task +``` + +### 3. Verify State +``` +Check files_modified_this_session still exist +Check implementations match key_implementations +If mismatch: flag for review before proceeding +``` + +### 4. Continue Execution +``` +Display: "Resuming from Phase {N}, Plan {M}, Task: {name}" +Display: decisions made (for context) +Display: active blockers (for awareness) +Continue with task execution +``` + +--- + +## Decision Tracking + +Decisions are first-class citizens, not comments. + +### What to Track + +| Type | Example | Why Track | +|------|---------|-----------| +| Technology choice | "Using jose for JWT" | Prevents second-guessing | +| Architecture decision | "Separate auth service" | Documents reasoning | +| Trade-off resolution | "Speed over features" | Explains constraints | +| User preference | "Dark mode default" | Preserves intent | +| Constraint discovered | "API rate limited to 100/min" | Prevents repeated discovery | + +### Decision Format + +```yaml +decisions: + - date: 2024-01-15 + context: "Where the decision was needed" + decision: "What was decided" + reason: "Why this choice" + alternatives_considered: + - "Alternative A: rejected because..." + - "Alternative B: rejected because..." + reversible: true|false +``` + +--- + +## Blocker Management + +### Blocker States + +``` +[new] ──identify──▶ [active] ──resolve──▶ [resolved] + │ + │ workaround + ▼ + [bypassed] +``` + +### Blocker Format + +```yaml +blockers: + - id: block-001 + status: active + description: "Need production API keys" + identified_at: 2024-01-15T10:00:00Z + affects: + - "Phase 4: Production deployment" + - "Phase 5: Monitoring setup" + blocked_tasks: + - task-xyz: "Configure production environment" + workaround: null + resolution: null + + - id: block-002 + status: bypassed + description: "Design mockups not ready" + identified_at: 2024-01-14T09:00:00Z + affects: ["UI implementation"] + workaround: "Using placeholder styles, will refine later" + workaround_tasks: + - task-abc: "Apply final styles when mockups ready" +``` + +### Blocker Impact on Execution + +1. **Task Blocking:** Task marked `blocked` in tasks table +2. **Phase Blocking:** If all remaining tasks blocked, phase paused +3. **Initiative Blocking:** If all phases blocked, escalate to user + +--- + +## Session History + +Track work sessions for debugging and handoffs: + +```yaml +sessions: + - id: session-001 + agent: worker-abc + started: 2024-01-14T09:00:00Z + ended: 2024-01-14T12:30:00Z + context_usage: "45%" + completed: + - "Phase 1, Plan 1: Database setup" + - "Phase 1, Plan 2: User model" + notes: "Clean execution, no issues" + + - id: session-002 + agent: worker-def + started: 2024-01-14T13:00:00Z + ended: 2024-01-14T17:00:00Z + context_usage: "62%" + completed: + - "Phase 1, Plan 3: Auth endpoints" + issues: + - "Context exceeded 50%, quality may have degraded" + - "Encountered blocker: missing env vars" + handoff_reason: "Context limit reached" +``` + +--- + +## Storage Options + +### SQLite (Recommended for Codewalk) + +```sql +CREATE TABLE initiative_state ( + initiative_id TEXT PRIMARY KEY REFERENCES initiatives(id), + current_phase INTEGER, + current_plan INTEGER, + current_task TEXT, + current_wave INTEGER, + status TEXT, + progress_json TEXT, + updated_at INTEGER +); + +CREATE TABLE initiative_decisions ( + id TEXT PRIMARY KEY, + initiative_id TEXT REFERENCES initiatives(id), + date INTEGER, + context TEXT, + decision TEXT, + reason TEXT, + alternatives_json TEXT, + reversible BOOLEAN +); + +CREATE TABLE initiative_blockers ( + id TEXT PRIMARY KEY, + initiative_id TEXT REFERENCES initiatives(id), + status TEXT CHECK (status IN ('active', 'bypassed', 'resolved')), + description TEXT, + identified_at INTEGER, + affects_json TEXT, + workaround TEXT, + resolution TEXT, + resolved_at INTEGER +); + +CREATE TABLE session_history ( + id TEXT PRIMARY KEY, + initiative_id TEXT REFERENCES initiatives(id), + agent_id TEXT, + started_at INTEGER, + ended_at INTEGER, + context_usage REAL, + completed_json TEXT, + issues_json TEXT, + handoff_reason TEXT +); +``` + +### File-Based (Alternative) + +``` +.planning/ +├── STATE.md # Current state +├── decisions/ +│ └── 2024-01-15-jwt-library.md +├── blockers/ +│ └── block-001-oauth-creds.md +└── sessions/ + ├── session-001.md + └── session-002.md +``` + +--- + +## Integration with Agents + +### Worker +- Reads STATE.md at start +- Updates position on task transitions +- Adds deviations to session notes +- Updates progress counters + +### Architect +- Creates initial STATE.md when planning +- Sets up phase/plan structure +- Documents initial decisions + +### Orchestrator +- Monitors blocker status +- Triggers resume when blockers resolve +- Coordinates session handoffs + +### Verifier +- Reads decisions for verification context +- Updates state with verification results +- Flags issues for resolution + +--- + +## Example: Resume After Crash + +``` +1. Agent crashes mid-task + +2. Supervisor detects stale assignment + - Task assigned_at > 30min ago + - No progress updates + +3. Supervisor resets task + - Status back to 'open' + - Clear assigned_to + +4. New agent picks up task + - Reads STATE.md + - Sees: "Last working on: Refresh token rotation" + - Loads relevant PLAN.md + - Resumes execution + +5. STATE.md shows continuity + sessions: + - id: session-003 + status: crashed + notes: "Agent unresponsive, task reset" + - id: session-004 + status: active + notes: "Resuming from session-003 crash" +``` diff --git a/docs/task-granularity.md b/docs/task-granularity.md new file mode 100644 index 0000000..fe18714 --- /dev/null +++ b/docs/task-granularity.md @@ -0,0 +1,309 @@ +# Task Granularity Standards + +A task must be specific enough for execution without interpretation. Vague tasks cause agents to guess, leading to inconsistent results and rework. + +## The Granularity Test + +Ask: **Can an agent execute this task without making assumptions?** + +If the answer requires "it depends" or "probably means", the task is too vague. + +--- + +## Comparison Table + +| Too Vague | Just Right | +|-----------|------------| +| "Add authentication" | "Add JWT auth with refresh rotation using jose library, store in httpOnly cookie, 15min access / 7day refresh" | +| "Create the API" | "Create POST /api/projects accepting {name, description}, validates name length 3-50 chars, returns 201 with project object" | +| "Style the dashboard" | "Add Tailwind classes to Dashboard.tsx: grid layout (3 cols on lg, 1 on mobile), card shadows, hover states on action buttons" | +| "Handle errors" | "Wrap API calls in try/catch, return {error: string} on 4xx/5xx, show toast via sonner on client" | +| "Add form validation" | "Add Zod schema to CreateProjectForm: name (3-50 chars, alphanumeric), description (optional, max 500 chars), show inline errors" | +| "Improve performance" | "Add React.memo to ProjectCard, useMemo for filtered list in Dashboard, lazy load ProjectDetails route" | +| "Fix the login bug" | "Fix login redirect loop: after successful login in auth.ts:45, redirect to stored returnUrl instead of always '/' " | +| "Set up the database" | "Create SQLite database at data/cw.db with migrations in db/migrations/, run via 'cw db migrate'" | + +--- + +## Required Task Components + +Every task MUST include: + +### 1. Files +Exact paths that will be created or modified. + +```yaml +files: + - src/components/Chat.tsx # create + - src/hooks/useChat.ts # create + - src/api/messages.ts # modify +``` + +### 2. Action +What to do, what to avoid, and WHY. + +```yaml +action: | + Create Chat component with: + - Message list (virtualized for performance) + - Input field with send button + - Auto-scroll to bottom on new message + + DO NOT: + - Implement WebSocket (separate task) + - Add typing indicators (Phase 2) + + WHY: Core chat UI needed before real-time features +``` + +### 3. Verify +Command or check to prove completion. + +```yaml +verify: + - command: "npm run typecheck" + expect: "No type errors" + - command: "npm run test -- Chat.test.tsx" + expect: "Tests pass" + - manual: "Navigate to /chat, see empty message list and input" +``` + +### 4. Done +Measurable acceptance criteria. + +```yaml +done: + - "Chat component renders without errors" + - "Input accepts text and clears on submit" + - "Messages display in chronological order" + - "Tests cover send and display functionality" +``` + +--- + +## Task Types + +### Type: auto +Agent executes autonomously. + +```yaml +type: auto +files: [src/components/Button.tsx] +action: "Create Button component with primary/secondary variants using Tailwind" +verify: "npm run typecheck && npm run test" +done: "Button renders with correct styles for each variant" +``` + +### Type: checkpoint:human-verify +Agent completes, human confirms. + +```yaml +type: checkpoint:human-verify +files: [src/pages/Dashboard.tsx] +action: "Implement dashboard layout with project cards" +verify: "Navigate to /dashboard after login" +prompt: "Does the dashboard match the design mockup?" +done: "User confirms layout is correct" +``` + +### Type: checkpoint:decision +Human makes choice that affects implementation. + +```yaml +type: checkpoint:decision +prompt: "Which chart library should we use?" +options: + - recharts: "React-native, good for simple charts" + - d3: "More powerful, steeper learning curve" + - chart.js: "Lightweight, canvas-based" +affects: "All subsequent charting tasks" +``` + +### Type: checkpoint:human-action +Unavoidable manual step. + +```yaml +type: checkpoint:human-action +prompt: "Please click the verification link sent to your email" +reason: "Cannot automate email client interaction" +continue_after: "User confirms email verified" +``` + +--- + +## Time Estimation + +Tasks should fit within context budgets: + +| Complexity | Context % | Wall Time | Example | +|------------|-----------|-----------|---------| +| Trivial | 5-10% | 2-5 min | Add a CSS class | +| Simple | 10-20% | 5-15 min | Add form field | +| Medium | 20-35% | 15-30 min | Create API endpoint | +| Complex | 35-50% | 30-60 min | Implement auth flow | +| Too Large | >50% | - | **SPLIT REQUIRED** | + +--- + +## Splitting Large Tasks + +When a task exceeds 50% context estimate, decompose: + +### Before (Too Large) +```yaml +title: "Implement user authentication" +# This is 3+ hours of work, dozens of decisions +``` + +### After (Properly Decomposed) +```yaml +tasks: + - title: "Create users table with password hash" + files: [db/migrations/001_users.sql] + + - title: "Add signup endpoint with Zod validation" + files: [src/api/auth/signup.ts] + depends_on: [users-table] + + - title: "Add login endpoint with JWT generation" + files: [src/api/auth/login.ts] + depends_on: [users-table] + + - title: "Create auth middleware for protected routes" + files: [src/middleware/auth.ts] + depends_on: [login-endpoint] + + - title: "Add refresh token rotation" + files: [src/api/auth/refresh.ts, db/migrations/002_refresh_tokens.sql] + depends_on: [auth-middleware] +``` + +--- + +## Anti-Patterns + +### Vague Verbs +**Bad:** "Improve", "Enhance", "Update", "Fix" (without specifics) +**Good:** "Add X", "Change Y to Z", "Remove W" + +### Missing Constraints +**Bad:** "Add validation" +**Good:** "Add Zod validation: email format, password 8+ chars with number" + +### Implied Knowledge +**Bad:** "Handle the edge cases" +**Good:** "Handle: empty input (show error), network failure (retry 3x), duplicate email (show message)" + +### Compound Tasks +**Bad:** "Set up auth and create the user management pages" +**Good:** Two separate tasks with dependency + +### No Success Criteria +**Bad:** "Make it work" +**Good:** "Tests pass, no TypeScript errors, manual verification of happy path" + +--- + +## Examples by Domain + +### API Endpoint + +```yaml +title: "Create POST /api/projects endpoint" +files: + - src/api/projects/create.ts + - src/api/projects/schema.ts + +action: | + Create endpoint accepting: + - name: string (3-50 chars, required) + - description: string (max 500 chars, optional) + + Returns: + - 201: { id, name, description, createdAt } + - 400: { error: "validation message" } + - 401: { error: "Unauthorized" } + + Use Zod for validation, drizzle for DB insert. + +verify: + - "npm run test -- projects.test.ts" + - "curl -X POST /api/projects -d '{\"name\": \"Test\"}' returns 201" + +done: + - "Endpoint creates project in database" + - "Validation rejects invalid input with clear messages" + - "Auth middleware blocks unauthenticated requests" +``` + +### React Component + +```yaml +title: "Create ProjectCard component" +files: + - src/components/ProjectCard.tsx + - src/components/ProjectCard.test.tsx + +action: | + Create card displaying: + - Project name (truncate at 30 chars) + - Description preview (2 lines max) + - Created date (relative: "2 days ago") + - Status badge (active/archived) + + Props: { project: Project, onClick: () => void } + Use Tailwind: rounded-lg, shadow-sm, hover:shadow-md + +verify: + - "npm run typecheck" + - "npm run test -- ProjectCard" + - "Storybook renders all variants" + +done: + - "Card renders with all project fields" + - "Truncation works for long names" + - "Hover state visible" + - "Click handler fires" +``` + +### Database Migration + +```yaml +title: "Create projects table" +files: + - db/migrations/003_projects.sql + - src/db/schema/projects.ts + +action: | + Create table: + - id: TEXT PRIMARY KEY (uuid) + - user_id: TEXT NOT NULL REFERENCES users(id) + - name: TEXT NOT NULL + - description: TEXT + - status: TEXT DEFAULT 'active' CHECK (IN 'active', 'archived') + - created_at: INTEGER DEFAULT unixepoch() + - updated_at: INTEGER DEFAULT unixepoch() + + Indexes: user_id, status, created_at DESC + +verify: + - "cw db migrate runs without error" + - "sqlite3 data/cw.db '.schema projects' shows correct schema" + +done: + - "Migration applies cleanly" + - "Drizzle schema matches SQL" + - "Indexes created" +``` + +--- + +## Checklist Before Creating Task + +- [ ] Can an agent execute this without asking questions? +- [ ] Are all files listed explicitly? +- [ ] Is the action specific (not "improve" or "handle")? +- [ ] Is there a concrete verify step? +- [ ] Are done criteria measurable? +- [ ] Does estimated context fit under 50%? +- [ ] Are there no compound actions (split if needed)? diff --git a/docs/tasks.md b/docs/tasks.md new file mode 100644 index 0000000..77ce0fc --- /dev/null +++ b/docs/tasks.md @@ -0,0 +1,331 @@ +# Tasks Module + +Beads-inspired task management optimized for multi-agent coordination. Unlike beads (Git-distributed JSONL), this uses centralized SQLite for simplicity since all agents share the same workspace. + +## Design Rationale + +### Why Not Just Use Beads? + +Beads solves a different problem: distributed task tracking across forked repos with zero coordination. We don't need that: + +- All Workers operate in the same workspace under one `cw` server +- SQLite is the single source of truth +- tRPC exposes task queries directly to agents and dashboard +- No merge conflicts, no Git overhead + +### Core Agent Problem Solved + +Agents need to answer: **"What should I work on next?"** + +The `ready` query solves this: tasks that are `open` with all dependencies `closed`. Combined with priority ordering, agents can self-coordinate without human intervention. + +--- + +## Data Model + +### Task Entity + +| Field | Type | Description | +|-------|------|-------------| +| `id` | TEXT | Primary key. Hash-based (e.g., `tsk-a1b2c3`) or UUID | +| `parent_id` | TEXT | Optional. References parent task for hierarchies | +| `initiative_id` | TEXT | Optional. Links to Initiatives module | +| `phase_id` | TEXT | Optional. Links to initiative phase (for grouped approval) | +| `project_id` | TEXT | Optional. Scopes task to a project | +| `title` | TEXT | Required. Short task name | +| `description` | TEXT | Optional. Markdown-formatted details | +| `type` | TEXT | `task` (default), `epic`, `subtask` | +| `status` | TEXT | `open`, `in_progress`, `blocked`, `closed` | +| `priority` | INTEGER | 0=critical, 1=high, 2=normal (default), 3=low | +| `assigned_to` | TEXT | Agent/worker ID currently working on this | +| `assigned_at` | INTEGER | Unix timestamp when assigned | +| `metadata` | TEXT | JSON blob for extensibility | +| `created_at` | INTEGER | Unix timestamp | +| `updated_at` | INTEGER | Unix timestamp | +| `closed_at` | INTEGER | Unix timestamp when closed | +| `closed_reason` | TEXT | Why/how the task was completed | + +### Task Dependencies + +| Field | Type | Description | +|-------|------|-------------| +| `task_id` | TEXT | The task that is blocked | +| `depends_on` | TEXT | The task that must complete first | +| `type` | TEXT | `blocks` (default), `related` | + +### Task History + +| Field | Type | Description | +|-------|------|-------------| +| `id` | INTEGER | Auto-increment primary key | +| `task_id` | TEXT | The task that changed | +| `field` | TEXT | Which field changed | +| `old_value` | TEXT | Previous value | +| `new_value` | TEXT | New value | +| `changed_by` | TEXT | Agent/user ID | +| `changed_at` | INTEGER | Unix timestamp | + +--- + +## SQLite Schema + +```sql +CREATE TABLE tasks ( + id TEXT PRIMARY KEY, + parent_id TEXT REFERENCES tasks(id), + initiative_id TEXT, + phase_id TEXT, + project_id TEXT, + + title TEXT NOT NULL, + description TEXT, + type TEXT NOT NULL DEFAULT 'task' CHECK (type IN ('task', 'epic', 'subtask')), + + status TEXT NOT NULL DEFAULT 'open' CHECK (status IN ('open', 'in_progress', 'blocked', 'closed')), + priority INTEGER NOT NULL DEFAULT 2 CHECK (priority BETWEEN 0 AND 3), + + assigned_to TEXT, + assigned_at INTEGER, + + metadata TEXT, + + created_at INTEGER NOT NULL DEFAULT (unixepoch()), + updated_at INTEGER NOT NULL DEFAULT (unixepoch()), + closed_at INTEGER, + closed_reason TEXT +); + +CREATE TABLE task_dependencies ( + task_id TEXT NOT NULL REFERENCES tasks(id) ON DELETE CASCADE, + depends_on TEXT NOT NULL REFERENCES tasks(id) ON DELETE CASCADE, + type TEXT NOT NULL DEFAULT 'blocks' CHECK (type IN ('blocks', 'related')), + PRIMARY KEY (task_id, depends_on), + CHECK (task_id != depends_on) +); + +CREATE TABLE task_history ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + task_id TEXT NOT NULL REFERENCES tasks(id) ON DELETE CASCADE, + field TEXT NOT NULL, + old_value TEXT, + new_value TEXT, + changed_by TEXT, + changed_at INTEGER NOT NULL DEFAULT (unixepoch()) +); + +CREATE INDEX idx_tasks_status ON tasks(status); +CREATE INDEX idx_tasks_priority ON tasks(priority); +CREATE INDEX idx_tasks_assigned ON tasks(assigned_to); +CREATE INDEX idx_tasks_project ON tasks(project_id); +CREATE INDEX idx_tasks_initiative ON tasks(initiative_id); +CREATE INDEX idx_tasks_phase ON tasks(phase_id); +CREATE INDEX idx_task_history_task ON task_history(task_id); + +-- The critical view for agent work discovery +-- Tasks are ready when: open, no blocking deps, and phase approved (if linked) +CREATE VIEW ready_tasks AS +SELECT t.* FROM tasks t +LEFT JOIN initiative_phases p ON t.phase_id = p.id +WHERE t.status = 'open' + AND (t.phase_id IS NULL OR p.status IN ('approved', 'in_progress')) + AND NOT EXISTS ( + SELECT 1 FROM task_dependencies d + JOIN tasks dep ON d.depends_on = dep.id + WHERE d.task_id = t.id + AND d.type = 'blocks' + AND dep.status != 'closed' + ) +ORDER BY t.priority ASC, t.created_at ASC; +``` + +--- + +## Status Workflow + +``` + ┌──────────────────────────────────────┐ + │ │ + ▼ │ + [open] ──claim──▶ [in_progress] ──done──▶ [closed] + │ │ + │ │ blocked + │ ▼ + └───────────── [blocked] ◀─────unblock───┘ +``` + +| Transition | Trigger | Notes | +|------------|---------|-------| +| `open` → `in_progress` | Agent claims task | Sets `assigned_to`, `assigned_at` | +| `in_progress` → `closed` | Work completed | Sets `closed_at`, `closed_reason` | +| `in_progress` → `blocked` | External dependency | Manual or auto-detected | +| `blocked` → `open` | Blocker resolved | Clears assignment | +| `open` → `closed` | Cancelled/won't do | Direct close without work | + +--- + +## CLI Reference + +All commands under `cw task` subcommand. + +### Core Commands + +| Command | Description | +|---------|-------------| +| `cw task ready` | List tasks ready for work (open + no blockers) | +| `cw task list [--status STATUS] [--project ID]` | List tasks with filters | +| `cw task show <id>` | Show task details + history | +| `cw task create <title> [-p PRIORITY] [-d DESC]` | Create new task | +| `cw task update <id> [--status STATUS] [--priority P]` | Update task fields | +| `cw task close <id> [--reason REASON]` | Mark task complete | + +### Dependency Commands + +| Command | Description | +|---------|-------------| +| `cw task dep add <task> <depends-on>` | Task blocked by another | +| `cw task dep rm <task> <depends-on>` | Remove dependency | +| `cw task dep tree <id>` | Show dependency graph | + +### Assignment Commands + +| Command | Description | +|---------|-------------| +| `cw task assign <id> <agent>` | Assign task to agent | +| `cw task unassign <id>` | Release task | +| `cw task mine` | List tasks assigned to current agent | + +### Output Flags (global) + +| Flag | Description | +|------|-------------| +| `--json` | Output as JSON (for agent consumption) | +| `--quiet` | Minimal output (just IDs) | + +--- + +## Agent Workflow + +Standard loop for Workers: + +``` +1. cw task ready --json +2. Pick highest priority task from result +3. cw task update <id> --status in_progress +4. Do the work +5. cw task close <id> --reason "Implemented X" +6. Loop to step 1 +``` + +If `cw task ready` returns empty, the agent's work is done. + +--- + +## Integration Points + +### With Initiatives +- Tasks can link to an initiative via `initiative_id` +- When initiative is approved, tasks are generated from its technical concept +- Closing all tasks for an initiative signals initiative completion + +### With Orchestrator +- Orchestrator queries `ready_tasks` view to dispatch work +- Assignment tracked to prevent double-dispatch +- Orchestrator can bulk-create tasks from job definitions + +### With Workers +- Workers claim tasks via `cw task update --status in_progress` +- Worker ID stored in `assigned_to` +- On worker crash, Supervisor can detect stale assignments and reset + +### tRPC Procedures + +```typescript +// Suggested tRPC router shape +task.list(filters) // → Task[] +task.ready(projectId?) // → Task[] +task.get(id) // → Task | null +task.create(input) // → Task +task.update(id, input) // → Task +task.close(id, reason) // → Task +task.assign(id, agent) // → Task +task.history(id) // → TaskHistory[] +task.depAdd(id, dep) // → void +task.depRemove(id, dep) // → void +task.depTree(id) // → DependencyTree +``` + +--- + +## Task Granularity Standards + +A task must be specific enough for execution without interpretation. Vague tasks cause agents to guess, leading to inconsistent results. + +### Quick Reference + +| Too Vague | Just Right | +|-----------|------------| +| "Add authentication" | "Add JWT auth with refresh rotation using jose, httpOnly cookie, 15min access / 7day refresh" | +| "Create the API" | "Create POST /api/projects accepting {name, description}, validates name 3-50 chars, returns 201" | +| "Handle errors" | "Wrap API calls in try/catch, return {error: string} on 4xx/5xx, show toast via sonner" | + +### Required Task Components + +Every task MUST include: + +1. **files** — Exact paths modified/created +2. **action** — What to do, what to avoid, WHY +3. **verify** — Command or check to prove completion +4. **done** — Measurable acceptance criteria + +See [task-granularity.md](task-granularity.md) for comprehensive examples and anti-patterns. + +### Context Budget + +Tasks are sized to fit agent context budgets: + +| Complexity | Context % | Example | +|------------|-----------|---------| +| Simple | 10-20% | Add form field | +| Medium | 20-35% | Create API endpoint | +| Complex | 35-50% | Implement auth flow | +| Too Large | >50% | **SPLIT REQUIRED** | + +See [context-engineering.md](context-engineering.md) for context management rules. + +--- + +## Deviation Handling + +When Workers encounter unexpected issues during execution, they follow deviation rules: + +| Rule | Action | Permission | +|------|--------|------------| +| Rule 1: Bug fixes | Auto-fix | None needed | +| Rule 2: Missing critical (validation, auth) | Auto-add | None needed | +| Rule 3: Blocking issues (deps, imports) | Auto-fix | None needed | +| Rule 4: Architectural changes | ASK | Required | + +See [deviation-rules.md](deviation-rules.md) for detailed guidance. + +--- + +## Execution Artifacts + +Task execution produces artifacts: + +| Artifact | Purpose | +|----------|---------| +| Commits | Per-task atomic commits | +| SUMMARY.md | Record of what happened | +| STATE.md updates | Position tracking | + +See [execution-artifacts.md](execution-artifacts.md) for artifact specifications. + +--- + +## Future Considerations + +- **Compaction**: Summarize old closed tasks to reduce DB size (beads does this with LLM) +- **Labels/tags**: Additional categorization beyond type +- **Time tracking**: Estimated vs actual time for capacity planning +- **Recurring tasks**: Templates that spawn new tasks on schedule diff --git a/docs/verification.md b/docs/verification.md new file mode 100644 index 0000000..c1997e2 --- /dev/null +++ b/docs/verification.md @@ -0,0 +1,322 @@ +# Goal-Backward Verification + +Verification confirms that **goals are achieved**, not merely that **tasks were completed**. A completed task "create chat component" does not guarantee the goal "working chat interface" is met. + +## Core Principle + +**Task completion ≠ Goal achievement** + +Tasks are implementation steps. Goals are user outcomes. Verification bridges the gap by checking observable outcomes, not just checklist items. + +--- + +## Verification Levels + +### Level 1: Existence Check +Does the artifact exist? + +``` +✓ File exists at expected path +✓ Component is exported +✓ Route is registered +``` + +### Level 2: Substance Check +Is the artifact substantive (not a stub)? + +``` +✓ Function has implementation (not just return null) +✓ Component renders content (not empty div) +✓ API returns meaningful response (not placeholder) +``` + +### Level 3: Wiring Check +Is the artifact connected to the system? + +``` +✓ Component is rendered somewhere +✓ API endpoint is called by client +✓ Event handler is attached +✓ Database query is executed +``` + +**All three levels must pass for verification success.** + +--- + +## Must-Have Derivation + +Before verification, derive what "done" means from the goal: + +### 1. Observable Truths (3-7 user perspectives) +What can a user observe when the goal is achieved? + +```yaml +observable_truths: + - "User can click 'Send' and message appears in chat" + - "Messages persist after page refresh" + - "New messages appear without page reload" + - "User sees typing indicator when other party types" +``` + +### 2. Required Artifacts +What files MUST exist? + +```yaml +required_artifacts: + - path: src/components/Chat.tsx + check: "Exports Chat component" + - path: src/api/messages.ts + check: "Exports sendMessage, getMessages" + - path: src/hooks/useChat.ts + check: "Exports useChat hook" +``` + +### 3. Required Wiring +What connections MUST work? + +```yaml +required_wiring: + - from: Chat.tsx + to: useChat.ts + check: "Component calls hook" + - from: useChat.ts + to: messages.ts + check: "Hook calls API" + - from: messages.ts + to: database + check: "API persists to DB" +``` + +### 4. Key Links (Where Stubs Hide) +What integration points commonly fail? + +```yaml +key_links: + - "Form onSubmit → API call (not just console.log)" + - "WebSocket connection → message handler" + - "API response → state update → render" +``` + +--- + +## Verification Process + +### Phase Verification + +After all tasks in a phase complete: + +``` +1. Load must-haves (from phase goal or PLAN frontmatter) +2. For each observable truth: + a. Level 1: Does the relevant code exist? + b. Level 2: Is it substantive? + c. Level 3: Is it wired? +3. For each required artifact: + a. Verify file exists + b. Verify not a stub + c. Verify it's imported/used +4. For each key link: + a. Trace the connection + b. Verify data flows +5. Scan for anti-patterns (see below) +6. Structure gaps for re-planning +``` + +### Anti-Pattern Scanning + +Check for common incomplete work: + +| Pattern | Detection | Meaning | +|---------|-----------|---------| +| `// TODO` | Grep for TODO comments | Work deferred | +| `throw new Error('Not implemented')` | Grep for stub errors | Placeholder code | +| `return null` / `return {}` | AST analysis | Empty implementations | +| `console.log` in handlers | Grep for console.log | Debug code left behind | +| Empty catch blocks | AST analysis | Swallowed errors | +| Hardcoded values | Manual review | Missing configuration | + +--- + +## Verification Output + +### Pass Case + +```yaml +# 2-VERIFICATION.md +phase: 2 +status: PASS +verified_at: 2024-01-15T10:30:00Z + +observable_truths: + - truth: "User can send message" + status: VERIFIED + evidence: "Chat.tsx:45 calls sendMessage on submit" + - truth: "Messages persist" + status: VERIFIED + evidence: "messages.ts:23 inserts to SQLite" + +required_artifacts: + - path: src/components/Chat.tsx + status: EXISTS + check: PASSED + - path: src/api/messages.ts + status: EXISTS + check: PASSED + +anti_patterns_found: [] + +human_verification_needed: + - "Visual layout matches design" + - "Real-time updates work under load" +``` + +### Fail Case (Gaps Found) + +```yaml +# 2-VERIFICATION.md +phase: 2 +status: GAPS_FOUND +verified_at: 2024-01-15T10:30:00Z + +gaps: + - type: STUB + location: src/hooks/useChat.ts:34 + description: "sendMessage returns immediately without API call" + severity: BLOCKING + + - type: MISSING_WIRING + location: src/components/Chat.tsx + description: "WebSocket not connected, no real-time updates" + severity: BLOCKING + + - type: ANTI_PATTERN + location: src/api/messages.ts:67 + description: "Empty catch block swallows errors" + severity: HIGH + +remediation_plan: + - "Connect useChat to actual API endpoint" + - "Initialize WebSocket in Chat component" + - "Add error handling to API calls" +``` + +--- + +## User Acceptance Testing (UAT) + +Verification confirms code correctness. UAT confirms user experience. + +### UAT Process + +1. Extract testable deliverables from phase goal +2. Walk user through each one: + - "Can you log in with your email?" + - "Does the dashboard show your projects?" + - "Can you create a new project?" +3. Record result: PASS, FAIL, or describe issue +4. If issues found: + - Diagnose root cause + - Create targeted fix plan +5. If all pass: Phase complete + +### UAT Output + +```yaml +# 2-UAT.md +phase: 2 +tested_by: user +tested_at: 2024-01-15T14:00:00Z + +test_cases: + - case: "Login with email" + result: PASS + + - case: "Dashboard shows projects" + result: FAIL + issue: "Shows loading spinner forever" + diagnosis: "API returns 500, missing auth header" + + - case: "Create new project" + result: BLOCKED + reason: "Cannot test, dashboard not loading" + +fix_required: true +fix_plan: + - task: "Add auth header to dashboard API call" + files: [src/api/projects.ts] + priority: P0 +``` + +--- + +## Integration with Task Workflow + +### Task Completion Hook +When task closes: +1. Worker marks task closed with reason +2. If all phase tasks closed, trigger phase verification +3. Verifier agent runs goal-backward check +4. If PASS: Phase marked complete +5. If GAPS: Create remediation tasks, phase stays in_progress + +### Verification Task Type +Verification itself is a task: + +```yaml +type: verification +phase_id: phase-2 +status: open +assigned_to: verifier-agent +priority: P0 # Always high priority +``` + +--- + +## Checkpoint Types + +During execution, agents may need human input. Use precise checkpoint types: + +### checkpoint:human-verify (90% of checkpoints) +Agent completed work, user confirms it works. + +```yaml +checkpoint: human-verify +prompt: "Can you log in with email and password?" +expected: "User confirms successful login" +``` + +### checkpoint:decision (9% of checkpoints) +User must make implementation choice. + +```yaml +checkpoint: decision +prompt: "OAuth2 or SAML for SSO?" +options: + - OAuth2: "Simpler, most common" + - SAML: "Enterprise requirement" +``` + +### checkpoint:human-action (1% of checkpoints) +Truly unavoidable manual step. + +```yaml +checkpoint: human-action +prompt: "Click the email verification link" +reason: "Cannot automate email client interaction" +``` + +--- + +## Human Verification Needs + +Some verifications require human eyes: + +| Category | Examples | Why Human | +|----------|----------|-----------| +| Visual | Layout, spacing, colors | Subjective/design judgment | +| Real-time | WebSocket, live updates | Requires interaction | +| External | OAuth flow, payment | Third-party systems | +| Accessibility | Screen reader, keyboard nav | Requires tooling/expertise | + +**Mark these explicitly** in verification output. Don't claim PASS when human verification is pending. diff --git a/drizzle/0001_overrated_gladiator.sql b/drizzle/0001_overrated_gladiator.sql new file mode 100644 index 0000000..df7406b --- /dev/null +++ b/drizzle/0001_overrated_gladiator.sql @@ -0,0 +1,8 @@ +CREATE TABLE `phase_dependencies` ( + `id` text PRIMARY KEY NOT NULL, + `phase_id` text NOT NULL, + `depends_on_phase_id` text NOT NULL, + `created_at` integer NOT NULL, + FOREIGN KEY (`phase_id`) REFERENCES `phases`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`depends_on_phase_id`) REFERENCES `phases`(`id`) ON UPDATE no action ON DELETE cascade +); diff --git a/drizzle/0002_bumpy_killraven.sql b/drizzle/0002_bumpy_killraven.sql new file mode 100644 index 0000000..ca97cbb --- /dev/null +++ b/drizzle/0002_bumpy_killraven.sql @@ -0,0 +1,12 @@ +CREATE TABLE `pages` ( + `id` text PRIMARY KEY NOT NULL, + `initiative_id` text NOT NULL, + `parent_page_id` text, + `title` text NOT NULL, + `content` text, + `sort_order` integer DEFAULT 0 NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL, + FOREIGN KEY (`initiative_id`) REFERENCES `initiatives`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`parent_page_id`) REFERENCES `pages`(`id`) ON UPDATE no action ON DELETE cascade +); diff --git a/drizzle/0003_curly_ser_duncan.sql b/drizzle/0003_curly_ser_duncan.sql new file mode 100644 index 0000000..82c1b9b --- /dev/null +++ b/drizzle/0003_curly_ser_duncan.sql @@ -0,0 +1,20 @@ +CREATE TABLE `projects` ( + `id` text PRIMARY KEY NOT NULL, + `name` text NOT NULL, + `url` text NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE UNIQUE INDEX `projects_name_unique` ON `projects` (`name`);--> statement-breakpoint +CREATE UNIQUE INDEX `projects_url_unique` ON `projects` (`url`);--> statement-breakpoint +CREATE TABLE `initiative_projects` ( + `id` text PRIMARY KEY NOT NULL, + `initiative_id` text NOT NULL, + `project_id` text NOT NULL, + `created_at` integer NOT NULL, + FOREIGN KEY (`initiative_id`) REFERENCES `initiatives`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`project_id`) REFERENCES `projects`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE UNIQUE INDEX `initiative_project_unique` ON `initiative_projects` (`initiative_id`,`project_id`); \ No newline at end of file diff --git a/drizzle/0004_white_captain_britain.sql b/drizzle/0004_white_captain_britain.sql new file mode 100644 index 0000000..84bf703 --- /dev/null +++ b/drizzle/0004_white_captain_britain.sql @@ -0,0 +1,15 @@ +CREATE TABLE `accounts` ( + `id` text PRIMARY KEY NOT NULL, + `email` text NOT NULL, + `provider` text DEFAULT 'claude' NOT NULL, + `config_dir` text NOT NULL, + `is_exhausted` integer DEFAULT false NOT NULL, + `exhausted_until` integer, + `last_used_at` integer, + `sort_order` integer DEFAULT 0 NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +ALTER TABLE `agents` ADD `provider` text DEFAULT 'claude' NOT NULL;--> statement-breakpoint +ALTER TABLE `agents` ADD `account_id` text REFERENCES accounts(id); \ No newline at end of file diff --git a/drizzle/0005_blushing_wendell_vaughn.sql b/drizzle/0005_blushing_wendell_vaughn.sql new file mode 100644 index 0000000..08bb6d9 --- /dev/null +++ b/drizzle/0005_blushing_wendell_vaughn.sql @@ -0,0 +1,4 @@ +ALTER TABLE `agents` ADD `pid` integer;--> statement-breakpoint +ALTER TABLE `agents` ADD `output_file_path` text;--> statement-breakpoint +ALTER TABLE `agents` ADD `result` text;--> statement-breakpoint +ALTER TABLE `agents` ADD `pending_questions` text; \ No newline at end of file diff --git a/drizzle/0006_curvy_sandman.sql b/drizzle/0006_curvy_sandman.sql new file mode 100644 index 0000000..cbfb79e --- /dev/null +++ b/drizzle/0006_curvy_sandman.sql @@ -0,0 +1 @@ +ALTER TABLE `agents` ADD `initiative_id` text REFERENCES initiatives(id); \ No newline at end of file diff --git a/drizzle/0007_robust_the_watchers.sql b/drizzle/0007_robust_the_watchers.sql new file mode 100644 index 0000000..ed3761a --- /dev/null +++ b/drizzle/0007_robust_the_watchers.sql @@ -0,0 +1,27 @@ +PRAGMA foreign_keys=OFF;--> statement-breakpoint +CREATE TABLE `__new_tasks` ( + `id` text PRIMARY KEY NOT NULL, + `plan_id` text, + `phase_id` text, + `initiative_id` text, + `name` text NOT NULL, + `description` text, + `type` text DEFAULT 'auto' NOT NULL, + `category` text DEFAULT 'execute' NOT NULL, + `priority` text DEFAULT 'medium' NOT NULL, + `status` text DEFAULT 'pending' NOT NULL, + `requires_approval` integer, + `order` integer DEFAULT 0 NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL, + FOREIGN KEY (`plan_id`) REFERENCES `plans`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`phase_id`) REFERENCES `phases`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`initiative_id`) REFERENCES `initiatives`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +INSERT INTO `__new_tasks`("id", "plan_id", "phase_id", "initiative_id", "name", "description", "type", "category", "priority", "status", "requires_approval", "order", "created_at", "updated_at") SELECT "id", "plan_id", NULL, NULL, "name", "description", "type", 'execute', "priority", "status", NULL, "order", "created_at", "updated_at" FROM `tasks`;--> statement-breakpoint +DROP TABLE `tasks`;--> statement-breakpoint +ALTER TABLE `__new_tasks` RENAME TO `tasks`;--> statement-breakpoint +PRAGMA foreign_keys=ON;--> statement-breakpoint +ALTER TABLE `initiatives` ADD `merge_requires_approval` integer DEFAULT true NOT NULL;--> statement-breakpoint +ALTER TABLE `initiatives` ADD `merge_target` text; diff --git a/drizzle/0008_eliminate_plans_table.sql b/drizzle/0008_eliminate_plans_table.sql new file mode 100644 index 0000000..1aaddf3 --- /dev/null +++ b/drizzle/0008_eliminate_plans_table.sql @@ -0,0 +1,61 @@ +-- Migration: Eliminate plans table +-- Plans are now decompose tasks with parentTaskId for child relationships + +PRAGMA foreign_keys=OFF;--> statement-breakpoint + +-- Step 1: Create new tasks table with parent_task_id instead of plan_id +CREATE TABLE `__new_tasks` ( + `id` text PRIMARY KEY NOT NULL, + `phase_id` text, + `initiative_id` text, + `parent_task_id` text, + `name` text NOT NULL, + `description` text, + `type` text DEFAULT 'auto' NOT NULL, + `category` text DEFAULT 'execute' NOT NULL, + `priority` text DEFAULT 'medium' NOT NULL, + `status` text DEFAULT 'pending' NOT NULL, + `requires_approval` integer, + `order` integer DEFAULT 0 NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL, + FOREIGN KEY (`phase_id`) REFERENCES `phases`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`initiative_id`) REFERENCES `initiatives`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`parent_task_id`) REFERENCES `__new_tasks`(`id`) ON UPDATE no action ON DELETE cascade +);--> statement-breakpoint + +-- Step 2: Insert plans as decompose tasks FIRST (so plan IDs exist as task IDs for foreign key) +INSERT INTO `__new_tasks`("id", "phase_id", "initiative_id", "parent_task_id", "name", "description", "type", "category", "priority", "status", "requires_approval", "order", "created_at", "updated_at") +SELECT + "id", + "phase_id", + NULL, + NULL, + "name", + "description", + 'auto', + 'decompose', + 'medium', + CASE + WHEN "status" = 'completed' THEN 'completed' + WHEN "status" = 'in_progress' THEN 'in_progress' + ELSE 'pending' + END, + NULL, + "number", + "created_at", + "updated_at" +FROM `plans`;--> statement-breakpoint + +-- Step 3: Copy existing tasks, converting plan_id to parent_task_id +INSERT INTO `__new_tasks`("id", "phase_id", "initiative_id", "parent_task_id", "name", "description", "type", "category", "priority", "status", "requires_approval", "order", "created_at", "updated_at") +SELECT "id", "phase_id", "initiative_id", "plan_id", "name", "description", "type", "category", "priority", "status", "requires_approval", "order", "created_at", "updated_at" FROM `tasks`;--> statement-breakpoint + +-- Step 4: Drop old tasks table and rename new one +DROP TABLE `tasks`;--> statement-breakpoint +ALTER TABLE `__new_tasks` RENAME TO `tasks`;--> statement-breakpoint + +-- Step 5: Drop plans table +DROP TABLE `plans`;--> statement-breakpoint + +PRAGMA foreign_keys=ON; diff --git a/drizzle/0009_drop_account_config_dir.sql b/drizzle/0009_drop_account_config_dir.sql new file mode 100644 index 0000000..ec142b8 --- /dev/null +++ b/drizzle/0009_drop_account_config_dir.sql @@ -0,0 +1,4 @@ +-- Migration: Remove config_dir column from accounts table +-- Path is now convention-based: <workspaceRoot>/.cw/accounts/<accountId>/ + +ALTER TABLE `accounts` DROP COLUMN `config_dir`; diff --git a/drizzle/0010_add_account_credentials.sql b/drizzle/0010_add_account_credentials.sql new file mode 100644 index 0000000..7e71580 --- /dev/null +++ b/drizzle/0010_add_account_credentials.sql @@ -0,0 +1,5 @@ +-- Migration: Add config_json and credentials columns to accounts table +-- Credentials are now stored in DB and written to disk before spawning agents. + +ALTER TABLE `accounts` ADD COLUMN `config_json` text;--> statement-breakpoint +ALTER TABLE `accounts` ADD COLUMN `credentials` text; diff --git a/drizzle/0011_drop_initiative_description.sql b/drizzle/0011_drop_initiative_description.sql new file mode 100644 index 0000000..b5a39cb --- /dev/null +++ b/drizzle/0011_drop_initiative_description.sql @@ -0,0 +1,4 @@ +-- Migration: Remove unused description column from initiatives table +-- Content is stored in pages (markdown via Tiptap editor), not in the initiative itself. + +ALTER TABLE `initiatives` DROP COLUMN `description`; diff --git a/drizzle/0012_add_agent_user_dismissed_at.sql b/drizzle/0012_add_agent_user_dismissed_at.sql new file mode 100644 index 0000000..040be15 --- /dev/null +++ b/drizzle/0012_add_agent_user_dismissed_at.sql @@ -0,0 +1 @@ +ALTER TABLE `agents` ADD `user_dismissed_at` integer; \ No newline at end of file diff --git a/drizzle/meta/0001_snapshot.json b/drizzle/meta/0001_snapshot.json new file mode 100644 index 0000000..d4982b5 --- /dev/null +++ b/drizzle/meta/0001_snapshot.json @@ -0,0 +1,695 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "43bf93b7-da71-4b69-b289-2991b6e54a69", + "prevId": "d2fc5ac9-8232-401a-a55f-a97a4d9b6f21", + "tables": { + "agents": { + "name": "agents", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "session_id": { + "name": "session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "worktree_id": { + "name": "worktree_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'idle'" + }, + "mode": { + "name": "mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'execute'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "agents_name_unique": { + "name": "agents_name_unique", + "columns": [ + "name" + ], + "isUnique": true + } + }, + "foreignKeys": { + "agents_task_id_tasks_id_fk": { + "name": "agents_task_id_tasks_id_fk", + "tableFrom": "agents", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiatives": { + "name": "initiatives", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "messages": { + "name": "messages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sender_type": { + "name": "sender_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sender_id": { + "name": "sender_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "recipient_type": { + "name": "recipient_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "recipient_id": { + "name": "recipient_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'info'" + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "requires_response": { + "name": "requires_response", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "parent_message_id": { + "name": "parent_message_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "messages_sender_id_agents_id_fk": { + "name": "messages_sender_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "sender_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_recipient_id_agents_id_fk": { + "name": "messages_recipient_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "recipient_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_parent_message_id_messages_id_fk": { + "name": "messages_parent_message_id_messages_id_fk", + "tableFrom": "messages", + "tableTo": "messages", + "columnsFrom": [ + "parent_message_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phase_dependencies": { + "name": "phase_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_phase_id": { + "name": "depends_on_phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phase_dependencies_phase_id_phases_id_fk": { + "name": "phase_dependencies_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "phase_dependencies_depends_on_phase_id_phases_id_fk": { + "name": "phase_dependencies_depends_on_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "depends_on_phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phases": { + "name": "phases", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phases_initiative_id_initiatives_id_fk": { + "name": "phases_initiative_id_initiatives_id_fk", + "tableFrom": "phases", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "plans": { + "name": "plans", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "plans_phase_id_phases_id_fk": { + "name": "plans_phase_id_phases_id_fk", + "tableFrom": "plans", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_dependencies": { + "name": "task_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_task_id": { + "name": "depends_on_task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "task_dependencies_task_id_tasks_id_fk": { + "name": "task_dependencies_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "task_dependencies_depends_on_task_id_tasks_id_fk": { + "name": "task_dependencies_depends_on_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "depends_on_task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "tasks": { + "name": "tasks", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "plan_id": { + "name": "plan_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'auto'" + }, + "priority": { + "name": "priority", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'medium'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "order": { + "name": "order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "tasks_plan_id_plans_id_fk": { + "name": "tasks_plan_id_plans_id_fk", + "tableFrom": "tasks", + "tableTo": "plans", + "columnsFrom": [ + "plan_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0002_snapshot.json b/drizzle/meta/0002_snapshot.json new file mode 100644 index 0000000..dfef6b6 --- /dev/null +++ b/drizzle/meta/0002_snapshot.json @@ -0,0 +1,789 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "cd4eb2e6-af83-473e-a189-8480d217b3c8", + "prevId": "43bf93b7-da71-4b69-b289-2991b6e54a69", + "tables": { + "agents": { + "name": "agents", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "session_id": { + "name": "session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "worktree_id": { + "name": "worktree_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'idle'" + }, + "mode": { + "name": "mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'execute'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "agents_name_unique": { + "name": "agents_name_unique", + "columns": [ + "name" + ], + "isUnique": true + } + }, + "foreignKeys": { + "agents_task_id_tasks_id_fk": { + "name": "agents_task_id_tasks_id_fk", + "tableFrom": "agents", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiatives": { + "name": "initiatives", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "messages": { + "name": "messages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sender_type": { + "name": "sender_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sender_id": { + "name": "sender_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "recipient_type": { + "name": "recipient_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "recipient_id": { + "name": "recipient_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'info'" + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "requires_response": { + "name": "requires_response", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "parent_message_id": { + "name": "parent_message_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "messages_sender_id_agents_id_fk": { + "name": "messages_sender_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "sender_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_recipient_id_agents_id_fk": { + "name": "messages_recipient_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "recipient_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_parent_message_id_messages_id_fk": { + "name": "messages_parent_message_id_messages_id_fk", + "tableFrom": "messages", + "tableTo": "messages", + "columnsFrom": [ + "parent_message_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "pages": { + "name": "pages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "parent_page_id": { + "name": "parent_page_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "pages_initiative_id_initiatives_id_fk": { + "name": "pages_initiative_id_initiatives_id_fk", + "tableFrom": "pages", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "pages_parent_page_id_pages_id_fk": { + "name": "pages_parent_page_id_pages_id_fk", + "tableFrom": "pages", + "tableTo": "pages", + "columnsFrom": [ + "parent_page_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phase_dependencies": { + "name": "phase_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_phase_id": { + "name": "depends_on_phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phase_dependencies_phase_id_phases_id_fk": { + "name": "phase_dependencies_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "phase_dependencies_depends_on_phase_id_phases_id_fk": { + "name": "phase_dependencies_depends_on_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "depends_on_phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phases": { + "name": "phases", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phases_initiative_id_initiatives_id_fk": { + "name": "phases_initiative_id_initiatives_id_fk", + "tableFrom": "phases", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "plans": { + "name": "plans", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "plans_phase_id_phases_id_fk": { + "name": "plans_phase_id_phases_id_fk", + "tableFrom": "plans", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_dependencies": { + "name": "task_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_task_id": { + "name": "depends_on_task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "task_dependencies_task_id_tasks_id_fk": { + "name": "task_dependencies_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "task_dependencies_depends_on_task_id_tasks_id_fk": { + "name": "task_dependencies_depends_on_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "depends_on_task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "tasks": { + "name": "tasks", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "plan_id": { + "name": "plan_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'auto'" + }, + "priority": { + "name": "priority", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'medium'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "order": { + "name": "order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "tasks_plan_id_plans_id_fk": { + "name": "tasks_plan_id_plans_id_fk", + "tableFrom": "tasks", + "tableTo": "plans", + "columnsFrom": [ + "plan_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0003_snapshot.json b/drizzle/meta/0003_snapshot.json new file mode 100644 index 0000000..05d33f4 --- /dev/null +++ b/drizzle/meta/0003_snapshot.json @@ -0,0 +1,923 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "0750ece4-b483-4fb3-8c64-3fbbc13b041d", + "prevId": "cd4eb2e6-af83-473e-a189-8480d217b3c8", + "tables": { + "agents": { + "name": "agents", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "session_id": { + "name": "session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "worktree_id": { + "name": "worktree_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'idle'" + }, + "mode": { + "name": "mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'execute'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "agents_name_unique": { + "name": "agents_name_unique", + "columns": [ + "name" + ], + "isUnique": true + } + }, + "foreignKeys": { + "agents_task_id_tasks_id_fk": { + "name": "agents_task_id_tasks_id_fk", + "tableFrom": "agents", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiative_projects": { + "name": "initiative_projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "project_id": { + "name": "project_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "initiative_project_unique": { + "name": "initiative_project_unique", + "columns": [ + "initiative_id", + "project_id" + ], + "isUnique": true + } + }, + "foreignKeys": { + "initiative_projects_initiative_id_initiatives_id_fk": { + "name": "initiative_projects_initiative_id_initiatives_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "initiative_projects_project_id_projects_id_fk": { + "name": "initiative_projects_project_id_projects_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "projects", + "columnsFrom": [ + "project_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiatives": { + "name": "initiatives", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "messages": { + "name": "messages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sender_type": { + "name": "sender_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sender_id": { + "name": "sender_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "recipient_type": { + "name": "recipient_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "recipient_id": { + "name": "recipient_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'info'" + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "requires_response": { + "name": "requires_response", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "parent_message_id": { + "name": "parent_message_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "messages_sender_id_agents_id_fk": { + "name": "messages_sender_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "sender_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_recipient_id_agents_id_fk": { + "name": "messages_recipient_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "recipient_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_parent_message_id_messages_id_fk": { + "name": "messages_parent_message_id_messages_id_fk", + "tableFrom": "messages", + "tableTo": "messages", + "columnsFrom": [ + "parent_message_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "pages": { + "name": "pages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "parent_page_id": { + "name": "parent_page_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "pages_initiative_id_initiatives_id_fk": { + "name": "pages_initiative_id_initiatives_id_fk", + "tableFrom": "pages", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "pages_parent_page_id_pages_id_fk": { + "name": "pages_parent_page_id_pages_id_fk", + "tableFrom": "pages", + "tableTo": "pages", + "columnsFrom": [ + "parent_page_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phase_dependencies": { + "name": "phase_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_phase_id": { + "name": "depends_on_phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phase_dependencies_phase_id_phases_id_fk": { + "name": "phase_dependencies_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "phase_dependencies_depends_on_phase_id_phases_id_fk": { + "name": "phase_dependencies_depends_on_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "depends_on_phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phases": { + "name": "phases", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phases_initiative_id_initiatives_id_fk": { + "name": "phases_initiative_id_initiatives_id_fk", + "tableFrom": "phases", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "plans": { + "name": "plans", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "plans_phase_id_phases_id_fk": { + "name": "plans_phase_id_phases_id_fk", + "tableFrom": "plans", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "projects": { + "name": "projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "projects_name_unique": { + "name": "projects_name_unique", + "columns": [ + "name" + ], + "isUnique": true + }, + "projects_url_unique": { + "name": "projects_url_unique", + "columns": [ + "url" + ], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_dependencies": { + "name": "task_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_task_id": { + "name": "depends_on_task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "task_dependencies_task_id_tasks_id_fk": { + "name": "task_dependencies_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "task_dependencies_depends_on_task_id_tasks_id_fk": { + "name": "task_dependencies_depends_on_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "depends_on_task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "tasks": { + "name": "tasks", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "plan_id": { + "name": "plan_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'auto'" + }, + "priority": { + "name": "priority", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'medium'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "order": { + "name": "order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "tasks_plan_id_plans_id_fk": { + "name": "tasks_plan_id_plans_id_fk", + "tableFrom": "tasks", + "tableTo": "plans", + "columnsFrom": [ + "plan_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0004_snapshot.json b/drizzle/meta/0004_snapshot.json new file mode 100644 index 0000000..1d50c35 --- /dev/null +++ b/drizzle/meta/0004_snapshot.json @@ -0,0 +1,1034 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "670f93ab-6ad3-4c58-bdb4-e31056bad596", + "prevId": "0750ece4-b483-4fb3-8c64-3fbbc13b041d", + "tables": { + "accounts": { + "name": "accounts", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'claude'" + }, + "config_dir": { + "name": "config_dir", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "is_exhausted": { + "name": "is_exhausted", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "exhausted_until": { + "name": "exhausted_until", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_used_at": { + "name": "last_used_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "agents": { + "name": "agents", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "session_id": { + "name": "session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "worktree_id": { + "name": "worktree_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'claude'" + }, + "account_id": { + "name": "account_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'idle'" + }, + "mode": { + "name": "mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'execute'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "agents_name_unique": { + "name": "agents_name_unique", + "columns": [ + "name" + ], + "isUnique": true + } + }, + "foreignKeys": { + "agents_task_id_tasks_id_fk": { + "name": "agents_task_id_tasks_id_fk", + "tableFrom": "agents", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "agents_account_id_accounts_id_fk": { + "name": "agents_account_id_accounts_id_fk", + "tableFrom": "agents", + "tableTo": "accounts", + "columnsFrom": [ + "account_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiative_projects": { + "name": "initiative_projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "project_id": { + "name": "project_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "initiative_project_unique": { + "name": "initiative_project_unique", + "columns": [ + "initiative_id", + "project_id" + ], + "isUnique": true + } + }, + "foreignKeys": { + "initiative_projects_initiative_id_initiatives_id_fk": { + "name": "initiative_projects_initiative_id_initiatives_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "initiative_projects_project_id_projects_id_fk": { + "name": "initiative_projects_project_id_projects_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "projects", + "columnsFrom": [ + "project_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiatives": { + "name": "initiatives", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "messages": { + "name": "messages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sender_type": { + "name": "sender_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sender_id": { + "name": "sender_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "recipient_type": { + "name": "recipient_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "recipient_id": { + "name": "recipient_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'info'" + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "requires_response": { + "name": "requires_response", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "parent_message_id": { + "name": "parent_message_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "messages_sender_id_agents_id_fk": { + "name": "messages_sender_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "sender_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_recipient_id_agents_id_fk": { + "name": "messages_recipient_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "recipient_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_parent_message_id_messages_id_fk": { + "name": "messages_parent_message_id_messages_id_fk", + "tableFrom": "messages", + "tableTo": "messages", + "columnsFrom": [ + "parent_message_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "pages": { + "name": "pages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "parent_page_id": { + "name": "parent_page_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "pages_initiative_id_initiatives_id_fk": { + "name": "pages_initiative_id_initiatives_id_fk", + "tableFrom": "pages", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "pages_parent_page_id_pages_id_fk": { + "name": "pages_parent_page_id_pages_id_fk", + "tableFrom": "pages", + "tableTo": "pages", + "columnsFrom": [ + "parent_page_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phase_dependencies": { + "name": "phase_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_phase_id": { + "name": "depends_on_phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phase_dependencies_phase_id_phases_id_fk": { + "name": "phase_dependencies_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "phase_dependencies_depends_on_phase_id_phases_id_fk": { + "name": "phase_dependencies_depends_on_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "depends_on_phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phases": { + "name": "phases", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phases_initiative_id_initiatives_id_fk": { + "name": "phases_initiative_id_initiatives_id_fk", + "tableFrom": "phases", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "plans": { + "name": "plans", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "plans_phase_id_phases_id_fk": { + "name": "plans_phase_id_phases_id_fk", + "tableFrom": "plans", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "projects": { + "name": "projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "projects_name_unique": { + "name": "projects_name_unique", + "columns": [ + "name" + ], + "isUnique": true + }, + "projects_url_unique": { + "name": "projects_url_unique", + "columns": [ + "url" + ], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_dependencies": { + "name": "task_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_task_id": { + "name": "depends_on_task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "task_dependencies_task_id_tasks_id_fk": { + "name": "task_dependencies_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "task_dependencies_depends_on_task_id_tasks_id_fk": { + "name": "task_dependencies_depends_on_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "depends_on_task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "tasks": { + "name": "tasks", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "plan_id": { + "name": "plan_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'auto'" + }, + "priority": { + "name": "priority", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'medium'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "order": { + "name": "order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "tasks_plan_id_plans_id_fk": { + "name": "tasks_plan_id_plans_id_fk", + "tableFrom": "tasks", + "tableTo": "plans", + "columnsFrom": [ + "plan_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0005_snapshot.json b/drizzle/meta/0005_snapshot.json new file mode 100644 index 0000000..737f460 --- /dev/null +++ b/drizzle/meta/0005_snapshot.json @@ -0,0 +1,1062 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "565f8da6-cd99-461f-91ef-857bd03c35b2", + "prevId": "670f93ab-6ad3-4c58-bdb4-e31056bad596", + "tables": { + "accounts": { + "name": "accounts", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'claude'" + }, + "config_dir": { + "name": "config_dir", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "is_exhausted": { + "name": "is_exhausted", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "exhausted_until": { + "name": "exhausted_until", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_used_at": { + "name": "last_used_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "agents": { + "name": "agents", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "session_id": { + "name": "session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "worktree_id": { + "name": "worktree_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'claude'" + }, + "account_id": { + "name": "account_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'idle'" + }, + "mode": { + "name": "mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'execute'" + }, + "pid": { + "name": "pid", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "output_file_path": { + "name": "output_file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "result": { + "name": "result", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "pending_questions": { + "name": "pending_questions", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "agents_name_unique": { + "name": "agents_name_unique", + "columns": [ + "name" + ], + "isUnique": true + } + }, + "foreignKeys": { + "agents_task_id_tasks_id_fk": { + "name": "agents_task_id_tasks_id_fk", + "tableFrom": "agents", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "agents_account_id_accounts_id_fk": { + "name": "agents_account_id_accounts_id_fk", + "tableFrom": "agents", + "tableTo": "accounts", + "columnsFrom": [ + "account_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiative_projects": { + "name": "initiative_projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "project_id": { + "name": "project_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "initiative_project_unique": { + "name": "initiative_project_unique", + "columns": [ + "initiative_id", + "project_id" + ], + "isUnique": true + } + }, + "foreignKeys": { + "initiative_projects_initiative_id_initiatives_id_fk": { + "name": "initiative_projects_initiative_id_initiatives_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "initiative_projects_project_id_projects_id_fk": { + "name": "initiative_projects_project_id_projects_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "projects", + "columnsFrom": [ + "project_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiatives": { + "name": "initiatives", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "messages": { + "name": "messages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sender_type": { + "name": "sender_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sender_id": { + "name": "sender_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "recipient_type": { + "name": "recipient_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "recipient_id": { + "name": "recipient_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'info'" + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "requires_response": { + "name": "requires_response", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "parent_message_id": { + "name": "parent_message_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "messages_sender_id_agents_id_fk": { + "name": "messages_sender_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "sender_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_recipient_id_agents_id_fk": { + "name": "messages_recipient_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "recipient_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_parent_message_id_messages_id_fk": { + "name": "messages_parent_message_id_messages_id_fk", + "tableFrom": "messages", + "tableTo": "messages", + "columnsFrom": [ + "parent_message_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "pages": { + "name": "pages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "parent_page_id": { + "name": "parent_page_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "pages_initiative_id_initiatives_id_fk": { + "name": "pages_initiative_id_initiatives_id_fk", + "tableFrom": "pages", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "pages_parent_page_id_pages_id_fk": { + "name": "pages_parent_page_id_pages_id_fk", + "tableFrom": "pages", + "tableTo": "pages", + "columnsFrom": [ + "parent_page_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phase_dependencies": { + "name": "phase_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_phase_id": { + "name": "depends_on_phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phase_dependencies_phase_id_phases_id_fk": { + "name": "phase_dependencies_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "phase_dependencies_depends_on_phase_id_phases_id_fk": { + "name": "phase_dependencies_depends_on_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "depends_on_phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phases": { + "name": "phases", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phases_initiative_id_initiatives_id_fk": { + "name": "phases_initiative_id_initiatives_id_fk", + "tableFrom": "phases", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "plans": { + "name": "plans", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "plans_phase_id_phases_id_fk": { + "name": "plans_phase_id_phases_id_fk", + "tableFrom": "plans", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "projects": { + "name": "projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "projects_name_unique": { + "name": "projects_name_unique", + "columns": [ + "name" + ], + "isUnique": true + }, + "projects_url_unique": { + "name": "projects_url_unique", + "columns": [ + "url" + ], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_dependencies": { + "name": "task_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_task_id": { + "name": "depends_on_task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "task_dependencies_task_id_tasks_id_fk": { + "name": "task_dependencies_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "task_dependencies_depends_on_task_id_tasks_id_fk": { + "name": "task_dependencies_depends_on_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "depends_on_task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "tasks": { + "name": "tasks", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "plan_id": { + "name": "plan_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'auto'" + }, + "priority": { + "name": "priority", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'medium'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "order": { + "name": "order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "tasks_plan_id_plans_id_fk": { + "name": "tasks_plan_id_plans_id_fk", + "tableFrom": "tasks", + "tableTo": "plans", + "columnsFrom": [ + "plan_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0006_snapshot.json b/drizzle/meta/0006_snapshot.json new file mode 100644 index 0000000..bd82f73 --- /dev/null +++ b/drizzle/meta/0006_snapshot.json @@ -0,0 +1,1082 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "9a2910b0-8795-4844-a65a-52538f50bbab", + "prevId": "565f8da6-cd99-461f-91ef-857bd03c35b2", + "tables": { + "accounts": { + "name": "accounts", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'claude'" + }, + "config_dir": { + "name": "config_dir", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "is_exhausted": { + "name": "is_exhausted", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "exhausted_until": { + "name": "exhausted_until", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_used_at": { + "name": "last_used_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "agents": { + "name": "agents", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "session_id": { + "name": "session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "worktree_id": { + "name": "worktree_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'claude'" + }, + "account_id": { + "name": "account_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'idle'" + }, + "mode": { + "name": "mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'execute'" + }, + "pid": { + "name": "pid", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "output_file_path": { + "name": "output_file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "result": { + "name": "result", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "pending_questions": { + "name": "pending_questions", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "agents_name_unique": { + "name": "agents_name_unique", + "columns": [ + "name" + ], + "isUnique": true + } + }, + "foreignKeys": { + "agents_task_id_tasks_id_fk": { + "name": "agents_task_id_tasks_id_fk", + "tableFrom": "agents", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "agents_initiative_id_initiatives_id_fk": { + "name": "agents_initiative_id_initiatives_id_fk", + "tableFrom": "agents", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "agents_account_id_accounts_id_fk": { + "name": "agents_account_id_accounts_id_fk", + "tableFrom": "agents", + "tableTo": "accounts", + "columnsFrom": [ + "account_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiative_projects": { + "name": "initiative_projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "project_id": { + "name": "project_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "initiative_project_unique": { + "name": "initiative_project_unique", + "columns": [ + "initiative_id", + "project_id" + ], + "isUnique": true + } + }, + "foreignKeys": { + "initiative_projects_initiative_id_initiatives_id_fk": { + "name": "initiative_projects_initiative_id_initiatives_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "initiative_projects_project_id_projects_id_fk": { + "name": "initiative_projects_project_id_projects_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "projects", + "columnsFrom": [ + "project_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiatives": { + "name": "initiatives", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "messages": { + "name": "messages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sender_type": { + "name": "sender_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sender_id": { + "name": "sender_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "recipient_type": { + "name": "recipient_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "recipient_id": { + "name": "recipient_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'info'" + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "requires_response": { + "name": "requires_response", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "parent_message_id": { + "name": "parent_message_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "messages_sender_id_agents_id_fk": { + "name": "messages_sender_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "sender_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_recipient_id_agents_id_fk": { + "name": "messages_recipient_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "recipient_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_parent_message_id_messages_id_fk": { + "name": "messages_parent_message_id_messages_id_fk", + "tableFrom": "messages", + "tableTo": "messages", + "columnsFrom": [ + "parent_message_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "pages": { + "name": "pages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "parent_page_id": { + "name": "parent_page_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "pages_initiative_id_initiatives_id_fk": { + "name": "pages_initiative_id_initiatives_id_fk", + "tableFrom": "pages", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "pages_parent_page_id_pages_id_fk": { + "name": "pages_parent_page_id_pages_id_fk", + "tableFrom": "pages", + "tableTo": "pages", + "columnsFrom": [ + "parent_page_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phase_dependencies": { + "name": "phase_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_phase_id": { + "name": "depends_on_phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phase_dependencies_phase_id_phases_id_fk": { + "name": "phase_dependencies_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "phase_dependencies_depends_on_phase_id_phases_id_fk": { + "name": "phase_dependencies_depends_on_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "depends_on_phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phases": { + "name": "phases", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phases_initiative_id_initiatives_id_fk": { + "name": "phases_initiative_id_initiatives_id_fk", + "tableFrom": "phases", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "plans": { + "name": "plans", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "plans_phase_id_phases_id_fk": { + "name": "plans_phase_id_phases_id_fk", + "tableFrom": "plans", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "projects": { + "name": "projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "projects_name_unique": { + "name": "projects_name_unique", + "columns": [ + "name" + ], + "isUnique": true + }, + "projects_url_unique": { + "name": "projects_url_unique", + "columns": [ + "url" + ], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_dependencies": { + "name": "task_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_task_id": { + "name": "depends_on_task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "task_dependencies_task_id_tasks_id_fk": { + "name": "task_dependencies_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "task_dependencies_depends_on_task_id_tasks_id_fk": { + "name": "task_dependencies_depends_on_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "depends_on_task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "tasks": { + "name": "tasks", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "plan_id": { + "name": "plan_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'auto'" + }, + "priority": { + "name": "priority", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'medium'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "order": { + "name": "order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "tasks_plan_id_plans_id_fk": { + "name": "tasks_plan_id_plans_id_fk", + "tableFrom": "tasks", + "tableTo": "plans", + "columnsFrom": [ + "plan_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/0007_snapshot.json b/drizzle/meta/0007_snapshot.json new file mode 100644 index 0000000..6c1eab7 --- /dev/null +++ b/drizzle/meta/0007_snapshot.json @@ -0,0 +1,1152 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "c0b6d7d3-c9da-440a-9fb8-9dd88df5672a", + "prevId": "9a2910b0-8795-4844-a65a-52538f50bbab", + "tables": { + "accounts": { + "name": "accounts", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'claude'" + }, + "config_dir": { + "name": "config_dir", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "is_exhausted": { + "name": "is_exhausted", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "exhausted_until": { + "name": "exhausted_until", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "last_used_at": { + "name": "last_used_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "agents": { + "name": "agents", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "session_id": { + "name": "session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "worktree_id": { + "name": "worktree_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'claude'" + }, + "account_id": { + "name": "account_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'idle'" + }, + "mode": { + "name": "mode", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'execute'" + }, + "pid": { + "name": "pid", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "output_file_path": { + "name": "output_file_path", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "result": { + "name": "result", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "pending_questions": { + "name": "pending_questions", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "agents_name_unique": { + "name": "agents_name_unique", + "columns": [ + "name" + ], + "isUnique": true + } + }, + "foreignKeys": { + "agents_task_id_tasks_id_fk": { + "name": "agents_task_id_tasks_id_fk", + "tableFrom": "agents", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "agents_initiative_id_initiatives_id_fk": { + "name": "agents_initiative_id_initiatives_id_fk", + "tableFrom": "agents", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "agents_account_id_accounts_id_fk": { + "name": "agents_account_id_accounts_id_fk", + "tableFrom": "agents", + "tableTo": "accounts", + "columnsFrom": [ + "account_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiative_projects": { + "name": "initiative_projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "project_id": { + "name": "project_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "initiative_project_unique": { + "name": "initiative_project_unique", + "columns": [ + "initiative_id", + "project_id" + ], + "isUnique": true + } + }, + "foreignKeys": { + "initiative_projects_initiative_id_initiatives_id_fk": { + "name": "initiative_projects_initiative_id_initiatives_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "initiative_projects_project_id_projects_id_fk": { + "name": "initiative_projects_project_id_projects_id_fk", + "tableFrom": "initiative_projects", + "tableTo": "projects", + "columnsFrom": [ + "project_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "initiatives": { + "name": "initiatives", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'active'" + }, + "merge_requires_approval": { + "name": "merge_requires_approval", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": true + }, + "merge_target": { + "name": "merge_target", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "messages": { + "name": "messages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sender_type": { + "name": "sender_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sender_id": { + "name": "sender_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "recipient_type": { + "name": "recipient_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "recipient_id": { + "name": "recipient_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'info'" + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "requires_response": { + "name": "requires_response", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "parent_message_id": { + "name": "parent_message_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "messages_sender_id_agents_id_fk": { + "name": "messages_sender_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "sender_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_recipient_id_agents_id_fk": { + "name": "messages_recipient_id_agents_id_fk", + "tableFrom": "messages", + "tableTo": "agents", + "columnsFrom": [ + "recipient_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "messages_parent_message_id_messages_id_fk": { + "name": "messages_parent_message_id_messages_id_fk", + "tableFrom": "messages", + "tableTo": "messages", + "columnsFrom": [ + "parent_message_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "pages": { + "name": "pages", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "parent_page_id": { + "name": "parent_page_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "sort_order": { + "name": "sort_order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "pages_initiative_id_initiatives_id_fk": { + "name": "pages_initiative_id_initiatives_id_fk", + "tableFrom": "pages", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "pages_parent_page_id_pages_id_fk": { + "name": "pages_parent_page_id_pages_id_fk", + "tableFrom": "pages", + "tableTo": "pages", + "columnsFrom": [ + "parent_page_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phase_dependencies": { + "name": "phase_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_phase_id": { + "name": "depends_on_phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phase_dependencies_phase_id_phases_id_fk": { + "name": "phase_dependencies_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "phase_dependencies_depends_on_phase_id_phases_id_fk": { + "name": "phase_dependencies_depends_on_phase_id_phases_id_fk", + "tableFrom": "phase_dependencies", + "tableTo": "phases", + "columnsFrom": [ + "depends_on_phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "phases": { + "name": "phases", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "phases_initiative_id_initiatives_id_fk": { + "name": "phases_initiative_id_initiatives_id_fk", + "tableFrom": "phases", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "plans": { + "name": "plans", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "number": { + "name": "number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "plans_phase_id_phases_id_fk": { + "name": "plans_phase_id_phases_id_fk", + "tableFrom": "plans", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "projects": { + "name": "projects", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "projects_name_unique": { + "name": "projects_name_unique", + "columns": [ + "name" + ], + "isUnique": true + }, + "projects_url_unique": { + "name": "projects_url_unique", + "columns": [ + "url" + ], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_dependencies": { + "name": "task_dependencies", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "depends_on_task_id": { + "name": "depends_on_task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "task_dependencies_task_id_tasks_id_fk": { + "name": "task_dependencies_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "task_dependencies_depends_on_task_id_tasks_id_fk": { + "name": "task_dependencies_depends_on_task_id_tasks_id_fk", + "tableFrom": "task_dependencies", + "tableTo": "tasks", + "columnsFrom": [ + "depends_on_task_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "tasks": { + "name": "tasks", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "plan_id": { + "name": "plan_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "phase_id": { + "name": "phase_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "initiative_id": { + "name": "initiative_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'auto'" + }, + "category": { + "name": "category", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'execute'" + }, + "priority": { + "name": "priority", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'medium'" + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'pending'" + }, + "requires_approval": { + "name": "requires_approval", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "order": { + "name": "order", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "tasks_plan_id_plans_id_fk": { + "name": "tasks_plan_id_plans_id_fk", + "tableFrom": "tasks", + "tableTo": "plans", + "columnsFrom": [ + "plan_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "tasks_phase_id_phases_id_fk": { + "name": "tasks_phase_id_phases_id_fk", + "tableFrom": "tasks", + "tableTo": "phases", + "columnsFrom": [ + "phase_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "tasks_initiative_id_initiatives_id_fk": { + "name": "tasks_initiative_id_initiatives_id_fk", + "tableFrom": "tasks", + "tableTo": "initiatives", + "columnsFrom": [ + "initiative_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} \ No newline at end of file diff --git a/drizzle/meta/_journal.json b/drizzle/meta/_journal.json index 9b15d9f..13ca87f 100644 --- a/drizzle/meta/_journal.json +++ b/drizzle/meta/_journal.json @@ -8,6 +8,90 @@ "when": 1769882826521, "tag": "0000_bizarre_naoko", "breakpoints": true + }, + { + "idx": 1, + "version": "6", + "when": 1770236400939, + "tag": "0001_overrated_gladiator", + "breakpoints": true + }, + { + "idx": 2, + "version": "6", + "when": 1770283755529, + "tag": "0002_bumpy_killraven", + "breakpoints": true + }, + { + "idx": 3, + "version": "6", + "when": 1770310029604, + "tag": "0003_curly_ser_duncan", + "breakpoints": true + }, + { + "idx": 4, + "version": "6", + "when": 1770311913089, + "tag": "0004_white_captain_britain", + "breakpoints": true + }, + { + "idx": 5, + "version": "6", + "when": 1770314201607, + "tag": "0005_blushing_wendell_vaughn", + "breakpoints": true + }, + { + "idx": 6, + "version": "6", + "when": 1770317104950, + "tag": "0006_curvy_sandman", + "breakpoints": true + }, + { + "idx": 7, + "version": "6", + "when": 1770373854589, + "tag": "0007_robust_the_watchers", + "breakpoints": true + }, + { + "idx": 8, + "version": "6", + "when": 1770460800000, + "tag": "0008_eliminate_plans_table", + "breakpoints": true + }, + { + "idx": 9, + "version": "6", + "when": 1770508800000, + "tag": "0009_drop_account_config_dir", + "breakpoints": true + }, + { + "idx": 10, + "version": "6", + "when": 1770512400000, + "tag": "0010_add_account_credentials", + "breakpoints": true + }, + { + "idx": 11, + "version": "6", + "when": 1770595200000, + "tag": "0011_drop_initiative_description", + "breakpoints": true + }, + { + "idx": 12, + "version": "6", + "when": 1770420629437, + "tag": "0012_add_agent_user_dismissed_at", + "breakpoints": true } ] } \ No newline at end of file diff --git a/drizzle/relations.ts b/drizzle/relations.ts new file mode 100644 index 0000000..ea6ddba --- /dev/null +++ b/drizzle/relations.ts @@ -0,0 +1,102 @@ +import { relations } from "drizzle-orm/relations"; +import { tasks, agents, messages, initiatives, phases, plans, taskDependencies, phaseDependencies } from "./schema"; + +export const agentsRelations = relations(agents, ({one, many}) => ({ + task: one(tasks, { + fields: [agents.taskId], + references: [tasks.id] + }), + messages_recipientId: many(messages, { + relationName: "messages_recipientId_agents_id" + }), + messages_senderId: many(messages, { + relationName: "messages_senderId_agents_id" + }), +})); + +export const tasksRelations = relations(tasks, ({one, many}) => ({ + agents: many(agents), + taskDependencies_dependsOnTaskId: many(taskDependencies, { + relationName: "taskDependencies_dependsOnTaskId_tasks_id" + }), + taskDependencies_taskId: many(taskDependencies, { + relationName: "taskDependencies_taskId_tasks_id" + }), + plan: one(plans, { + fields: [tasks.planId], + references: [plans.id] + }), +})); + +export const messagesRelations = relations(messages, ({one, many}) => ({ + message: one(messages, { + fields: [messages.parentMessageId], + references: [messages.id], + relationName: "messages_parentMessageId_messages_id" + }), + messages: many(messages, { + relationName: "messages_parentMessageId_messages_id" + }), + agent_recipientId: one(agents, { + fields: [messages.recipientId], + references: [agents.id], + relationName: "messages_recipientId_agents_id" + }), + agent_senderId: one(agents, { + fields: [messages.senderId], + references: [agents.id], + relationName: "messages_senderId_agents_id" + }), +})); + +export const phasesRelations = relations(phases, ({one, many}) => ({ + initiative: one(initiatives, { + fields: [phases.initiativeId], + references: [initiatives.id] + }), + plans: many(plans), + phaseDependencies_dependsOnPhaseId: many(phaseDependencies, { + relationName: "phaseDependencies_dependsOnPhaseId_phases_id" + }), + phaseDependencies_phaseId: many(phaseDependencies, { + relationName: "phaseDependencies_phaseId_phases_id" + }), +})); + +export const initiativesRelations = relations(initiatives, ({many}) => ({ + phases: many(phases), +})); + +export const plansRelations = relations(plans, ({one, many}) => ({ + phase: one(phases, { + fields: [plans.phaseId], + references: [phases.id] + }), + tasks: many(tasks), +})); + +export const taskDependenciesRelations = relations(taskDependencies, ({one}) => ({ + task_dependsOnTaskId: one(tasks, { + fields: [taskDependencies.dependsOnTaskId], + references: [tasks.id], + relationName: "taskDependencies_dependsOnTaskId_tasks_id" + }), + task_taskId: one(tasks, { + fields: [taskDependencies.taskId], + references: [tasks.id], + relationName: "taskDependencies_taskId_tasks_id" + }), +})); + +export const phaseDependenciesRelations = relations(phaseDependencies, ({one}) => ({ + phase_dependsOnPhaseId: one(phases, { + fields: [phaseDependencies.dependsOnPhaseId], + references: [phases.id], + relationName: "phaseDependencies_dependsOnPhaseId_phases_id" + }), + phase_phaseId: one(phases, { + fields: [phaseDependencies.phaseId], + references: [phases.id], + relationName: "phaseDependencies_phaseId_phases_id" + }), +})); \ No newline at end of file diff --git a/drizzle/schema.ts b/drizzle/schema.ts new file mode 100644 index 0000000..a7bf643 --- /dev/null +++ b/drizzle/schema.ts @@ -0,0 +1,98 @@ +import { sqliteTable, AnySQLiteColumn, uniqueIndex, foreignKey, text, integer } from "drizzle-orm/sqlite-core" + import { sql } from "drizzle-orm" + +export const agents = sqliteTable("agents", { + id: text().primaryKey().notNull(), + name: text().notNull(), + taskId: text("task_id").references(() => tasks.id, { onDelete: "set null" } ), + sessionId: text("session_id"), + worktreeId: text("worktree_id").notNull(), + status: text().default("idle").notNull(), + mode: text().default("execute").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}, +(table) => [ + uniqueIndex("agents_name_unique").on(table.name), +]); + +export const initiatives = sqliteTable("initiatives", { + id: text().primaryKey().notNull(), + name: text().notNull(), + description: text(), + status: text().default("active").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const messages = sqliteTable("messages", { + id: text().primaryKey().notNull(), + senderType: text("sender_type").notNull(), + senderId: text("sender_id").references(() => agents.id, { onDelete: "set null" } ), + recipientType: text("recipient_type").notNull(), + recipientId: text("recipient_id").references(() => agents.id, { onDelete: "set null" } ), + type: text().default("info").notNull(), + content: text().notNull(), + requiresResponse: integer("requires_response").default(false).notNull(), + status: text().default("pending").notNull(), + parentMessageId: text("parent_message_id"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}, +(table) => [ + foreignKey(() => ({ + columns: [table.parentMessageId], + foreignColumns: [table.id], + name: "messages_parent_message_id_messages_id_fk" + })).onDelete("set null"), +]); + +export const phases = sqliteTable("phases", { + id: text().primaryKey().notNull(), + initiativeId: text("initiative_id").notNull().references(() => initiatives.id, { onDelete: "cascade" } ), + number: integer().notNull(), + name: text().notNull(), + description: text(), + status: text().default("pending").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const plans = sqliteTable("plans", { + id: text().primaryKey().notNull(), + phaseId: text("phase_id").notNull().references(() => phases.id, { onDelete: "cascade" } ), + number: integer().notNull(), + name: text().notNull(), + description: text(), + status: text().default("pending").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const taskDependencies = sqliteTable("task_dependencies", { + id: text().primaryKey().notNull(), + taskId: text("task_id").notNull().references(() => tasks.id, { onDelete: "cascade" } ), + dependsOnTaskId: text("depends_on_task_id").notNull().references(() => tasks.id, { onDelete: "cascade" } ), + createdAt: integer("created_at").notNull(), +}); + +export const tasks = sqliteTable("tasks", { + id: text().primaryKey().notNull(), + planId: text("plan_id").notNull().references(() => plans.id, { onDelete: "cascade" } ), + name: text().notNull(), + description: text(), + type: text().default("auto").notNull(), + priority: text().default("medium").notNull(), + status: text().default("pending").notNull(), + order: integer().default(0).notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const phaseDependencies = sqliteTable("phase_dependencies", { + id: text().primaryKey().notNull(), + phaseId: text("phase_id").notNull().references(() => phases.id, { onDelete: "cascade" } ), + dependsOnPhaseId: text("depends_on_phase_id").notNull().references(() => phases.id, { onDelete: "cascade" } ), + createdAt: integer("created_at").notNull(), +}); + diff --git a/package-lock.json b/package-lock.json index 14f7d17..6f9c29d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,14 +12,21 @@ "packages/*" ], "dependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/extension-link": "^3.19.0", + "@tiptap/markdown": "^3.19.0", + "@tiptap/starter-kit": "^3.19.0", "@trpc/client": "^11.9.0", "@trpc/server": "^11.9.0", "better-sqlite3": "^12.6.2", "commander": "^12.1.0", "drizzle-orm": "^0.45.1", "execa": "^9.5.2", + "gray-matter": "^4.0.3", "nanoid": "^5.1.6", + "pino": "^10.3.0", "simple-git": "^3.30.0", + "unique-names-generator": "^4.7.1", "zod": "^4.3.6" }, "bin": { @@ -29,6 +36,7 @@ "@types/better-sqlite3": "^7.6.13", "@types/node": "^22.10.7", "drizzle-kit": "^0.31.8", + "pino-pretty": "^13.1.3", "rimraf": "^6.0.1", "tsx": "^4.19.2", "typescript": "^5.7.3", @@ -1289,7 +1297,7 @@ "version": "1.7.4", "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.4.tgz", "integrity": "sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "@floating-ui/utils": "^0.2.10" @@ -1299,7 +1307,7 @@ "version": "1.7.5", "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.5.tgz", "integrity": "sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "@floating-ui/core": "^1.7.4", @@ -1324,7 +1332,7 @@ "version": "0.2.10", "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", - "dev": true, + "devOptional": true, "license": "MIT" }, "node_modules/@isaacs/balanced-match": { @@ -1453,6 +1461,22 @@ "node": ">= 8" } }, + "node_modules/@pinojs/redact": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@pinojs/redact/-/redact-0.4.0.tgz", + "integrity": "sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==", + "license": "MIT" + }, + "node_modules/@popperjs/core": { + "version": "2.11.8", + "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", + "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/popperjs" + } + }, "node_modules/@radix-ui/primitive": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", @@ -2152,6 +2176,12 @@ "dev": true, "license": "MIT" }, + "node_modules/@remirror/core-constants": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@remirror/core-constants/-/core-constants-3.0.0.tgz", + "integrity": "sha512-42aWfPrimMfDKDi4YegyS7x+/0tlzaqwPQCULLanv3DMIlu96KTJR0fM5isWX2UViOqlGnX6YFgqWepcX+XMNg==", + "license": "MIT" + }, "node_modules/@rolldown/pluginutils": { "version": "1.0.0-beta.27", "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", @@ -2807,6 +2837,513 @@ "url": "https://github.com/sponsors/tannerlinsley" } }, + "node_modules/@tiptap/core": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/core/-/core-3.19.0.tgz", + "integrity": "sha512-bpqELwPW+DG8gWiD8iiFtSl4vIBooG5uVJod92Qxn3rA9nFatyXRr4kNbMJmOZ66ezUvmCjXVe/5/G4i5cyzKA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-blockquote": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-blockquote/-/extension-blockquote-3.19.0.tgz", + "integrity": "sha512-y3UfqY9KD5XwWz3ndiiJ089Ij2QKeiXy/g1/tlAN/F1AaWsnkHEHMLxCP1BIqmMpwsX7rZjMLN7G5Lp7c9682A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-bold": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-bold/-/extension-bold-3.19.0.tgz", + "integrity": "sha512-UZgb1d0XK4J/JRIZ7jW+s4S6KjuEDT2z1PPM6ugcgofgJkWQvRZelCPbmtSFd3kwsD+zr9UPVgTh9YIuGQ8t+Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-bubble-menu": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-bubble-menu/-/extension-bubble-menu-3.19.0.tgz", + "integrity": "sha512-klNVIYGCdznhFkrRokzGd6cwzoi8J7E5KbuOfZBwFwhMKZhlz/gJfKmYg9TJopeUhrr2Z9yHgWTk8dh/YIJCdQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "@floating-ui/dom": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-bullet-list": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-bullet-list/-/extension-bullet-list-3.19.0.tgz", + "integrity": "sha512-F9uNnqd0xkJbMmRxVI5RuVxwB9JaCH/xtRqOUNQZnRBt7IdAElCY+Dvb4hMCtiNv+enGM/RFGJuFHR9TxmI7rw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/extension-list": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-code": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-code/-/extension-code-3.19.0.tgz", + "integrity": "sha512-2kqqQIXBXj2Or+4qeY3WoE7msK+XaHKL6EKOcKlOP2BW8eYqNTPzNSL+PfBDQ3snA7ljZQkTs/j4GYDj90vR1A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-code-block": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-code-block/-/extension-code-block-3.19.0.tgz", + "integrity": "sha512-b/2qR+tMn8MQb+eaFYgVk4qXnLNkkRYmwELQ8LEtEDQPxa5Vl7J3eu8+4OyoIFhZrNDZvvoEp80kHMCP8sI6rg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-document": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-document/-/extension-document-3.19.0.tgz", + "integrity": "sha512-AOf0kHKSFO0ymjVgYSYDncRXTITdTcrj1tqxVazrmO60KNl1Rc2dAggDvIVTEBy5NvceF0scc7q3sE/5ZtVV7A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-dropcursor": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-dropcursor/-/extension-dropcursor-3.19.0.tgz", + "integrity": "sha512-sf3dEZXiLvsGqVK2maUIzXY6qtYYCvBumag7+VPTMGQ0D4hiZ1X/4ukt4+6VXDg5R2WP1CoIt/QvUetUjWNhbQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/extensions": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-floating-menu": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-floating-menu/-/extension-floating-menu-3.19.0.tgz", + "integrity": "sha512-JaoEkVRkt+Slq3tySlIsxnMnCjS0L5n1CA1hctjLy0iah8edetj3XD5mVv5iKqDzE+LIjF4nwLRRVKJPc8hFBg==", + "license": "MIT", + "optional": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@floating-ui/dom": "^1.0.0", + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-gapcursor": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-gapcursor/-/extension-gapcursor-3.19.0.tgz", + "integrity": "sha512-w7DACS4oSZaDWjz7gropZHPc9oXqC9yERZTcjWxyORuuIh1JFf0TRYspleK+OK28plK/IftojD/yUDn1MTRhvA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/extensions": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-hard-break": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-hard-break/-/extension-hard-break-3.19.0.tgz", + "integrity": "sha512-lAmQraYhPS5hafvCl74xDB5+bLuNwBKIEsVoim35I0sDJj5nTrfhaZgMJ91VamMvT+6FF5f1dvBlxBxAWa8jew==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-heading": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-heading/-/extension-heading-3.19.0.tgz", + "integrity": "sha512-uLpLlfyp086WYNOc0ekm1gIZNlEDfmzOhKzB0Hbyi6jDagTS+p9mxUNYeYOn9jPUxpFov43+Wm/4E24oY6B+TQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-horizontal-rule": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-horizontal-rule/-/extension-horizontal-rule-3.19.0.tgz", + "integrity": "sha512-iqUHmgMGhMgYGwG6L/4JdelVQ5Mstb4qHcgTGd/4dkcUOepILvhdxajPle7OEdf9sRgjQO6uoAU5BVZVC26+ng==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-italic": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-italic/-/extension-italic-3.19.0.tgz", + "integrity": "sha512-6GffxOnS/tWyCbDkirWNZITiXRta9wrCmrfa4rh+v32wfaOL1RRQNyqo9qN6Wjyl1R42Js+yXTzTTzZsOaLMYA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-link": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-link/-/extension-link-3.19.0.tgz", + "integrity": "sha512-HEGDJnnCPfr7KWu7Dsq+eRRe/mBCsv6DuI+7fhOCLDJjjKzNgrX2abbo/zG3D/4lCVFaVb+qawgJubgqXR/Smw==", + "license": "MIT", + "dependencies": { + "linkifyjs": "^4.3.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-list": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-list/-/extension-list-3.19.0.tgz", + "integrity": "sha512-N6nKbFB2VwMsPlCw67RlAtYSK48TAsAUgjnD+vd3ieSlIufdQnLXDFUP6hFKx9mwoUVUgZGz02RA6bkxOdYyTw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-list-item": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-list-item/-/extension-list-item-3.19.0.tgz", + "integrity": "sha512-VsSKuJz4/Tb6ZmFkXqWpDYkRzmaLTyE6dNSEpNmUpmZ32sMqo58mt11/huADNwfBFB0Ve7siH/VnFNIJYY3xvg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/extension-list": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-list-keymap": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-list-keymap/-/extension-list-keymap-3.19.0.tgz", + "integrity": "sha512-bxgmAgA3RzBGA0GyTwS2CC1c+QjkJJq9hC+S6PSOWELGRiTbwDN3MANksFXLjntkTa0N5fOnL27vBHtMStURqw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/extension-list": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-ordered-list": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-ordered-list/-/extension-ordered-list-3.19.0.tgz", + "integrity": "sha512-cxGsINquwHYE1kmhAcLNLHAofmoDEG6jbesR5ybl7tU5JwtKVO7S/xZatll2DU1dsDAXWPWEeeMl4e/9svYjCg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/extension-list": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-paragraph": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-paragraph/-/extension-paragraph-3.19.0.tgz", + "integrity": "sha512-xWa6gj82l5+AzdYyrSk9P4ynySaDzg/SlR1FarXE5yPXibYzpS95IWaVR0m2Qaz7Rrk+IiYOTGxGRxcHLOelNg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-placeholder": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-placeholder/-/extension-placeholder-3.19.0.tgz", + "integrity": "sha512-i15OfgyI4IDCYAcYSKUMnuZkYuUInfanjf9zquH8J2BETiomf/jZldVCp/QycMJ8DOXZ38fXDc99wOygnSNySg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/extensions": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-strike": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-strike/-/extension-strike-3.19.0.tgz", + "integrity": "sha512-xYpabHsv7PccLUBQaP8AYiFCnYbx6P93RHPd0lgNwhdOjYFd931Zy38RyoxPHAgbYVmhf1iyx7lpuLtBnhS5dA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-table": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-table/-/extension-table-3.19.0.tgz", + "integrity": "sha512-Lg8DlkkDUMYE/CcGOxoCWF98B2i7VWh+AGgqlF+XWrHjhlKHfENLRXm1a0vWuyyP3NknRYILoaaZ1s7QzmXKRA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-text": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-text/-/extension-text-3.19.0.tgz", + "integrity": "sha512-K95+SnbZy0h6hNFtfy23n8t/nOcTFEf69In9TSFVVmwn/Nwlke+IfiESAkqbt1/7sKJeegRXYO7WzFEmFl9Q/g==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extension-underline": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extension-underline/-/extension-underline-3.19.0.tgz", + "integrity": "sha512-800MGEWfG49j10wQzAFiW/ele1HT04MamcL8iyuPNu7ZbjbGN2yknvdrJlRy7hZlzIrVkZMr/1tz62KN33VHIw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0" + } + }, + "node_modules/@tiptap/extensions": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/extensions/-/extensions-3.19.0.tgz", + "integrity": "sha512-ZmGUhLbMWaGqnJh2Bry+6V4M6gMpUDYo4D1xNux5Gng/E/eYtc+PMxMZ/6F7tNTAuujLBOQKj6D+4SsSm457jw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/html": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/html/-/html-3.19.0.tgz", + "integrity": "sha512-ceBKCurbi9Rir1QZOGt+l17w6VGSEnQu9LKWnSwK6A6x8x7wvP8zYvM6HiuSuP8CRLT284fNBc8qrbWDuBnHbg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0", + "happy-dom": "^20.0.2" + } + }, + "node_modules/@tiptap/markdown": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/markdown/-/markdown-3.19.0.tgz", + "integrity": "sha512-Pnfacq2FHky1rqwmGwEmUJxuZu8VZ8XjaJIqsQC34S3CQWiOU+PukC9In2odzcooiVncLWT9s97jKuYpbmF1tQ==", + "license": "MIT", + "dependencies": { + "marked": "^17.0.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, + "node_modules/@tiptap/pm": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/pm/-/pm-3.19.0.tgz", + "integrity": "sha512-789zcnM4a8OWzvbD2DL31d0wbSm9BVeO/R7PLQwLIGysDI3qzrcclyZ8yhqOEVuvPitRRwYLq+mY14jz7kY4cw==", + "license": "MIT", + "dependencies": { + "prosemirror-changeset": "^2.3.0", + "prosemirror-collab": "^1.3.1", + "prosemirror-commands": "^1.6.2", + "prosemirror-dropcursor": "^1.8.1", + "prosemirror-gapcursor": "^1.3.2", + "prosemirror-history": "^1.4.1", + "prosemirror-inputrules": "^1.4.0", + "prosemirror-keymap": "^1.2.2", + "prosemirror-markdown": "^1.13.1", + "prosemirror-menu": "^1.2.4", + "prosemirror-model": "^1.24.1", + "prosemirror-schema-basic": "^1.2.3", + "prosemirror-schema-list": "^1.5.0", + "prosemirror-state": "^1.4.3", + "prosemirror-tables": "^1.6.4", + "prosemirror-trailing-node": "^3.0.0", + "prosemirror-transform": "^1.10.2", + "prosemirror-view": "^1.38.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + } + }, + "node_modules/@tiptap/react": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/react/-/react-3.19.0.tgz", + "integrity": "sha512-GQQMUUXMpNd8tRjc1jDK3tDRXFugJO7C928EqmeBcBzTKDrFIJ3QUoZKEPxUNb6HWhZ2WL7q00fiMzsv4DNSmg==", + "license": "MIT", + "dependencies": { + "@types/use-sync-external-store": "^0.0.6", + "fast-equals": "^5.3.3", + "use-sync-external-store": "^1.4.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "optionalDependencies": { + "@tiptap/extension-bubble-menu": "^3.19.0", + "@tiptap/extension-floating-menu": "^3.19.0" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0", + "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0", + "@types/react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0", + "react": "^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/@tiptap/starter-kit": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/starter-kit/-/starter-kit-3.19.0.tgz", + "integrity": "sha512-dTCkHEz+Y8ADxX7h+xvl6caAj+3nII/wMB1rTQchSuNKqJTOrzyUsCWm094+IoZmLT738wANE0fRIgziNHs/ug==", + "license": "MIT", + "dependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/extension-blockquote": "^3.19.0", + "@tiptap/extension-bold": "^3.19.0", + "@tiptap/extension-bullet-list": "^3.19.0", + "@tiptap/extension-code": "^3.19.0", + "@tiptap/extension-code-block": "^3.19.0", + "@tiptap/extension-document": "^3.19.0", + "@tiptap/extension-dropcursor": "^3.19.0", + "@tiptap/extension-gapcursor": "^3.19.0", + "@tiptap/extension-hard-break": "^3.19.0", + "@tiptap/extension-heading": "^3.19.0", + "@tiptap/extension-horizontal-rule": "^3.19.0", + "@tiptap/extension-italic": "^3.19.0", + "@tiptap/extension-link": "^3.19.0", + "@tiptap/extension-list": "^3.19.0", + "@tiptap/extension-list-item": "^3.19.0", + "@tiptap/extension-list-keymap": "^3.19.0", + "@tiptap/extension-ordered-list": "^3.19.0", + "@tiptap/extension-paragraph": "^3.19.0", + "@tiptap/extension-strike": "^3.19.0", + "@tiptap/extension-text": "^3.19.0", + "@tiptap/extension-underline": "^3.19.0", + "@tiptap/extensions": "^3.19.0", + "@tiptap/pm": "^3.19.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + } + }, + "node_modules/@tiptap/suggestion": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@tiptap/suggestion/-/suggestion-3.19.0.tgz", + "integrity": "sha512-tUZwMRFqTVPIo566ZmHNRteyZxJy2EE4FA+S3IeIUOOvY6AW0h1imhbpBO7sXV8CeEQvpa+2DWwLvy7L3vmstA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/ueberdosis" + }, + "peerDependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/pm": "^3.19.0" + } + }, "node_modules/@trpc/client": { "version": "11.9.0", "resolved": "https://registry.npmjs.org/@trpc/client/-/client-11.9.0.tgz", @@ -2929,11 +3466,32 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==", + "license": "MIT" + }, + "node_modules/@types/markdown-it": { + "version": "14.1.2", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.2.tgz", + "integrity": "sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==", + "license": "MIT", + "dependencies": { + "@types/linkify-it": "^5", + "@types/mdurl": "^2" + } + }, + "node_modules/@types/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==", + "license": "MIT" + }, "node_modules/@types/node": { "version": "22.19.7", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.7.tgz", "integrity": "sha512-MciR4AKGHWl7xwxkBa6xUGxQJ4VBOmPTF7sL+iGzuahOFaO0jHCsuEfS80pan1ef4gWId1oWOweIhrDEYLuaOw==", - "devOptional": true, "license": "MIT", "dependencies": { "undici-types": "~6.21.0" @@ -2943,7 +3501,6 @@ "version": "19.2.11", "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.11.tgz", "integrity": "sha512-tORuanb01iEzWvMGVGv2ZDhYZVeRMrw453DCSAIn/5yvcSVnMoUMTyf33nQJLahYEnv9xqrTNbgz4qY5EfSh0g==", - "devOptional": true, "license": "MIT", "dependencies": { "csstype": "^3.2.2" @@ -2953,12 +3510,34 @@ "version": "19.2.3", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", - "devOptional": true, "license": "MIT", "peerDependencies": { "@types/react": "^19.2.0" } }, + "node_modules/@types/use-sync-external-store": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/@types/use-sync-external-store/-/use-sync-external-store-0.0.6.tgz", + "integrity": "sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==", + "license": "MIT" + }, + "node_modules/@types/whatwg-mimetype": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/whatwg-mimetype/-/whatwg-mimetype-3.0.2.tgz", + "integrity": "sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==", + "license": "MIT", + "peer": true + }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "license": "MIT", + "peer": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@vitejs/plugin-react": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", @@ -3155,6 +3734,12 @@ "dev": true, "license": "MIT" }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, "node_modules/aria-hidden": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", @@ -3190,6 +3775,15 @@ "node": ">=4" } }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/autoprefixer": { "version": "10.4.24", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.24.tgz", @@ -3501,6 +4095,13 @@ "node": ">=6" } }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true, + "license": "MIT" + }, "node_modules/commander": { "version": "12.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", @@ -3523,6 +4124,12 @@ "integrity": "sha512-RAj4E421UYRgqokKUmotqAwuplYw15qtdXfY+hGzgCJ/MBjCVZcSoHK/kH9kocfjRjcDME7IiDWR/1WX1TM2Pg==", "license": "MIT" }, + "node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", + "license": "MIT" + }, "node_modules/cross-spawn": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", @@ -3554,9 +4161,18 @@ "version": "3.2.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", - "devOptional": true, "license": "MIT" }, + "node_modules/dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, "node_modules/debug": { "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", @@ -4278,6 +4894,18 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, "node_modules/es-module-lexer": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", @@ -4350,11 +4978,22 @@ "node": ">=6" } }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, "license": "BSD-2-Clause", "bin": { "esparse": "bin/esparse.js", @@ -4419,6 +5058,34 @@ "node": ">=12.0.0" } }, + "node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "license": "MIT", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-copy": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-4.0.2.tgz", + "integrity": "sha512-ybA6PDXIXOXivLJK/z9e+Otk7ve13I4ckBvGO5I2RRmBU1gMHLVDJYEuJYhGwez7YNlYji2M2DvVU+a9mSFDlw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-equals": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.4.0.tgz", + "integrity": "sha512-jt2DW/aNFNwke7AUd+Z+e6pz39KO5rzdbbFCg2sGafS4mk13MI7Z8O5z9cADNn5lhGODIgLwug6TZO2ctf7kcw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/fast-glob": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", @@ -4449,6 +5116,13 @@ "node": ">= 6" } }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true, + "license": "MIT" + }, "node_modules/fastq": { "version": "1.20.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", @@ -4641,6 +5315,39 @@ "node": ">=10.13.0" } }, + "node_modules/gray-matter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", + "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", + "license": "MIT", + "dependencies": { + "js-yaml": "^3.13.1", + "kind-of": "^6.0.2", + "section-matter": "^1.0.0", + "strip-bom-string": "^1.0.0" + }, + "engines": { + "node": ">=6.0" + } + }, + "node_modules/happy-dom": { + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-20.5.0.tgz", + "integrity": "sha512-VQe+Q5CYiGOgcCERXhcfNsbnrN92FDEKciMH/x6LppU9dd0j4aTjCTlqONFOIMcAm/5JxS3+utowbXV1OoFr+g==", + "license": "MIT", + "peer": true, + "dependencies": { + "@types/node": ">=20.0.0", + "@types/whatwg-mimetype": "^3.0.2", + "@types/ws": "^8.18.1", + "entities": "^4.5.0", + "whatwg-mimetype": "^3.0.0", + "ws": "^8.18.3" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -4654,6 +5361,13 @@ "node": ">= 0.4" } }, + "node_modules/help-me": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz", + "integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==", + "dev": true, + "license": "MIT" + }, "node_modules/human-signals": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-8.0.1.tgz", @@ -4724,6 +5438,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -4818,6 +5541,16 @@ "jiti": "bin/jiti.js" } }, + "node_modules/joycon": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -4825,6 +5558,28 @@ "dev": true, "license": "MIT" }, + "node_modules/js-yaml": { + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz", + "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/js-yaml/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, "node_modules/jsesc": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", @@ -4851,6 +5606,15 @@ "node": ">=6" } }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/lilconfig": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", @@ -4871,6 +5635,21 @@ "dev": true, "license": "MIT" }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "license": "MIT", + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/linkifyjs": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/linkifyjs/-/linkifyjs-4.3.2.tgz", + "integrity": "sha512-NT1CJtq3hHIreOianA8aSXn6Cw0JzYOuDQbOrSPe7gqFnCpKP++MQe3ODgO3oh2GJFORkAAdqredOa60z63GbA==", + "license": "MIT" + }, "node_modules/lru-cache": { "version": "11.2.5", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.5.tgz", @@ -4900,6 +5679,41 @@ "@jridgewell/sourcemap-codec": "^1.5.5" } }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/marked": { + "version": "17.0.1", + "resolved": "https://registry.npmjs.org/marked/-/marked-17.0.1.tgz", + "integrity": "sha512-boeBdiS0ghpWcSwoNm/jJBwdpFaMnZWRzjA6SkUMYb40SVaN1x7mmfGKp0jvexGcx+7y2La5zRZsYFZI6Qpypg==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "license": "MIT" + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -5120,6 +5934,15 @@ ], "license": "MIT" }, + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -5129,6 +5952,12 @@ "wrappy": "1" } }, + "node_modules/orderedmap": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/orderedmap/-/orderedmap-2.1.1.tgz", + "integrity": "sha512-TvAWxi0nDe1j/rtMcWcIj94+Ffe6n7zhow33h40SKxmsmozs6dz/e+EajymfoFcHd7sxNn8yHM8839uixMOV6g==", + "license": "MIT" + }, "node_modules/package-json-from-dist": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", @@ -5218,6 +6047,81 @@ "node": ">=0.10.0" } }, + "node_modules/pino": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-10.3.0.tgz", + "integrity": "sha512-0GNPNzHXBKw6U/InGe79A3Crzyk9bcSyObF9/Gfo9DLEf5qj5RF50RSjsu0W1rZ6ZqRGdzDFCRBQvi9/rSGPtA==", + "license": "MIT", + "dependencies": { + "@pinojs/redact": "^0.4.0", + "atomic-sleep": "^1.0.0", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^3.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^5.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^4.0.1", + "thread-stream": "^4.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz", + "integrity": "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-pretty": { + "version": "13.1.3", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.3.tgz", + "integrity": "sha512-ttXRkkOz6WWC95KeY9+xxWL6AtImwbyMHrL1mSwqwW9u+vLp/WIElvHvCSDg0xO/Dzrggz1zv3rN5ovTRVowKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^4.0.0", + "fast-safe-stringify": "^2.1.1", + "help-me": "^5.0.0", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^3.0.0", + "pump": "^3.0.0", + "secure-json-parse": "^4.0.0", + "sonic-boom": "^4.0.1", + "strip-json-comments": "^5.0.2" + }, + "bin": { + "pino-pretty": "bin.js" + } + }, + "node_modules/pino-pretty/node_modules/strip-json-comments": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz", + "integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pino-std-serializers": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.1.0.tgz", + "integrity": "sha512-BndPH67/JxGExRgiX1dX0w1FvZck5Wa4aal9198SrRhZjH3GxKQUKIBnYJTdj2HDN3UQAS06HlfcSbQj2OHmaw==", + "license": "MIT" + }, "node_modules/pirates": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", @@ -5481,6 +6385,217 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/process-warning": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz", + "integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/prosemirror-changeset": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/prosemirror-changeset/-/prosemirror-changeset-2.3.1.tgz", + "integrity": "sha512-j0kORIBm8ayJNl3zQvD1TTPHJX3g042et6y/KQhZhnPrruO8exkTgG8X+NRpj7kIyMMEx74Xb3DyMIBtO0IKkQ==", + "license": "MIT", + "dependencies": { + "prosemirror-transform": "^1.0.0" + } + }, + "node_modules/prosemirror-collab": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/prosemirror-collab/-/prosemirror-collab-1.3.1.tgz", + "integrity": "sha512-4SnynYR9TTYaQVXd/ieUvsVV4PDMBzrq2xPUWutHivDuOshZXqQ5rGbZM84HEaXKbLdItse7weMGOUdDVcLKEQ==", + "license": "MIT", + "dependencies": { + "prosemirror-state": "^1.0.0" + } + }, + "node_modules/prosemirror-commands": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/prosemirror-commands/-/prosemirror-commands-1.7.1.tgz", + "integrity": "sha512-rT7qZnQtx5c0/y/KlYaGvtG411S97UaL6gdp6RIZ23DLHanMYLyfGBV5DtSnZdthQql7W+lEVbpSfwtO8T+L2w==", + "license": "MIT", + "dependencies": { + "prosemirror-model": "^1.0.0", + "prosemirror-state": "^1.0.0", + "prosemirror-transform": "^1.10.2" + } + }, + "node_modules/prosemirror-dropcursor": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/prosemirror-dropcursor/-/prosemirror-dropcursor-1.8.2.tgz", + "integrity": "sha512-CCk6Gyx9+Tt2sbYk5NK0nB1ukHi2ryaRgadV/LvyNuO3ena1payM2z6Cg0vO1ebK8cxbzo41ku2DE5Axj1Zuiw==", + "license": "MIT", + "dependencies": { + "prosemirror-state": "^1.0.0", + "prosemirror-transform": "^1.1.0", + "prosemirror-view": "^1.1.0" + } + }, + "node_modules/prosemirror-gapcursor": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/prosemirror-gapcursor/-/prosemirror-gapcursor-1.4.0.tgz", + "integrity": "sha512-z00qvurSdCEWUIulij/isHaqu4uLS8r/Fi61IbjdIPJEonQgggbJsLnstW7Lgdk4zQ68/yr6B6bf7sJXowIgdQ==", + "license": "MIT", + "dependencies": { + "prosemirror-keymap": "^1.0.0", + "prosemirror-model": "^1.0.0", + "prosemirror-state": "^1.0.0", + "prosemirror-view": "^1.0.0" + } + }, + "node_modules/prosemirror-history": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/prosemirror-history/-/prosemirror-history-1.5.0.tgz", + "integrity": "sha512-zlzTiH01eKA55UAf1MEjtssJeHnGxO0j4K4Dpx+gnmX9n+SHNlDqI2oO1Kv1iPN5B1dm5fsljCfqKF9nFL6HRg==", + "license": "MIT", + "dependencies": { + "prosemirror-state": "^1.2.2", + "prosemirror-transform": "^1.0.0", + "prosemirror-view": "^1.31.0", + "rope-sequence": "^1.3.0" + } + }, + "node_modules/prosemirror-inputrules": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/prosemirror-inputrules/-/prosemirror-inputrules-1.5.1.tgz", + "integrity": "sha512-7wj4uMjKaXWAQ1CDgxNzNtR9AlsuwzHfdFH1ygEHA2KHF2DOEaXl1CJfNPAKCg9qNEh4rum975QLaCiQPyY6Fw==", + "license": "MIT", + "dependencies": { + "prosemirror-state": "^1.0.0", + "prosemirror-transform": "^1.0.0" + } + }, + "node_modules/prosemirror-keymap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/prosemirror-keymap/-/prosemirror-keymap-1.2.3.tgz", + "integrity": "sha512-4HucRlpiLd1IPQQXNqeo81BGtkY8Ai5smHhKW9jjPKRc2wQIxksg7Hl1tTI2IfT2B/LgX6bfYvXxEpJl7aKYKw==", + "license": "MIT", + "dependencies": { + "prosemirror-state": "^1.0.0", + "w3c-keyname": "^2.2.0" + } + }, + "node_modules/prosemirror-markdown": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/prosemirror-markdown/-/prosemirror-markdown-1.13.4.tgz", + "integrity": "sha512-D98dm4cQ3Hs6EmjK500TdAOew4Z03EV71ajEFiWra3Upr7diytJsjF4mPV2dW+eK5uNectiRj0xFxYI9NLXDbw==", + "license": "MIT", + "dependencies": { + "@types/markdown-it": "^14.0.0", + "markdown-it": "^14.0.0", + "prosemirror-model": "^1.25.0" + } + }, + "node_modules/prosemirror-menu": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/prosemirror-menu/-/prosemirror-menu-1.2.5.tgz", + "integrity": "sha512-qwXzynnpBIeg1D7BAtjOusR+81xCp53j7iWu/IargiRZqRjGIlQuu1f3jFi+ehrHhWMLoyOQTSRx/IWZJqOYtQ==", + "license": "MIT", + "dependencies": { + "crelt": "^1.0.0", + "prosemirror-commands": "^1.0.0", + "prosemirror-history": "^1.0.0", + "prosemirror-state": "^1.0.0" + } + }, + "node_modules/prosemirror-model": { + "version": "1.25.4", + "resolved": "https://registry.npmjs.org/prosemirror-model/-/prosemirror-model-1.25.4.tgz", + "integrity": "sha512-PIM7E43PBxKce8OQeezAs9j4TP+5yDpZVbuurd1h5phUxEKIu+G2a+EUZzIC5nS1mJktDJWzbqS23n1tsAf5QA==", + "license": "MIT", + "dependencies": { + "orderedmap": "^2.0.0" + } + }, + "node_modules/prosemirror-schema-basic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/prosemirror-schema-basic/-/prosemirror-schema-basic-1.2.4.tgz", + "integrity": "sha512-ELxP4TlX3yr2v5rM7Sb70SqStq5NvI15c0j9j/gjsrO5vaw+fnnpovCLEGIcpeGfifkuqJwl4fon6b+KdrODYQ==", + "license": "MIT", + "dependencies": { + "prosemirror-model": "^1.25.0" + } + }, + "node_modules/prosemirror-schema-list": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/prosemirror-schema-list/-/prosemirror-schema-list-1.5.1.tgz", + "integrity": "sha512-927lFx/uwyQaGwJxLWCZRkjXG0p48KpMj6ueoYiu4JX05GGuGcgzAy62dfiV8eFZftgyBUvLx76RsMe20fJl+Q==", + "license": "MIT", + "dependencies": { + "prosemirror-model": "^1.0.0", + "prosemirror-state": "^1.0.0", + "prosemirror-transform": "^1.7.3" + } + }, + "node_modules/prosemirror-state": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/prosemirror-state/-/prosemirror-state-1.4.4.tgz", + "integrity": "sha512-6jiYHH2CIGbCfnxdHbXZ12gySFY/fz/ulZE333G6bPqIZ4F+TXo9ifiR86nAHpWnfoNjOb3o5ESi7J8Uz1jXHw==", + "license": "MIT", + "dependencies": { + "prosemirror-model": "^1.0.0", + "prosemirror-transform": "^1.0.0", + "prosemirror-view": "^1.27.0" + } + }, + "node_modules/prosemirror-tables": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/prosemirror-tables/-/prosemirror-tables-1.8.5.tgz", + "integrity": "sha512-V/0cDCsHKHe/tfWkeCmthNUcEp1IVO3p6vwN8XtwE9PZQLAZJigbw3QoraAdfJPir4NKJtNvOB8oYGKRl+t0Dw==", + "license": "MIT", + "dependencies": { + "prosemirror-keymap": "^1.2.3", + "prosemirror-model": "^1.25.4", + "prosemirror-state": "^1.4.4", + "prosemirror-transform": "^1.10.5", + "prosemirror-view": "^1.41.4" + } + }, + "node_modules/prosemirror-trailing-node": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/prosemirror-trailing-node/-/prosemirror-trailing-node-3.0.0.tgz", + "integrity": "sha512-xiun5/3q0w5eRnGYfNlW1uU9W6x5MoFKWwq/0TIRgt09lv7Hcser2QYV8t4muXbEr+Fwo0geYn79Xs4GKywrRQ==", + "license": "MIT", + "dependencies": { + "@remirror/core-constants": "3.0.0", + "escape-string-regexp": "^4.0.0" + }, + "peerDependencies": { + "prosemirror-model": "^1.22.1", + "prosemirror-state": "^1.4.2", + "prosemirror-view": "^1.33.8" + } + }, + "node_modules/prosemirror-transform": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/prosemirror-transform/-/prosemirror-transform-1.11.0.tgz", + "integrity": "sha512-4I7Ce4KpygXb9bkiPS3hTEk4dSHorfRw8uI0pE8IhxlK2GXsqv5tIA7JUSxtSu7u8APVOTtbUBxTmnHIxVkIJw==", + "license": "MIT", + "dependencies": { + "prosemirror-model": "^1.21.0" + } + }, + "node_modules/prosemirror-view": { + "version": "1.41.5", + "resolved": "https://registry.npmjs.org/prosemirror-view/-/prosemirror-view-1.41.5.tgz", + "integrity": "sha512-UDQbIPnDrjE8tqUBbPmCOZgtd75htE6W3r0JCmY9bL6W1iemDM37MZEKC49d+tdQ0v/CKx4gjxLoLsfkD2NiZA==", + "license": "MIT", + "dependencies": { + "prosemirror-model": "^1.20.0", + "prosemirror-state": "^1.0.0", + "prosemirror-transform": "^1.1.0" + } + }, "node_modules/pump": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", @@ -5491,6 +6606,15 @@ "once": "^1.3.1" } }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -5512,6 +6636,12 @@ ], "license": "MIT" }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", + "license": "MIT" + }, "node_modules/rc": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", @@ -5677,6 +6807,15 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "license": "MIT", + "engines": { + "node": ">= 12.13.0" + } + }, "node_modules/recast": { "version": "0.23.11", "resolved": "https://registry.npmjs.org/recast/-/recast-0.23.11.tgz", @@ -5801,6 +6940,12 @@ "fsevents": "~2.3.2" } }, + "node_modules/rope-sequence": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/rope-sequence/-/rope-sequence-1.3.4.tgz", + "integrity": "sha512-UT5EDe2cu2E/6O4igUr5PSFs23nvvukicWHx6GnOPlHAiiYbzNuCRQCuiUdHJQcqKalLKlrYJnjY0ySGsXNQXQ==", + "license": "MIT" + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -5845,12 +6990,51 @@ ], "license": "MIT" }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/scheduler": { "version": "0.27.0", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", "license": "MIT" }, + "node_modules/section-matter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", + "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", + "license": "MIT", + "dependencies": { + "extend-shallow": "^2.0.1", + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/secure-json-parse": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.1.0.tgz", + "integrity": "sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, "node_modules/semver": { "version": "7.7.3", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", @@ -5984,6 +7168,15 @@ "url": "https://github.com/steveukx/git-js?sponsor=1" } }, + "node_modules/sonic-boom": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", + "integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, "node_modules/sonner": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/sonner/-/sonner-2.0.7.tgz", @@ -6025,6 +7218,21 @@ "source-map": "^0.6.0" } }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "license": "BSD-3-Clause" + }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", @@ -6048,6 +7256,15 @@ "safe-buffer": "~5.2.0" } }, + "node_modules/strip-bom-string": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", + "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/strip-final-newline": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-4.0.0.tgz", @@ -6238,6 +7455,18 @@ "node": ">=0.8" } }, + "node_modules/thread-stream": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-4.0.0.tgz", + "integrity": "sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA==", + "license": "MIT", + "dependencies": { + "real-require": "^0.2.0" + }, + "engines": { + "node": ">=20" + } + }, "node_modules/tiny-invariant": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", @@ -6294,6 +7523,15 @@ "node": ">=14.0.0" } }, + "node_modules/tippy.js": { + "version": "6.3.7", + "resolved": "https://registry.npmjs.org/tippy.js/-/tippy.js-6.3.7.tgz", + "integrity": "sha512-E1d3oP2emgJ9dRQZdf3Kkn0qJgI6ZLpyS5z6ZkY1DF3kaQaBsGZsndEpHwx+eC+tYM41HaSNvNtLx8tU57FzTQ==", + "license": "MIT", + "dependencies": { + "@popperjs/core": "^2.9.0" + } + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -6365,11 +7603,16 @@ "node": ">=14.17" } }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "license": "MIT" + }, "node_modules/undici-types": { "version": "6.21.0", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", - "devOptional": true, "license": "MIT" }, "node_modules/unicorn-magic": { @@ -6384,6 +7627,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/unique-names-generator": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/unique-names-generator/-/unique-names-generator-4.7.1.tgz", + "integrity": "sha512-lMx9dX+KRmG8sq6gulYYpKWZc9RlGsgBR6aoO8Qsm3qvkSJ+3rAymr+TnV8EDMrIrwuFJ4kruzMWM/OpYzPoow==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/unplugin": { "version": "2.3.11", "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-2.3.11.tgz", @@ -6642,6 +7894,12 @@ } } }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "license": "MIT" + }, "node_modules/webpack-virtual-modules": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.2.tgz", @@ -6649,6 +7907,16 @@ "dev": true, "license": "MIT" }, + "node_modules/whatwg-mimetype": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", + "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -6687,6 +7955,28 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "license": "ISC" }, + "node_modules/ws": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", + "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", @@ -6728,6 +8018,14 @@ "@radix-ui/react-label": "^2.1.8", "@tanstack/react-query": "^5.75.0", "@tanstack/react-router": "^1.158.0", + "@tiptap/extension-link": "^3.19.0", + "@tiptap/extension-placeholder": "^3.19.0", + "@tiptap/extension-table": "^3.19.0", + "@tiptap/html": "^3.19.0", + "@tiptap/pm": "^3.19.0", + "@tiptap/react": "^3.19.0", + "@tiptap/starter-kit": "^3.19.0", + "@tiptap/suggestion": "^3.19.0", "@trpc/client": "^11.9.0", "@trpc/react-query": "^11.9.0", "class-variance-authority": "^0.7.1", @@ -6736,7 +8034,8 @@ "react": "^19.0.0", "react-dom": "^19.0.0", "sonner": "^2.0.7", - "tailwind-merge": "^3.4.0" + "tailwind-merge": "^3.4.0", + "tippy.js": "^6.3.7" }, "devDependencies": { "@radix-ui/react-dropdown-menu": "^2.1.16", diff --git a/package.json b/package.json index 67bf643..44d4392 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,9 @@ "version": "0.0.1", "description": "Multi-agent workspace for orchestrating multiple Claude Code agents", "type": "module", - "workspaces": ["packages/*"], + "workspaces": [ + "packages/*" + ], "main": "./dist/index.js", "bin": { "cw": "./dist/bin/cw.js" @@ -26,20 +28,28 @@ "author": "", "license": "ISC", "dependencies": { + "@tiptap/core": "^3.19.0", + "@tiptap/extension-link": "^3.19.0", + "@tiptap/markdown": "^3.19.0", + "@tiptap/starter-kit": "^3.19.0", "@trpc/client": "^11.9.0", "@trpc/server": "^11.9.0", "better-sqlite3": "^12.6.2", "commander": "^12.1.0", "drizzle-orm": "^0.45.1", "execa": "^9.5.2", + "gray-matter": "^4.0.3", "nanoid": "^5.1.6", + "pino": "^10.3.0", "simple-git": "^3.30.0", + "unique-names-generator": "^4.7.1", "zod": "^4.3.6" }, "devDependencies": { "@types/better-sqlite3": "^7.6.13", "@types/node": "^22.10.7", "drizzle-kit": "^0.31.8", + "pino-pretty": "^13.1.3", "rimraf": "^6.0.1", "tsx": "^4.19.2", "typescript": "^5.7.3", diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts index 03fbbb7..99b9db5 100644 --- a/packages/shared/src/index.ts +++ b/packages/shared/src/index.ts @@ -1,2 +1,3 @@ export type { AppRouter } from './trpc.js'; -export type { Initiative, Phase, Plan, Task, Agent, Message, PendingQuestions, QuestionItem, SubscriptionEvent } from './types.js'; +export type { Initiative, Phase, Plan, Task, Agent, Message, PendingQuestions, QuestionItem, SubscriptionEvent, Project } from './types.js'; +export { sortByPriorityAndQueueTime, type SortableItem } from './utils.js'; diff --git a/packages/shared/src/types.ts b/packages/shared/src/types.ts index d183727..9aadeb8 100644 --- a/packages/shared/src/types.ts +++ b/packages/shared/src/types.ts @@ -1,4 +1,4 @@ -export type { Initiative, Phase, Plan, Task, Agent, Message } from '../../../src/db/schema.js'; +export type { Initiative, Phase, Plan, Task, Agent, Message, Page, Project, Account } from '../../../src/db/schema.js'; export type { PendingQuestions, QuestionItem } from '../../../src/agent/types.js'; /** diff --git a/packages/shared/src/utils.ts b/packages/shared/src/utils.ts new file mode 100644 index 0000000..a6b14ce --- /dev/null +++ b/packages/shared/src/utils.ts @@ -0,0 +1,37 @@ +/** + * Shared utility functions that can be used across frontend and backend. + */ + +export interface SortableItem { + priority: 'low' | 'medium' | 'high'; + createdAt: Date | string; +} + +/** + * Priority order mapping for sorting (higher number = higher priority) + */ +const PRIORITY_ORDER = { + high: 3, + medium: 2, + low: 1, +} as const; + +/** + * Sorts items by priority (high to low) then by queue time (oldest first). + * This ensures high-priority items come first, but within the same priority, + * items are processed in FIFO order. + */ +export function sortByPriorityAndQueueTime<T extends SortableItem>(items: T[]): T[] { + return [...items].sort((a, b) => { + // First sort by priority (high to low) + const priorityDiff = PRIORITY_ORDER[b.priority] - PRIORITY_ORDER[a.priority]; + if (priorityDiff !== 0) { + return priorityDiff; + } + + // Within same priority, sort by creation time (oldest first - FIFO) + const aTime = typeof a.createdAt === 'string' ? new Date(a.createdAt) : a.createdAt; + const bTime = typeof b.createdAt === 'string' ? new Date(b.createdAt) : b.createdAt; + return aTime.getTime() - bTime.getTime(); + }); +} \ No newline at end of file diff --git a/packages/web/package.json b/packages/web/package.json index e1dcd14..219a352 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -15,6 +15,14 @@ "@radix-ui/react-label": "^2.1.8", "@tanstack/react-query": "^5.75.0", "@tanstack/react-router": "^1.158.0", + "@tiptap/extension-link": "^3.19.0", + "@tiptap/extension-placeholder": "^3.19.0", + "@tiptap/extension-table": "^3.19.0", + "@tiptap/html": "^3.19.0", + "@tiptap/pm": "^3.19.0", + "@tiptap/react": "^3.19.0", + "@tiptap/starter-kit": "^3.19.0", + "@tiptap/suggestion": "^3.19.0", "@trpc/client": "^11.9.0", "@trpc/react-query": "^11.9.0", "class-variance-authority": "^0.7.1", @@ -23,7 +31,8 @@ "react": "^19.0.0", "react-dom": "^19.0.0", "sonner": "^2.0.7", - "tailwind-merge": "^3.4.0" + "tailwind-merge": "^3.4.0", + "tippy.js": "^6.3.7" }, "devDependencies": { "@radix-ui/react-dropdown-menu": "^2.1.16", diff --git a/packages/web/src/components/AgentOutputViewer.tsx b/packages/web/src/components/AgentOutputViewer.tsx new file mode 100644 index 0000000..966fffe --- /dev/null +++ b/packages/web/src/components/AgentOutputViewer.tsx @@ -0,0 +1,177 @@ +import { useEffect, useRef, useState } from "react"; +import { Button } from "@/components/ui/button"; +import { ArrowDown, Pause, Play, AlertCircle } from "lucide-react"; +import { trpc } from "@/lib/trpc"; +import { useSubscriptionWithErrorHandling } from "@/hooks"; + +interface AgentOutputViewerProps { + agentId: string; + agentName?: string; +} + +export function AgentOutputViewer({ agentId, agentName }: AgentOutputViewerProps) { + const [output, setOutput] = useState<string[]>([]); + const [follow, setFollow] = useState(true); + const containerRef = useRef<HTMLPreElement>(null); + + // Load initial/historical output + const outputQuery = trpc.getAgentOutput.useQuery( + { id: agentId }, + { + refetchOnWindowFocus: false, + } + ); + + // Subscribe to live output with error handling + const subscription = useSubscriptionWithErrorHandling( + () => trpc.onAgentOutput.useSubscription({ agentId }), + { + onData: (event) => { + // event is TrackedEnvelope<{ agentId: string; data: string }> + // event.data is the inner data object + const payload = event.data as { agentId: string; data: string }; + setOutput((prev) => [...prev, payload.data]); + }, + onError: (error) => { + console.error('Agent output subscription error:', error); + }, + autoReconnect: true, + maxReconnectAttempts: 3, + } + ); + + // Set initial output when query loads + useEffect(() => { + if (outputQuery.data) { + // Split NDJSON content into chunks for display + // Each line might be a JSON event, so we just display raw for now + const lines = outputQuery.data.split("\n").filter(Boolean); + // Extract text from JSONL events for display + const textChunks: string[] = []; + for (const line of lines) { + try { + const event = JSON.parse(line); + if (event.type === "assistant" && Array.isArray(event.message?.content)) { + // Claude CLI stream-json: complete assistant messages with content blocks + for (const block of event.message.content) { + if (block.type === "text" && block.text) { + textChunks.push(block.text); + } + } + } else if (event.type === "stream_event" && event.event?.delta?.text) { + // Legacy streaming format: granular text deltas + textChunks.push(event.event.delta.text); + } else if (event.type === "result" && event.result) { + // Don't add result text since it duplicates the content + } + } catch { + // Not JSON, display as-is + textChunks.push(line + "\n"); + } + } + setOutput(textChunks); + } + }, [outputQuery.data]); + + // Reset output when agent changes + useEffect(() => { + setOutput([]); + setFollow(true); + }, [agentId]); + + // Auto-scroll to bottom when following + useEffect(() => { + if (follow && containerRef.current) { + containerRef.current.scrollTop = containerRef.current.scrollHeight; + } + }, [output, follow]); + + // Handle scroll to detect user scrolling up + function handleScroll() { + if (!containerRef.current) return; + const { scrollTop, scrollHeight, clientHeight } = containerRef.current; + const isAtBottom = scrollHeight - scrollTop - clientHeight < 50; + if (!isAtBottom && follow) { + setFollow(false); + } + } + + // Jump to bottom + function scrollToBottom() { + if (containerRef.current) { + containerRef.current.scrollTop = containerRef.current.scrollHeight; + setFollow(true); + } + } + + const isLoading = outputQuery.isLoading; + const hasOutput = output.length > 0; + + return ( + <div className="flex flex-col h-[600px] rounded-lg border overflow-hidden"> + {/* Header */} + <div className="flex items-center justify-between border-b bg-zinc-900 px-4 py-2"> + <div className="flex items-center gap-2"> + <span className="text-sm font-medium text-zinc-100"> + {agentName ? `Output: ${agentName}` : "Agent Output"} + </span> + {subscription.error && ( + <div className="flex items-center gap-1 text-red-400" title={subscription.error.message}> + <AlertCircle className="h-3 w-3" /> + <span className="text-xs">Connection error</span> + </div> + )} + {subscription.isConnecting && ( + <span className="text-xs text-yellow-400">Connecting...</span> + )} + </div> + <div className="flex items-center gap-2"> + <Button + variant="ghost" + size="sm" + onClick={() => setFollow(!follow)} + className="h-7 text-zinc-400 hover:text-zinc-100 hover:bg-zinc-800" + > + {follow ? ( + <> + <Pause className="mr-1 h-3 w-3" /> + Following + </> + ) : ( + <> + <Play className="mr-1 h-3 w-3" /> + Paused + </> + )} + </Button> + {!follow && ( + <Button + variant="ghost" + size="sm" + onClick={scrollToBottom} + className="h-7 text-zinc-400 hover:text-zinc-100 hover:bg-zinc-800" + > + <ArrowDown className="mr-1 h-3 w-3" /> + Jump to bottom + </Button> + )} + </div> + </div> + + {/* Output content */} + <pre + ref={containerRef} + onScroll={handleScroll} + className="flex-1 overflow-y-auto bg-zinc-900 p-4 font-mono text-sm text-zinc-100 whitespace-pre-wrap" + > + {isLoading ? ( + <span className="text-zinc-500">Loading output...</span> + ) : !hasOutput ? ( + <span className="text-zinc-500">No output yet...</span> + ) : ( + output.join("") + )} + </pre> + </div> + ); +} diff --git a/packages/web/src/components/CreateInitiativeDialog.tsx b/packages/web/src/components/CreateInitiativeDialog.tsx index 6e43198..a6244d0 100644 --- a/packages/web/src/components/CreateInitiativeDialog.tsx +++ b/packages/web/src/components/CreateInitiativeDialog.tsx @@ -10,9 +10,9 @@ import { import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; -import { Textarea } from "@/components/ui/textarea"; import { toast } from "sonner"; import { trpc } from "@/lib/trpc"; +import { ProjectPicker } from "./ProjectPicker"; interface CreateInitiativeDialogProps { open: boolean; @@ -24,7 +24,7 @@ export function CreateInitiativeDialog({ onOpenChange, }: CreateInitiativeDialogProps) { const [name, setName] = useState(""); - const [description, setDescription] = useState(""); + const [projectIds, setProjectIds] = useState<string[]>([]); const [error, setError] = useState<string | null>(null); const utils = trpc.useUtils(); @@ -44,7 +44,7 @@ export function CreateInitiativeDialog({ useEffect(() => { if (open) { setName(""); - setDescription(""); + setProjectIds([]); setError(null); } }, [open]); @@ -54,7 +54,7 @@ export function CreateInitiativeDialog({ setError(null); createMutation.mutate({ name: name.trim(), - description: description.trim() || undefined, + projectIds: projectIds.length > 0 ? projectIds : undefined, }); } @@ -81,19 +81,13 @@ export function CreateInitiativeDialog({ /> </div> <div className="space-y-2"> - <Label htmlFor="initiative-description"> - Description{" "} + <Label> + Projects{" "} <span className="text-muted-foreground font-normal"> (optional) </span> </Label> - <Textarea - id="initiative-description" - placeholder="Brief description of the initiative..." - value={description} - onChange={(e) => setDescription(e.target.value)} - rows={3} - /> + <ProjectPicker value={projectIds} onChange={setProjectIds} /> </div> {error && ( <p className="text-sm text-destructive">{error}</p> diff --git a/packages/web/src/components/ExecutionTab.tsx b/packages/web/src/components/ExecutionTab.tsx new file mode 100644 index 0000000..c17e373 --- /dev/null +++ b/packages/web/src/components/ExecutionTab.tsx @@ -0,0 +1,48 @@ +import { + ExecutionProvider, + PhaseActions, + PhasesList, + ProgressSidebar, + TaskModal, + type PhaseData, +} from "@/components/execution"; + +interface ExecutionTabProps { + initiativeId: string; + phases: PhaseData[]; + phasesLoading: boolean; + phasesLoaded: boolean; +} + +export function ExecutionTab({ + initiativeId, + phases, + phasesLoading, + phasesLoaded, +}: ExecutionTabProps) { + return ( + <ExecutionProvider> + <div className="grid grid-cols-1 gap-6 lg:grid-cols-[1fr_340px]"> + {/* Left column: Phases */} + <div className="space-y-0"> + <div className="flex items-center justify-between border-b border-border pb-3"> + <h2 className="text-lg font-semibold">Phases</h2> + <PhaseActions initiativeId={initiativeId} phases={phases} /> + </div> + + <PhasesList + initiativeId={initiativeId} + phases={phases} + phasesLoading={phasesLoading} + phasesLoaded={phasesLoaded} + /> + </div> + + {/* Right column: Progress + Decisions */} + <ProgressSidebar phases={phases} /> + </div> + + <TaskModal /> + </ExecutionProvider> + ); +} diff --git a/packages/web/src/components/InitiativeHeader.tsx b/packages/web/src/components/InitiativeHeader.tsx index 8b3324e..b320a18 100644 --- a/packages/web/src/components/InitiativeHeader.tsx +++ b/packages/web/src/components/InitiativeHeader.tsx @@ -1,52 +1,118 @@ -import { ChevronLeft } from "lucide-react"; -import { Card, CardContent } from "@/components/ui/card"; +import { useState } from "react"; +import { ChevronLeft, Pencil, Check } from "lucide-react"; import { Button } from "@/components/ui/button"; +import { Badge } from "@/components/ui/badge"; import { StatusBadge } from "@/components/StatusBadge"; +import { ProjectPicker } from "./ProjectPicker"; +import { trpc } from "@/lib/trpc"; +import { toast } from "sonner"; export interface InitiativeHeaderProps { initiative: { id: string; name: string; status: string; - createdAt: string; - updatedAt: string; }; + projects?: Array<{ id: string; name: string; url: string }>; onBack: () => void; } export function InitiativeHeader({ initiative, + projects, onBack, }: InitiativeHeaderProps) { - return ( - <div className="flex flex-col gap-4"> - {/* Top bar: back button + actions placeholder */} - <div className="flex items-center justify-between"> - <Button variant="ghost" size="sm" onClick={onBack}> - <ChevronLeft className="mr-1 h-4 w-4" /> - Back to Dashboard - </Button> - <Button variant="outline" size="sm" disabled> - Actions - </Button> - </div> + const [editing, setEditing] = useState(false); + const [editIds, setEditIds] = useState<string[]>([]); - {/* Initiative metadata card */} - <Card> - <CardContent className="p-6"> - <div className="flex flex-col gap-2"> - <div className="flex items-center gap-3"> - <h1 className="text-2xl font-bold">{initiative.name}</h1> - <StatusBadge status={initiative.status} /> - </div> - <p className="text-sm text-muted-foreground"> - Created: {new Date(initiative.createdAt).toLocaleDateString()} - {" | "} - Updated: {new Date(initiative.updatedAt).toLocaleDateString()} - </p> + const utils = trpc.useUtils(); + const updateMutation = trpc.updateInitiativeProjects.useMutation({ + onSuccess: () => { + utils.getInitiative.invalidate({ id: initiative.id }); + setEditing(false); + toast.success("Projects updated"); + }, + onError: (err) => { + toast.error(err.message); + }, + }); + + function startEditing() { + setEditIds(projects?.map((p) => p.id) ?? []); + setEditing(true); + } + + function saveProjects() { + if (editIds.length === 0) { + toast.error("At least one project is required"); + return; + } + updateMutation.mutate({ + initiativeId: initiative.id, + projectIds: editIds, + }); + } + + return ( + <div className="space-y-2"> + <div className="flex items-center justify-between"> + <div className="flex items-center gap-3"> + <Button variant="ghost" size="icon" className="h-8 w-8" onClick={onBack}> + <ChevronLeft className="h-4 w-4" /> + </Button> + <h1 className="text-xl font-semibold">{initiative.name}</h1> + <StatusBadge status={initiative.status} /> + {!editing && projects && projects.length > 0 && ( + <> + {projects.map((p) => ( + <Badge key={p.id} variant="outline" className="text-xs font-normal"> + {p.name} + </Badge> + ))} + <Button + variant="ghost" + size="icon" + className="h-6 w-6" + onClick={startEditing} + > + <Pencil className="h-3 w-3" /> + </Button> + </> + )} + {!editing && (!projects || projects.length === 0) && ( + <Button + variant="ghost" + size="sm" + className="text-xs text-muted-foreground" + onClick={startEditing} + > + + Add projects + </Button> + )} + </div> + </div> + {editing && ( + <div className="ml-11 max-w-sm space-y-2"> + <ProjectPicker value={editIds} onChange={setEditIds} /> + <div className="flex gap-2"> + <Button + size="sm" + onClick={saveProjects} + disabled={editIds.length === 0 || updateMutation.isPending} + > + <Check className="mr-1 h-3 w-3" /> + {updateMutation.isPending ? "Saving..." : "Save"} + </Button> + <Button + size="sm" + variant="outline" + onClick={() => setEditing(false)} + > + Cancel + </Button> </div> - </CardContent> - </Card> + </div> + )} </div> ); } diff --git a/packages/web/src/components/MessageCard.tsx b/packages/web/src/components/MessageCard.tsx index 4801553..0bccfa8 100644 --- a/packages/web/src/components/MessageCard.tsx +++ b/packages/web/src/components/MessageCard.tsx @@ -1,20 +1,5 @@ import { Card } from "@/components/ui/card"; -import { cn } from "@/lib/utils"; - -function formatRelativeTime(isoDate: string): string { - const now = Date.now(); - const then = new Date(isoDate).getTime(); - const diffMs = now - then; - const diffSec = Math.floor(diffMs / 1000); - const diffMin = Math.floor(diffSec / 60); - const diffHr = Math.floor(diffMin / 60); - const diffDay = Math.floor(diffHr / 24); - - if (diffSec < 60) return "just now"; - if (diffMin < 60) return `${diffMin} min ago`; - if (diffHr < 24) return `${diffHr}h ago`; - return `${diffDay}d ago`; -} +import { cn, formatRelativeTime } from "@/lib/utils"; interface MessageCardProps { agentName: string; diff --git a/packages/web/src/components/ProjectPicker.tsx b/packages/web/src/components/ProjectPicker.tsx new file mode 100644 index 0000000..ec5fde4 --- /dev/null +++ b/packages/web/src/components/ProjectPicker.tsx @@ -0,0 +1,65 @@ +import { useState } from "react"; +import { Plus } from "lucide-react"; +import { trpc } from "@/lib/trpc"; +import { RegisterProjectDialog } from "./RegisterProjectDialog"; + +interface ProjectPickerProps { + value: string[]; + onChange: (ids: string[]) => void; + error?: string; +} + +export function ProjectPicker({ value, onChange, error }: ProjectPickerProps) { + const [registerOpen, setRegisterOpen] = useState(false); + + const projectsQuery = trpc.listProjects.useQuery(); + const projects = projectsQuery.data ?? []; + + function toggle(id: string) { + if (value.includes(id)) { + onChange(value.filter((v) => v !== id)); + } else { + onChange([...value, id]); + } + } + + return ( + <div className="space-y-2"> + {projects.length === 0 && !projectsQuery.isLoading && ( + <p className="text-sm text-muted-foreground">No projects registered yet.</p> + )} + {projects.length > 0 && ( + <div className="max-h-40 overflow-y-auto rounded border border-border p-2 space-y-1"> + {projects.map((p) => ( + <label + key={p.id} + className="flex items-center gap-2 rounded px-2 py-1.5 text-sm hover:bg-accent cursor-pointer" + > + <input + type="checkbox" + checked={value.includes(p.id)} + onChange={() => toggle(p.id)} + className="rounded border-border" + /> + <span className="font-medium">{p.name}</span> + <span className="text-muted-foreground text-xs truncate">{p.url}</span> + </label> + ))} + </div> + )} + <button + type="button" + onClick={() => setRegisterOpen(true)} + className="inline-flex items-center gap-1 text-sm text-primary hover:underline" + > + <Plus className="h-3 w-3" /> + Register new project + </button> + {error && <p className="text-sm text-destructive">{error}</p>} + <RegisterProjectDialog + open={registerOpen} + onOpenChange={setRegisterOpen} + /> + </div> + ); +} diff --git a/packages/web/src/components/RefineSpawnDialog.tsx b/packages/web/src/components/RefineSpawnDialog.tsx new file mode 100644 index 0000000..dc70a23 --- /dev/null +++ b/packages/web/src/components/RefineSpawnDialog.tsx @@ -0,0 +1,125 @@ +import { useState } from "react"; +import { Sparkles } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogDescription, + DialogFooter, +} from "@/components/ui/dialog"; +import { Textarea } from "@/components/ui/textarea"; + +interface RefineSpawnDialogProps { + /** Button text to show in the trigger */ + triggerText: string; + /** Dialog title */ + title: string; + /** Dialog description */ + description: string; + /** Whether to show the instruction textarea */ + showInstructionInput?: boolean; + /** Placeholder text for the instruction textarea */ + instructionPlaceholder?: string; + /** Whether the spawn mutation is pending */ + isSpawning: boolean; + /** Error message if spawn failed */ + error?: string; + /** Called when the user wants to spawn */ + onSpawn: (instruction?: string) => void; + /** Custom trigger button (optional) */ + trigger?: React.ReactNode; +} + +export function RefineSpawnDialog({ + triggerText, + title, + description, + showInstructionInput = true, + instructionPlaceholder = "What should the agent focus on? (optional)", + isSpawning, + error, + onSpawn, + trigger, +}: RefineSpawnDialogProps) { + const [showDialog, setShowDialog] = useState(false); + const [instruction, setInstruction] = useState(""); + + const handleSpawn = () => { + const finalInstruction = showInstructionInput && instruction.trim() + ? instruction.trim() + : undefined; + onSpawn(finalInstruction); + }; + + const handleOpenChange = (open: boolean) => { + setShowDialog(open); + if (!open) { + setInstruction(""); + } + }; + + const defaultTrigger = ( + <Button + variant="outline" + size="sm" + onClick={() => setShowDialog(true)} + className="gap-1.5" + > + <Sparkles className="h-3.5 w-3.5" /> + {triggerText} + </Button> + ); + + return ( + <> + {trigger ? ( + <div onClick={() => setShowDialog(true)}> + {trigger} + </div> + ) : ( + defaultTrigger + )} + + <Dialog open={showDialog} onOpenChange={handleOpenChange}> + <DialogContent> + <DialogHeader> + <DialogTitle>{title}</DialogTitle> + <DialogDescription>{description}</DialogDescription> + </DialogHeader> + + {showInstructionInput && ( + <Textarea + placeholder={instructionPlaceholder} + value={instruction} + onChange={(e) => setInstruction(e.target.value)} + rows={3} + /> + )} + + <DialogFooter> + <Button + variant="outline" + onClick={() => setShowDialog(false)} + > + Cancel + </Button> + <Button + onClick={handleSpawn} + disabled={isSpawning} + > + {isSpawning ? "Starting..." : "Start"} + </Button> + </DialogFooter> + + {error && ( + <p className="text-xs text-destructive"> + {error} + </p> + )} + </DialogContent> + </Dialog> + </> + ); +} \ No newline at end of file diff --git a/packages/web/src/components/RegisterProjectDialog.tsx b/packages/web/src/components/RegisterProjectDialog.tsx new file mode 100644 index 0000000..cc6b0d2 --- /dev/null +++ b/packages/web/src/components/RegisterProjectDialog.tsx @@ -0,0 +1,110 @@ +import { useEffect, useState } from "react"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { toast } from "sonner"; +import { trpc } from "@/lib/trpc"; + +interface RegisterProjectDialogProps { + open: boolean; + onOpenChange: (open: boolean) => void; +} + +export function RegisterProjectDialog({ + open, + onOpenChange, +}: RegisterProjectDialogProps) { + const [name, setName] = useState(""); + const [url, setUrl] = useState(""); + const [error, setError] = useState<string | null>(null); + + const utils = trpc.useUtils(); + + const registerMutation = trpc.registerProject.useMutation({ + onSuccess: () => { + utils.listProjects.invalidate(); + onOpenChange(false); + toast.success("Project registered"); + }, + onError: (err) => { + setError(err.message); + }, + }); + + useEffect(() => { + if (open) { + setName(""); + setUrl(""); + setError(null); + } + }, [open]); + + function handleSubmit(e: React.FormEvent) { + e.preventDefault(); + setError(null); + registerMutation.mutate({ + name: name.trim(), + url: url.trim(), + }); + } + + const canSubmit = + name.trim().length > 0 && + url.trim().length > 0 && + !registerMutation.isPending; + + return ( + <Dialog open={open} onOpenChange={onOpenChange}> + <DialogContent> + <DialogHeader> + <DialogTitle>Register Project</DialogTitle> + <DialogDescription> + Register a git repository as a project. + </DialogDescription> + </DialogHeader> + <form onSubmit={handleSubmit} className="space-y-4"> + <div className="space-y-2"> + <Label htmlFor="project-name">Name</Label> + <Input + id="project-name" + placeholder="e.g. my-app" + value={name} + onChange={(e) => setName(e.target.value)} + autoFocus + /> + </div> + <div className="space-y-2"> + <Label htmlFor="project-url">Repository URL</Label> + <Input + id="project-url" + placeholder="e.g. https://github.com/org/repo.git" + value={url} + onChange={(e) => setUrl(e.target.value)} + /> + </div> + {error && <p className="text-sm text-destructive">{error}</p>} + <DialogFooter> + <Button + type="button" + variant="outline" + onClick={() => onOpenChange(false)} + > + Cancel + </Button> + <Button type="submit" disabled={!canSubmit}> + {registerMutation.isPending ? "Registering..." : "Register"} + </Button> + </DialogFooter> + </form> + </DialogContent> + </Dialog> + ); +} diff --git a/packages/web/src/components/SpawnArchitectDropdown.tsx b/packages/web/src/components/SpawnArchitectDropdown.tsx index 8466a8a..61115c8 100644 --- a/packages/web/src/components/SpawnArchitectDropdown.tsx +++ b/packages/web/src/components/SpawnArchitectDropdown.tsx @@ -7,59 +7,42 @@ import { DropdownMenuItem, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; -import { toast } from "sonner"; import { trpc } from "@/lib/trpc"; +import { useSpawnMutation } from "@/hooks/useSpawnMutation"; interface SpawnArchitectDropdownProps { initiativeId: string; - initiativeName: string; + initiativeName?: string; } export function SpawnArchitectDropdown({ initiativeId, - initiativeName, }: SpawnArchitectDropdownProps) { const [open, setOpen] = useState(false); const [successText, setSuccessText] = useState<string | null>(null); - const discussMutation = trpc.spawnArchitectDiscuss.useMutation({ - onSuccess: () => { - setOpen(false); - setSuccessText("Spawned!"); - setTimeout(() => setSuccessText(null), 2000); - toast.success("Architect spawned"); - }, - onError: () => { - toast.error("Failed to spawn architect"); - }, + const handleSuccess = () => { + setOpen(false); + setSuccessText("Spawned!"); + setTimeout(() => setSuccessText(null), 2000); + }; + + const discussSpawn = useSpawnMutation(trpc.spawnArchitectDiscuss.useMutation, { + onSuccess: handleSuccess, }); - const breakdownMutation = trpc.spawnArchitectBreakdown.useMutation({ - onSuccess: () => { - setOpen(false); - setSuccessText("Spawned!"); - setTimeout(() => setSuccessText(null), 2000); - toast.success("Architect spawned"); - }, - onError: () => { - toast.error("Failed to spawn architect"); - }, + const breakdownSpawn = useSpawnMutation(trpc.spawnArchitectBreakdown.useMutation, { + onSuccess: handleSuccess, }); - const isPending = discussMutation.isPending || breakdownMutation.isPending; + const isPending = discussSpawn.isSpawning || breakdownSpawn.isSpawning; function handleDiscuss() { - discussMutation.mutate({ - name: initiativeName + "-discuss", - initiativeId, - }); + discussSpawn.spawn({ initiativeId }); } function handleBreakdown() { - breakdownMutation.mutate({ - name: initiativeName + "-breakdown", - initiativeId, - }); + breakdownSpawn.spawn({ initiativeId }); } return ( diff --git a/packages/web/src/components/StatusDot.tsx b/packages/web/src/components/StatusDot.tsx new file mode 100644 index 0000000..beb7a63 --- /dev/null +++ b/packages/web/src/components/StatusDot.tsx @@ -0,0 +1,76 @@ +import { cn } from "@/lib/utils"; + +/** + * Color mapping for different status values. + * Uses semantic colors that work well as small dots. + */ +const statusColors: Record<string, string> = { + // Task statuses + pending: "bg-gray-400", + pending_approval: "bg-yellow-400", + in_progress: "bg-blue-500", + completed: "bg-green-500", + blocked: "bg-red-500", + + // Agent statuses + idle: "bg-gray-400", + running: "bg-blue-500", + waiting_for_input: "bg-yellow-400", + stopped: "bg-gray-600", + crashed: "bg-red-500", + + // Initiative/Phase statuses + active: "bg-blue-500", + archived: "bg-gray-400", + + // Message statuses + read: "bg-green-500", + responded: "bg-blue-500", + + // Priority indicators + low: "bg-green-400", + medium: "bg-yellow-400", + high: "bg-red-400", +} as const; + +const defaultColor = "bg-gray-400"; + +interface StatusDotProps { + status: string; + size?: "sm" | "md" | "lg"; + className?: string; + title?: string; +} + +/** + * Small colored dot to indicate status at a glance. + * More compact than StatusBadge for use in lists or tight spaces. + */ +export function StatusDot({ + status, + size = "md", + className, + title +}: StatusDotProps) { + const sizeClasses = { + sm: "h-2 w-2", + md: "h-3 w-3", + lg: "h-4 w-4" + }; + + const color = statusColors[status] ?? defaultColor; + const displayTitle = title ?? status.replace(/_/g, " ").toLowerCase(); + + return ( + <div + className={cn( + "rounded-full", + sizeClasses[size], + color, + className + )} + title={displayTitle} + aria-label={`Status: ${displayTitle}`} + /> + ); +} \ No newline at end of file diff --git a/packages/web/src/components/TaskDetailModal.tsx b/packages/web/src/components/TaskDetailModal.tsx index ceb49ee..bfb231f 100644 --- a/packages/web/src/components/TaskDetailModal.tsx +++ b/packages/web/src/components/TaskDetailModal.tsx @@ -8,6 +8,7 @@ import { } from "@/components/ui/dialog"; import { Button } from "@/components/ui/button"; import { StatusBadge } from "@/components/StatusBadge"; +import { StatusDot } from "@/components/StatusDot"; /** Serialized Task shape as returned by tRPC (Date serialized to string over JSON) */ export interface SerializedTask { @@ -117,7 +118,7 @@ export function TaskDetailModal({ className="flex items-center gap-2 text-sm" > <span>{dep.name}</span> - <StatusBadge status={dep.status} /> + <StatusDot status={dep.status} size="md" /> </li> ))} </ul> @@ -137,7 +138,7 @@ export function TaskDetailModal({ className="flex items-center gap-2 text-sm" > <span>{dep.name}</span> - <StatusBadge status={dep.status} /> + <StatusDot status={dep.status} size="md" /> </li> ))} </ul> diff --git a/packages/web/src/components/editor/BlockSelectionExtension.ts b/packages/web/src/components/editor/BlockSelectionExtension.ts new file mode 100644 index 0000000..a5049f0 --- /dev/null +++ b/packages/web/src/components/editor/BlockSelectionExtension.ts @@ -0,0 +1,186 @@ +import { Extension } from "@tiptap/core"; +import { Plugin, PluginKey, type EditorState, type Transaction } from "@tiptap/pm/state"; +import { Decoration, DecorationSet } from "@tiptap/pm/view"; + +export type BlockSelectionState = { + anchorIndex: number; + headIndex: number; +} | null; + +export const blockSelectionKey = new PluginKey<BlockSelectionState>( + "blockSelection", +); + +function selectedRange( + state: BlockSelectionState, +): { from: number; to: number } | null { + if (!state) return null; + return { + from: Math.min(state.anchorIndex, state.headIndex), + to: Math.max(state.anchorIndex, state.headIndex), + }; +} + +/** Returns doc positions spanning the selected block range. */ +export function getBlockRange( + editorState: EditorState, + sel: BlockSelectionState, +): { fromPos: number; toPos: number } | null { + if (!sel) return null; + const range = selectedRange(sel)!; + const doc = editorState.doc; + let fromPos = 0; + let toPos = 0; + let idx = 0; + doc.forEach((node, offset) => { + if (idx === range.from) fromPos = offset; + if (idx === range.to) toPos = offset + node.nodeSize; + idx++; + }); + return { fromPos, toPos }; +} + +function isPrintable(e: KeyboardEvent): boolean { + if (e.ctrlKey || e.metaKey || e.altKey) return false; + return e.key.length === 1; +} + +export const BlockSelectionExtension = Extension.create({ + name: "blockSelection", + + addProseMirrorPlugins() { + return [ + new Plugin<BlockSelectionState>({ + key: blockSelectionKey, + + state: { + init(): BlockSelectionState { + return null; + }, + apply(tr: Transaction, value: BlockSelectionState): BlockSelectionState { + const meta = tr.getMeta(blockSelectionKey); + if (meta !== undefined) return meta; + // Doc changed while selection active → clear (positions stale) + if (value && tr.docChanged) return null; + // User set a new text selection (not from our plugin) → clear + if (value && tr.selectionSet && !tr.getMeta("blockSelectionInternal")) { + return null; + } + return value; + }, + }, + + props: { + decorations(state: EditorState): DecorationSet { + const sel = blockSelectionKey.getState(state); + const range = selectedRange(sel); + if (!range) return DecorationSet.empty; + + const decorations: Decoration[] = []; + let idx = 0; + state.doc.forEach((node, pos) => { + if (idx >= range.from && idx <= range.to) { + decorations.push( + Decoration.node(pos, pos + node.nodeSize, { + class: "block-selected", + }), + ); + } + idx++; + }); + return DecorationSet.create(state.doc, decorations); + }, + + attributes(state: EditorState): Record<string, string> | null { + const sel = blockSelectionKey.getState(state); + if (sel) return { class: "has-block-selection" }; + return null; + }, + + handleKeyDown(view, event) { + const sel = blockSelectionKey.getState(view.state); + if (!sel) return false; + + const childCount = view.state.doc.childCount; + + if (event.key === "ArrowDown" && event.shiftKey) { + event.preventDefault(); + const newHead = Math.min(sel.headIndex + 1, childCount - 1); + const tr = view.state.tr.setMeta(blockSelectionKey, { + anchorIndex: sel.anchorIndex, + headIndex: newHead, + }); + tr.setMeta("blockSelectionInternal", true); + view.dispatch(tr); + return true; + } + + if (event.key === "ArrowUp" && event.shiftKey) { + event.preventDefault(); + const newHead = Math.max(sel.headIndex - 1, 0); + const tr = view.state.tr.setMeta(blockSelectionKey, { + anchorIndex: sel.anchorIndex, + headIndex: newHead, + }); + tr.setMeta("blockSelectionInternal", true); + view.dispatch(tr); + return true; + } + + if (event.key === "Escape") { + event.preventDefault(); + view.dispatch( + view.state.tr.setMeta(blockSelectionKey, null), + ); + return true; + } + + if (event.key === "Backspace" || event.key === "Delete") { + event.preventDefault(); + const range = selectedRange(sel); + if (!range) return true; + const blockRange = getBlockRange(view.state, sel); + if (!blockRange) return true; + const tr = view.state.tr.delete(blockRange.fromPos, blockRange.toPos); + tr.setMeta(blockSelectionKey, null); + view.dispatch(tr); + return true; + } + + if (isPrintable(event)) { + // Delete selected blocks, clear selection, let PM handle char insertion + const blockRange = getBlockRange(view.state, sel); + if (blockRange) { + const tr = view.state.tr.delete(blockRange.fromPos, blockRange.toPos); + tr.setMeta(blockSelectionKey, null); + view.dispatch(tr); + } + return false; + } + + // Modifier-only keys (Shift, Ctrl, Alt, Meta) — ignore + if (["Shift", "Control", "Alt", "Meta"].includes(event.key)) { + return false; + } + + // Any other key — clear selection and pass through + view.dispatch( + view.state.tr.setMeta(blockSelectionKey, null), + ); + return false; + }, + + handleClick(view) { + const sel = blockSelectionKey.getState(view.state); + if (sel) { + view.dispatch( + view.state.tr.setMeta(blockSelectionKey, null), + ); + } + return false; + }, + }, + }), + ]; + }, +}); diff --git a/packages/web/src/components/editor/ContentProposalReview.tsx b/packages/web/src/components/editor/ContentProposalReview.tsx new file mode 100644 index 0000000..5b25ef7 --- /dev/null +++ b/packages/web/src/components/editor/ContentProposalReview.tsx @@ -0,0 +1,191 @@ +import { useState, useCallback } from "react"; +import { Check, ChevronDown, ChevronRight, AlertTriangle } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { trpc } from "@/lib/trpc"; +import { markdownToTiptapJson } from "@/lib/markdown-to-tiptap"; + +interface ContentProposal { + pageId: string; + pageTitle: string; + summary: string; + markdown: string; +} + +interface ContentProposalReviewProps { + proposals: ContentProposal[]; + agentCreatedAt: Date; + agentId: string; + onDismiss: () => void; +} + +export function ContentProposalReview({ + proposals, + agentCreatedAt, + agentId, + onDismiss, +}: ContentProposalReviewProps) { + const [accepted, setAccepted] = useState<Set<string>>(new Set()); + const utils = trpc.useUtils(); + const updatePageMutation = trpc.updatePage.useMutation({ + onSuccess: () => { + void utils.listPages.invalidate(); + void utils.getPage.invalidate(); + }, + }); + + const dismissMutation = trpc.dismissAgent.useMutation({ + onSuccess: () => { + void utils.listAgents.invalidate(); + onDismiss(); + }, + }); + + const handleAccept = useCallback( + async (proposal: ContentProposal) => { + const tiptapJson = markdownToTiptapJson(proposal.markdown); + await updatePageMutation.mutateAsync({ + id: proposal.pageId, + content: JSON.stringify(tiptapJson), + }); + setAccepted((prev) => new Set(prev).add(proposal.pageId)); + }, + [updatePageMutation], + ); + + const handleAcceptAll = useCallback(async () => { + for (const proposal of proposals) { + if (!accepted.has(proposal.pageId)) { + const tiptapJson = markdownToTiptapJson(proposal.markdown); + await updatePageMutation.mutateAsync({ + id: proposal.pageId, + content: JSON.stringify(tiptapJson), + }); + setAccepted((prev) => new Set(prev).add(proposal.pageId)); + } + } + }, [proposals, accepted, updatePageMutation]); + + const allAccepted = proposals.every((p) => accepted.has(p.pageId)); + + return ( + <div className="rounded-lg border border-border bg-card p-4 space-y-3"> + <div className="flex items-center justify-between"> + <h3 className="text-sm font-semibold"> + Agent Proposals ({proposals.length}) + </h3> + <div className="flex gap-2"> + {!allAccepted && ( + <Button + variant="outline" + size="sm" + onClick={handleAcceptAll} + disabled={updatePageMutation.isPending} + > + Accept All + </Button> + )} + <Button + variant="ghost" + size="sm" + onClick={() => dismissMutation.mutate({ id: agentId })} + disabled={dismissMutation.isPending} + > + {dismissMutation.isPending ? "Dismissing..." : "Dismiss"} + </Button> + </div> + </div> + + <div className="space-y-2"> + {proposals.map((proposal) => ( + <ProposalCard + key={proposal.pageId} + proposal={proposal} + isAccepted={accepted.has(proposal.pageId)} + agentCreatedAt={agentCreatedAt} + onAccept={() => handleAccept(proposal)} + isAccepting={updatePageMutation.isPending} + /> + ))} + </div> + </div> + ); +} + +interface ProposalCardProps { + proposal: ContentProposal; + isAccepted: boolean; + agentCreatedAt: Date; + onAccept: () => void; + isAccepting: boolean; +} + +function ProposalCard({ + proposal, + isAccepted, + agentCreatedAt, + onAccept, + isAccepting, +}: ProposalCardProps) { + const [expanded, setExpanded] = useState(false); + + // Check if page was modified since agent started + const pageQuery = trpc.getPage.useQuery({ id: proposal.pageId }); + const pageUpdatedAt = pageQuery.data?.updatedAt; + const isStale = + pageUpdatedAt && new Date(pageUpdatedAt) > agentCreatedAt; + + return ( + <div className="rounded border border-border p-3 space-y-2"> + <div className="flex items-start justify-between gap-2"> + <div className="flex-1 min-w-0"> + <button + className="flex items-center gap-1 text-sm font-medium hover:text-foreground/80" + onClick={() => setExpanded(!expanded)} + > + {expanded ? ( + <ChevronDown className="h-3.5 w-3.5 shrink-0" /> + ) : ( + <ChevronRight className="h-3.5 w-3.5 shrink-0" /> + )} + {proposal.pageTitle} + </button> + <p className="text-xs text-muted-foreground mt-0.5 pl-5"> + {proposal.summary} + </p> + </div> + + {isAccepted ? ( + <div className="flex items-center gap-1 text-xs text-green-600 shrink-0"> + <Check className="h-3.5 w-3.5" /> + Accepted + </div> + ) : ( + <Button + variant="outline" + size="sm" + onClick={onAccept} + disabled={isAccepting} + className="shrink-0" + > + Accept + </Button> + )} + </div> + + {isStale && !isAccepted && ( + <div className="flex items-center gap-1.5 text-xs text-yellow-600 pl-5"> + <AlertTriangle className="h-3 w-3" /> + Content was modified since agent started + </div> + )} + + {expanded && ( + <div className="pl-5 pt-1"> + <div className="prose prose-sm max-w-none rounded bg-muted/50 p-3 text-xs overflow-auto max-h-64"> + <pre className="whitespace-pre-wrap text-xs">{proposal.markdown}</pre> + </div> + </div> + )} + </div> + ); +} diff --git a/packages/web/src/components/editor/ContentTab.tsx b/packages/web/src/components/editor/ContentTab.tsx new file mode 100644 index 0000000..35bb919 --- /dev/null +++ b/packages/web/src/components/editor/ContentTab.tsx @@ -0,0 +1,348 @@ +import { useState, useCallback, useRef, useEffect } from "react"; +import type { Editor } from "@tiptap/react"; +import { AlertCircle } from "lucide-react"; +import { trpc } from "@/lib/trpc"; +import { useAutoSave } from "@/hooks/useAutoSave"; +import { TiptapEditor } from "./TiptapEditor"; +import { PageTitleProvider } from "./PageTitleContext"; +import { PageTree } from "./PageTree"; +import { RefineAgentPanel } from "./RefineAgentPanel"; +import { Skeleton } from "@/components/Skeleton"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogDescription, + DialogFooter, +} from "@/components/ui/dialog"; + +interface ContentTabProps { + initiativeId: string; + initiativeName: string; +} + +interface DeleteConfirmation { + pageId: string; + redo: () => void; +} + +export function ContentTab({ initiativeId, initiativeName }: ContentTabProps) { + const utils = trpc.useUtils(); + const handleSaved = useCallback(() => { + void utils.listPages.invalidate({ initiativeId }); + }, [utils, initiativeId]); + const { save, flush, isSaving } = useAutoSave({ onSaved: handleSaved }); + + // Get or create root page + const rootPageQuery = trpc.getRootPage.useQuery({ initiativeId }); + const allPagesQuery = trpc.listPages.useQuery({ initiativeId }); + const createPageMutation = trpc.createPage.useMutation({ + onSuccess: () => { + void utils.listPages.invalidate({ initiativeId }); + }, + }); + const deletePageMutation = trpc.deletePage.useMutation({ + onSuccess: () => { + void utils.listPages.invalidate({ initiativeId }); + }, + }); + + const updateInitiativeMutation = trpc.updateInitiative.useMutation({ + onSuccess: () => { + void utils.getInitiative.invalidate({ id: initiativeId }); + }, + }); + const initiativeNameTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null); + const pendingInitiativeNameRef = useRef<string | null>(null); + + const [activePageId, setActivePageId] = useState<string | null>(null); + const [deleteConfirm, setDeleteConfirm] = useState<DeleteConfirmation | null>(null); + const [pageTitle, setPageTitle] = useState(""); + + // Keep a ref to the current editor so subpage creation can insert links + const editorRef = useRef<Editor | null>(null); + + // Resolve active page: use explicit selection, or fallback to root + const resolvedActivePageId = + activePageId ?? rootPageQuery.data?.id ?? null; + + const isRootPage = resolvedActivePageId != null && resolvedActivePageId === rootPageQuery.data?.id; + + // Fetch active page content + const activePageQuery = trpc.getPage.useQuery( + { id: resolvedActivePageId! }, + { enabled: !!resolvedActivePageId }, + ); + + const handleEditorUpdate = useCallback( + (json: string) => { + if (resolvedActivePageId) { + save(resolvedActivePageId, { content: json }); + } + }, + [resolvedActivePageId, save], + ); + + // Sync title from server when active page changes + useEffect(() => { + if (activePageQuery.data) { + setPageTitle(isRootPage ? initiativeName : activePageQuery.data.title); + } + }, [activePageQuery.data?.id, isRootPage, initiativeName]); // eslint-disable-line react-hooks/exhaustive-deps + + const handleTitleChange = useCallback( + (e: React.ChangeEvent<HTMLInputElement>) => { + const newTitle = e.target.value; + setPageTitle(newTitle); + if (isRootPage) { + // Debounce initiative name updates + pendingInitiativeNameRef.current = newTitle; + if (initiativeNameTimerRef.current) { + clearTimeout(initiativeNameTimerRef.current); + } + initiativeNameTimerRef.current = setTimeout(() => { + pendingInitiativeNameRef.current = null; + updateInitiativeMutation.mutate({ id: initiativeId, name: newTitle }); + initiativeNameTimerRef.current = null; + }, 1000); + } else if (resolvedActivePageId) { + save(resolvedActivePageId, { title: newTitle }); + } + }, + [isRootPage, resolvedActivePageId, save, initiativeId, updateInitiativeMutation], + ); + + // Flush pending initiative name save on unmount + useEffect(() => { + return () => { + if (initiativeNameTimerRef.current) { + clearTimeout(initiativeNameTimerRef.current); + initiativeNameTimerRef.current = null; + } + if (pendingInitiativeNameRef.current != null) { + updateInitiativeMutation.mutate({ id: initiativeId, name: pendingInitiativeNameRef.current }); + pendingInitiativeNameRef.current = null; + } + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + const handleTitleKeyDown = useCallback( + (e: React.KeyboardEvent<HTMLInputElement>) => { + if (e.key === "Enter") { + e.preventDefault(); + // Focus the Tiptap editor below + const el = (e.target as HTMLElement) + .closest(".flex-1") + ?.querySelector<HTMLElement>("[contenteditable]"); + el?.focus(); + } + }, + [], + ); + + const handleCreateChild = useCallback( + (parentPageId: string) => { + createPageMutation.mutate({ + initiativeId, + parentPageId, + title: "Untitled", + }); + }, + [initiativeId, createPageMutation], + ); + + const handleNavigate = useCallback((pageId: string) => { + setActivePageId(pageId); + }, []); + + // Slash command: /subpage — creates a page and inserts a link at cursor + const handleSubpageCreate = useCallback( + async (editor: Editor) => { + editorRef.current = editor; + try { + const page = await createPageMutation.mutateAsync({ + initiativeId, + parentPageId: resolvedActivePageId, + title: "Untitled", + }); + // Insert page link at current cursor position + editor.commands.insertPageLink({ pageId: page.id }); + // Wait for auto-save to persist the link before navigating + await flush(); + // Update the query cache so navigating back shows content with the link + utils.getPage.setData( + { id: resolvedActivePageId! }, + (old) => old ? { ...old, content: JSON.stringify(editor.getJSON()) } : undefined, + ); + // Navigate directly to the newly created subpage + setActivePageId(page.id); + } catch { + // Mutation errors surfaced via React Query state + } + }, + [initiativeId, resolvedActivePageId, createPageMutation, flush, utils], + ); + + // Detect when a page link is deleted from the editor (already undone by plugin) + const handlePageLinkDeleted = useCallback( + (pageId: string, redo: () => void) => { + // Don't prompt for pages that don't exist in our tree + const allPages = allPagesQuery.data ?? []; + const exists = allPages.some((p) => p.id === pageId); + if (!exists) { + // Page doesn't exist — redo the deletion so the stale link is removed + redo(); + return; + } + + setDeleteConfirm({ pageId, redo }); + }, + [allPagesQuery.data], + ); + + const confirmDeleteSubpage = useCallback(() => { + if (deleteConfirm) { + // Re-delete the page link from the editor, then delete the page from DB + deleteConfirm.redo(); + deletePageMutation.mutate({ id: deleteConfirm.pageId }); + setDeleteConfirm(null); + } + }, [deleteConfirm, deletePageMutation]); + + const dismissDeleteConfirm = useCallback(() => { + setDeleteConfirm(null); + }, []); + + const allPages = allPagesQuery.data ?? []; + + // Loading + if (rootPageQuery.isLoading) { + return ( + <div className="flex gap-4"> + <Skeleton className="h-64 w-48" /> + <Skeleton className="h-64 flex-1" /> + </div> + ); + } + + // Error — server likely needs restart or migration hasn't applied + if (rootPageQuery.isError) { + return ( + <div className="flex flex-col items-center justify-center gap-3 py-12 text-center"> + <AlertCircle className="h-6 w-6 text-destructive" /> + <p className="text-sm text-destructive"> + Failed to load editor: {rootPageQuery.error.message} + </p> + <p className="text-xs text-muted-foreground"> + Make sure the backend server is running with the latest code. + </p> + <Button + variant="outline" + size="sm" + onClick={() => void rootPageQuery.refetch()} + > + Retry + </Button> + </div> + ); + } + + return ( + <> + <PageTitleProvider pages={allPages}> + <div className="flex gap-4 pt-4"> + {/* Page tree sidebar */} + <div className="w-48 shrink-0 border-r border-border pr-3"> + <div className="flex items-center justify-between mb-2"> + <span className="text-xs font-medium text-muted-foreground uppercase tracking-wider"> + Pages + </span> + </div> + <PageTree + pages={allPages} + activePageId={resolvedActivePageId ?? ""} + onNavigate={handleNavigate} + onCreateChild={handleCreateChild} + /> + </div> + + {/* Editor area */} + <div className="flex-1 min-w-0"> + {/* Refine agent panel — sits above editor */} + <RefineAgentPanel initiativeId={initiativeId} /> + + {resolvedActivePageId && ( + <> + {(isSaving || updateInitiativeMutation.isPending) && ( + <div className="flex justify-end mb-2"> + <span className="text-xs text-muted-foreground"> + Saving... + </span> + </div> + )} + {activePageQuery.isSuccess && ( + <input + value={pageTitle} + onChange={handleTitleChange} + onKeyDown={handleTitleKeyDown} + placeholder="Untitled" + className="w-full text-3xl font-bold bg-transparent border-none outline-none placeholder:text-muted-foreground/40 pl-11 mb-2" + /> + )} + {activePageQuery.isSuccess && ( + <TiptapEditor + key={resolvedActivePageId} + pageId={resolvedActivePageId} + content={activePageQuery.data?.content ?? null} + onUpdate={handleEditorUpdate} + onPageLinkClick={handleNavigate} + onSubpageCreate={handleSubpageCreate} + onPageLinkDeleted={handlePageLinkDeleted} + /> + )} + {activePageQuery.isLoading && ( + <Skeleton className="h-64 w-full" /> + )} + {activePageQuery.isError && ( + <div className="flex items-center gap-2 py-4 text-sm text-destructive"> + <AlertCircle className="h-4 w-4" /> + Failed to load page: {activePageQuery.error.message} + </div> + )} + </> + )} + </div> + </div> + + {/* Delete subpage confirmation dialog */} + <Dialog + open={deleteConfirm !== null} + onOpenChange={(open) => { + if (!open) dismissDeleteConfirm(); + }} + > + <DialogContent> + <DialogHeader> + <DialogTitle>Delete subpage?</DialogTitle> + <DialogDescription> + You removed the link to “{allPages.find((p) => p.id === deleteConfirm?.pageId)?.title ?? "Untitled"}”. + Do you also want to delete the subpage and all its content? + </DialogDescription> + </DialogHeader> + <DialogFooter> + <Button variant="outline" onClick={dismissDeleteConfirm}> + Keep subpage + </Button> + <Button variant="destructive" onClick={confirmDeleteSubpage}> + Delete subpage + </Button> + </DialogFooter> + </DialogContent> + </Dialog> + </PageTitleProvider> + </> + ); +} diff --git a/packages/web/src/components/editor/PageBreadcrumb.tsx b/packages/web/src/components/editor/PageBreadcrumb.tsx new file mode 100644 index 0000000..c9c7739 --- /dev/null +++ b/packages/web/src/components/editor/PageBreadcrumb.tsx @@ -0,0 +1,52 @@ +import { useMemo } from "react"; +import { ChevronRight } from "lucide-react"; + +interface PageBreadcrumbProps { + pages: Array<{ + id: string; + parentPageId: string | null; + title: string; + }>; + activePageId: string; + onNavigate: (pageId: string) => void; +} + +export function PageBreadcrumb({ + pages, + activePageId, + onNavigate, +}: PageBreadcrumbProps) { + const trail = useMemo(() => { + const byId = new Map(pages.map((p) => [p.id, p])); + const result: Array<{ id: string; title: string }> = []; + let current = byId.get(activePageId); + + while (current) { + result.unshift({ id: current.id, title: current.title }); + current = current.parentPageId + ? byId.get(current.parentPageId) + : undefined; + } + return result; + }, [pages, activePageId]); + + return ( + <nav className="flex items-center gap-1 text-sm text-muted-foreground"> + {trail.map((item, i) => ( + <span key={item.id} className="flex items-center gap-1"> + {i > 0 && <ChevronRight className="h-3 w-3" />} + {i < trail.length - 1 ? ( + <button + onClick={() => onNavigate(item.id)} + className="hover:text-foreground transition-colors" + > + {item.title} + </button> + ) : ( + <span className="text-foreground font-medium">{item.title}</span> + )} + </span> + ))} + </nav> + ); +} diff --git a/packages/web/src/components/editor/PageLinkExtension.tsx b/packages/web/src/components/editor/PageLinkExtension.tsx new file mode 100644 index 0000000..93e66c1 --- /dev/null +++ b/packages/web/src/components/editor/PageLinkExtension.tsx @@ -0,0 +1,75 @@ +import { Node, mergeAttributes, ReactNodeViewRenderer, NodeViewWrapper } from "@tiptap/react"; +import type { NodeViewProps } from "@tiptap/react"; +import { FileText } from "lucide-react"; +import { usePageTitle } from "./PageTitleContext"; + +declare module "@tiptap/react" { + interface Commands<ReturnType> { + pageLink: { + insertPageLink: (attrs: { pageId: string }) => ReturnType; + }; + } +} + +function PageLinkNodeView({ node }: NodeViewProps) { + const title = usePageTitle(node.attrs.pageId); + + const handleClick = (e: React.MouseEvent) => { + (e.currentTarget as HTMLElement).dispatchEvent( + new CustomEvent("page-link-click", { + bubbles: true, + detail: { pageId: node.attrs.pageId }, + }), + ); + }; + + return ( + <NodeViewWrapper className="page-link-block" data-page-link={node.attrs.pageId} onClick={handleClick}> + <FileText className="h-5 w-5 shrink-0" /> + <span>{title}</span> + </NodeViewWrapper> + ); +} + +export const PageLinkExtension = Node.create({ + name: "pageLink", + group: "block", + atom: true, + + addAttributes() { + return { + pageId: { default: null }, + }; + }, + + parseHTML() { + return [ + { tag: 'div[data-page-link]' }, + { tag: 'span[data-page-link]' }, + ]; + }, + + renderHTML({ HTMLAttributes }) { + return [ + "div", + mergeAttributes(HTMLAttributes, { + "data-page-link": HTMLAttributes.pageId, + class: "page-link-block", + }), + ]; + }, + + addCommands() { + return { + insertPageLink: + (attrs) => + ({ chain }) => { + return chain().insertContent({ type: this.name, attrs }).run(); + }, + }; + }, + + addNodeView() { + return ReactNodeViewRenderer(PageLinkNodeView); + }, +}); diff --git a/packages/web/src/components/editor/PageTitleContext.tsx b/packages/web/src/components/editor/PageTitleContext.tsx new file mode 100644 index 0000000..72b725c --- /dev/null +++ b/packages/web/src/components/editor/PageTitleContext.tsx @@ -0,0 +1,26 @@ +import { createContext, useContext, useMemo } from "react"; + +const PageTitleContext = createContext<Map<string, string>>(new Map()); + +interface PageTitleProviderProps { + pages: { id: string; title: string }[]; + children: React.ReactNode; +} + +export function PageTitleProvider({ pages, children }: PageTitleProviderProps) { + const titleMap = useMemo( + () => new Map(pages.map((p) => [p.id, p.title])), + [pages], + ); + + return ( + <PageTitleContext.Provider value={titleMap}> + {children} + </PageTitleContext.Provider> + ); +} + +export function usePageTitle(pageId: string): string { + const titleMap = useContext(PageTitleContext); + return titleMap.get(pageId) ?? "Untitled"; +} diff --git a/packages/web/src/components/editor/PageTree.tsx b/packages/web/src/components/editor/PageTree.tsx new file mode 100644 index 0000000..875af02 --- /dev/null +++ b/packages/web/src/components/editor/PageTree.tsx @@ -0,0 +1,119 @@ +import { useMemo } from "react"; +import { FileText, Plus } from "lucide-react"; +import { Button } from "@/components/ui/button"; + +interface PageTreeProps { + pages: Array<{ + id: string; + parentPageId: string | null; + title: string; + }>; + activePageId: string; + onNavigate: (pageId: string) => void; + onCreateChild: (parentPageId: string) => void; +} + +interface TreeNode { + id: string; + title: string; + children: TreeNode[]; +} + +export function PageTree({ + pages, + activePageId, + onNavigate, + onCreateChild, +}: PageTreeProps) { + const tree = useMemo(() => { + const childrenMap = new Map<string | null, typeof pages>([]); + for (const page of pages) { + const key = page.parentPageId; + const existing = childrenMap.get(key) ?? []; + existing.push(page); + childrenMap.set(key, existing); + } + + function buildTree(parentId: string | null): TreeNode[] { + const children = childrenMap.get(parentId) ?? []; + return children.map((page) => ({ + id: page.id, + title: page.title, + children: buildTree(page.id), + })); + } + + return buildTree(null); + }, [pages]); + + return ( + <div className="space-y-0.5"> + {tree.map((node) => ( + <PageTreeNode + key={node.id} + node={node} + depth={0} + activePageId={activePageId} + onNavigate={onNavigate} + onCreateChild={onCreateChild} + /> + ))} + </div> + ); +} + +interface PageTreeNodeProps { + node: TreeNode; + depth: number; + activePageId: string; + onNavigate: (pageId: string) => void; + onCreateChild: (parentPageId: string) => void; +} + +function PageTreeNode({ + node, + depth, + activePageId, + onNavigate, + onCreateChild, +}: PageTreeNodeProps) { + const isActive = node.id === activePageId; + + return ( + <div> + <div + className={`group flex items-center gap-1.5 rounded-sm px-2 py-1 text-sm cursor-pointer ${ + isActive + ? "bg-accent text-accent-foreground" + : "text-muted-foreground hover:bg-muted hover:text-foreground" + }`} + style={{ paddingLeft: `${depth * 12 + 8}px` }} + onClick={() => onNavigate(node.id)} + > + <FileText className="h-3.5 w-3.5 shrink-0" /> + <span className="truncate flex-1">{node.title}</span> + <Button + variant="ghost" + size="icon" + className="h-5 w-5 opacity-0 group-hover:opacity-100 shrink-0" + onClick={(e) => { + e.stopPropagation(); + onCreateChild(node.id); + }} + > + <Plus className="h-3 w-3" /> + </Button> + </div> + {node.children.map((child) => ( + <PageTreeNode + key={child.id} + node={child} + depth={depth + 1} + activePageId={activePageId} + onNavigate={onNavigate} + onCreateChild={onCreateChild} + /> + ))} + </div> + ); +} diff --git a/packages/web/src/components/editor/RefineAgentPanel.tsx b/packages/web/src/components/editor/RefineAgentPanel.tsx new file mode 100644 index 0000000..22f5aea --- /dev/null +++ b/packages/web/src/components/editor/RefineAgentPanel.tsx @@ -0,0 +1,142 @@ +import { useCallback } from "react"; +import { Loader2, AlertCircle } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { QuestionForm } from "@/components/QuestionForm"; +import { ContentProposalReview } from "./ContentProposalReview"; +import { RefineSpawnDialog } from "../RefineSpawnDialog"; +import { useRefineAgent } from "@/hooks"; + +interface RefineAgentPanelProps { + initiativeId: string; +} + +export function RefineAgentPanel({ initiativeId }: RefineAgentPanelProps) { + // All agent logic is now encapsulated in the hook + const { state, agent, questions, proposals, spawn, resume, refresh } = useRefineAgent(initiativeId); + + // spawn.mutate and resume.mutate are stable (ref-backed in useRefineAgent), + // so these callbacks won't change on every render. + const handleSpawn = useCallback((instruction?: string) => { + spawn.mutate({ + initiativeId, + instruction, + }); + }, [initiativeId, spawn.mutate]); + + const handleAnswerSubmit = useCallback( + (answers: Record<string, string>) => { + resume.mutate(answers); + }, + [resume.mutate], + ); + + const handleDismiss = useCallback(() => { + refresh(); + }, [refresh]); + + // No active agent — show spawn button + if (state === "none") { + return ( + <div className="mb-3"> + <RefineSpawnDialog + triggerText="Refine with Agent" + title="Refine Initiative Content" + description="An agent will review all pages and suggest improvements. Optionally tell it what to focus on." + instructionPlaceholder="What should the agent focus on? (optional)" + isSpawning={spawn.isPending} + error={spawn.error?.message} + onSpawn={handleSpawn} + /> + </div> + ); + } + + // Running + if (state === "running") { + return ( + <div className="mb-3 flex items-center gap-2 rounded-lg border border-border bg-card px-3 py-2"> + <Loader2 className="h-3.5 w-3.5 animate-spin text-primary" /> + <span className="text-sm text-muted-foreground"> + Architect is refining... + </span> + </div> + ); + } + + // Waiting for input — show inline questions + if (state === "waiting" && questions) { + return ( + <div className="mb-3 rounded-lg border border-border bg-card p-4"> + <h3 className="text-sm font-semibold mb-3">Agent has questions</h3> + <QuestionForm + questions={questions.questions} + onSubmit={handleAnswerSubmit} + onCancel={() => { + // Can't cancel mid-question — just dismiss + }} + isSubmitting={resume.isPending} + /> + </div> + ); + } + + // Completed with proposals + if (state === "completed" && proposals && proposals.length > 0) { + return ( + <div className="mb-3"> + <ContentProposalReview + proposals={proposals} + agentCreatedAt={new Date(agent!.createdAt)} + agentId={agent!.id} + onDismiss={handleDismiss} + /> + </div> + ); + } + + // Completed without proposals (or generic result) + if (state === "completed") { + return ( + <div className="mb-3 flex items-center gap-2 rounded-lg border border-border bg-card px-3 py-2"> + <span className="text-sm text-muted-foreground"> + Agent completed — no changes proposed. + </span> + <Button variant="ghost" size="sm" onClick={handleDismiss}> + Dismiss + </Button> + </div> + ); + } + + // Crashed + if (state === "crashed") { + return ( + <div className="mb-3 rounded-lg border border-destructive/50 bg-destructive/5 px-3 py-2"> + <div className="flex items-center gap-2"> + <AlertCircle className="h-3.5 w-3.5 text-destructive" /> + <span className="text-sm text-destructive">Agent crashed</span> + <RefineSpawnDialog + triggerText="Retry" + title="Refine Initiative Content" + description="An agent will review all pages and suggest improvements." + instructionPlaceholder="What should the agent focus on? (optional)" + isSpawning={spawn.isPending} + error={spawn.error?.message} + onSpawn={handleSpawn} + trigger={ + <Button + variant="outline" + size="sm" + className="ml-auto" + > + Retry + </Button> + } + /> + </div> + </div> + ); + } + + return null; +} diff --git a/packages/web/src/components/editor/SlashCommandList.tsx b/packages/web/src/components/editor/SlashCommandList.tsx new file mode 100644 index 0000000..0eb8fbe --- /dev/null +++ b/packages/web/src/components/editor/SlashCommandList.tsx @@ -0,0 +1,88 @@ +import { + useState, + useEffect, + useCallback, + forwardRef, + useImperativeHandle, +} from "react"; +import type { SlashCommandItem } from "./slash-command-items"; + +export interface SlashCommandListRef { + onKeyDown: (props: { event: KeyboardEvent }) => boolean; +} + +interface SlashCommandListProps { + items: SlashCommandItem[]; + command: (item: SlashCommandItem) => void; +} + +export const SlashCommandList = forwardRef< + SlashCommandListRef, + SlashCommandListProps +>(({ items, command }, ref) => { + const [selectedIndex, setSelectedIndex] = useState(0); + + useEffect(() => { + setSelectedIndex(0); + }, [items]); + + const selectItem = useCallback( + (index: number) => { + const item = items[index]; + if (item) { + command(item); + } + }, + [items, command], + ); + + useImperativeHandle(ref, () => ({ + onKeyDown: ({ event }: { event: KeyboardEvent }) => { + if (event.key === "ArrowUp") { + setSelectedIndex((prev) => (prev + items.length - 1) % items.length); + return true; + } + if (event.key === "ArrowDown") { + setSelectedIndex((prev) => (prev + 1) % items.length); + return true; + } + if (event.key === "Enter") { + selectItem(selectedIndex); + return true; + } + return false; + }, + })); + + if (items.length === 0) { + return null; + } + + return ( + <div className="z-50 min-w-[200px] overflow-hidden rounded-md border border-border bg-popover p-1 shadow-md"> + {items.map((item, index) => ( + <button + key={item.label} + onClick={() => selectItem(index)} + className={`flex w-full items-center gap-2 rounded-sm px-2 py-1.5 text-sm ${ + index === selectedIndex + ? "bg-accent text-accent-foreground" + : "text-popover-foreground" + }`} + > + <span className="flex h-6 w-6 shrink-0 items-center justify-center rounded border border-border bg-muted text-xs font-mono"> + {item.icon} + </span> + <div className="flex flex-col items-start"> + <span className="font-medium">{item.label}</span> + <span className="text-xs text-muted-foreground"> + {item.description} + </span> + </div> + </button> + ))} + </div> + ); +}); + +SlashCommandList.displayName = "SlashCommandList"; diff --git a/packages/web/src/components/editor/SlashCommands.ts b/packages/web/src/components/editor/SlashCommands.ts new file mode 100644 index 0000000..485be1f --- /dev/null +++ b/packages/web/src/components/editor/SlashCommands.ts @@ -0,0 +1,121 @@ +import { Extension } from "@tiptap/react"; +import { ReactRenderer } from "@tiptap/react"; +import Suggestion from "@tiptap/suggestion"; +import tippy, { type Instance as TippyInstance } from "tippy.js"; +import { + slashCommandItems, + type SlashCommandItem, +} from "./slash-command-items"; +import { SlashCommandList, type SlashCommandListRef } from "./SlashCommandList"; + +export const SlashCommands = Extension.create({ + name: "slashCommands", + + addStorage() { + return { + onSubpageCreate: null as ((editor: unknown) => void) | null, + }; + }, + + addOptions() { + return { + suggestion: { + char: "/", + startOfLine: false, + command: ({ + editor, + range, + props, + }: { + editor: ReturnType<typeof import("@tiptap/react").useEditor>; + range: { from: number; to: number }; + props: SlashCommandItem; + }) => { + // Delete the slash command text + editor.chain().focus().deleteRange(range).run(); + // Execute the selected command + props.action(editor); + }, + items: ({ query }: { query: string }): SlashCommandItem[] => { + return slashCommandItems.filter((item) => + item.label.toLowerCase().includes(query.toLowerCase()), + ); + }, + render: () => { + let component: ReactRenderer<SlashCommandListRef> | null = null; + let popup: TippyInstance[] | null = null; + + return { + onStart: (props: { + editor: ReturnType<typeof import("@tiptap/react").useEditor>; + clientRect: (() => DOMRect | null) | null; + items: SlashCommandItem[]; + command: (item: SlashCommandItem) => void; + }) => { + component = new ReactRenderer(SlashCommandList, { + props: { + items: props.items, + command: props.command, + }, + editor: props.editor, + }); + + const getReferenceClientRect = props.clientRect; + + popup = tippy("body", { + getReferenceClientRect: getReferenceClientRect as () => DOMRect, + appendTo: () => document.body, + content: component.element, + showOnCreate: true, + interactive: true, + trigger: "manual", + placement: "bottom-start", + }); + }, + + onUpdate: (props: { + items: SlashCommandItem[]; + command: (item: SlashCommandItem) => void; + clientRect: (() => DOMRect | null) | null; + }) => { + component?.updateProps({ + items: props.items, + command: props.command, + }); + + if (popup?.[0]) { + popup[0].setProps({ + getReferenceClientRect: + props.clientRect as unknown as () => DOMRect, + }); + } + }, + + onKeyDown: (props: { event: KeyboardEvent }) => { + if (props.event.key === "Escape") { + popup?.[0]?.hide(); + return true; + } + + return component?.ref?.onKeyDown(props) ?? false; + }, + + onExit: () => { + popup?.[0]?.destroy(); + component?.destroy(); + }, + }; + }, + }, + }; + }, + + addProseMirrorPlugins() { + return [ + Suggestion({ + editor: this.editor, + ...this.options.suggestion, + }), + ]; + }, +}); diff --git a/packages/web/src/components/editor/TiptapEditor.tsx b/packages/web/src/components/editor/TiptapEditor.tsx new file mode 100644 index 0000000..91b6144 --- /dev/null +++ b/packages/web/src/components/editor/TiptapEditor.tsx @@ -0,0 +1,361 @@ +import { useState, useEffect, useRef, useCallback } from "react"; +import { useEditor, EditorContent, Extension } from "@tiptap/react"; +import type { Editor } from "@tiptap/react"; +import { GripVertical, Plus } from "lucide-react"; +import StarterKit from "@tiptap/starter-kit"; +import Placeholder from "@tiptap/extension-placeholder"; +import Link from "@tiptap/extension-link"; +import { Table, TableRow, TableCell, TableHeader } from "@tiptap/extension-table"; +import { Plugin, PluginKey, NodeSelection, TextSelection } from "@tiptap/pm/state"; +import { Fragment, Slice, type Node as PmNode } from "@tiptap/pm/model"; +import { SlashCommands } from "./SlashCommands"; +import { PageLinkExtension } from "./PageLinkExtension"; +import { + BlockSelectionExtension, + blockSelectionKey, + getBlockRange, +} from "./BlockSelectionExtension"; + +interface TiptapEditorProps { + content: string | null; + onUpdate: (json: string) => void; + pageId: string; + onPageLinkClick?: (pageId: string) => void; + onSubpageCreate?: ( + editor: Editor, + ) => void; + onPageLinkDeleted?: (pageId: string, redo: () => void) => void; +} + +export function TiptapEditor({ + content, + onUpdate, + pageId, + onPageLinkClick, + onSubpageCreate, + onPageLinkDeleted, +}: TiptapEditorProps) { + const containerRef = useRef<HTMLDivElement>(null); + const onPageLinkDeletedRef = useRef(onPageLinkDeleted); + onPageLinkDeletedRef.current = onPageLinkDeleted; + const blockIndexRef = useRef<number | null>(null); + const savedBlockSelRef = useRef<{ anchorIndex: number; headIndex: number } | null>(null); + + const editor = useEditor( + { + extensions: [ + StarterKit, + Table.configure({ resizable: true, cellMinWidth: 50 }), + TableRow, + TableCell, + TableHeader, + Placeholder.configure({ + includeChildren: true, + placeholder: ({ node }) => { + if (node.type.name === 'heading') { + return `Heading ${node.attrs.level}`; + } + return "Type '/' for commands..."; + }, + }), + Link.configure({ + openOnClick: false, + }), + SlashCommands, + PageLinkExtension, + BlockSelectionExtension, + // Detect pageLink node deletions by comparing old/new doc state + Extension.create({ + name: "pageLinkDeletionDetector", + addStorage() { + return { skipDetection: false }; + }, + addProseMirrorPlugins() { + const tiptapEditor = this.editor; + return [ + new Plugin({ + key: new PluginKey("pageLinkDeletionDetector"), + appendTransaction(_transactions, oldState, newState) { + if (oldState.doc.eq(newState.doc)) return null; + + const oldLinks = new Set<string>(); + oldState.doc.descendants((node) => { + if (node.type.name === "pageLink" && node.attrs.pageId) { + oldLinks.add(node.attrs.pageId); + } + }); + + const newLinks = new Set<string>(); + newState.doc.descendants((node) => { + if (node.type.name === "pageLink" && node.attrs.pageId) { + newLinks.add(node.attrs.pageId); + } + }); + + for (const removedPageId of oldLinks) { + if (!newLinks.has(removedPageId)) { + // Fire async to avoid dispatching during appendTransaction + setTimeout(() => { + if (tiptapEditor.storage.pageLinkDeletionDetector.skipDetection) { + tiptapEditor.storage.pageLinkDeletionDetector.skipDetection = false; + return; + } + // Undo the deletion immediately so the link reappears + tiptapEditor.commands.undo(); + // Pass a redo function so the caller can re-delete if confirmed + onPageLinkDeletedRef.current?.( + removedPageId, + () => { + tiptapEditor.storage.pageLinkDeletionDetector.skipDetection = true; + tiptapEditor.commands.redo(); + }, + ); + }, 0); + } + } + + return null; + }, + }), + ]; + }, + }), + ], + content: content ? JSON.parse(content) : undefined, + onUpdate: ({ editor: e }) => { + onUpdate(JSON.stringify(e.getJSON())); + }, + editorProps: { + attributes: { + class: + "prose prose-sm prose-p:my-1 prose-headings:mb-1 prose-headings:mt-3 prose-ul:my-1 prose-ol:my-1 prose-li:my-0.5 prose-blockquote:my-1 prose-pre:my-1 prose-hr:my-2 dark:prose-invert max-w-none focus:outline-none min-h-[400px] pl-11 pr-4 py-1", + }, + }, + }, + [pageId], + ); + + // Wire the onSubpageCreate callback into editor storage + useEffect(() => { + if (editor && onSubpageCreate) { + editor.storage.slashCommands.onSubpageCreate = (ed: Editor) => { + onSubpageCreate(ed); + }; + } + }, [editor, onSubpageCreate]); + + // Handle page link clicks via custom event + const handlePageLinkClick = useCallback( + (e: Event) => { + const detail = (e as CustomEvent).detail; + if (detail?.pageId && onPageLinkClick) { + onPageLinkClick(detail.pageId); + } + }, + [onPageLinkClick], + ); + + useEffect(() => { + const el = containerRef.current; + if (!el) return; + el.addEventListener("page-link-click", handlePageLinkClick); + return () => + el.removeEventListener("page-link-click", handlePageLinkClick); + }, [handlePageLinkClick]); + + // Floating drag handle: track which block the mouse is over + const [handlePos, setHandlePos] = useState<{ top: number; height: number } | null>(null); + const blockElRef = useRef<HTMLElement | null>(null); + + const onMouseMove = useCallback((e: React.MouseEvent) => { + // If hovering the handle itself, keep current position + if ((e.target as HTMLElement).closest("[data-block-handle-row]")) return; + + const editorEl = containerRef.current?.querySelector(".ProseMirror"); + if (!editorEl || !editor) return; + + // Walk from event target up to a direct child of .ProseMirror + let target = e.target as HTMLElement; + while (target && target !== editorEl && target.parentElement !== editorEl) { + target = target.parentElement!; + } + + if (target && target !== editorEl && target.parentElement === editorEl) { + blockElRef.current = target; + const editorRect = editorEl.getBoundingClientRect(); + const blockRect = target.getBoundingClientRect(); + setHandlePos({ + top: blockRect.top - editorRect.top, + height: blockRect.height, + }); + + // Track top-level block index for block selection + try { + const pos = editor.view.posAtDOM(target, 0); + blockIndexRef.current = editor.view.state.doc.resolve(pos).index(0); + } catch { + blockIndexRef.current = null; + } + } + // Don't clear — only onMouseLeave clears + }, [editor]); + + const onMouseLeave = useCallback(() => { + setHandlePos(null); + blockElRef.current = null; + blockIndexRef.current = null; + }, []); + + // Click on drag handle → select block (Shift+click extends) + const onHandleClick = useCallback( + (e: React.MouseEvent) => { + if (!editor) return; + const idx = blockIndexRef.current; + if (idx == null) return; + + // Use saved state from mousedown (PM may have cleared it due to focus change) + const existing = savedBlockSelRef.current; + + let newSel; + if (e.shiftKey && existing) { + newSel = { anchorIndex: existing.anchorIndex, headIndex: idx }; + } else { + newSel = { anchorIndex: idx, headIndex: idx }; + } + + const tr = editor.view.state.tr.setMeta(blockSelectionKey, newSel); + tr.setMeta("blockSelectionInternal", true); + editor.view.dispatch(tr); + // Refocus editor so Shift+Arrow keys reach PM's handleKeyDown + editor.view.focus(); + }, + [editor], + ); + + // Add a new empty paragraph below the hovered block + const onHandleAdd = useCallback(() => { + if (!editor || !blockElRef.current) return; + const view = editor.view; + try { + const pos = view.posAtDOM(blockElRef.current, 0); + const $pos = view.state.doc.resolve(pos); + const after = $pos.after($pos.depth); + const paragraph = view.state.schema.nodes.paragraph.create(); + const tr = view.state.tr.insert(after, paragraph); + // Place cursor inside the new paragraph + tr.setSelection(TextSelection.create(tr.doc, after + 1)); + view.dispatch(tr); + view.focus(); + } catch { + // posAtDOM can throw if the element isn't in the editor + } + }, [editor]); + + // Initiate ProseMirror-native drag when handle is dragged + const onHandleDragStart = useCallback( + (e: React.DragEvent) => { + if (!editor || !blockElRef.current) return; + + const view = editor.view; + const el = blockElRef.current; + // Use saved state from mousedown (PM may have cleared it due to focus change) + const bsel = savedBlockSelRef.current; + + try { + // Multi-block drag: if block selection is active and hovered block is in range + if (bsel && blockIndexRef.current != null) { + const from = Math.min(bsel.anchorIndex, bsel.headIndex); + const to = Math.max(bsel.anchorIndex, bsel.headIndex); + + if (blockIndexRef.current >= from && blockIndexRef.current <= to) { + const blockRange = getBlockRange(view.state, bsel); + if (blockRange) { + const nodes: PmNode[] = []; + let idx = 0; + view.state.doc.forEach((node) => { + if (idx >= from && idx <= to) nodes.push(node); + idx++; + }); + + const sel = TextSelection.create( + view.state.doc, + blockRange.fromPos, + blockRange.toPos, + ); + const tr = view.state.tr.setSelection(sel); + tr.setMeta("blockSelectionInternal", true); + view.dispatch(tr); + + view.dragging = { + slice: new Slice(Fragment.from(nodes), 0, 0), + move: true, + }; + + e.dataTransfer.effectAllowed = "move"; + e.dataTransfer.setDragImage(el, 0, 0); + e.dataTransfer.setData("application/x-pm-drag", "true"); + return; + } + } + } + + // Single-block drag (existing behavior) + const pos = view.posAtDOM(el, 0); + const $pos = view.state.doc.resolve(pos); + const before = $pos.before($pos.depth); + + const sel = NodeSelection.create(view.state.doc, before); + view.dispatch(view.state.tr.setSelection(sel)); + + view.dragging = { slice: sel.content(), move: true }; + + e.dataTransfer.effectAllowed = "move"; + e.dataTransfer.setDragImage(el, 0, 0); + e.dataTransfer.setData("application/x-pm-drag", "true"); + } catch { + // posAtDOM can throw if the element isn't in the editor + } + }, + [editor], + ); + + return ( + <div + ref={containerRef} + className="relative" + onMouseMove={onMouseMove} + onMouseLeave={onMouseLeave} + > + {handlePos && ( + <div + data-block-handle-row + className="absolute left-0 flex items-start z-10" + style={{ top: handlePos.top + 1 }} + > + <div + onClick={onHandleAdd} + onMouseDown={(e) => e.preventDefault()} + className="flex items-center justify-center w-5 h-6 cursor-pointer rounded hover:bg-muted" + > + <Plus className="h-3.5 w-3.5 text-muted-foreground/60" /> + </div> + <div + data-drag-handle + draggable + onMouseDown={() => { + if (editor) { + savedBlockSelRef.current = blockSelectionKey.getState(editor.view.state) ?? null; + } + }} + onClick={onHandleClick} + onDragStart={onHandleDragStart} + className="flex items-center justify-center w-5 h-6 cursor-grab rounded hover:bg-muted" + > + <GripVertical className="h-3.5 w-3.5 text-muted-foreground/60" /> + </div> + </div> + )} + <EditorContent editor={editor} /> + </div> + ); +} diff --git a/packages/web/src/components/editor/slash-command-items.ts b/packages/web/src/components/editor/slash-command-items.ts new file mode 100644 index 0000000..bb48a67 --- /dev/null +++ b/packages/web/src/components/editor/slash-command-items.ts @@ -0,0 +1,86 @@ +import type { Editor } from "@tiptap/react"; + +export interface SlashCommandItem { + label: string; + icon: string; + description: string; + /** If true, the action reads onSubpageCreate from editor.storage.slashCommands */ + isSubpage?: boolean; + action: (editor: Editor) => void; +} + +export const slashCommandItems: SlashCommandItem[] = [ + { + label: "Heading 1", + icon: "H1", + description: "Large heading", + action: (editor) => + editor.chain().focus().toggleHeading({ level: 1 }).run(), + }, + { + label: "Heading 2", + icon: "H2", + description: "Medium heading", + action: (editor) => + editor.chain().focus().toggleHeading({ level: 2 }).run(), + }, + { + label: "Heading 3", + icon: "H3", + description: "Small heading", + action: (editor) => + editor.chain().focus().toggleHeading({ level: 3 }).run(), + }, + { + label: "Bullet List", + icon: "UL", + description: "Unordered list", + action: (editor) => editor.chain().focus().toggleBulletList().run(), + }, + { + label: "Numbered List", + icon: "OL", + description: "Ordered list", + action: (editor) => editor.chain().focus().toggleOrderedList().run(), + }, + { + label: "Code Block", + icon: "<>", + description: "Code snippet", + action: (editor) => editor.chain().focus().toggleCodeBlock().run(), + }, + { + label: "Quote", + icon: "\"", + description: "Block quote", + action: (editor) => editor.chain().focus().toggleBlockquote().run(), + }, + { + label: "Divider", + icon: "---", + description: "Horizontal rule", + action: (editor) => editor.chain().focus().setHorizontalRule().run(), + }, + { + label: "Table", + icon: "T#", + description: "Insert a table", + action: (editor) => + editor.chain().focus().insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run(), + }, + { + label: "Subpage", + icon: "\uD83D\uDCC4", + description: "Create a linked subpage", + isSubpage: true, + action: (editor) => { + const callback = editor.storage.slashCommands + ?.onSubpageCreate as + | ((editor: Editor) => void) + | undefined; + if (callback) { + callback(editor); + } + }, + }, +]; diff --git a/packages/web/src/components/execution/BreakdownSection.tsx b/packages/web/src/components/execution/BreakdownSection.tsx new file mode 100644 index 0000000..533496d --- /dev/null +++ b/packages/web/src/components/execution/BreakdownSection.tsx @@ -0,0 +1,90 @@ +import { useCallback, useMemo } from "react"; +import { Loader2, Sparkles } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { trpc } from "@/lib/trpc"; +import { useSpawnMutation } from "@/hooks/useSpawnMutation"; + +interface BreakdownSectionProps { + initiativeId: string; + phasesLoaded: boolean; + phases: Array<{ status: string }>; +} + +export function BreakdownSection({ + initiativeId, + phasesLoaded, + phases, +}: BreakdownSectionProps) { + const utils = trpc.useUtils(); + + // Breakdown agent tracking + const agentsQuery = trpc.listAgents.useQuery(); + const allAgents = agentsQuery.data ?? []; + const breakdownAgent = useMemo(() => { + const candidates = allAgents + .filter( + (a) => + a.mode === "breakdown" && + a.taskId === initiativeId && + ["running", "waiting_for_input", "idle"].includes(a.status), + ) + .sort( + (a, b) => + new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime(), + ); + return candidates[0] ?? null; + }, [allAgents, initiativeId]); + + const isBreakdownRunning = breakdownAgent?.status === "running"; + + const breakdownSpawn = useSpawnMutation(trpc.spawnArchitectBreakdown.useMutation, { + onSuccess: () => { + void utils.listAgents.invalidate(); + }, + showToast: false, // We show our own error UI + }); + + const handleBreakdown = useCallback(() => { + breakdownSpawn.spawn({ initiativeId }); + }, [initiativeId, breakdownSpawn]); + + // Don't render if we have phases + if (phasesLoaded && phases.length > 0) { + return null; + } + + // Don't render during loading + if (!phasesLoaded) { + return null; + } + + return ( + <div className="py-8 text-center space-y-3"> + <p className="text-muted-foreground">No phases yet</p> + {isBreakdownRunning ? ( + <div className="flex items-center justify-center gap-2 text-sm text-muted-foreground"> + <Loader2 className="h-3.5 w-3.5 animate-spin" /> + Breaking down initiative... + </div> + ) : ( + <Button + variant="outline" + size="sm" + onClick={handleBreakdown} + disabled={breakdownSpawn.isSpawning} + className="gap-1.5" + > + <Sparkles className="h-3.5 w-3.5" /> + {breakdownSpawn.isSpawning + ? "Starting..." + : "Break Down Initiative"} + </Button> + )} + {breakdownSpawn.isError && ( + <p className="text-xs text-destructive"> + {breakdownSpawn.error} + </p> + )} + </div> + ); +} \ No newline at end of file diff --git a/packages/web/src/components/execution/ExecutionContext.tsx b/packages/web/src/components/execution/ExecutionContext.tsx new file mode 100644 index 0000000..100c69e --- /dev/null +++ b/packages/web/src/components/execution/ExecutionContext.tsx @@ -0,0 +1,150 @@ +import { createContext, useContext, useState, useCallback, useMemo, ReactNode } from "react"; +import type { SerializedTask } from "@/components/TaskRow"; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +export interface TaskCounts { + complete: number; + total: number; +} + +export interface FlatTaskEntry { + task: SerializedTask; + phaseName: string; + agentName: string | null; + blockedBy: Array<{ name: string; status: string }>; + dependents: Array<{ name: string; status: string }>; +} + +export interface PhaseData { + id: string; + initiativeId: string; + number: number; + name: string; + description: string | null; + status: string; + createdAt: string | Date; + updatedAt: string | Date; +} + +// --------------------------------------------------------------------------- +// Context +// --------------------------------------------------------------------------- + +interface ExecutionContextValue { + // Task selection + selectedTaskId: string | null; + setSelectedTaskId: (taskId: string | null) => void; + + // Task counts by phase + taskCountsByPhase: Record<string, TaskCounts>; + handleTaskCounts: (phaseId: string, counts: TaskCounts) => void; + + // Tasks by phase + tasksByPhase: Record<string, FlatTaskEntry[]>; + handleRegisterTasks: (phaseId: string, entries: FlatTaskEntry[]) => void; + + // Derived data + allFlatTasks: FlatTaskEntry[]; + selectedEntry: FlatTaskEntry | null; + tasksComplete: number; + tasksTotal: number; +} + +const ExecutionContext = createContext<ExecutionContextValue | null>(null); + +// --------------------------------------------------------------------------- +// Provider +// --------------------------------------------------------------------------- + +interface ExecutionProviderProps { + children: ReactNode; +} + +export function ExecutionProvider({ children }: ExecutionProviderProps) { + const [selectedTaskId, setSelectedTaskId] = useState<string | null>(null); + const [taskCountsByPhase, setTaskCountsByPhase] = useState< + Record<string, TaskCounts> + >({}); + const [tasksByPhase, setTasksByPhase] = useState< + Record<string, FlatTaskEntry[]> + >({}); + + const handleTaskCounts = useCallback( + (phaseId: string, counts: TaskCounts) => { + setTaskCountsByPhase((prev) => { + if ( + prev[phaseId]?.complete === counts.complete && + prev[phaseId]?.total === counts.total + ) { + return prev; + } + return { ...prev, [phaseId]: counts }; + }); + }, + [], + ); + + const handleRegisterTasks = useCallback( + (phaseId: string, entries: FlatTaskEntry[]) => { + setTasksByPhase((prev) => { + if (prev[phaseId] === entries) return prev; + return { ...prev, [phaseId]: entries }; + }); + }, + [], + ); + + const allFlatTasks = useMemo( + () => Object.values(tasksByPhase).flat(), + [tasksByPhase] + ); + + const selectedEntry = useMemo( + () => selectedTaskId + ? allFlatTasks.find((e) => e.task.id === selectedTaskId) ?? null + : null, + [selectedTaskId, allFlatTasks] + ); + + const { tasksComplete, tasksTotal } = useMemo(() => { + const allTaskCounts = Object.values(taskCountsByPhase); + return { + tasksComplete: allTaskCounts.reduce((s, c) => s + c.complete, 0), + tasksTotal: allTaskCounts.reduce((s, c) => s + c.total, 0), + }; + }, [taskCountsByPhase]); + + const value: ExecutionContextValue = { + selectedTaskId, + setSelectedTaskId, + taskCountsByPhase, + handleTaskCounts, + tasksByPhase, + handleRegisterTasks, + allFlatTasks, + selectedEntry, + tasksComplete, + tasksTotal, + }; + + return ( + <ExecutionContext.Provider value={value}> + {children} + </ExecutionContext.Provider> + ); +} + +// --------------------------------------------------------------------------- +// Hook +// --------------------------------------------------------------------------- + +export function useExecutionContext() { + const context = useContext(ExecutionContext); + if (!context) { + throw new Error("useExecutionContext must be used within ExecutionProvider"); + } + return context; +} \ No newline at end of file diff --git a/packages/web/src/components/execution/PhaseActions.tsx b/packages/web/src/components/execution/PhaseActions.tsx new file mode 100644 index 0000000..17a39c2 --- /dev/null +++ b/packages/web/src/components/execution/PhaseActions.tsx @@ -0,0 +1,60 @@ +import { useCallback, useMemo } from "react"; +import { Loader2 } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { trpc } from "@/lib/trpc"; + +interface PhaseActionsProps { + initiativeId: string; + phases: Array<{ id: string; status: string }>; +} + +export function PhaseActions({ initiativeId, phases }: PhaseActionsProps) { + const queuePhaseMutation = trpc.queuePhase.useMutation(); + + // Breakdown agent tracking for status display + const agentsQuery = trpc.listAgents.useQuery(); + const allAgents = agentsQuery.data ?? []; + const breakdownAgent = useMemo(() => { + const candidates = allAgents + .filter( + (a) => + a.mode === "breakdown" && + a.taskId === initiativeId && + ["running", "waiting_for_input", "idle"].includes(a.status), + ) + .sort( + (a, b) => + new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime(), + ); + return candidates[0] ?? null; + }, [allAgents, initiativeId]); + + const isBreakdownRunning = breakdownAgent?.status === "running"; + const hasPendingPhases = phases.some((p) => p.status === "pending"); + + const handleQueueAll = useCallback(() => { + const pendingPhases = phases.filter((p) => p.status === "pending"); + for (const phase of pendingPhases) { + queuePhaseMutation.mutate({ phaseId: phase.id }); + } + }, [phases, queuePhaseMutation]); + + return ( + <div className="flex items-center gap-2"> + {isBreakdownRunning && ( + <div className="flex items-center gap-1.5 text-xs text-muted-foreground"> + <Loader2 className="h-3 w-3 animate-spin" /> + Breaking down... + </div> + )} + <Button + variant="outline" + size="sm" + disabled={!hasPendingPhases} + onClick={handleQueueAll} + > + Queue All + </Button> + </div> + ); +} \ No newline at end of file diff --git a/packages/web/src/components/execution/PhaseWithTasks.tsx b/packages/web/src/components/execution/PhaseWithTasks.tsx new file mode 100644 index 0000000..31bf945 --- /dev/null +++ b/packages/web/src/components/execution/PhaseWithTasks.tsx @@ -0,0 +1,137 @@ +import { useState, useCallback, useEffect } from "react"; +import { trpc } from "@/lib/trpc"; +import { PhaseAccordion } from "@/components/PhaseAccordion"; +import { PlanTasksFetcher } from "./PlanTasksFetcher"; +import type { SerializedTask } from "@/components/TaskRow"; +import type { TaskCounts, FlatTaskEntry } from "./ExecutionContext"; +import { sortByPriorityAndQueueTime } from "@codewalk-district/shared"; + +interface PhaseWithTasksProps { + phase: { + id: string; + initiativeId: string; + number: number; + name: string; + description: string | null; + status: string; + createdAt: string; + updatedAt: string; + }; + defaultExpanded: boolean; + onTaskClick: (taskId: string) => void; + onTaskCounts: (phaseId: string, counts: TaskCounts) => void; + registerTasks: (phaseId: string, entries: FlatTaskEntry[]) => void; +} + +export function PhaseWithTasks({ + phase, + defaultExpanded, + onTaskClick, + onTaskCounts, + registerTasks, +}: PhaseWithTasksProps) { + const plansQuery = trpc.listPlans.useQuery({ phaseId: phase.id }); + const depsQuery = trpc.getPhaseDependencies.useQuery({ phaseId: phase.id }); + + const plans = plansQuery.data ?? []; + const planIds = plans.map((p) => p.id); + + return ( + <PhaseWithTasksInner + phase={phase} + planIds={planIds} + plansLoaded={plansQuery.isSuccess} + phaseDependencyIds={depsQuery.data?.dependencies ?? []} + defaultExpanded={defaultExpanded} + onTaskClick={onTaskClick} + onTaskCounts={onTaskCounts} + registerTasks={registerTasks} + /> + ); +} + +interface PhaseWithTasksInnerProps { + phase: PhaseWithTasksProps["phase"]; + planIds: string[]; + plansLoaded: boolean; + phaseDependencyIds: string[]; + defaultExpanded: boolean; + onTaskClick: (taskId: string) => void; + onTaskCounts: (phaseId: string, counts: TaskCounts) => void; + registerTasks: (phaseId: string, entries: FlatTaskEntry[]) => void; +} + +function PhaseWithTasksInner({ + phase, + planIds, + plansLoaded, + phaseDependencyIds: _phaseDependencyIds, + defaultExpanded, + onTaskClick, + onTaskCounts, + registerTasks, +}: PhaseWithTasksInnerProps) { + const [planTasks, setPlanTasks] = useState<Record<string, SerializedTask[]>>( + {}, + ); + + const handlePlanTasks = useCallback( + (planId: string, tasks: SerializedTask[]) => { + setPlanTasks((prev) => { + if (prev[planId] === tasks) return prev; + return { ...prev, [planId]: tasks }; + }); + }, + [], + ); + + // Propagate derived counts and entries outside the setState updater + // to avoid synchronous setState-inside-setState cascades. + useEffect(() => { + const allTasks = Object.values(planTasks).flat(); + const complete = allTasks.filter( + (t) => t.status === "completed", + ).length; + onTaskCounts(phase.id, { complete, total: allTasks.length }); + + const entries: FlatTaskEntry[] = allTasks.map((task) => ({ + task, + phaseName: `Phase ${phase.number}: ${phase.name}`, + agentName: null, + blockedBy: [], + dependents: [], + })); + registerTasks(phase.id, entries); + }, [planTasks, phase.id, phase.number, phase.name, onTaskCounts, registerTasks]); + + const allTasks = planIds.flatMap((pid) => planTasks[pid] ?? []); + const sortedTasks = sortByPriorityAndQueueTime(allTasks); + const taskEntries = sortedTasks.map((task) => ({ + task, + agentName: null as string | null, + blockedBy: [] as Array<{ name: string; status: string }>, + })); + + const phaseDeps: Array<{ name: string; status: string }> = []; + + return ( + <> + {plansLoaded && + planIds.map((planId) => ( + <PlanTasksFetcher + key={planId} + planId={planId} + onTasks={handlePlanTasks} + /> + ))} + + <PhaseAccordion + phase={phase} + tasks={taskEntries} + defaultExpanded={defaultExpanded} + phaseDependencies={phaseDeps} + onTaskClick={onTaskClick} + /> + </> + ); +} \ No newline at end of file diff --git a/packages/web/src/components/execution/PhasesList.tsx b/packages/web/src/components/execution/PhasesList.tsx new file mode 100644 index 0000000..934ea25 --- /dev/null +++ b/packages/web/src/components/execution/PhasesList.tsx @@ -0,0 +1,74 @@ +import { Skeleton } from "@/components/Skeleton"; +import { useExecutionContext, type PhaseData } from "./ExecutionContext"; +import { PhaseWithTasks } from "./PhaseWithTasks"; +import { BreakdownSection } from "./BreakdownSection"; + +interface PhasesListProps { + initiativeId: string; + phases: PhaseData[]; + phasesLoading: boolean; + phasesLoaded: boolean; +} + +export function PhasesList({ + initiativeId, + phases, + phasesLoading, + phasesLoaded, +}: PhasesListProps) { + const { setSelectedTaskId, handleTaskCounts, handleRegisterTasks } = + useExecutionContext(); + + const firstIncompletePhaseIndex = phases.findIndex( + (p) => p.status !== "completed", + ); + + if (phasesLoading) { + return ( + <div className="space-y-1 pt-3"> + {Array.from({ length: 3 }).map((_, i) => ( + <Skeleton key={i} className="h-10 w-full" /> + ))} + </div> + ); + } + + if (phasesLoaded && phases.length === 0) { + return ( + <BreakdownSection + initiativeId={initiativeId} + phasesLoaded={phasesLoaded} + phases={phases} + /> + ); + } + + return ( + <> + {phasesLoaded && + phases.map((phase, idx) => { + const serializedPhase = { + id: phase.id, + initiativeId: phase.initiativeId, + number: phase.number, + name: phase.name, + description: phase.description, + status: phase.status, + createdAt: String(phase.createdAt), + updatedAt: String(phase.updatedAt), + }; + + return ( + <PhaseWithTasks + key={phase.id} + phase={serializedPhase} + defaultExpanded={idx === firstIncompletePhaseIndex} + onTaskClick={setSelectedTaskId} + onTaskCounts={handleTaskCounts} + registerTasks={handleRegisterTasks} + /> + ); + })} + </> + ); +} \ No newline at end of file diff --git a/packages/web/src/components/execution/PlanTasksFetcher.tsx b/packages/web/src/components/execution/PlanTasksFetcher.tsx new file mode 100644 index 0000000..46e829e --- /dev/null +++ b/packages/web/src/components/execution/PlanTasksFetcher.tsx @@ -0,0 +1,20 @@ +import { useEffect } from "react"; +import { trpc } from "@/lib/trpc"; +import type { SerializedTask } from "@/components/TaskRow"; + +interface PlanTasksFetcherProps { + planId: string; + onTasks: (planId: string, tasks: SerializedTask[]) => void; +} + +export function PlanTasksFetcher({ planId, onTasks }: PlanTasksFetcherProps) { + const tasksQuery = trpc.listTasks.useQuery({ planId }); + + useEffect(() => { + if (tasksQuery.data) { + onTasks(planId, tasksQuery.data as unknown as SerializedTask[]); + } + }, [tasksQuery.data, planId, onTasks]); + + return null; +} \ No newline at end of file diff --git a/packages/web/src/components/execution/ProgressSidebar.tsx b/packages/web/src/components/execution/ProgressSidebar.tsx new file mode 100644 index 0000000..e52c9ae --- /dev/null +++ b/packages/web/src/components/execution/ProgressSidebar.tsx @@ -0,0 +1,28 @@ +import { ProgressPanel } from "@/components/ProgressPanel"; +import { DecisionList } from "@/components/DecisionList"; +import { useExecutionContext, type PhaseData } from "./ExecutionContext"; + +interface ProgressSidebarProps { + phases: PhaseData[]; +} + +export function ProgressSidebar({ phases }: ProgressSidebarProps) { + const { tasksComplete, tasksTotal } = useExecutionContext(); + + const phasesComplete = phases.filter( + (p) => p.status === "completed", + ).length; + + return ( + <div className="space-y-6"> + <ProgressPanel + phasesComplete={phasesComplete} + phasesTotal={phases.length} + tasksComplete={tasksComplete} + tasksTotal={tasksTotal} + /> + + <DecisionList decisions={[]} /> + </div> + ); +} \ No newline at end of file diff --git a/packages/web/src/components/execution/TaskModal.tsx b/packages/web/src/components/execution/TaskModal.tsx new file mode 100644 index 0000000..d6c235a --- /dev/null +++ b/packages/web/src/components/execution/TaskModal.tsx @@ -0,0 +1,34 @@ +import { useCallback } from "react"; +import { TaskDetailModal } from "@/components/TaskDetailModal"; +import { useExecutionContext } from "./ExecutionContext"; +import { trpc } from "@/lib/trpc"; + +export function TaskModal() { + const { selectedEntry, setSelectedTaskId } = useExecutionContext(); + const queueTaskMutation = trpc.queueTask.useMutation(); + + const handleQueueTask = useCallback( + (taskId: string) => { + queueTaskMutation.mutate({ taskId }); + setSelectedTaskId(null); + }, + [queueTaskMutation, setSelectedTaskId], + ); + + const handleClose = useCallback(() => { + setSelectedTaskId(null); + }, [setSelectedTaskId]); + + return ( + <TaskDetailModal + task={selectedEntry?.task ?? null} + phaseName={selectedEntry?.phaseName ?? ""} + agentName={selectedEntry?.agentName ?? null} + dependencies={selectedEntry?.blockedBy ?? []} + dependents={selectedEntry?.dependents ?? []} + onClose={handleClose} + onQueueTask={handleQueueTask} + onStopTask={handleClose} + /> + ); +} \ No newline at end of file diff --git a/packages/web/src/components/execution/index.ts b/packages/web/src/components/execution/index.ts new file mode 100644 index 0000000..b16b156 --- /dev/null +++ b/packages/web/src/components/execution/index.ts @@ -0,0 +1,9 @@ +export { ExecutionProvider, useExecutionContext } from "./ExecutionContext"; +export { BreakdownSection } from "./BreakdownSection"; +export { PhaseActions } from "./PhaseActions"; +export { PhasesList } from "./PhasesList"; +export { PhaseWithTasks } from "./PhaseWithTasks"; +export { PlanTasksFetcher } from "./PlanTasksFetcher"; +export { ProgressSidebar } from "./ProgressSidebar"; +export { TaskModal } from "./TaskModal"; +export type { TaskCounts, FlatTaskEntry, PhaseData } from "./ExecutionContext"; \ No newline at end of file diff --git a/packages/web/src/hooks/index.ts b/packages/web/src/hooks/index.ts new file mode 100644 index 0000000..42225c7 --- /dev/null +++ b/packages/web/src/hooks/index.ts @@ -0,0 +1,18 @@ +/** + * Shared React hooks for the Codewalk District frontend. + * + * This module provides reusable hooks for common patterns like + * debouncing, subscription management, and agent interactions. + */ + +export { useAutoSave } from './useAutoSave.js'; +export { useDebounce, useDebounceWithImmediate } from './useDebounce.js'; +export { useRefineAgent } from './useRefineAgent.js'; +export { useSubscriptionWithErrorHandling } from './useSubscriptionWithErrorHandling.js'; + +export type { + RefineAgentState, + ContentProposal, + SpawnRefineAgentOptions, + UseRefineAgentResult, +} from './useRefineAgent.js'; \ No newline at end of file diff --git a/packages/web/src/hooks/useAutoSave.ts b/packages/web/src/hooks/useAutoSave.ts new file mode 100644 index 0000000..a038b4c --- /dev/null +++ b/packages/web/src/hooks/useAutoSave.ts @@ -0,0 +1,68 @@ +import { useRef, useCallback, useEffect } from "react"; +import { trpc } from "@/lib/trpc"; + +interface UseAutoSaveOptions { + debounceMs?: number; + onSaved?: () => void; +} + +export function useAutoSave({ debounceMs = 1000, onSaved }: UseAutoSaveOptions = {}) { + const updateMutation = trpc.updatePage.useMutation({ onSuccess: onSaved }); + const timerRef = useRef<ReturnType<typeof setTimeout> | null>(null); + const pendingRef = useRef<{ + id: string; + title?: string; + content?: string | null; + } | null>(null); + + const flush = useCallback(() => { + if (timerRef.current) { + clearTimeout(timerRef.current); + timerRef.current = null; + } + if (pendingRef.current) { + const data = pendingRef.current; + pendingRef.current = null; + const promise = updateMutation.mutateAsync(data); + // Prevent unhandled rejection when called from debounce timer + promise.catch(() => {}); + return promise; + } + return Promise.resolve(); + }, [updateMutation]); + + const save = useCallback( + (id: string, data: { title?: string; content?: string | null }) => { + pendingRef.current = { id, ...data }; + + if (timerRef.current) { + clearTimeout(timerRef.current); + } + + timerRef.current = setTimeout(() => void flush(), debounceMs); + }, + [debounceMs, flush], + ); + + // Flush on unmount + useEffect(() => { + return () => { + if (timerRef.current) { + clearTimeout(timerRef.current); + } + if (pendingRef.current) { + // Fire off the save — mutation will complete asynchronously + const data = pendingRef.current; + pendingRef.current = null; + updateMutation.mutate(data); + } + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + return { + save, + flush, + isSaving: updateMutation.isPending, + }; +} diff --git a/packages/web/src/hooks/useDebounce.ts b/packages/web/src/hooks/useDebounce.ts new file mode 100644 index 0000000..8469f86 --- /dev/null +++ b/packages/web/src/hooks/useDebounce.ts @@ -0,0 +1,157 @@ +import { useEffect, useState, useRef } from 'react'; + +/** + * Hook that debounces a value, delaying updates until after a specified delay. + * + * Useful for delaying API calls, search queries, or other expensive operations + * until the user has stopped typing or interacting. + * + * @param value - The value to debounce + * @param delayMs - Delay in milliseconds (default: 500) + * @returns The debounced value + * + * @example + * ```tsx + * function SearchInput() { + * const [query, setQuery] = useState(''); + * const debouncedQuery = useDebounce(query, 300); + * + * // This effect will only run when debouncedQuery changes + * useEffect(() => { + * if (debouncedQuery) { + * performSearch(debouncedQuery); + * } + * }, [debouncedQuery]); + * + * return ( + * <input + * value={query} + * onChange={(e) => setQuery(e.target.value)} + * placeholder="Search..." + * /> + * ); + * } + * ``` + */ +export function useDebounce<T>(value: T, delayMs: number = 500): T { + const [debouncedValue, setDebouncedValue] = useState<T>(value); + const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null); + + useEffect(() => { + // Clear existing timeout + if (timeoutRef.current) { + clearTimeout(timeoutRef.current); + } + + // Set new timeout + timeoutRef.current = setTimeout(() => { + setDebouncedValue(value); + }, delayMs); + + // Cleanup function + return () => { + if (timeoutRef.current) { + clearTimeout(timeoutRef.current); + } + }; + }, [value, delayMs]); + + // Cleanup on unmount + useEffect(() => { + return () => { + if (timeoutRef.current) { + clearTimeout(timeoutRef.current); + } + }; + }, []); + + return debouncedValue; +} + +/** + * Alternative debounce hook that also provides immediate control. + * + * Returns both the debounced value and a function to immediately update it, + * useful when you need to bypass the debounce in certain cases. + * + * @param value - The value to debounce + * @param delayMs - Delay in milliseconds (default: 500) + * @returns Object with debouncedValue and setImmediate function + * + * @example + * ```tsx + * function AutoSaveForm() { + * const [formData, setFormData] = useState({ title: '', content: '' }); + * const { debouncedValue: debouncedFormData, setImmediate } = useDebounceWithImmediate(formData, 1000); + * + * // Auto-save after 1 second of no changes + * useEffect(() => { + * saveFormData(debouncedFormData); + * }, [debouncedFormData]); + * + * const handleSubmit = () => { + * // Immediately save without waiting for debounce + * setImmediate(formData); + * submitForm(formData); + * }; + * + * return ( + * <form onSubmit={handleSubmit}> + * <input + * value={formData.title} + * onChange={(e) => setFormData(prev => ({ ...prev, title: e.target.value }))} + * /> + * <button type="submit">Submit</button> + * </form> + * ); + * } + * ``` + */ +export function useDebounceWithImmediate<T>(value: T, delayMs: number = 500) { + const [debouncedValue, setDebouncedValue] = useState<T>(value); + const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null); + + useEffect(() => { + // Clear existing timeout + if (timeoutRef.current) { + clearTimeout(timeoutRef.current); + } + + // Set new timeout + timeoutRef.current = setTimeout(() => { + setDebouncedValue(value); + }, delayMs); + + // Cleanup function + return () => { + if (timeoutRef.current) { + clearTimeout(timeoutRef.current); + } + }; + }, [value, delayMs]); + + // Cleanup on unmount + useEffect(() => { + return () => { + if (timeoutRef.current) { + clearTimeout(timeoutRef.current); + } + }; + }, []); + + const setImmediate = (newValue: T) => { + // Clear pending timeout + if (timeoutRef.current) { + clearTimeout(timeoutRef.current); + timeoutRef.current = null; + } + + // Immediately update debounced value + setDebouncedValue(newValue); + }; + + return { + debouncedValue, + setImmediate, + }; +} \ No newline at end of file diff --git a/packages/web/src/hooks/useRefineAgent.ts b/packages/web/src/hooks/useRefineAgent.ts new file mode 100644 index 0000000..5f9d657 --- /dev/null +++ b/packages/web/src/hooks/useRefineAgent.ts @@ -0,0 +1,253 @@ +import { useMemo, useCallback, useRef } from 'react'; +import { trpc } from '@/lib/trpc'; +import type { Agent, PendingQuestions } from '@codewalk-district/shared'; + +export type RefineAgentState = 'none' | 'running' | 'waiting' | 'completed' | 'crashed'; + +export interface ContentProposal { + pageId: string; + pageTitle: string; + summary: string; + markdown: string; +} + +export interface SpawnRefineAgentOptions { + initiativeId: string; + instruction?: string; +} + +export interface UseRefineAgentResult { + /** Current refine agent for the initiative */ + agent: Agent | null; + /** Current state of the refine agent */ + state: RefineAgentState; + /** Questions from the agent (when state is 'waiting') */ + questions: PendingQuestions | null; + /** Parsed content proposals (when state is 'completed') */ + proposals: ContentProposal[] | null; + /** Raw result message (when state is 'completed') */ + result: string | null; + /** Mutation for spawning a new refine agent */ + spawn: { + mutate: (options: SpawnRefineAgentOptions) => void; + isPending: boolean; + error: Error | null; + }; + /** Mutation for resuming agent with answers */ + resume: { + mutate: (answers: Record<string, string>) => void; + isPending: boolean; + error: Error | null; + }; + /** Whether any queries are loading */ + isLoading: boolean; + /** Function to refresh agent data */ + refresh: () => void; +} + +/** + * Hook for managing refine agents for a specific initiative. + * + * Encapsulates the logic for finding, spawning, and interacting with refine agents + * that analyze and suggest improvements to initiative content. + * + * @param initiativeId - The ID of the initiative to manage refine agents for + * @returns Object with agent state, mutations, and helper functions + * + * @example + * ```tsx + * function RefineSection({ initiativeId }: { initiativeId: string }) { + * const { + * state, + * agent, + * questions, + * proposals, + * spawn, + * resume, + * refresh + * } = useRefineAgent(initiativeId); + * + * const handleSpawn = () => { + * spawn.mutate({ + * initiativeId, + * instruction: 'Focus on clarity and structure' + * }); + * }; + * + * if (state === 'none') { + * return ( + * <button onClick={handleSpawn} disabled={spawn.isPending}> + * Start Refine Agent + * </button> + * ); + * } + * + * if (state === 'waiting' && questions) { + * return ( + * <QuestionForm + * questions={questions.questions} + * onSubmit={(answers) => resume.mutate(answers)} + * isSubmitting={resume.isPending} + * /> + * ); + * } + * + * if (state === 'completed' && proposals) { + * return <ProposalReview proposals={proposals} onDismiss={refresh} />; + * } + * + * return <div>Agent is {state}...</div>; + * } + * ``` + */ +export function useRefineAgent(initiativeId: string): UseRefineAgentResult { + const utils = trpc.useUtils(); + + // Query all agents and find the active refine agent + const agentsQuery = trpc.listAgents.useQuery(); + const agents = agentsQuery.data ?? []; + + const agent = useMemo(() => { + // Find the most recent refine agent for this initiative + const candidates = agents + .filter( + (a) => + a.mode === 'refine' && + a.initiativeId === initiativeId && + ['running', 'waiting_for_input', 'idle', 'crashed'].includes(a.status) && + !a.userDismissedAt, // Exclude dismissed agents + ) + .sort( + (a, b) => + new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime(), + ); + return candidates[0] ?? null; + }, [agents, initiativeId]); + + const state: RefineAgentState = useMemo(() => { + if (!agent) return 'none'; + switch (agent.status) { + case 'running': + return 'running'; + case 'waiting_for_input': + return 'waiting'; + case 'idle': + return 'completed'; + case 'crashed': + return 'crashed'; + default: + return 'none'; + } + }, [agent]); + + // Fetch questions when waiting for input + const questionsQuery = trpc.getAgentQuestions.useQuery( + { id: agent?.id ?? '' }, + { enabled: state === 'waiting' && !!agent }, + ); + + // Fetch result when completed + const resultQuery = trpc.getAgentResult.useQuery( + { id: agent?.id ?? '' }, + { enabled: state === 'completed' && !!agent }, + ); + + // Parse proposals from result + const { proposals, result } = useMemo(() => { + if (!resultQuery.data?.success || !resultQuery.data.message) { + return { proposals: null, result: null }; + } + + const message = resultQuery.data.message; + + try { + const parsed = JSON.parse(message); + if (parsed.proposals && Array.isArray(parsed.proposals)) { + const proposals: ContentProposal[] = parsed.proposals.map( + (p: { pageId: string; title?: string; pageTitle?: string; summary: string; body?: string; markdown?: string }) => ({ + pageId: p.pageId, + pageTitle: p.pageTitle ?? p.title ?? '', + summary: p.summary, + markdown: p.markdown ?? p.body ?? '', + }), + ); + return { proposals, result: message }; + } + } catch { + // Not JSON — treat as regular result + } + + return { proposals: null, result: message }; + }, [resultQuery.data]); + + // Spawn mutation + const spawnMutation = trpc.spawnArchitectRefine.useMutation({ + onSuccess: () => { + void utils.listAgents.invalidate(); + }, + }); + + // Resume mutation + const resumeMutation = trpc.resumeAgent.useMutation({ + onSuccess: () => { + void utils.listAgents.invalidate(); + }, + }); + + // Keep mutation functions in refs so the returned spawn/resume objects are + // stable across renders. tRPC mutation objects change identity every render, + // which cascades into unstable callbacks → unstable props → Radix Dialog + // re-renders that trigger the React 19 compose-refs infinite loop. + const spawnMutateRef = useRef(spawnMutation.mutate); + spawnMutateRef.current = spawnMutation.mutate; + const agentRef = useRef(agent); + agentRef.current = agent; + const resumeMutateRef = useRef(resumeMutation.mutate); + resumeMutateRef.current = resumeMutation.mutate; + + const spawnFn = useCallback(({ initiativeId, instruction }: SpawnRefineAgentOptions) => { + spawnMutateRef.current({ + initiativeId, + instruction: instruction?.trim() || undefined, + }); + }, []); + + const spawn = useMemo(() => ({ + mutate: spawnFn, + isPending: spawnMutation.isPending, + error: spawnMutation.error, + }), [spawnFn, spawnMutation.isPending, spawnMutation.error]); + + const resumeFn = useCallback((answers: Record<string, string>) => { + const a = agentRef.current; + if (a) { + resumeMutateRef.current({ id: a.id, answers }); + } + }, []); + + const resume = useMemo(() => ({ + mutate: resumeFn, + isPending: resumeMutation.isPending, + error: resumeMutation.error, + }), [resumeFn, resumeMutation.isPending, resumeMutation.error]); + + const refresh = useCallback(() => { + void utils.listAgents.invalidate(); + }, [utils]); + + const isLoading = agentsQuery.isLoading || + (state === 'waiting' && questionsQuery.isLoading) || + (state === 'completed' && resultQuery.isLoading); + + return { + agent, + state, + questions: questionsQuery.data ?? null, + proposals, + result, + spawn, + resume, + isLoading, + refresh, + }; +} \ No newline at end of file diff --git a/packages/web/src/hooks/useSpawnMutation.ts b/packages/web/src/hooks/useSpawnMutation.ts new file mode 100644 index 0000000..2732272 --- /dev/null +++ b/packages/web/src/hooks/useSpawnMutation.ts @@ -0,0 +1,49 @@ +import { useCallback } from "react"; +import { toast } from "sonner"; + +interface SpawnMutationOptions { + onSuccess?: () => void; + showToast?: boolean; + successMessage?: string; + errorMessage?: string; +} + +export function useSpawnMutation<T>( + mutationFn: any, + options: SpawnMutationOptions = {} +) { + const { + onSuccess, + showToast = true, + successMessage = "Architect spawned", + errorMessage = "Failed to spawn architect", + } = options; + + const mutation = mutationFn({ + onSuccess: () => { + if (showToast) { + toast.success(successMessage); + } + onSuccess?.(); + }, + onError: () => { + if (showToast) { + toast.error(errorMessage); + } + }, + }); + + const spawn = useCallback( + (params: T) => { + mutation.mutate(params); + }, + [mutation] + ); + + return { + spawn, + isSpawning: mutation.isPending, + error: mutation.error?.message, + isError: mutation.isError, + }; +} \ No newline at end of file diff --git a/packages/web/src/hooks/useSubscriptionWithErrorHandling.ts b/packages/web/src/hooks/useSubscriptionWithErrorHandling.ts new file mode 100644 index 0000000..b208582 --- /dev/null +++ b/packages/web/src/hooks/useSubscriptionWithErrorHandling.ts @@ -0,0 +1,180 @@ +import { useCallback, useEffect, useRef, useState } from 'react'; +import { trpc } from '@/lib/trpc'; +import type { SubscriptionEvent } from '@codewalk-district/shared'; + +interface UseSubscriptionWithErrorHandlingOptions { + /** Called when subscription receives data */ + onData?: (data: SubscriptionEvent) => void; + /** Called when subscription encounters an error */ + onError?: (error: Error) => void; + /** Called when subscription starts */ + onStarted?: () => void; + /** Called when subscription stops */ + onStopped?: () => void; + /** Whether to automatically reconnect on errors (default: true) */ + autoReconnect?: boolean; + /** Delay before attempting reconnection in ms (default: 1000) */ + reconnectDelay?: number; + /** Maximum number of reconnection attempts (default: 5) */ + maxReconnectAttempts?: number; + /** Whether the subscription is enabled (default: true) */ + enabled?: boolean; +} + +interface SubscriptionState { + isConnected: boolean; + isConnecting: boolean; + error: Error | null; + reconnectAttempts: number; + lastEventId: string | null; +} + +/** + * Hook for managing tRPC subscriptions with error handling, reconnection, and cleanup. + * + * Provides automatic reconnection on connection failures, tracks connection state, + * and ensures proper cleanup on unmount. + */ +export function useSubscriptionWithErrorHandling( + subscription: () => ReturnType<typeof trpc.subscribeToEvents.useSubscription>, + options: UseSubscriptionWithErrorHandlingOptions = {} +) { + const { + autoReconnect = true, + reconnectDelay = 1000, + maxReconnectAttempts = 5, + enabled = true, + } = options; + + const [state, setState] = useState<SubscriptionState>({ + isConnected: false, + isConnecting: false, + error: null, + reconnectAttempts: 0, + lastEventId: null, + }); + + // Store callbacks in refs so they never appear in effect deps. + // Callers pass inline arrows that change identity every render — + // putting them in deps causes setState → re-render → new callback → effect re-fire → infinite loop. + const callbacksRef = useRef(options); + callbacksRef.current = options; + + const reconnectAttemptsRef = useRef(0); + const reconnectTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null); + const mountedRef = useRef(true); + + // Clear reconnect timeout on unmount + useEffect(() => { + mountedRef.current = true; + return () => { + mountedRef.current = false; + if (reconnectTimeoutRef.current) { + clearTimeout(reconnectTimeoutRef.current); + } + callbacksRef.current.onStopped?.(); + }; + }, []); + + const scheduleReconnect = useCallback(() => { + if (!autoReconnect || reconnectAttemptsRef.current >= maxReconnectAttempts || !mountedRef.current) { + return; + } + + reconnectTimeoutRef.current = setTimeout(() => { + if (mountedRef.current) { + reconnectAttemptsRef.current += 1; + setState(prev => ({ + ...prev, + isConnecting: true, + reconnectAttempts: reconnectAttemptsRef.current, + })); + } + }, reconnectDelay); + }, [autoReconnect, maxReconnectAttempts, reconnectDelay]); + + const subscriptionResult = subscription(); + + // Handle subscription state changes. + // Only depends on primitive/stable values — never on caller callbacks. + useEffect(() => { + if (!enabled) { + setState(prev => { + if (!prev.isConnected && !prev.isConnecting && prev.error === null) return prev; + return { ...prev, isConnected: false, isConnecting: false, error: null }; + }); + return; + } + + if (subscriptionResult.status === 'pending') { + setState(prev => { + if (prev.isConnecting && prev.error === null) return prev; + return { ...prev, isConnecting: true, error: null }; + }); + callbacksRef.current.onStarted?.(); + } else if (subscriptionResult.status === 'error') { + const error = subscriptionResult.error instanceof Error + ? subscriptionResult.error + : new Error('Subscription error'); + + setState(prev => ({ + ...prev, + isConnected: false, + isConnecting: false, + error, + })); + + callbacksRef.current.onError?.(error); + scheduleReconnect(); + } else if (subscriptionResult.status === 'success') { + reconnectAttemptsRef.current = 0; + setState(prev => { + if (prev.isConnected && !prev.isConnecting && prev.error === null && prev.reconnectAttempts === 0) return prev; + return { ...prev, isConnected: true, isConnecting: false, error: null, reconnectAttempts: 0 }; + }); + + if (reconnectTimeoutRef.current) { + clearTimeout(reconnectTimeoutRef.current); + reconnectTimeoutRef.current = null; + } + } + }, [enabled, subscriptionResult.status, subscriptionResult.error, scheduleReconnect]); + + // Handle incoming data + useEffect(() => { + if (subscriptionResult.data) { + setState(prev => ({ ...prev, lastEventId: subscriptionResult.data.id })); + callbacksRef.current.onData?.(subscriptionResult.data); + } + }, [subscriptionResult.data]); + + return { + ...state, + /** Manually trigger a reconnection attempt */ + reconnect: () => { + if (mountedRef.current) { + reconnectAttemptsRef.current = 0; + setState(prev => ({ + ...prev, + isConnecting: true, + error: null, + reconnectAttempts: 0, + })); + } + }, + /** Reset error state and reconnection attempts */ + reset: () => { + if (reconnectTimeoutRef.current) { + clearTimeout(reconnectTimeoutRef.current); + reconnectTimeoutRef.current = null; + } + reconnectAttemptsRef.current = 0; + setState(prev => ({ + ...prev, + error: null, + reconnectAttempts: 0, + isConnecting: false, + })); + }, + }; +} diff --git a/packages/web/src/index.css b/packages/web/src/index.css index 6333ca3..e22ff8a 100644 --- a/packages/web/src/index.css +++ b/packages/web/src/index.css @@ -60,3 +60,71 @@ min-height: 100vh; } } + +/* Notion-style page link blocks inside the editor */ +.page-link-block { + display: flex; + align-items: center; + gap: 0.5rem; + width: 100%; + padding: 0.2rem 0.25rem; + border-radius: 0.25rem; + cursor: pointer; + color: hsl(var(--foreground)); + font-size: 0.9375rem; + line-height: 1.4; + transition: background-color 0.15s; +} + +.page-link-block:hover { + background-color: hsl(var(--muted)); +} + +.page-link-block svg { + color: hsl(var(--muted-foreground)); + flex-shrink: 0; +} + +/* Block selection highlight */ +.ProseMirror .block-selected { + background-color: hsl(var(--primary) / 0.08); + border-radius: 0.25rem; + box-shadow: 0.375rem 0 0 hsl(var(--primary) / 0.08), -0.375rem 0 0 hsl(var(--primary) / 0.08); +} +.dark .ProseMirror .block-selected { + background-color: hsl(var(--primary) / 0.12); + box-shadow: 0.375rem 0 0 hsl(var(--primary) / 0.12), -0.375rem 0 0 hsl(var(--primary) / 0.12); +} + +/* Hide cursor and text selection during block selection mode */ +.ProseMirror.has-block-selection { + caret-color: transparent; +} +.ProseMirror.has-block-selection *::selection { + background: transparent; +} + +/* Notion-style placeholder on empty blocks */ +.ProseMirror .is-empty::before { + color: hsl(var(--muted-foreground)); + content: attr(data-placeholder); + float: left; + height: 0; + pointer-events: none; +} + +/* Table wrapper overflow */ +.ProseMirror .tableWrapper { overflow-x: auto; margin: 1em 0; } + +/* Cell positioning for resize handles */ +.ProseMirror table td, .ProseMirror table th { position: relative; min-width: 50px; vertical-align: top; } + +/* Column resize handle */ +.ProseMirror .column-resize-handle { position: absolute; right: -2px; top: 0; bottom: -2px; width: 4px; background-color: hsl(var(--primary) / 0.4); pointer-events: none; z-index: 20; } + +/* Resize cursor */ +.ProseMirror.resize-cursor { cursor: col-resize; } + +/* Selected cell highlight */ +.ProseMirror td.selectedCell, .ProseMirror th.selectedCell { background-color: hsl(var(--primary) / 0.08); } +.dark .ProseMirror td.selectedCell, .dark .ProseMirror th.selectedCell { background-color: hsl(var(--primary) / 0.15); } diff --git a/packages/web/src/layouts/AppLayout.tsx b/packages/web/src/layouts/AppLayout.tsx index e623959..ae62724 100644 --- a/packages/web/src/layouts/AppLayout.tsx +++ b/packages/web/src/layouts/AppLayout.tsx @@ -2,7 +2,9 @@ import { Link } from '@tanstack/react-router' const navItems = [ { label: 'Initiatives', to: '/initiatives' }, + { label: 'Agents', to: '/agents' }, { label: 'Inbox', to: '/inbox' }, + { label: 'Settings', to: '/settings' }, ] as const export function AppLayout({ children }: { children: React.ReactNode }) { diff --git a/packages/web/src/lib/markdown-to-tiptap.ts b/packages/web/src/lib/markdown-to-tiptap.ts new file mode 100644 index 0000000..4c28903 --- /dev/null +++ b/packages/web/src/lib/markdown-to-tiptap.ts @@ -0,0 +1,217 @@ +/** + * Markdown to Tiptap JSON converter. + * + * Converts agent-produced markdown back into Tiptap JSON for page updates. + * Uses @tiptap/html's generateJSON to parse HTML into Tiptap nodes. + */ + +import { generateJSON } from '@tiptap/html'; +import StarterKit from '@tiptap/starter-kit'; +import { Table, TableRow, TableCell, TableHeader } from '@tiptap/extension-table'; + +/** + * Convert markdown string to Tiptap JSON document. + */ +export function markdownToTiptapJson(markdown: string): object { + const html = markdownToHtml(markdown); + return generateJSON(html, [StarterKit, Table, TableRow, TableCell, TableHeader]); +} + +/** + * Simple markdown → HTML converter covering StarterKit nodes. + * Handles: headings, paragraphs, bold, italic, code, code blocks, + * bullet lists, ordered lists, blockquotes, links, horizontal rules, tables. + */ +function markdownToHtml(md: string): string { + // Normalize line endings + let text = md.replace(/\r\n/g, '\n'); + + // Code blocks (fenced) + text = text.replace(/```(\w*)\n([\s\S]*?)```/g, (_match, lang, code) => { + const escaped = escapeHtml(code.replace(/\n$/, '')); + const langAttr = lang ? ` class="language-${lang}"` : ''; + return `<pre><code${langAttr}>${escaped}</code></pre>`; + }); + + // Split into lines for block-level processing + const lines = text.split('\n'); + const htmlLines: string[] = []; + let i = 0; + + while (i < lines.length) { + const line = lines[i]; + + // Skip lines inside pre blocks (already handled) + if (line.startsWith('<pre>')) { + let block = line; + while (i < lines.length && !lines[i].includes('</pre>')) { + i++; + block += '\n' + lines[i]; + } + htmlLines.push(block); + i++; + continue; + } + + // Horizontal rule + if (/^---+$/.test(line.trim())) { + htmlLines.push('<hr>'); + i++; + continue; + } + + // Headings + const headingMatch = line.match(/^(#{1,6})\s+(.+)$/); + if (headingMatch) { + const level = headingMatch[1].length; + htmlLines.push(`<h${level}>${inlineMarkdown(headingMatch[2])}</h${level}>`); + i++; + continue; + } + + // Blockquote + if (line.startsWith('> ')) { + const quoteLines: string[] = []; + while (i < lines.length && lines[i].startsWith('> ')) { + quoteLines.push(lines[i].slice(2)); + i++; + } + htmlLines.push(`<blockquote><p>${inlineMarkdown(quoteLines.join(' '))}</p></blockquote>`); + continue; + } + + // Unordered list + if (/^[-*]\s+/.test(line)) { + const items: string[] = []; + while (i < lines.length && /^[-*]\s+/.test(lines[i])) { + items.push(lines[i].replace(/^[-*]\s+/, '')); + i++; + } + const lis = items.map((item) => `<li><p>${inlineMarkdown(item)}</p></li>`).join(''); + htmlLines.push(`<ul>${lis}</ul>`); + continue; + } + + // Ordered list + if (/^\d+\.\s+/.test(line)) { + const items: string[] = []; + while (i < lines.length && /^\d+\.\s+/.test(lines[i])) { + items.push(lines[i].replace(/^\d+\.\s+/, '')); + i++; + } + const lis = items.map((item) => `<li><p>${inlineMarkdown(item)}</p></li>`).join(''); + htmlLines.push(`<ol>${lis}</ol>`); + continue; + } + + // Table: current line has | and next line is a separator row + if (line.includes('|') && i + 1 < lines.length && /^\s*\|?\s*[-:]+[-| :]*$/.test(lines[i + 1])) { + const headerCells = parseTableRow(line); + i += 2; // skip header + separator + + const bodyRows: string[][] = []; + while (i < lines.length && lines[i].includes('|') && lines[i].trim() !== '') { + bodyRows.push(parseTableRow(lines[i])); + i++; + } + + const ths = headerCells.map((c) => `<th>${inlineMarkdown(c)}</th>`).join(''); + const thead = `<thead><tr>${ths}</tr></thead>`; + + let tbody = ''; + if (bodyRows.length > 0) { + const trs = bodyRows + .map((row) => { + const tds = row.map((c) => `<td>${inlineMarkdown(c)}</td>`).join(''); + return `<tr>${tds}</tr>`; + }) + .join(''); + tbody = `<tbody>${trs}</tbody>`; + } + + htmlLines.push(`<table>${thead}${tbody}</table>`); + continue; + } + + // Empty line + if (line.trim() === '') { + i++; + continue; + } + + // Paragraph (collect consecutive non-empty, non-block lines) + const paraLines: string[] = []; + while ( + i < lines.length && + lines[i].trim() !== '' && + !lines[i].startsWith('#') && + !lines[i].startsWith('> ') && + !/^[-*]\s+/.test(lines[i]) && + !/^\d+\.\s+/.test(lines[i]) && + !/^---+$/.test(lines[i].trim()) && + !lines[i].startsWith('<pre>') && + !lines[i].startsWith('```') && + !isTableStart(lines, i) + ) { + paraLines.push(lines[i]); + i++; + } + if (paraLines.length > 0) { + htmlLines.push(`<p>${inlineMarkdown(paraLines.join(' '))}</p>`); + } else { + i++; + } + } + + return htmlLines.join(''); +} + +/** + * Check if lines[i] starts a markdown table (has | and next line is separator). + */ +function isTableStart(lines: string[], i: number): boolean { + return ( + lines[i].includes('|') && + i + 1 < lines.length && + /^\s*\|?\s*[-:]+[-| :]*$/.test(lines[i + 1]) + ); +} + +/** + * Parse a markdown table row: strip leading/trailing pipes, split on |, trim cells. + */ +function parseTableRow(line: string): string[] { + let trimmed = line.trim(); + if (trimmed.startsWith('|')) trimmed = trimmed.slice(1); + if (trimmed.endsWith('|')) trimmed = trimmed.slice(0, -1); + return trimmed.split('|').map((c) => c.trim()); +} + +/** + * Process inline markdown: bold, italic, inline code, links. + */ +function inlineMarkdown(text: string): string { + let result = escapeHtml(text); + + // Inline code (must come before bold/italic to avoid conflicts) + result = result.replace(/`([^`]+)`/g, '<code>$1</code>'); + + // Bold + result = result.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>'); + + // Italic + result = result.replace(/\*(.+?)\*/g, '<em>$1</em>'); + + // Links [text](url) + result = result.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '<a href="$2">$1</a>'); + + return result; +} + +function escapeHtml(text: string): string { + return text + .replace(/&/g, '&') + .replace(/</g, '<') + .replace(/>/g, '>') + .replace(/"/g, '"'); +} diff --git a/packages/web/src/lib/utils.ts b/packages/web/src/lib/utils.ts index 365058c..a4cec0f 100644 --- a/packages/web/src/lib/utils.ts +++ b/packages/web/src/lib/utils.ts @@ -4,3 +4,40 @@ import { twMerge } from "tailwind-merge"; export function cn(...inputs: ClassValue[]) { return twMerge(clsx(inputs)); } + +/** + * Format a date as relative time (e.g., "2 minutes ago", "1 hour ago") + */ +export function formatRelativeTime(dateString: string): string { + const date = new Date(dateString); + const now = new Date(); + const diffInMs = now.getTime() - date.getTime(); + const diffInSeconds = Math.floor(diffInMs / 1000); + + if (diffInSeconds < 60) { + return diffInSeconds <= 1 ? "just now" : `${diffInSeconds} seconds ago`; + } + + const diffInMinutes = Math.floor(diffInSeconds / 60); + if (diffInMinutes < 60) { + return diffInMinutes === 1 ? "1 minute ago" : `${diffInMinutes} minutes ago`; + } + + const diffInHours = Math.floor(diffInMinutes / 60); + if (diffInHours < 24) { + return diffInHours === 1 ? "1 hour ago" : `${diffInHours} hours ago`; + } + + const diffInDays = Math.floor(diffInHours / 24); + if (diffInDays < 30) { + return diffInDays === 1 ? "1 day ago" : `${diffInDays} days ago`; + } + + const diffInMonths = Math.floor(diffInDays / 30); + if (diffInMonths < 12) { + return diffInMonths === 1 ? "1 month ago" : `${diffInMonths} months ago`; + } + + const diffInYears = Math.floor(diffInMonths / 12); + return diffInYears === 1 ? "1 year ago" : `${diffInYears} years ago`; +} diff --git a/packages/web/src/routeTree.gen.ts b/packages/web/src/routeTree.gen.ts index 5ad0e2a..e14ea0a 100644 --- a/packages/web/src/routeTree.gen.ts +++ b/packages/web/src/routeTree.gen.ts @@ -9,26 +9,50 @@ // Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified. import { Route as rootRouteImport } from './routes/__root' +import { Route as SettingsRouteImport } from './routes/settings' import { Route as InboxRouteImport } from './routes/inbox' +import { Route as AgentsRouteImport } from './routes/agents' import { Route as IndexRouteImport } from './routes/index' +import { Route as SettingsIndexRouteImport } from './routes/settings/index' import { Route as InitiativesIndexRouteImport } from './routes/initiatives/index' +import { Route as SettingsHealthRouteImport } from './routes/settings/health' import { Route as InitiativesIdRouteImport } from './routes/initiatives/$id' +const SettingsRoute = SettingsRouteImport.update({ + id: '/settings', + path: '/settings', + getParentRoute: () => rootRouteImport, +} as any) const InboxRoute = InboxRouteImport.update({ id: '/inbox', path: '/inbox', getParentRoute: () => rootRouteImport, } as any) +const AgentsRoute = AgentsRouteImport.update({ + id: '/agents', + path: '/agents', + getParentRoute: () => rootRouteImport, +} as any) const IndexRoute = IndexRouteImport.update({ id: '/', path: '/', getParentRoute: () => rootRouteImport, } as any) +const SettingsIndexRoute = SettingsIndexRouteImport.update({ + id: '/', + path: '/', + getParentRoute: () => SettingsRoute, +} as any) const InitiativesIndexRoute = InitiativesIndexRouteImport.update({ id: '/initiatives/', path: '/initiatives/', getParentRoute: () => rootRouteImport, } as any) +const SettingsHealthRoute = SettingsHealthRouteImport.update({ + id: '/health', + path: '/health', + getParentRoute: () => SettingsRoute, +} as any) const InitiativesIdRoute = InitiativesIdRouteImport.update({ id: '/initiatives/$id', path: '/initiatives/$id', @@ -37,40 +61,84 @@ const InitiativesIdRoute = InitiativesIdRouteImport.update({ export interface FileRoutesByFullPath { '/': typeof IndexRoute + '/agents': typeof AgentsRoute '/inbox': typeof InboxRoute + '/settings': typeof SettingsRouteWithChildren '/initiatives/$id': typeof InitiativesIdRoute + '/settings/health': typeof SettingsHealthRoute '/initiatives/': typeof InitiativesIndexRoute + '/settings/': typeof SettingsIndexRoute } export interface FileRoutesByTo { '/': typeof IndexRoute + '/agents': typeof AgentsRoute '/inbox': typeof InboxRoute '/initiatives/$id': typeof InitiativesIdRoute + '/settings/health': typeof SettingsHealthRoute '/initiatives': typeof InitiativesIndexRoute + '/settings': typeof SettingsIndexRoute } export interface FileRoutesById { __root__: typeof rootRouteImport '/': typeof IndexRoute + '/agents': typeof AgentsRoute '/inbox': typeof InboxRoute + '/settings': typeof SettingsRouteWithChildren '/initiatives/$id': typeof InitiativesIdRoute + '/settings/health': typeof SettingsHealthRoute '/initiatives/': typeof InitiativesIndexRoute + '/settings/': typeof SettingsIndexRoute } export interface FileRouteTypes { fileRoutesByFullPath: FileRoutesByFullPath - fullPaths: '/' | '/inbox' | '/initiatives/$id' | '/initiatives/' + fullPaths: + | '/' + | '/agents' + | '/inbox' + | '/settings' + | '/initiatives/$id' + | '/settings/health' + | '/initiatives/' + | '/settings/' fileRoutesByTo: FileRoutesByTo - to: '/' | '/inbox' | '/initiatives/$id' | '/initiatives' - id: '__root__' | '/' | '/inbox' | '/initiatives/$id' | '/initiatives/' + to: + | '/' + | '/agents' + | '/inbox' + | '/initiatives/$id' + | '/settings/health' + | '/initiatives' + | '/settings' + id: + | '__root__' + | '/' + | '/agents' + | '/inbox' + | '/settings' + | '/initiatives/$id' + | '/settings/health' + | '/initiatives/' + | '/settings/' fileRoutesById: FileRoutesById } export interface RootRouteChildren { IndexRoute: typeof IndexRoute + AgentsRoute: typeof AgentsRoute InboxRoute: typeof InboxRoute + SettingsRoute: typeof SettingsRouteWithChildren InitiativesIdRoute: typeof InitiativesIdRoute InitiativesIndexRoute: typeof InitiativesIndexRoute } declare module '@tanstack/react-router' { interface FileRoutesByPath { + '/settings': { + id: '/settings' + path: '/settings' + fullPath: '/settings' + preLoaderRoute: typeof SettingsRouteImport + parentRoute: typeof rootRouteImport + } '/inbox': { id: '/inbox' path: '/inbox' @@ -78,6 +146,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof InboxRouteImport parentRoute: typeof rootRouteImport } + '/agents': { + id: '/agents' + path: '/agents' + fullPath: '/agents' + preLoaderRoute: typeof AgentsRouteImport + parentRoute: typeof rootRouteImport + } '/': { id: '/' path: '/' @@ -85,6 +160,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof IndexRouteImport parentRoute: typeof rootRouteImport } + '/settings/': { + id: '/settings/' + path: '/' + fullPath: '/settings/' + preLoaderRoute: typeof SettingsIndexRouteImport + parentRoute: typeof SettingsRoute + } '/initiatives/': { id: '/initiatives/' path: '/initiatives' @@ -92,6 +174,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof InitiativesIndexRouteImport parentRoute: typeof rootRouteImport } + '/settings/health': { + id: '/settings/health' + path: '/health' + fullPath: '/settings/health' + preLoaderRoute: typeof SettingsHealthRouteImport + parentRoute: typeof SettingsRoute + } '/initiatives/$id': { id: '/initiatives/$id' path: '/initiatives/$id' @@ -102,9 +191,25 @@ declare module '@tanstack/react-router' { } } +interface SettingsRouteChildren { + SettingsHealthRoute: typeof SettingsHealthRoute + SettingsIndexRoute: typeof SettingsIndexRoute +} + +const SettingsRouteChildren: SettingsRouteChildren = { + SettingsHealthRoute: SettingsHealthRoute, + SettingsIndexRoute: SettingsIndexRoute, +} + +const SettingsRouteWithChildren = SettingsRoute._addFileChildren( + SettingsRouteChildren, +) + const rootRouteChildren: RootRouteChildren = { IndexRoute: IndexRoute, + AgentsRoute: AgentsRoute, InboxRoute: InboxRoute, + SettingsRoute: SettingsRouteWithChildren, InitiativesIdRoute: InitiativesIdRoute, InitiativesIndexRoute: InitiativesIndexRoute, } diff --git a/packages/web/src/routes/agents.tsx b/packages/web/src/routes/agents.tsx new file mode 100644 index 0000000..1bd9b40 --- /dev/null +++ b/packages/web/src/routes/agents.tsx @@ -0,0 +1,196 @@ +import { useState } from "react"; +import { createFileRoute } from "@tanstack/react-router"; +import { AlertCircle, RefreshCw } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Card } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Skeleton } from "@/components/Skeleton"; +import { toast } from "sonner"; +import { trpc } from "@/lib/trpc"; +import { AgentOutputViewer } from "@/components/AgentOutputViewer"; +import { formatRelativeTime } from "@/lib/utils"; +import { cn } from "@/lib/utils"; +import { useSubscriptionWithErrorHandling } from "@/hooks"; + +export const Route = createFileRoute("/agents")({ + component: AgentsPage, +}); + +function AgentsPage() { + const [selectedAgentId, setSelectedAgentId] = useState<string | null>(null); + + // Live updates: invalidate agents on agent events with robust error handling + const utils = trpc.useUtils(); + const subscription = useSubscriptionWithErrorHandling( + () => trpc.onAgentUpdate.useSubscription(undefined), + { + onData: () => { + void utils.listAgents.invalidate(); + }, + onError: (error) => { + toast.error("Live updates disconnected. Refresh to reconnect.", { + id: "sub-error", + duration: Infinity, + }); + console.error('Agent updates subscription error:', error); + }, + onStarted: () => { + // Clear any existing error toasts when reconnecting + toast.dismiss("sub-error"); + }, + autoReconnect: true, + maxReconnectAttempts: 5, + } + ); + + // Data fetching + const agentsQuery = trpc.listAgents.useQuery(); + + // Handlers + function handleRefresh() { + void utils.listAgents.invalidate(); + } + + // Loading state + if (agentsQuery.isLoading) { + return ( + <div className="space-y-4"> + <div className="flex items-center justify-between"> + <div className="flex items-center gap-2"> + <Skeleton className="h-6 w-20" /> + <Skeleton className="h-5 w-8 rounded-full" /> + </div> + <Skeleton className="h-8 w-20" /> + </div> + <div className="grid grid-cols-1 gap-6 lg:grid-cols-[300px_1fr]"> + <div className="space-y-2"> + {Array.from({ length: 5 }).map((_, i) => ( + <Card key={i} className="p-3"> + <div className="flex items-center gap-3"> + <Skeleton className="h-2 w-2 rounded-full" /> + <Skeleton className="h-4 w-24" /> + </div> + </Card> + ))} + </div> + <Skeleton className="h-96 rounded-lg" /> + </div> + </div> + ); + } + + // Error state + if (agentsQuery.isError) { + return ( + <div className="flex flex-col items-center justify-center gap-4 py-12"> + <AlertCircle className="h-8 w-8 text-destructive" /> + <p className="text-sm text-destructive"> + Failed to load agents: {agentsQuery.error?.message ?? "Unknown error"} + </p> + <Button variant="outline" size="sm" onClick={handleRefresh}> + Retry + </Button> + </div> + ); + } + + const agents = agentsQuery.data ?? []; + const selectedAgent = selectedAgentId + ? agents.find((a) => a.id === selectedAgentId) + : null; + + return ( + <div className="space-y-4"> + {/* Header */} + <div className="flex items-center justify-between"> + <div className="flex items-center gap-2"> + <h1 className="text-lg font-semibold">Agents</h1> + <Badge variant="secondary">{agents.length}</Badge> + </div> + <Button variant="outline" size="sm" onClick={handleRefresh}> + <RefreshCw className="mr-1.5 h-3.5 w-3.5" /> + Refresh + </Button> + </div> + + {/* Two-column layout */} + <div className="grid grid-cols-1 gap-6 lg:grid-cols-[300px_1fr]"> + {/* Left: Agent List */} + <div className="space-y-2"> + {agents.length === 0 ? ( + <div className="rounded-lg border border-dashed p-8 text-center"> + <p className="text-sm text-muted-foreground">No agents found</p> + </div> + ) : ( + agents.map((agent) => ( + <Card + key={agent.id} + className={cn( + "cursor-pointer p-3 transition-colors hover:bg-muted/50", + selectedAgentId === agent.id && "bg-muted" + )} + onClick={() => setSelectedAgentId(agent.id)} + > + <div className="flex items-center justify-between gap-2"> + <div className="flex items-center gap-2 min-w-0"> + <StatusDot status={agent.status} /> + <span className="truncate text-sm font-medium"> + {agent.name} + </span> + </div> + <div className="flex items-center gap-1.5 shrink-0"> + <Badge variant="outline" className="text-xs"> + {agent.provider} + </Badge> + <Badge variant="secondary" className="text-xs"> + {agent.mode} + </Badge> + </div> + </div> + <div className="mt-1 text-xs text-muted-foreground"> + {formatRelativeTime(String(agent.createdAt))} + </div> + </Card> + )) + )} + </div> + + {/* Right: Output Viewer */} + {selectedAgent ? ( + <AgentOutputViewer agentId={selectedAgent.id} agentName={selectedAgent.name} /> + ) : ( + <div className="flex items-center justify-center rounded-lg border border-dashed p-8"> + <p className="text-sm text-muted-foreground"> + Select an agent to view output + </p> + </div> + )} + </div> + </div> + ); +} + +// --------------------------------------------------------------------------- +// Components +// --------------------------------------------------------------------------- + +function StatusDot({ status }: { status: string }) { + const colors: Record<string, string> = { + running: "bg-green-500", + waiting_for_input: "bg-yellow-500", + idle: "bg-zinc-400", + stopped: "bg-zinc-400", + crashed: "bg-red-500", + }; + return ( + <span + className={cn("h-2 w-2 rounded-full shrink-0", colors[status] ?? "bg-zinc-400")} + title={status} + /> + ); +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + diff --git a/packages/web/src/routes/inbox.tsx b/packages/web/src/routes/inbox.tsx index 2d884ed..d7ce8df 100644 --- a/packages/web/src/routes/inbox.tsx +++ b/packages/web/src/routes/inbox.tsx @@ -8,6 +8,7 @@ import { toast } from "sonner"; import { trpc } from "@/lib/trpc"; import { InboxList } from "@/components/InboxList"; import { QuestionForm } from "@/components/QuestionForm"; +import { formatRelativeTime } from "@/lib/utils"; export const Route = createFileRoute("/inbox")({ component: InboxPage, @@ -328,17 +329,3 @@ function InboxPage() { // Helpers // --------------------------------------------------------------------------- -function formatRelativeTime(isoDate: string): string { - const now = Date.now(); - const then = new Date(isoDate).getTime(); - const diffMs = now - then; - const diffSec = Math.floor(diffMs / 1000); - const diffMin = Math.floor(diffSec / 60); - const diffHr = Math.floor(diffMin / 60); - const diffDay = Math.floor(diffHr / 24); - - if (diffSec < 60) return "just now"; - if (diffMin < 60) return `${diffMin} min ago`; - if (diffHr < 24) return `${diffHr}h ago`; - return `${diffDay}d ago`; -} diff --git a/packages/web/src/routes/initiatives/$id.tsx b/packages/web/src/routes/initiatives/$id.tsx index 6a7081c..7bcac70 100644 --- a/packages/web/src/routes/initiatives/$id.tsx +++ b/packages/web/src/routes/initiatives/$id.tsx @@ -1,4 +1,4 @@ -import { useState, useCallback, useEffect } from "react"; +import { useState } from "react"; import { createFileRoute, useNavigate } from "@tanstack/react-router"; import { AlertCircle } from "lucide-react"; import { Button } from "@/components/ui/button"; @@ -6,247 +6,87 @@ import { Skeleton } from "@/components/Skeleton"; import { toast } from "sonner"; import { trpc } from "@/lib/trpc"; import { InitiativeHeader } from "@/components/InitiativeHeader"; -import { ProgressPanel } from "@/components/ProgressPanel"; -import { PhaseAccordion } from "@/components/PhaseAccordion"; -import { DecisionList } from "@/components/DecisionList"; -import { TaskDetailModal } from "@/components/TaskDetailModal"; -import type { SerializedTask } from "@/components/TaskRow"; +import { ContentTab } from "@/components/editor/ContentTab"; +import { ExecutionTab } from "@/components/ExecutionTab"; +import { useSubscriptionWithErrorHandling } from "@/hooks"; export const Route = createFileRoute("/initiatives/$id")({ component: InitiativeDetailPage, }); -// --------------------------------------------------------------------------- -// Types -// --------------------------------------------------------------------------- - -/** Aggregated task counts reported upward from PhaseWithTasks */ -interface TaskCounts { - complete: number; - total: number; -} - -/** Flat task entry with metadata needed for the modal */ -interface FlatTaskEntry { - task: SerializedTask; - phaseName: string; - agentName: string | null; - blockedBy: Array<{ name: string; status: string }>; - dependents: Array<{ name: string; status: string }>; -} - -// --------------------------------------------------------------------------- -// PhaseWithTasks — solves the "hooks inside loops" problem -// --------------------------------------------------------------------------- - -interface PhaseWithTasksProps { - phase: { - id: string; - initiativeId: string; - number: number; - name: string; - description: string | null; - status: string; - createdAt: string; - updatedAt: string; - }; - defaultExpanded: boolean; - onTaskClick: (taskId: string) => void; - onTaskCounts: (phaseId: string, counts: TaskCounts) => void; - registerTasks: (phaseId: string, entries: FlatTaskEntry[]) => void; -} - -function PhaseWithTasks({ - phase, - defaultExpanded, - onTaskClick, - onTaskCounts, - registerTasks, -}: PhaseWithTasksProps) { - // Fetch all plans for this phase - const plansQuery = trpc.listPlans.useQuery({ phaseId: phase.id }); - - // Fetch phase dependencies - const depsQuery = trpc.getPhaseDependencies.useQuery({ phaseId: phase.id }); - - const plans = plansQuery.data ?? []; - const planIds = plans.map((p) => p.id); - - return ( - <PhaseWithTasksInner - phase={phase} - planIds={planIds} - plansLoaded={plansQuery.isSuccess} - phaseDependencyIds={depsQuery.data?.dependencies ?? []} - defaultExpanded={defaultExpanded} - onTaskClick={onTaskClick} - onTaskCounts={onTaskCounts} - registerTasks={registerTasks} - /> - ); -} - -// Inner component that fetches tasks for each plan — needs stable hook count -// Since planIds array changes, we fetch tasks per plan inside yet another child -interface PhaseWithTasksInnerProps { - phase: PhaseWithTasksProps["phase"]; - planIds: string[]; - plansLoaded: boolean; - phaseDependencyIds: string[]; - defaultExpanded: boolean; - onTaskClick: (taskId: string) => void; - onTaskCounts: (phaseId: string, counts: TaskCounts) => void; - registerTasks: (phaseId: string, entries: FlatTaskEntry[]) => void; -} - -function PhaseWithTasksInner({ - phase, - planIds, - plansLoaded, - phaseDependencyIds: _phaseDependencyIds, - defaultExpanded, - onTaskClick, - onTaskCounts, - registerTasks, -}: PhaseWithTasksInnerProps) { - // We can't call useQuery in a loop, so we render PlanTasksFetcher per plan - // and aggregate the results - const [planTasks, setPlanTasks] = useState<Record<string, SerializedTask[]>>( - {}, - ); - - const handlePlanTasks = useCallback( - (planId: string, tasks: SerializedTask[]) => { - setPlanTasks((prev) => { - // Skip if unchanged (same reference) - if (prev[planId] === tasks) return prev; - const next = { ...prev, [planId]: tasks }; - - // Aggregate all tasks across plans - const allTasks = Object.values(next).flat(); - const complete = allTasks.filter( - (t) => t.status === "completed", - ).length; - - // Report counts up - onTaskCounts(phase.id, { complete, total: allTasks.length }); - - // Register flat entries for the modal lookup - const entries: FlatTaskEntry[] = allTasks.map((task) => ({ - task, - phaseName: `Phase ${phase.number}: ${phase.name}`, - agentName: null, // No agent info from task data alone - blockedBy: [], // Simplified: no dependency lookup per task in v1 - dependents: [], - })); - registerTasks(phase.id, entries); - - return next; - }); - }, - [phase.id, phase.number, phase.name, onTaskCounts, registerTasks], - ); - - // Build task entries for PhaseAccordion - const allTasks = planIds.flatMap((pid) => planTasks[pid] ?? []); - const taskEntries = allTasks.map((task) => ({ - task, - agentName: null as string | null, - blockedBy: [] as Array<{ name: string; status: string }>, - })); - - // Phase-level dependencies (empty for now — would need to resolve IDs to names) - const phaseDeps: Array<{ name: string; status: string }> = []; - - return ( - <> - {/* Hidden fetchers — one per plan */} - {plansLoaded && - planIds.map((planId) => ( - <PlanTasksFetcher - key={planId} - planId={planId} - onTasks={handlePlanTasks} - /> - ))} - - <PhaseAccordion - phase={phase} - tasks={taskEntries} - defaultExpanded={defaultExpanded} - phaseDependencies={phaseDeps} - onTaskClick={onTaskClick} - /> - </> - ); -} - -// --------------------------------------------------------------------------- -// PlanTasksFetcher — fetches tasks for a single plan (stable hook count) -// --------------------------------------------------------------------------- - -interface PlanTasksFetcherProps { - planId: string; - onTasks: (planId: string, tasks: SerializedTask[]) => void; -} - -function PlanTasksFetcher({ planId, onTasks }: PlanTasksFetcherProps) { - const tasksQuery = trpc.listTasks.useQuery({ planId }); - - // Report tasks upward via useEffect (not during render) to avoid - // setState-during-render loops when the parent re-renders on state update. - useEffect(() => { - if (tasksQuery.data) { - onTasks(planId, tasksQuery.data as unknown as SerializedTask[]); - } - }, [tasksQuery.data, planId, onTasks]); - - return null; // Render nothing — this is a data-fetching component -} - -// --------------------------------------------------------------------------- -// Main Page Component -// --------------------------------------------------------------------------- +type Tab = "content" | "execution"; function InitiativeDetailPage() { const { id } = Route.useParams(); const navigate = useNavigate(); + const [activeTab, setActiveTab] = useState<Tab>("content"); - // Live updates: invalidate detail queries on task/phase and agent events + // Live updates: keep subscriptions at page level so they work across both tabs const utils = trpc.useUtils(); - trpc.onTaskUpdate.useSubscription(undefined, { - onData: () => { - void utils.listPhases.invalidate(); - void utils.listTasks.invalidate(); - void utils.listPlans.invalidate(); - }, - onError: () => { - toast.error("Live updates disconnected. Refresh to reconnect.", { - id: "sub-error", - duration: Infinity, - }); - }, - }); - trpc.onAgentUpdate.useSubscription(undefined, { - onData: () => { - void utils.listAgents.invalidate(); - }, - onError: () => { - toast.error("Live updates disconnected. Refresh to reconnect.", { - id: "sub-error", - duration: Infinity, - }); - }, - }); - // State - const [selectedTaskId, setSelectedTaskId] = useState<string | null>(null); - const [taskCountsByPhase, setTaskCountsByPhase] = useState< - Record<string, TaskCounts> - >({}); - const [tasksByPhase, setTasksByPhase] = useState< - Record<string, FlatTaskEntry[]> - >({}); + // Task updates subscription with robust error handling + useSubscriptionWithErrorHandling( + () => trpc.onTaskUpdate.useSubscription(undefined), + { + onData: () => { + void utils.listPhases.invalidate(); + void utils.listTasks.invalidate(); + void utils.listPlans.invalidate(); + }, + onError: (error) => { + toast.error("Live updates disconnected. Refresh to reconnect.", { + id: "sub-error", + duration: Infinity, + }); + console.error('Task updates subscription error:', error); + }, + onStarted: () => toast.dismiss("sub-error"), + autoReconnect: true, + maxReconnectAttempts: 5, + } + ); + + // Agent updates subscription with robust error handling + useSubscriptionWithErrorHandling( + () => trpc.onAgentUpdate.useSubscription(undefined), + { + onData: () => { + void utils.listAgents.invalidate(); + }, + onError: (error) => { + toast.error("Live updates disconnected. Refresh to reconnect.", { + id: "sub-error", + duration: Infinity, + }); + console.error('Agent updates subscription error:', error); + }, + onStarted: () => toast.dismiss("sub-error"), + autoReconnect: true, + maxReconnectAttempts: 5, + } + ); + + // Page updates subscription with robust error handling + useSubscriptionWithErrorHandling( + () => trpc.onPageUpdate.useSubscription(undefined), + { + onData: () => { + void utils.listPages.invalidate(); + void utils.getPage.invalidate(); + void utils.getRootPage.invalidate(); + }, + onError: (error) => { + toast.error("Live updates disconnected. Refresh to reconnect.", { + id: "sub-error", + duration: Infinity, + }); + console.error('Page updates subscription error:', error); + }, + onStarted: () => toast.dismiss("sub-error"), + autoReconnect: true, + maxReconnectAttempts: 5, + } + ); // tRPC queries const initiativeQuery = trpc.getInitiative.useQuery({ id }); @@ -255,94 +95,20 @@ function InitiativeDetailPage() { { enabled: !!initiativeQuery.data }, ); - // tRPC mutations - const queueTaskMutation = trpc.queueTask.useMutation(); - const queuePhaseMutation = trpc.queuePhase.useMutation(); - - // Callbacks for PhaseWithTasks - const handleTaskCounts = useCallback( - (phaseId: string, counts: TaskCounts) => { - setTaskCountsByPhase((prev) => { - if ( - prev[phaseId]?.complete === counts.complete && - prev[phaseId]?.total === counts.total - ) { - return prev; - } - return { ...prev, [phaseId]: counts }; - }); - }, - [], - ); - - const handleRegisterTasks = useCallback( - (phaseId: string, entries: FlatTaskEntry[]) => { - setTasksByPhase((prev) => { - if (prev[phaseId] === entries) return prev; - return { ...prev, [phaseId]: entries }; - }); - }, - [], - ); - - // Derived data - const phases = phasesQuery.data ?? []; - const phasesComplete = phases.filter( - (p) => p.status === "completed", - ).length; - - const allTaskCounts = Object.values(taskCountsByPhase); - const tasksComplete = allTaskCounts.reduce((s, c) => s + c.complete, 0); - const tasksTotal = allTaskCounts.reduce((s, c) => s + c.total, 0); - - // Find selected task across all phases - const allFlatTasks = Object.values(tasksByPhase).flat(); - const selectedEntry = selectedTaskId - ? allFlatTasks.find((e) => e.task.id === selectedTaskId) ?? null - : null; - - // Determine which phase should be expanded by default (first non-completed) - const firstIncompletePhaseIndex = phases.findIndex( - (p) => p.status !== "completed", - ); - - // Queue all pending phases - const handleQueueAll = useCallback(() => { - const pendingPhases = phases.filter((p) => p.status === "pending"); - for (const phase of pendingPhases) { - queuePhaseMutation.mutate({ phaseId: phase.id }); - } - }, [phases, queuePhaseMutation]); - - // Queue a single task - const handleQueueTask = useCallback( - (taskId: string) => { - queueTaskMutation.mutate({ taskId }); - setSelectedTaskId(null); - }, - [queueTaskMutation], - ); - // Loading state if (initiativeQuery.isLoading) { return ( <div className="space-y-6"> - {/* Header skeleton */} <div className="flex items-center gap-4"> <Skeleton className="h-4 w-4" /> <Skeleton className="h-7 w-64" /> <Skeleton className="h-5 w-20" /> </div> - - {/* Two-column grid skeleton */} <div className="grid grid-cols-1 gap-6 lg:grid-cols-[1fr_340px]"> - {/* Left: phase accordion skeletons */} <div className="space-y-1"> <Skeleton className="h-12 w-full rounded border" /> <Skeleton className="h-12 w-full rounded border" /> </div> - - {/* Right: ProgressPanel + DecisionList skeletons */} <div className="space-y-6"> <Skeleton className="h-24 w-full rounded" /> <Skeleton className="h-20 w-full rounded" /> @@ -376,109 +142,59 @@ function InitiativeDetailPage() { const initiative = initiativeQuery.data; if (!initiative) return null; - // tRPC serializes Date to string over JSON — cast to wire format const serializedInitiative = { id: initiative.id, name: initiative.name, status: initiative.status, - createdAt: String(initiative.createdAt), - updatedAt: String(initiative.updatedAt), }; - const hasPendingPhases = phases.some((p) => p.status === "pending"); + const projects = (initiative as { projects?: Array<{ id: string; name: string; url: string }> }).projects; + + const phases = phasesQuery.data ?? []; return ( - <div className="space-y-6"> + <div className="space-y-3"> {/* Header */} <InitiativeHeader initiative={serializedInitiative} + projects={projects} onBack={() => navigate({ to: "/initiatives" })} /> - {/* Two-column layout */} - <div className="grid grid-cols-1 gap-6 lg:grid-cols-[1fr_340px]"> - {/* Left column: Phases */} - <div className="space-y-0"> - {/* Section header */} - <div className="flex items-center justify-between border-b border-border pb-3"> - <h2 className="text-lg font-semibold">Phases</h2> - <Button - variant="outline" - size="sm" - disabled={!hasPendingPhases} - onClick={handleQueueAll} - > - Queue All - </Button> - </div> - - {/* Phase loading */} - {phasesQuery.isLoading && ( - <div className="space-y-1 pt-3"> - {Array.from({ length: 3 }).map((_, i) => ( - <Skeleton key={i} className="h-10 w-full" /> - ))} - </div> - )} - - {/* Phases list */} - {phasesQuery.isSuccess && phases.length === 0 && ( - <div className="py-8 text-center text-muted-foreground"> - No phases yet - </div> - )} - - {phasesQuery.isSuccess && - phases.map((phase, idx) => { - // tRPC serializes Date to string over JSON — cast to wire format - const serializedPhase = { - id: phase.id, - initiativeId: phase.initiativeId, - number: phase.number, - name: phase.name, - description: phase.description, - status: phase.status, - createdAt: String(phase.createdAt), - updatedAt: String(phase.updatedAt), - }; - - return ( - <PhaseWithTasks - key={phase.id} - phase={serializedPhase} - defaultExpanded={idx === firstIncompletePhaseIndex} - onTaskClick={setSelectedTaskId} - onTaskCounts={handleTaskCounts} - registerTasks={handleRegisterTasks} - /> - ); - })} - </div> - - {/* Right column: Progress + Decisions */} - <div className="space-y-6"> - <ProgressPanel - phasesComplete={phasesComplete} - phasesTotal={phases.length} - tasksComplete={tasksComplete} - tasksTotal={tasksTotal} - /> - - <DecisionList decisions={[]} /> - </div> + {/* Tab bar */} + <div className="flex gap-1 border-b border-border"> + <button + onClick={() => setActiveTab("content")} + className={`px-4 py-2 text-sm font-medium border-b-2 transition-colors ${ + activeTab === "content" + ? "border-primary text-foreground" + : "border-transparent text-muted-foreground hover:text-foreground" + }`} + > + Content + </button> + <button + onClick={() => setActiveTab("execution")} + className={`px-4 py-2 text-sm font-medium border-b-2 transition-colors ${ + activeTab === "execution" + ? "border-primary text-foreground" + : "border-transparent text-muted-foreground hover:text-foreground" + }`} + > + Execution + </button> </div> - {/* Task Detail Modal */} - <TaskDetailModal - task={selectedEntry?.task ?? null} - phaseName={selectedEntry?.phaseName ?? ""} - agentName={selectedEntry?.agentName ?? null} - dependencies={selectedEntry?.blockedBy ?? []} - dependents={selectedEntry?.dependents ?? []} - onClose={() => setSelectedTaskId(null)} - onQueueTask={handleQueueTask} - onStopTask={() => setSelectedTaskId(null)} - /> + {/* Tab content */} + {activeTab === "content" && <ContentTab initiativeId={id} initiativeName={initiative.name} />} + {activeTab === "execution" && ( + <ExecutionTab + initiativeId={id} + phases={phases} + phasesLoading={phasesQuery.isLoading} + phasesLoaded={phasesQuery.isSuccess} + /> + )} </div> ); } diff --git a/packages/web/src/routes/settings.tsx b/packages/web/src/routes/settings.tsx new file mode 100644 index 0000000..b7b4464 --- /dev/null +++ b/packages/web/src/routes/settings.tsx @@ -0,0 +1,35 @@ +import { createFileRoute, Link, Outlet } from '@tanstack/react-router' + +export const Route = createFileRoute('/settings')({ + component: SettingsLayout, +}) + +const settingsTabs = [ + { label: 'Health Check', to: '/settings/health' }, +] as const + +function SettingsLayout() { + return ( + <div className="space-y-4"> + <div className="flex items-center justify-between"> + <h1 className="text-2xl font-bold tracking-tight">Settings</h1> + </div> + <nav className="flex gap-1 border-b border-border"> + {settingsTabs.map((tab) => ( + <Link + key={tab.to} + to={tab.to} + className="px-4 py-2 text-sm font-medium border-b-2 border-transparent text-muted-foreground transition-colors hover:text-foreground" + activeProps={{ + className: + 'px-4 py-2 text-sm font-medium border-b-2 border-primary text-foreground', + }} + > + {tab.label} + </Link> + ))} + </nav> + <Outlet /> + </div> + ) +} diff --git a/packages/web/src/routes/settings/health.tsx b/packages/web/src/routes/settings/health.tsx new file mode 100644 index 0000000..8cdfd1b --- /dev/null +++ b/packages/web/src/routes/settings/health.tsx @@ -0,0 +1,383 @@ +import { createFileRoute } from '@tanstack/react-router' +import { + CheckCircle2, + XCircle, + AlertTriangle, + RefreshCw, + Server, +} from 'lucide-react' +import { trpc } from '@/lib/trpc' +import { Card, CardHeader, CardTitle, CardContent } from '@/components/ui/card' +import { Badge } from '@/components/ui/badge' +import { Button } from '@/components/ui/button' +import { Skeleton } from '@/components/Skeleton' + +export const Route = createFileRoute('/settings/health')({ + component: HealthCheckPage, +}) + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function formatUptime(seconds: number): string { + const d = Math.floor(seconds / 86400) + const h = Math.floor((seconds % 86400) / 3600) + const m = Math.floor((seconds % 3600) / 60) + const s = Math.floor(seconds % 60) + + const parts: string[] = [] + if (d > 0) parts.push(`${d}d`) + if (h > 0) parts.push(`${h}h`) + if (m > 0) parts.push(`${m}m`) + if (s > 0 || parts.length === 0) parts.push(`${s}s`) + return parts.join(' ') +} + +function formatResetTime(isoDate: string): string { + const now = Date.now() + const target = new Date(isoDate).getTime() + const diffMs = target - now + if (diffMs <= 0) return 'now' + + const totalMinutes = Math.floor(diffMs / 60_000) + const totalHours = Math.floor(totalMinutes / 60) + const totalDays = Math.floor(totalHours / 24) + + if (totalDays > 0) { + const remainingHours = totalHours - totalDays * 24 + return `in ${totalDays}d ${remainingHours}h` + } + const remainingMinutes = totalMinutes - totalHours * 60 + return `in ${totalHours}h ${remainingMinutes}m` +} + +function capitalize(s: string): string { + return s.charAt(0).toUpperCase() + s.slice(1) +} + +// --------------------------------------------------------------------------- +// Usage bar +// --------------------------------------------------------------------------- + +function UsageBar({ + label, + utilization, + resetsAt, +}: { + label: string + utilization: number + resetsAt: string | null +}) { + const color = + utilization >= 90 + ? 'bg-destructive' + : utilization >= 70 + ? 'bg-yellow-500' + : 'bg-green-500' + const resetText = resetsAt ? formatResetTime(resetsAt) : null + return ( + <div className="flex items-center gap-2 text-xs"> + <span className="w-20 shrink-0 text-muted-foreground">{label}</span> + <div className="h-2 flex-1 rounded-full bg-muted"> + <div + className={`h-2 rounded-full ${color}`} + style={{ width: `${Math.min(utilization, 100)}%` }} + /> + </div> + <span className="w-12 shrink-0 text-right"> + {utilization.toFixed(0)}% + </span> + {resetText && ( + <span className="shrink-0 text-muted-foreground"> + resets {resetText} + </span> + )} + </div> + ) +} + +// --------------------------------------------------------------------------- +// Page component +// --------------------------------------------------------------------------- + +function HealthCheckPage() { + const healthQuery = trpc.systemHealthCheck.useQuery(undefined, { + refetchInterval: 30_000, + }) + + const { data, isLoading, isError, error, refetch } = healthQuery + + // Loading state + if (isLoading) { + return ( + <div className="space-y-6"> + <div className="flex justify-end"> + <Skeleton className="h-9 w-24" /> + </div> + <Skeleton className="h-32 w-full" /> + <Skeleton className="h-48 w-full" /> + <Skeleton className="h-32 w-full" /> + </div> + ) + } + + // Error state + if (isError) { + return ( + <div className="flex flex-col items-center justify-center gap-4 py-12"> + <XCircle className="h-8 w-8 text-destructive" /> + <p className="text-sm text-destructive"> + Failed to load health check: {error?.message ?? 'Unknown error'} + </p> + <Button variant="outline" size="sm" onClick={() => void refetch()}> + Retry + </Button> + </div> + ) + } + + if (!data) return null + + const { server, accounts, projects } = data + + return ( + <div className="space-y-6"> + {/* Refresh button */} + <div className="flex justify-end"> + <Button + variant="outline" + size="sm" + onClick={() => void refetch()} + > + <RefreshCw className="mr-2 h-4 w-4" /> + Refresh + </Button> + </div> + + {/* Server Status */} + <Card> + <CardHeader> + <CardTitle className="flex items-center gap-2 text-lg"> + <Server className="h-5 w-5" /> + Server Status + </CardTitle> + </CardHeader> + <CardContent> + <div className="flex items-center gap-3"> + <CheckCircle2 className="h-5 w-5 text-green-500" /> + <div> + <p className="text-sm font-medium">Running</p> + <p className="text-xs text-muted-foreground"> + Uptime: {formatUptime(server.uptime)} + {server.startedAt && ( + <> + {' '} + · Started{' '} + {new Date(server.startedAt).toLocaleString()} + </> + )} + </p> + </div> + </div> + </CardContent> + </Card> + + {/* Accounts */} + <div className="space-y-3"> + <h2 className="text-lg font-semibold">Accounts</h2> + {accounts.length === 0 ? ( + <Card> + <CardContent className="py-6"> + <p className="text-center text-sm text-muted-foreground"> + No accounts configured. Use{' '} + <code className="rounded bg-muted px-1 py-0.5 text-xs"> + cw account add + </code>{' '} + to register one. + </p> + </CardContent> + </Card> + ) : ( + accounts.map((account) => ( + <AccountCard key={account.id} account={account} /> + )) + )} + </div> + + {/* Projects */} + <div className="space-y-3"> + <h2 className="text-lg font-semibold">Projects</h2> + {projects.length === 0 ? ( + <Card> + <CardContent className="py-6"> + <p className="text-center text-sm text-muted-foreground"> + No projects registered yet. + </p> + </CardContent> + </Card> + ) : ( + projects.map((project) => ( + <Card key={project.id}> + <CardContent className="flex items-center gap-3 py-4"> + {project.repoExists ? ( + <CheckCircle2 className="h-5 w-5 shrink-0 text-green-500" /> + ) : ( + <XCircle className="h-5 w-5 shrink-0 text-destructive" /> + )} + <div className="min-w-0 flex-1"> + <p className="text-sm font-medium">{project.name}</p> + <p className="truncate text-xs text-muted-foreground"> + {project.url} + </p> + </div> + <span className="shrink-0 text-xs text-muted-foreground"> + {project.repoExists ? 'Clone found' : 'Clone missing'} + </span> + </CardContent> + </Card> + )) + )} + </div> + </div> + ) +} + +// --------------------------------------------------------------------------- +// Account card +// --------------------------------------------------------------------------- + +type AccountData = { + id: string + email: string + provider: string + credentialsValid: boolean + tokenValid: boolean + tokenExpiresAt: string | null + subscriptionType: string | null + error: string | null + usage: { + five_hour: { utilization: number; resets_at: string | null } | null + seven_day: { utilization: number; resets_at: string | null } | null + seven_day_sonnet: { utilization: number; resets_at: string | null } | null + seven_day_opus: { utilization: number; resets_at: string | null } | null + extra_usage: { + is_enabled: boolean + monthly_limit: number | null + used_credits: number | null + utilization: number | null + } | null + } | null + isExhausted: boolean + exhaustedUntil: string | null + lastUsedAt: string | null + agentCount: number + activeAgentCount: number +} + +function AccountCard({ account }: { account: AccountData }) { + const statusIcon = !account.credentialsValid ? ( + <XCircle className="h-5 w-5 shrink-0 text-destructive" /> + ) : account.isExhausted ? ( + <AlertTriangle className="h-5 w-5 shrink-0 text-yellow-500" /> + ) : ( + <CheckCircle2 className="h-5 w-5 shrink-0 text-green-500" /> + ) + + const statusText = !account.credentialsValid + ? 'Invalid credentials' + : account.isExhausted + ? `Exhausted until ${account.exhaustedUntil ? new Date(account.exhaustedUntil).toLocaleTimeString() : 'unknown'}` + : 'Available' + + const usage = account.usage + + return ( + <Card> + <CardContent className="space-y-3 py-4"> + {/* Header row */} + <div className="flex items-start gap-3"> + {statusIcon} + <div className="min-w-0 flex-1"> + <div className="flex flex-wrap items-center gap-2"> + <span className="text-sm font-medium">{account.email}</span> + <Badge variant="outline">{account.provider}</Badge> + {account.subscriptionType && ( + <Badge variant="secondary"> + {capitalize(account.subscriptionType)} + </Badge> + )} + </div> + <div className="mt-1 flex items-center gap-3 text-xs text-muted-foreground"> + <span> + {account.agentCount} agent{account.agentCount !== 1 ? 's' : ''}{' '} + ({account.activeAgentCount} active) + </span> + <span>{statusText}</span> + </div> + </div> + </div> + + {/* Usage bars */} + {usage && ( + <div className="space-y-1.5 pl-8"> + {usage.five_hour && ( + <UsageBar + label="Session (5h)" + utilization={usage.five_hour.utilization} + resetsAt={usage.five_hour.resets_at} + /> + )} + {usage.seven_day && ( + <UsageBar + label="Weekly (7d)" + utilization={usage.seven_day.utilization} + resetsAt={usage.seven_day.resets_at} + /> + )} + {usage.seven_day_sonnet && + usage.seven_day_sonnet.utilization > 0 && ( + <UsageBar + label="Sonnet (7d)" + utilization={usage.seven_day_sonnet.utilization} + resetsAt={usage.seven_day_sonnet.resets_at} + /> + )} + {usage.seven_day_opus && usage.seven_day_opus.utilization > 0 && ( + <UsageBar + label="Opus (7d)" + utilization={usage.seven_day_opus.utilization} + resetsAt={usage.seven_day_opus.resets_at} + /> + )} + {usage.extra_usage && usage.extra_usage.is_enabled && ( + <div className="flex items-center gap-2 text-xs"> + <span className="w-20 shrink-0 text-muted-foreground"> + Extra usage + </span> + <span> + ${((usage.extra_usage.used_credits ?? 0) / 100).toFixed(2)}{' '} + used + {usage.extra_usage.monthly_limit != null && ( + <> + {' '} + / ${(usage.extra_usage.monthly_limit / 100).toFixed( + 2 + )}{' '} + limit + </> + )} + </span> + </div> + )} + </div> + )} + + {/* Error message */} + {account.error && ( + <p className="pl-8 text-xs text-destructive">{account.error}</p> + )} + </CardContent> + </Card> + ) +} diff --git a/packages/web/src/routes/settings/index.tsx b/packages/web/src/routes/settings/index.tsx new file mode 100644 index 0000000..acd2d38 --- /dev/null +++ b/packages/web/src/routes/settings/index.tsx @@ -0,0 +1,7 @@ +import { createFileRoute, redirect } from '@tanstack/react-router' + +export const Route = createFileRoute('/settings/')({ + beforeLoad: () => { + throw redirect({ to: '/settings/health' }) + }, +}) diff --git a/packages/web/tsconfig.app.tsbuildinfo b/packages/web/tsconfig.app.tsbuildinfo new file mode 100644 index 0000000..0e34422 --- /dev/null +++ b/packages/web/tsconfig.app.tsbuildinfo @@ -0,0 +1 @@ +{"root":["./src/app.tsx","./src/main.tsx","./src/routetree.gen.ts","./src/router.tsx","./src/vite-env.d.ts","./src/components/actionmenu.tsx","./src/components/agentoutputviewer.tsx","./src/components/createinitiativedialog.tsx","./src/components/decisionlist.tsx","./src/components/dependencyindicator.tsx","./src/components/errorboundary.tsx","./src/components/executiontab.tsx","./src/components/freetextinput.tsx","./src/components/inboxlist.tsx","./src/components/initiativecard.tsx","./src/components/initiativeheader.tsx","./src/components/initiativelist.tsx","./src/components/messagecard.tsx","./src/components/optiongroup.tsx","./src/components/phaseaccordion.tsx","./src/components/progressbar.tsx","./src/components/progresspanel.tsx","./src/components/projectpicker.tsx","./src/components/questionform.tsx","./src/components/refinespawndialog.tsx","./src/components/registerprojectdialog.tsx","./src/components/skeleton.tsx","./src/components/spawnarchitectdropdown.tsx","./src/components/statusbadge.tsx","./src/components/statusdot.tsx","./src/components/taskdetailmodal.tsx","./src/components/taskrow.tsx","./src/components/editor/blockselectionextension.ts","./src/components/editor/contentproposalreview.tsx","./src/components/editor/contenttab.tsx","./src/components/editor/pagebreadcrumb.tsx","./src/components/editor/pagelinkextension.tsx","./src/components/editor/pagetitlecontext.tsx","./src/components/editor/pagetree.tsx","./src/components/editor/refineagentpanel.tsx","./src/components/editor/slashcommandlist.tsx","./src/components/editor/slashcommands.ts","./src/components/editor/tiptapeditor.tsx","./src/components/editor/slash-command-items.ts","./src/components/execution/breakdownsection.tsx","./src/components/execution/executioncontext.tsx","./src/components/execution/phaseactions.tsx","./src/components/execution/phasewithtasks.tsx","./src/components/execution/phaseslist.tsx","./src/components/execution/plantasksfetcher.tsx","./src/components/execution/progresssidebar.tsx","./src/components/execution/taskmodal.tsx","./src/components/execution/index.ts","./src/components/ui/badge.tsx","./src/components/ui/button.tsx","./src/components/ui/card.tsx","./src/components/ui/dialog.tsx","./src/components/ui/dropdown-menu.tsx","./src/components/ui/input.tsx","./src/components/ui/label.tsx","./src/components/ui/sonner.tsx","./src/components/ui/textarea.tsx","./src/hooks/index.ts","./src/hooks/useautosave.ts","./src/hooks/usedebounce.ts","./src/hooks/userefineagent.ts","./src/hooks/usespawnmutation.ts","./src/hooks/usesubscriptionwitherrorhandling.ts","./src/layouts/applayout.tsx","./src/lib/markdown-to-tiptap.ts","./src/lib/trpc.ts","./src/lib/utils.ts","./src/routes/__root.tsx","./src/routes/agents.tsx","./src/routes/inbox.tsx","./src/routes/index.tsx","./src/routes/settings.tsx","./src/routes/initiatives/$id.tsx","./src/routes/initiatives/index.tsx","./src/routes/settings/health.tsx","./src/routes/settings/index.tsx"],"errors":true,"version":"5.9.3"} \ No newline at end of file diff --git a/reference/ccswitch b/reference/ccswitch new file mode 100755 index 0000000..d7691a4 --- /dev/null +++ b/reference/ccswitch @@ -0,0 +1,1614 @@ +#!/usr/bin/env bash + +# Multi-Account Switcher for Claude Code +# Simple tool to manage and switch between multiple Claude Code accounts + +set -euo pipefail + +# Configuration +readonly BACKUP_DIR="$HOME/.claude-switch-backup" +readonly SEQUENCE_FILE="$BACKUP_DIR/sequence.json" +readonly USAGE_API_URL="https://api.anthropic.com/api/oauth/usage" +readonly TOKEN_REFRESH_URL="https://console.anthropic.com/v1/oauth/token" +readonly OAUTH_CLIENT_ID="9d1c250a-e61b-44d9-88ed-5944d1962f5e" +readonly TOKEN_REFRESH_BUFFER=300 # Refresh if expiring within 5 minutes + +# Container detection +is_running_in_container() { + # Check for Docker environment file + if [[ -f /.dockerenv ]]; then + return 0 + fi + + # Check cgroup for container indicators + if [[ -f /proc/1/cgroup ]] && grep -q 'docker\|lxc\|containerd\|kubepods' /proc/1/cgroup 2>/dev/null; then + return 0 + fi + + # Check mount info for container filesystems + if [[ -f /proc/self/mountinfo ]] && grep -q 'docker\|overlay' /proc/self/mountinfo 2>/dev/null; then + return 0 + fi + + # Check for common container environment variables + if [[ -n "${CONTAINER:-}" ]] || [[ -n "${container:-}" ]]; then + return 0 + fi + + return 1 +} + +# Platform detection +detect_platform() { + case "$(uname -s)" in + Darwin) echo "macos" ;; + Linux) + if [[ -n "${WSL_DISTRO_NAME:-}" ]]; then + echo "wsl" + else + echo "linux" + fi + ;; + *) echo "unknown" ;; + esac +} + +# Get Claude configuration file path with fallback +get_claude_config_path() { + local primary_config="$HOME/.claude/.claude.json" + local fallback_config="$HOME/.claude.json" + + # Check primary location first + if [[ -f "$primary_config" ]]; then + # Verify it has valid oauthAccount structure + if jq -e '.oauthAccount' "$primary_config" >/dev/null 2>&1; then + echo "$primary_config" + return + fi + fi + + # Fallback to standard location + echo "$fallback_config" +} + +# Basic validation that JSON is valid +validate_json() { + local file="$1" + if ! jq . "$file" >/dev/null 2>&1; then + echo "Error: Invalid JSON in $file" + return 1 + fi +} + +# Email validation function +validate_email() { + local email="$1" + # Use robust regex for email validation + if [[ "$email" =~ ^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$ ]]; then + return 0 + else + return 1 + fi +} + +# Account identifier resolution function +resolve_account_identifier() { + local identifier="$1" + if [[ "$identifier" =~ ^[0-9]+$ ]]; then + echo "$identifier" # It's a number + else + # Look up account number by email + local account_num + account_num=$(jq -r --arg email "$identifier" '.accounts | to_entries[] | select(.value.email == $email) | .key' "$SEQUENCE_FILE" 2>/dev/null) + if [[ -n "$account_num" && "$account_num" != "null" ]]; then + echo "$account_num" + else + echo "" + fi + fi +} + +# Safe JSON write with validation +write_json() { + local file="$1" + local content="$2" + local temp_file + temp_file=$(mktemp "${file}.XXXXXX") + + echo "$content" > "$temp_file" + if ! jq . "$temp_file" >/dev/null 2>&1; then + rm -f "$temp_file" + echo "Error: Generated invalid JSON" + return 1 + fi + + mv "$temp_file" "$file" + chmod 600 "$file" +} + +# Check Bash version (4.4+ required) +check_bash_version() { + local version + version=$(bash --version | head -n1 | grep -oE '[0-9]+\.[0-9]+' | head -n1) + if ! awk -v ver="$version" 'BEGIN { exit (ver >= 4.4 ? 0 : 1) }'; then + echo "Error: Bash 4.4+ required (found $version)" + exit 1 + fi +} + +# Check dependencies +check_dependencies() { + for cmd in jq curl; do + if ! command -v "$cmd" >/dev/null 2>&1; then + echo "Error: Required command '$cmd' not found" + echo "Install with: apt install $cmd (Linux) or brew install $cmd (macOS)" + exit 1 + fi + done +} + +# Setup backup directories +setup_directories() { + mkdir -p "$BACKUP_DIR"/{configs,credentials} + chmod 700 "$BACKUP_DIR" + chmod 700 "$BACKUP_DIR"/{configs,credentials} +} + +# Claude Code process detection +is_claude_running() { + # Use pgrep for reliable process detection + pgrep -x "claude" >/dev/null 2>&1 +} + +# Wait for Claude Code to close (no timeout - user controlled) +wait_for_claude_close() { + if ! is_claude_running; then + return 0 + fi + + echo "Claude Code is running. Please close it first." + echo "Waiting for Claude Code to close..." + + while is_claude_running; do + sleep 1 + done + + echo "Claude Code closed. Continuing..." +} + +# Get current account info from .claude.json +get_current_account() { + if [[ ! -f "$(get_claude_config_path)" ]]; then + echo "none" + return + fi + + if ! validate_json "$(get_claude_config_path)"; then + echo "none" + return + fi + + local email + email=$(jq -r '.oauthAccount.emailAddress // empty' "$(get_claude_config_path)" 2>/dev/null) + echo "${email:-none}" +} + +# Read credentials based on platform +read_credentials() { + local platform + platform=$(detect_platform) + + case "$platform" in + macos) + security find-generic-password -s "Claude Code-credentials" -w 2>/dev/null || echo "" + ;; + linux|wsl) + if [[ -f "$HOME/.claude/.credentials.json" ]]; then + cat "$HOME/.claude/.credentials.json" + else + echo "" + fi + ;; + esac +} + +# Get OAuth access token from credentials +get_access_token() { + local creds + creds=$(read_credentials) + if [[ -z "$creds" ]]; then + echo "" + return + fi + echo "$creds" | jq -r '.claudeAiOauth.accessToken // empty' 2>/dev/null +} + +# Get OAuth refresh token from credentials +get_refresh_token() { + local creds + creds=$(read_credentials) + if [[ -z "$creds" ]]; then + echo "" + return + fi + echo "$creds" | jq -r '.claudeAiOauth.refreshToken // empty' 2>/dev/null +} + +# Get token expiry timestamp from credentials +get_token_expiry() { + local creds + creds=$(read_credentials) + if [[ -z "$creds" ]]; then + echo "" + return + fi + echo "$creds" | jq -r '.claudeAiOauth.expiresAt // empty' 2>/dev/null +} + +# Check if token is expired or expiring soon +is_token_expired() { + local expiry_timestamp + expiry_timestamp=$(get_token_expiry) + + if [[ -z "$expiry_timestamp" || "$expiry_timestamp" == "null" ]]; then + # No expiry info stored, assume valid (will fail on API call if not) + return 1 + fi + + # expiresAt is stored as milliseconds since epoch + local now_ms expiry_ms + now_ms=$(($(date +%s) * 1000)) + expiry_ms="$expiry_timestamp" + + # Token is expired if current time + buffer >= expiry time + # Convert buffer from seconds to milliseconds + local buffer_ms=$((TOKEN_REFRESH_BUFFER * 1000)) + if [[ $((now_ms + buffer_ms)) -ge $expiry_ms ]]; then + return 0 # Token is expired or expiring soon + fi + + return 1 # Token is still valid +} + +# Refresh the OAuth token via API +# Returns: JSON response on success, error message prefixed with "ERROR:" on failure, empty on network error +refresh_oauth_token_api() { + local refresh_token="$1" + + if [[ -z "$refresh_token" ]]; then + echo "ERROR:No refresh token provided" + return 1 + fi + + # Use the same format as Claude Code CLI + local request_body + request_body=$(jq -n \ + --arg grant_type "refresh_token" \ + --arg refresh_token "$refresh_token" \ + --arg client_id "$OAUTH_CLIENT_ID" \ + --arg scope "user:inference user:profile" \ + '{grant_type: $grant_type, refresh_token: $refresh_token, client_id: $client_id, scope: $scope}') + + local response + response=$(curl -s -X POST "$TOKEN_REFRESH_URL" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json" \ + -d "$request_body" 2>/dev/null) + + if [[ -z "$response" ]]; then + echo "ERROR:No response from server" + return 1 + fi + + # Check for error in response or HTML response (Cloudflare block) + if echo "$response" | grep -q '<!DOCTYPE html>'; then + echo "ERROR:Cloudflare blocked the request" + return 1 + fi + + # Check for OAuth error response + local error_type error_desc + error_type=$(echo "$response" | jq -r '.error // empty' 2>/dev/null) + if [[ -n "$error_type" ]]; then + error_desc=$(echo "$response" | jq -r '.error_description // "Unknown error"' 2>/dev/null) + echo "ERROR:$error_type - $error_desc" + return 1 + fi + + echo "$response" + return 0 +} + +# Refresh the OAuth token +# Returns: JSON response on success, "ERROR:..." message on failure +refresh_oauth_token() { + local refresh_token="$1" + local response + response=$(refresh_oauth_token_api "$refresh_token") + + # Pass through the response (either JSON or ERROR:message) + echo "$response" + + # Check if it was an error + if [[ "$response" == ERROR:* ]]; then + return 1 + fi + + return 0 +} + +# Update credentials with new token data +update_credentials_with_token() { + local new_access_token="$1" + local new_refresh_token="$2" + local expires_in="$3" + + local creds + creds=$(read_credentials) + + if [[ -z "$creds" ]]; then + return 1 + fi + + # Calculate expiry timestamp in MILLISECONDS (not seconds!) + # Claude Code stores expiresAt as a number in milliseconds + local now_ms expiry_ms + now_ms=$(($(date +%s) * 1000)) + expiry_ms=$((now_ms + (expires_in * 1000))) + + # Update credentials JSON with numeric expiresAt + # IMPORTANT: Use -c for compact output (no newlines) for keychain compatibility + local updated_creds + updated_creds=$(echo "$creds" | jq -c \ + --arg access "$new_access_token" \ + --arg refresh "$new_refresh_token" \ + --argjson expiry "$expiry_ms" \ + '.claudeAiOauth.accessToken = $access | .claudeAiOauth.refreshToken = $refresh | .claudeAiOauth.expiresAt = $expiry') + + write_credentials "$updated_creds" + return 0 +} + +# Get a valid access token, refreshing if necessary +get_valid_access_token() { + local access_token refresh_token + access_token=$(get_access_token) + + if [[ -z "$access_token" ]]; then + echo "" + return 1 + fi + + # Check if token needs refresh + if is_token_expired; then + refresh_token=$(get_refresh_token) + + if [[ -z "$refresh_token" ]]; then + # No refresh token available, return current token and hope for the best + echo "$access_token" + return 0 + fi + + echo "Token expired, refreshing..." >&2 + + local refresh_response + refresh_response=$(refresh_oauth_token "$refresh_token") + + if [[ -n "$refresh_response" ]]; then + local new_access new_refresh expires_in + new_access=$(echo "$refresh_response" | jq -r '.access_token // empty') + new_refresh=$(echo "$refresh_response" | jq -r '.refresh_token // empty') + expires_in=$(echo "$refresh_response" | jq -r '.expires_in // 28800') + + if [[ -n "$new_access" ]]; then + # Use existing refresh token if new one not provided + if [[ -z "$new_refresh" ]]; then + new_refresh="$refresh_token" + fi + + update_credentials_with_token "$new_access" "$new_refresh" "$expires_in" + echo "Token refreshed successfully" >&2 + echo "$new_access" + return 0 + fi + fi + + echo "Token refresh failed, using existing token" >&2 + fi + + echo "$access_token" + return 0 +} + +# Get account credentials (internal helper) +get_account_credentials() { + local account_num="$1" + local email="$2" + local platform creds + platform=$(detect_platform) + + case "$platform" in + macos) + creds=$(security find-generic-password -s "Claude Code-Account-${account_num}-${email}" -w 2>/dev/null || echo "") + ;; + linux|wsl) + local cred_file="$BACKUP_DIR/credentials/.claude-credentials-${account_num}-${email}.json" + if [[ -f "$cred_file" ]]; then + creds=$(cat "$cred_file") + else + creds="" + fi + ;; + esac + + # Check if creds is hex-encoded (broken format from old bug) + # and try to decode it + if [[ -n "$creds" ]] && [[ "$creds" =~ ^[0-9a-fA-F]+$ ]] && [[ ${#creds} -gt 100 ]]; then + # Try to decode hex + local decoded + decoded=$(echo "$creds" | xxd -r -p 2>/dev/null || echo "") + if [[ -n "$decoded" ]] && echo "$decoded" | jq '.' >/dev/null 2>&1; then + creds="$decoded" + fi + fi + + # Validate JSON - return empty if invalid + if [[ -n "$creds" ]] && ! echo "$creds" | jq '.' >/dev/null 2>&1; then + echo "" + return + fi + + echo "$creds" +} + +# Get OAuth access token for a specific account +get_account_access_token() { + local account_num="$1" + local email="$2" + local creds + creds=$(get_account_credentials "$account_num" "$email") + + if [[ -z "$creds" ]]; then + echo "" + return + fi + echo "$creds" | jq -r '.claudeAiOauth.accessToken // empty' 2>/dev/null +} + +# Get refresh token for a specific account +get_account_refresh_token() { + local account_num="$1" + local email="$2" + local creds + creds=$(get_account_credentials "$account_num" "$email") + + if [[ -z "$creds" ]]; then + echo "" + return + fi + echo "$creds" | jq -r '.claudeAiOauth.refreshToken // empty' 2>/dev/null +} + +# Get token expiry for a specific account +get_account_token_expiry() { + local account_num="$1" + local email="$2" + local creds + creds=$(get_account_credentials "$account_num" "$email") + + if [[ -z "$creds" ]]; then + echo "" + return + fi + echo "$creds" | jq -r '.claudeAiOauth.expiresAt // empty' 2>/dev/null +} + +# Check if account token is expired +is_account_token_expired() { + local account_num="$1" + local email="$2" + local expiry_timestamp + expiry_timestamp=$(get_account_token_expiry "$account_num" "$email") + + if [[ -z "$expiry_timestamp" || "$expiry_timestamp" == "null" ]]; then + return 1 + fi + + local now_ms expiry_ms + + # Handle both numeric (milliseconds) and string (ISO 8601) formats + if [[ "$expiry_timestamp" =~ ^[0-9]+$ ]]; then + # Numeric format (milliseconds since epoch) + now_ms=$(($(date +%s) * 1000)) + expiry_ms="$expiry_timestamp" + else + # String format (ISO 8601) - convert to milliseconds + # This handles legacy credentials with string expiresAt + now_ms=$(($(date +%s) * 1000)) + local expiry_epoch + if date --version >/dev/null 2>&1; then + # GNU date + expiry_epoch=$(date -d "$expiry_timestamp" +%s 2>/dev/null || echo "0") + else + # BSD date (macOS) + local formatted_date + formatted_date=$(echo "$expiry_timestamp" | sed 's/T/ /; s/[Zz]$//; s/[+-][0-9][0-9]:[0-9][0-9]$//') + expiry_epoch=$(TZ=UTC date -j -f "%Y-%m-%d %H:%M:%S" "$formatted_date" +%s 2>/dev/null || echo "0") + fi + expiry_ms=$((expiry_epoch * 1000)) + fi + + # Token is expired if current time + buffer >= expiry time + local buffer_ms=$((TOKEN_REFRESH_BUFFER * 1000)) + if [[ $((now_ms + buffer_ms)) -ge $expiry_ms ]]; then + return 0 + fi + + return 1 +} + +# Update account credentials with new token data +update_account_credentials_with_token() { + local account_num="$1" + local email="$2" + local new_access_token="$3" + local new_refresh_token="$4" + local expires_in="$5" + + local creds + creds=$(get_account_credentials "$account_num" "$email") + + if [[ -z "$creds" ]]; then + return 1 + fi + + # Calculate expiry timestamp in MILLISECONDS (not seconds!) + # Claude Code stores expiresAt as a number in milliseconds + local now_ms expiry_ms + now_ms=$(($(date +%s) * 1000)) + expiry_ms=$((now_ms + (expires_in * 1000))) + + # Update credentials JSON with numeric expiresAt + # IMPORTANT: Use -c for compact output (no newlines) for keychain compatibility + local updated_creds + updated_creds=$(echo "$creds" | jq -c \ + --arg access "$new_access_token" \ + --arg refresh "$new_refresh_token" \ + --argjson expiry "$expiry_ms" \ + '.claudeAiOauth.accessToken = $access | .claudeAiOauth.refreshToken = $refresh | .claudeAiOauth.expiresAt = $expiry') + + write_account_credentials "$account_num" "$email" "$updated_creds" + return 0 +} + +# Get valid access token for a specific account, refreshing if necessary +get_valid_account_access_token() { + local account_num="$1" + local email="$2" + local access_token refresh_token + + access_token=$(get_account_access_token "$account_num" "$email") + + if [[ -z "$access_token" ]]; then + echo "" + return 1 + fi + + if is_account_token_expired "$account_num" "$email"; then + refresh_token=$(get_account_refresh_token "$account_num" "$email") + + if [[ -z "$refresh_token" ]]; then + echo "$access_token" + return 0 + fi + + echo "Token expired for account $account_num, refreshing..." >&2 + + local refresh_response + refresh_response=$(refresh_oauth_token "$refresh_token") + + if [[ -n "$refresh_response" ]]; then + local new_access new_refresh expires_in + new_access=$(echo "$refresh_response" | jq -r '.access_token // empty') + new_refresh=$(echo "$refresh_response" | jq -r '.refresh_token // empty') + expires_in=$(echo "$refresh_response" | jq -r '.expires_in // 28800') + + if [[ -n "$new_access" ]]; then + if [[ -z "$new_refresh" ]]; then + new_refresh="$refresh_token" + fi + + update_account_credentials_with_token "$account_num" "$email" "$new_access" "$new_refresh" "$expires_in" + echo "Token refreshed successfully for account $account_num" >&2 + echo "$new_access" + return 0 + fi + fi + + echo "Token refresh failed for account $account_num, using existing token" >&2 + fi + + echo "$access_token" + return 0 +} + +# Fetch usage data from Anthropic API +fetch_usage() { + local token="$1" + if [[ -z "$token" ]]; then + echo "" + return + fi + + curl -s -X GET "$USAGE_API_URL" \ + -H "Accept: application/json" \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer $token" \ + -H "anthropic-beta: oauth-2025-04-20" 2>/dev/null +} + +# Format time remaining until reset +format_time_remaining() { + local reset_at="$1" + if [[ -z "$reset_at" || "$reset_at" == "null" ]]; then + echo "N/A" + return + fi + + local reset_epoch now_epoch diff_seconds + # Parse ISO 8601 date - handle both GNU and BSD date + if date --version >/dev/null 2>&1; then + # GNU date + reset_epoch=$(date -d "$reset_at" +%s 2>/dev/null || echo "0") + else + # BSD date (macOS) + # Convert ISO 8601 to format BSD date understands + # The API returns UTC times, so we need to parse as UTC + local formatted_date + formatted_date=$(echo "$reset_at" | sed 's/T/ /; s/[Zz]$//; s/[+-][0-9][0-9]:[0-9][0-9]$//') + # Parse the datetime as UTC by temporarily setting TZ + reset_epoch=$(TZ=UTC date -j -f "%Y-%m-%d %H:%M:%S" "$formatted_date" +%s 2>/dev/null || echo "0") + fi + + now_epoch=$(date +%s) + diff_seconds=$((reset_epoch - now_epoch)) + + if [[ $diff_seconds -le 0 ]]; then + echo "now" + return + fi + + local days hours minutes + days=$((diff_seconds / 86400)) + hours=$(((diff_seconds % 86400) / 3600)) + minutes=$(((diff_seconds % 3600) / 60)) + + if [[ $days -gt 0 ]]; then + echo "${days}d ${hours}h ${minutes}m" + elif [[ $hours -gt 0 ]]; then + echo "${hours}h ${minutes}m" + else + echo "${minutes}m" + fi +} + +# Format reset time for display +format_reset_time() { + local reset_at="$1" + if [[ -z "$reset_at" || "$reset_at" == "null" ]]; then + echo "N/A" + return + fi + + # Parse and format the date in local timezone + if date --version >/dev/null 2>&1; then + # GNU date + date -d "$reset_at" "+%Y-%m-%d %H:%M %Z" 2>/dev/null || echo "$reset_at" + else + # BSD date (macOS) + # The API returns UTC times, so we need to parse as UTC then display in local time + local formatted_date + formatted_date=$(echo "$reset_at" | sed 's/T/ /; s/[Zz]$//; s/[+-][0-9][0-9]:[0-9][0-9]$//') + local epoch + # Parse the datetime as UTC by temporarily setting TZ + epoch=$(TZ=UTC date -j -f "%Y-%m-%d %H:%M:%S" "$formatted_date" +%s 2>/dev/null || echo "0") + if [[ "$epoch" != "0" ]]; then + # Display in local timezone + date -j -f "%s" "$epoch" "+%Y-%m-%d %H:%M %Z" 2>/dev/null || echo "$reset_at" + else + echo "$reset_at" + fi + fi +} + +# Create a progress bar +progress_bar() { + local utilization="$1" + local width=20 + local filled empty + + # Handle null or empty + if [[ -z "$utilization" || "$utilization" == "null" ]]; then + printf '[%s]' "$(printf '%*s' $width '' | tr ' ' '-')" + return + fi + + # Round to integer + local pct + pct=$(printf "%.0f" "$utilization") + + filled=$((pct * width / 100)) + empty=$((width - filled)) + + # Use different colors based on utilization + local bar_char="█" + local empty_char="░" + + printf '[' + if [[ $filled -gt 0 ]]; then + printf '%*s' "$filled" '' | tr ' ' "$bar_char" + fi + if [[ $empty -gt 0 ]]; then + printf '%*s' "$empty" '' | tr ' ' "$empty_char" + fi + printf ']' +} + +# Display usage for a single account +display_account_usage() { + local account_num="$1" + local email="$2" + local token="$3" + local is_active="$4" + + local active_marker="" + if [[ "$is_active" == "true" ]]; then + active_marker=" (active)" + fi + + echo "" + echo "Account $account_num: $email$active_marker" + echo "$(printf '%*s' 60 '' | tr ' ' '-')" + + if [[ -z "$token" ]]; then + echo " Error: No access token available" + return + fi + + local usage_data + usage_data=$(fetch_usage "$token") + + if [[ -z "$usage_data" ]]; then + echo " Error: Failed to fetch usage data" + return + fi + + # Check for API error + if echo "$usage_data" | jq -e '.error' >/dev/null 2>&1; then + local error_msg + error_msg=$(echo "$usage_data" | jq -r '.error.message // .error // "Unknown error"') + echo " Error: $error_msg" + return + fi + + # Parse usage data + local five_hour_util five_hour_reset + local seven_day_util seven_day_reset + local seven_day_sonnet_util seven_day_sonnet_reset + local seven_day_opus_util seven_day_opus_reset + local extra_usage_enabled extra_usage_limit extra_usage_used extra_usage_util + + five_hour_util=$(echo "$usage_data" | jq -r '.five_hour.utilization // "null"') + five_hour_reset=$(echo "$usage_data" | jq -r '.five_hour.resets_at // "null"') + + seven_day_util=$(echo "$usage_data" | jq -r '.seven_day.utilization // "null"') + seven_day_reset=$(echo "$usage_data" | jq -r '.seven_day.resets_at // "null"') + + seven_day_sonnet_util=$(echo "$usage_data" | jq -r '.seven_day_sonnet.utilization // "null"') + seven_day_sonnet_reset=$(echo "$usage_data" | jq -r '.seven_day_sonnet.resets_at // "null"') + + seven_day_opus_util=$(echo "$usage_data" | jq -r '.seven_day_opus.utilization // "null"') + seven_day_opus_reset=$(echo "$usage_data" | jq -r '.seven_day_opus.resets_at // "null"') + + extra_usage_enabled=$(echo "$usage_data" | jq -r '.extra_usage.is_enabled // "null"') + extra_usage_limit=$(echo "$usage_data" | jq -r '.extra_usage.monthly_limit // "null"') + extra_usage_used=$(echo "$usage_data" | jq -r '.extra_usage.used_credits // "null"') + extra_usage_util=$(echo "$usage_data" | jq -r '.extra_usage.utilization // "null"') + + # Display 5-hour session usage + if [[ "$five_hour_util" != "null" ]]; then + printf " Session (5h): %s %5.1f%% resets in %s\n" \ + "$(progress_bar "$five_hour_util")" \ + "$five_hour_util" \ + "$(format_time_remaining "$five_hour_reset")" + printf " Reset: %s\n" "$(format_reset_time "$five_hour_reset")" + fi + + # Display 7-day weekly usage + if [[ "$seven_day_util" != "null" ]]; then + printf " Weekly (7d): %s %5.1f%% resets in %s\n" \ + "$(progress_bar "$seven_day_util")" \ + "$seven_day_util" \ + "$(format_time_remaining "$seven_day_reset")" + printf " Reset: %s\n" "$(format_reset_time "$seven_day_reset")" + fi + + # Display 7-day Sonnet usage if available + if [[ "$seven_day_sonnet_util" != "null" && "$seven_day_sonnet_util" != "0" ]]; then + printf " Sonnet (7d): %s %5.1f%% resets in %s\n" \ + "$(progress_bar "$seven_day_sonnet_util")" \ + "$seven_day_sonnet_util" \ + "$(format_time_remaining "$seven_day_sonnet_reset")" + fi + + # Display 7-day Opus usage if available + if [[ "$seven_day_opus_util" != "null" && "$seven_day_opus_util" != "0" ]]; then + printf " Opus (7d): %s %5.1f%% resets in %s\n" \ + "$(progress_bar "$seven_day_opus_util")" \ + "$seven_day_opus_util" \ + "$(format_time_remaining "$seven_day_opus_reset")" + fi + + # Display extra usage if enabled + if [[ "$extra_usage_enabled" == "true" ]]; then + echo "" + printf " Extra Usage: %s %5.1f%% (\$%.2f / \$%.2f)\n" \ + "$(progress_bar "$extra_usage_util")" \ + "$extra_usage_util" \ + "$(echo "$extra_usage_used / 100" | bc -l)" \ + "$(echo "$extra_usage_limit / 100" | bc -l)" + fi +} + +# Show usage for all accounts +cmd_usage() { + if [[ ! -f "$SEQUENCE_FILE" ]]; then + # No managed accounts, show usage for current account only + local current_email + current_email=$(get_current_account) + + if [[ "$current_email" == "none" ]]; then + echo "Error: No active Claude account found" + exit 1 + fi + + local token + token=$(get_valid_access_token) + + echo "Claude Code Usage" + echo "=================" + display_account_usage "0" "$current_email" "$token" "true" + exit 0 + fi + + # Get current active account + local current_email + current_email=$(get_current_account) + + echo "Claude Code Usage" + echo "=================" + + # Iterate through all managed accounts + local account_nums + account_nums=$(jq -r '.sequence[]' "$SEQUENCE_FILE") + + for account_num in $account_nums; do + local email is_active token + email=$(jq -r --arg num "$account_num" '.accounts[$num].email' "$SEQUENCE_FILE") + + if [[ "$email" == "$current_email" ]]; then + is_active="true" + token=$(get_valid_access_token) + else + is_active="false" + token=$(get_valid_account_access_token "$account_num" "$email") + fi + + display_account_usage "$account_num" "$email" "$token" "$is_active" + done + + echo "" +} + +# Write credentials based on platform +write_credentials() { + local credentials="$1" + local platform + platform=$(detect_platform) + + case "$platform" in + macos) + security add-generic-password -U -s "Claude Code-credentials" -a "$USER" -w "$credentials" 2>/dev/null + ;; + linux|wsl) + mkdir -p "$HOME/.claude" + printf '%s' "$credentials" > "$HOME/.claude/.credentials.json" + chmod 600 "$HOME/.claude/.credentials.json" + ;; + esac +} + +# Read account credentials from backup +read_account_credentials() { + local account_num="$1" + local email="$2" + local platform + platform=$(detect_platform) + + case "$platform" in + macos) + security find-generic-password -s "Claude Code-Account-${account_num}-${email}" -w 2>/dev/null || echo "" + ;; + linux|wsl) + local cred_file="$BACKUP_DIR/credentials/.claude-credentials-${account_num}-${email}.json" + if [[ -f "$cred_file" ]]; then + cat "$cred_file" + else + echo "" + fi + ;; + esac +} + +# Write account credentials to backup +write_account_credentials() { + local account_num="$1" + local email="$2" + local credentials="$3" + local platform + platform=$(detect_platform) + + case "$platform" in + macos) + security add-generic-password -U -s "Claude Code-Account-${account_num}-${email}" -a "$USER" -w "$credentials" 2>/dev/null + ;; + linux|wsl) + local cred_file="$BACKUP_DIR/credentials/.claude-credentials-${account_num}-${email}.json" + printf '%s' "$credentials" > "$cred_file" + chmod 600 "$cred_file" + ;; + esac +} + +# Read account config from backup +read_account_config() { + local account_num="$1" + local email="$2" + local config_file="$BACKUP_DIR/configs/.claude-config-${account_num}-${email}.json" + + if [[ -f "$config_file" ]]; then + cat "$config_file" + else + echo "" + fi +} + +# Write account config to backup +write_account_config() { + local account_num="$1" + local email="$2" + local config="$3" + local config_file="$BACKUP_DIR/configs/.claude-config-${account_num}-${email}.json" + + echo "$config" > "$config_file" + chmod 600 "$config_file" +} + +# Initialize sequence.json if it doesn't exist +init_sequence_file() { + if [[ ! -f "$SEQUENCE_FILE" ]]; then + local init_content='{"activeAccountNumber":null,"lastUpdated":"'$(date -u +%Y-%m-%dT%H:%M:%SZ)'","sequence":[],"accounts":{}}' + write_json "$SEQUENCE_FILE" "$init_content" + fi +} + +# Get next account number +get_next_account_number() { + if [[ ! -f "$SEQUENCE_FILE" ]]; then + echo "1" + return + fi + + local max_num + max_num=$(jq -r '.accounts | keys | map(tonumber) | max // 0' "$SEQUENCE_FILE") + echo $((max_num + 1)) +} + +# Check if account exists by email +account_exists() { + local email="$1" + if [[ ! -f "$SEQUENCE_FILE" ]]; then + return 1 + fi + + jq -e --arg email "$email" '.accounts[] | select(.email == $email)' "$SEQUENCE_FILE" >/dev/null 2>&1 +} + +# Add account +cmd_add_account() { + setup_directories + init_sequence_file + + local current_email + current_email=$(get_current_account) + + if [[ "$current_email" == "none" ]]; then + echo "Error: No active Claude account found. Please log in first." + exit 1 + fi + + # Get current credentials and config + local current_creds current_config account_uuid + current_creds=$(read_credentials) + current_config=$(cat "$(get_claude_config_path)") + + if [[ -z "$current_creds" ]]; then + echo "Error: No credentials found for current account" + exit 1 + fi + + account_uuid=$(jq -r '.oauthAccount.accountUuid' "$(get_claude_config_path)") + + # Check if account already exists + if account_exists "$current_email"; then + # Update existing account credentials + local account_num + account_num=$(resolve_account_identifier "$current_email") + + if [[ -z "$account_num" ]]; then + echo "Error: Failed to resolve account number for $current_email" + exit 1 + fi + + # Update stored credentials and config + write_account_credentials "$account_num" "$current_email" "$current_creds" + write_account_config "$account_num" "$current_email" "$current_config" + + # Update sequence.json with new timestamp + local updated_sequence + updated_sequence=$(jq --arg num "$account_num" --arg now "$(date -u +%Y-%m-%dT%H:%M:%SZ)" ' + .accounts[$num].updated = $now | + .activeAccountNumber = ($num | tonumber) | + .lastUpdated = $now + ' "$SEQUENCE_FILE") + + write_json "$SEQUENCE_FILE" "$updated_sequence" + + echo "Updated Account $account_num: $current_email (credentials refreshed)" + exit 0 + fi + + # Add new account + local account_num + account_num=$(get_next_account_number) + + # Store backups + write_account_credentials "$account_num" "$current_email" "$current_creds" + write_account_config "$account_num" "$current_email" "$current_config" + + # Update sequence.json + local updated_sequence + updated_sequence=$(jq --arg num "$account_num" --arg email "$current_email" --arg uuid "$account_uuid" --arg now "$(date -u +%Y-%m-%dT%H:%M:%SZ)" ' + .accounts[$num] = { + email: $email, + uuid: $uuid, + added: $now + } | + .sequence += [$num | tonumber] | + .activeAccountNumber = ($num | tonumber) | + .lastUpdated = $now + ' "$SEQUENCE_FILE") + + write_json "$SEQUENCE_FILE" "$updated_sequence" + + echo "Added Account $account_num: $current_email" +} + +# Remove account +cmd_remove_account() { + if [[ $# -eq 0 ]]; then + echo "Usage: $0 --remove-account <account_number|email>" + exit 1 + fi + + local identifier="$1" + local account_num + + if [[ ! -f "$SEQUENCE_FILE" ]]; then + echo "Error: No accounts are managed yet" + exit 1 + fi + + # Handle email vs numeric identifier + if [[ "$identifier" =~ ^[0-9]+$ ]]; then + account_num="$identifier" + else + # Validate email format + if ! validate_email "$identifier"; then + echo "Error: Invalid email format: $identifier" + exit 1 + fi + + # Resolve email to account number + account_num=$(resolve_account_identifier "$identifier") + if [[ -z "$account_num" ]]; then + echo "Error: No account found with email: $identifier" + exit 1 + fi + fi + + local account_info + account_info=$(jq -r --arg num "$account_num" '.accounts[$num] // empty' "$SEQUENCE_FILE") + + if [[ -z "$account_info" ]]; then + echo "Error: Account-$account_num does not exist" + exit 1 + fi + + local email + email=$(echo "$account_info" | jq -r '.email') + + local active_account + active_account=$(jq -r '.activeAccountNumber' "$SEQUENCE_FILE") + + if [[ "$active_account" == "$account_num" ]]; then + echo "Warning: Account-$account_num ($email) is currently active" + fi + + echo -n "Are you sure you want to permanently remove Account-$account_num ($email)? [y/N] " + read -r confirm + + if [[ "$confirm" != "y" && "$confirm" != "Y" ]]; then + echo "Cancelled" + exit 0 + fi + + # Remove backup files + local platform + platform=$(detect_platform) + case "$platform" in + macos) + security delete-generic-password -s "Claude Code-Account-${account_num}-${email}" 2>/dev/null || true + ;; + linux|wsl) + rm -f "$BACKUP_DIR/credentials/.claude-credentials-${account_num}-${email}.json" + ;; + esac + rm -f "$BACKUP_DIR/configs/.claude-config-${account_num}-${email}.json" + + # Update sequence.json + local updated_sequence + updated_sequence=$(jq --arg num "$account_num" --arg now "$(date -u +%Y-%m-%dT%H:%M:%SZ)" ' + del(.accounts[$num]) | + .sequence = (.sequence | map(select(. != ($num | tonumber)))) | + .lastUpdated = $now + ' "$SEQUENCE_FILE") + + write_json "$SEQUENCE_FILE" "$updated_sequence" + + echo "Account-$account_num ($email) has been removed" +} + +# First-run setup workflow +first_run_setup() { + local current_email + current_email=$(get_current_account) + + if [[ "$current_email" == "none" ]]; then + echo "No active Claude account found. Please log in first." + return 1 + fi + + echo -n "No managed accounts found. Add current account ($current_email) to managed list? [Y/n] " + read -r response + + if [[ "$response" == "n" || "$response" == "N" ]]; then + echo "Setup cancelled. You can run '$0 --add-account' later." + return 1 + fi + + cmd_add_account + return 0 +} + +# List accounts +cmd_list() { + if [[ ! -f "$SEQUENCE_FILE" ]]; then + echo "No accounts are managed yet." + first_run_setup + exit 0 + fi + + # Get current active account from .claude.json + local current_email + current_email=$(get_current_account) + + # Find which account number corresponds to the current email + local active_account_num="" + if [[ "$current_email" != "none" ]]; then + active_account_num=$(jq -r --arg email "$current_email" '.accounts | to_entries[] | select(.value.email == $email) | .key' "$SEQUENCE_FILE" 2>/dev/null) + fi + + echo "Accounts:" + jq -r --arg active "$active_account_num" ' + .sequence[] as $num | + .accounts["\($num)"] | + if "\($num)" == $active then + " \($num): \(.email) (active)" + else + " \($num): \(.email)" + end + ' "$SEQUENCE_FILE" +} + +# Switch to next account +cmd_switch() { + if [[ ! -f "$SEQUENCE_FILE" ]]; then + echo "Error: No accounts are managed yet" + exit 1 + fi + + local current_email + current_email=$(get_current_account) + + if [[ "$current_email" == "none" ]]; then + echo "Error: No active Claude account found" + exit 1 + fi + + # Check if current account is managed + if ! account_exists "$current_email"; then + echo "Notice: Active account '$current_email' was not managed." + cmd_add_account + local account_num + account_num=$(jq -r '.activeAccountNumber' "$SEQUENCE_FILE") + echo "It has been automatically added as Account-$account_num." + echo "Please run './ccswitch.sh --switch' again to switch to the next account." + exit 0 + fi + + # wait_for_claude_close + + local active_account sequence + active_account=$(jq -r '.activeAccountNumber' "$SEQUENCE_FILE") + sequence=($(jq -r '.sequence[]' "$SEQUENCE_FILE")) + + # Find next account in sequence + local next_account current_index=0 + for i in "${!sequence[@]}"; do + if [[ "${sequence[i]}" == "$active_account" ]]; then + current_index=$i + break + fi + done + + next_account="${sequence[$(((current_index + 1) % ${#sequence[@]}))]}" + + perform_switch "$next_account" +} + +# Switch to specific account +cmd_switch_to() { + if [[ $# -eq 0 ]]; then + echo "Usage: $0 --switch-to <account_number|email>" + exit 1 + fi + + local identifier="$1" + local target_account + + if [[ ! -f "$SEQUENCE_FILE" ]]; then + echo "Error: No accounts are managed yet" + exit 1 + fi + + # Handle email vs numeric identifier + if [[ "$identifier" =~ ^[0-9]+$ ]]; then + target_account="$identifier" + else + # Validate email format + if ! validate_email "$identifier"; then + echo "Error: Invalid email format: $identifier" + exit 1 + fi + + # Resolve email to account number + target_account=$(resolve_account_identifier "$identifier") + if [[ -z "$target_account" ]]; then + echo "Error: No account found with email: $identifier" + exit 1 + fi + fi + + local account_info + account_info=$(jq -r --arg num "$target_account" '.accounts[$num] // empty' "$SEQUENCE_FILE") + + if [[ -z "$account_info" ]]; then + echo "Error: Account-$target_account does not exist" + exit 1 + fi + + # wait_for_claude_close + perform_switch "$target_account" +} + +# Perform the actual account switch +perform_switch() { + local target_account="$1" + + # Get current and target account info + local current_account target_email current_email + current_account=$(jq -r '.activeAccountNumber' "$SEQUENCE_FILE") + target_email=$(jq -r --arg num "$target_account" '.accounts[$num].email' "$SEQUENCE_FILE") + current_email=$(get_current_account) + + # Step 1: Backup current account + local current_creds current_config + current_creds=$(read_credentials) + current_config=$(cat "$(get_claude_config_path)") + + write_account_credentials "$current_account" "$current_email" "$current_creds" + write_account_config "$current_account" "$current_email" "$current_config" + + # Step 2: Retrieve target account + local target_creds target_config + target_creds=$(read_account_credentials "$target_account" "$target_email") + target_config=$(read_account_config "$target_account" "$target_email") + + if [[ -z "$target_creds" || -z "$target_config" ]]; then + echo "Error: Missing backup data for Account-$target_account" + exit 1 + fi + + # Step 3: Activate target account + write_credentials "$target_creds" + + # Extract oauthAccount from backup and validate + local oauth_section + oauth_section=$(echo "$target_config" | jq '.oauthAccount' 2>/dev/null) + if [[ -z "$oauth_section" || "$oauth_section" == "null" ]]; then + echo "Error: Invalid oauthAccount in backup" + exit 1 + fi + + # Merge with current config and validate + local merged_config + merged_config=$(jq --argjson oauth "$oauth_section" '.oauthAccount = $oauth' "$(get_claude_config_path)" 2>/dev/null) + if [[ $? -ne 0 ]]; then + echo "Error: Failed to merge config" + exit 1 + fi + + # Use existing safe write_json function + write_json "$(get_claude_config_path)" "$merged_config" + + # Step 4: Update state + local updated_sequence + updated_sequence=$(jq --arg num "$target_account" --arg now "$(date -u +%Y-%m-%dT%H:%M:%SZ)" ' + .activeAccountNumber = ($num | tonumber) | + .lastUpdated = $now + ' "$SEQUENCE_FILE") + + write_json "$SEQUENCE_FILE" "$updated_sequence" + + echo "Switched to Account-$target_account ($target_email)" + # Display updated account list + cmd_list + echo "" + echo "Please restart Claude Code to use the new authentication." + echo "" + +} + +# Refresh tokens for all accounts +cmd_refresh() { + echo "Refreshing OAuth tokens..." + echo "" + + local refreshed=0 + local failed=0 + local skipped=0 + + # Check if Claude Code is running + local claude_is_running=false + if is_claude_running; then + claude_is_running=true + echo "Warning: Claude Code is running. Will skip refreshing the active account." + echo " (Refreshing would invalidate tokens that Claude Code is using)" + echo "" + fi + + # Refresh current account + local current_email + current_email=$(get_current_account) + + if [[ "$current_email" != "none" ]]; then + local refresh_token + refresh_token=$(get_refresh_token) + + if [[ -n "$refresh_token" ]]; then + # Skip refreshing current account if Claude is running + if [[ "$claude_is_running" == "true" ]]; then + echo "Skipping current account ($current_email): Claude Code is running" + echo " To refresh this account, close Claude Code first and run --refresh again" + ((skipped++)) + else + echo -n "Refreshing current account ($current_email)... " + local refresh_response + refresh_response=$(refresh_oauth_token "$refresh_token") + + if [[ "$refresh_response" == ERROR:* ]]; then + # Extract error message after "ERROR:" + local error_msg="${refresh_response#ERROR:}" + echo "failed ($error_msg)" + ((failed++)) + elif [[ -n "$refresh_response" ]]; then + local new_access new_refresh expires_in + new_access=$(echo "$refresh_response" | jq -r '.access_token // empty') + new_refresh=$(echo "$refresh_response" | jq -r '.refresh_token // empty') + expires_in=$(echo "$refresh_response" | jq -r '.expires_in // 28800') + + if [[ -n "$new_access" ]]; then + if [[ -z "$new_refresh" ]]; then + new_refresh="$refresh_token" + fi + update_credentials_with_token "$new_access" "$new_refresh" "$expires_in" + echo "done (valid for $((expires_in / 3600))h)" + ((refreshed++)) + else + echo "failed (invalid response)" + ((failed++)) + fi + else + echo "failed (no response)" + ((failed++)) + fi + fi + else + echo "Skipping current account ($current_email): no refresh token stored" + ((skipped++)) + fi + fi + + # Refresh managed accounts (if any) + if [[ -f "$SEQUENCE_FILE" ]]; then + local account_nums + account_nums=$(jq -r '.sequence[]' "$SEQUENCE_FILE") + + for account_num in $account_nums; do + local email + email=$(jq -r --arg num "$account_num" '.accounts[$num].email' "$SEQUENCE_FILE") + + # Skip if this is the current account (already refreshed) + if [[ "$email" == "$current_email" ]]; then + continue + fi + + local refresh_token + refresh_token=$(get_account_refresh_token "$account_num" "$email") + + if [[ -n "$refresh_token" ]]; then + echo -n "Refreshing account $account_num ($email)... " + local refresh_response + refresh_response=$(refresh_oauth_token "$refresh_token") + + if [[ "$refresh_response" == ERROR:* ]]; then + local error_msg="${refresh_response#ERROR:}" + echo "failed ($error_msg)" + ((failed++)) + elif [[ -n "$refresh_response" ]]; then + local new_access new_refresh expires_in + new_access=$(echo "$refresh_response" | jq -r '.access_token // empty') + new_refresh=$(echo "$refresh_response" | jq -r '.refresh_token // empty') + expires_in=$(echo "$refresh_response" | jq -r '.expires_in // 28800') + + if [[ -n "$new_access" ]]; then + if [[ -z "$new_refresh" ]]; then + new_refresh="$refresh_token" + fi + update_account_credentials_with_token "$account_num" "$email" "$new_access" "$new_refresh" "$expires_in" + echo "done (valid for $((expires_in / 3600))h)" + ((refreshed++)) + else + echo "failed (invalid response)" + ((failed++)) + fi + else + echo "failed (no response)" + ((failed++)) + fi + else + echo "Skipping account $account_num ($email): no refresh token stored" + ((skipped++)) + fi + done + fi + + echo "" + echo "Refresh complete: $refreshed succeeded, $failed failed, $skipped skipped" + + if [[ $failed -gt 0 ]]; then + echo "" + echo "Note: If refresh failed with 'invalid_grant', run 'claude /login' to re-authenticate." + fi +} + +# Show usage +show_usage() { + echo "Multi-Account Switcher for Claude Code" + echo "Usage: $0 [COMMAND]" + echo "" + echo "Commands:" + echo " --add-account Add current account to managed accounts" + echo " --remove-account <num|email> Remove account by number or email" + echo " --list List all managed accounts" + echo " --switch Rotate to next account in sequence" + echo " --switch-to <num|email> Switch to specific account number or email" + echo " --usage Show usage limits for all accounts" + echo " --refresh Refresh OAuth tokens for all accounts" + echo " --help Show this help message" + echo "" + echo "Examples:" + echo " $0 --add-account" + echo " $0 --list" + echo " $0 --switch" + echo " $0 --switch-to 2" + echo " $0 --switch-to user@example.com" + echo " $0 --remove-account user@example.com" + echo " $0 --usage" + echo " $0 --refresh" +} + +# Main script logic +main() { + # Basic checks - allow root execution in containers + if [[ $EUID -eq 0 ]] && ! is_running_in_container; then + echo "Error: Do not run this script as root (unless running in a container)" + exit 1 + fi + + check_bash_version + check_dependencies + + case "${1:-}" in + --add-account) + cmd_add_account + ;; + --remove-account) + shift + cmd_remove_account "$@" + ;; + --list) + cmd_list + ;; + --switch) + cmd_switch + ;; + --switch-to) + shift + cmd_switch_to "$@" + ;; + --usage) + cmd_usage + ;; + --refresh) + cmd_refresh + ;; + --help) + show_usage + ;; + "") + show_usage + ;; + *) + echo "Error: Unknown command '$1'" + show_usage + exit 1 + ;; + esac +} + +# Check if script is being sourced or executed +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + main "$@" +fi diff --git a/reference/gastown b/reference/gastown new file mode 160000 index 0000000..c6832e4 --- /dev/null +++ b/reference/gastown @@ -0,0 +1 @@ +Subproject commit c6832e4bac82874cf1d03da374f0cb21af937619 diff --git a/reference/get-shit-done b/reference/get-shit-done new file mode 160000 index 0000000..5660b6f --- /dev/null +++ b/reference/get-shit-done @@ -0,0 +1 @@ +Subproject commit 5660b6fc0b890f8d6ffc1c3f0c407f1a702a5f72 diff --git a/src/agent/accounts/extractor.ts b/src/agent/accounts/extractor.ts new file mode 100644 index 0000000..d724d1d --- /dev/null +++ b/src/agent/accounts/extractor.ts @@ -0,0 +1,67 @@ +import { existsSync, readFileSync } from 'node:fs'; +import { join } from 'node:path'; +import { homedir, platform } from 'node:os'; +import { execa } from 'execa'; + +export interface ExtractedAccount { + email: string; + accountUuid: string; + configJson: object; + credentials: string; +} + +/** + * Resolve the Claude Code config path with fallback logic. + * Primary: ~/.claude/.claude.json (if it exists and has oauthAccount) + * Fallback: ~/.claude.json + */ +function getClaudeConfigPath(): string { + const home = homedir(); + const primary = join(home, '.claude', '.claude.json'); + const fallback = join(home, '.claude.json'); + + if (existsSync(primary)) { + try { + const json = JSON.parse(readFileSync(primary, 'utf-8')); + if (json.oauthAccount) return primary; + } catch { + // invalid JSON, fall through + } + } + + return fallback; +} + +export async function extractCurrentClaudeAccount(): Promise<ExtractedAccount> { + const home = homedir(); + + // 1. Read Claude config (with fallback logic matching ccswitch) + const configPath = getClaudeConfigPath(); + const configRaw = readFileSync(configPath, 'utf-8'); + const configJson = JSON.parse(configRaw); + + const email = configJson.oauthAccount?.emailAddress; + const accountUuid = configJson.oauthAccount?.accountUuid; + + if (!email || !accountUuid) { + throw new Error('No Claude account found. Please log in with `claude` first.'); + } + + // 2. Read credentials (platform-specific) + let credentials: string; + if (platform() === 'darwin') { + // macOS: read from Keychain + const { stdout } = await execa('security', [ + 'find-generic-password', + '-s', 'Claude Code-credentials', + '-w', + ]); + credentials = stdout; + } else { + // Linux: read from file + const credPath = join(home, '.claude', '.credentials.json'); + credentials = readFileSync(credPath, 'utf-8'); + } + + return { email, accountUuid, configJson, credentials }; +} diff --git a/src/agent/accounts/index.ts b/src/agent/accounts/index.ts new file mode 100644 index 0000000..69e76e8 --- /dev/null +++ b/src/agent/accounts/index.ts @@ -0,0 +1,5 @@ +export { extractCurrentClaudeAccount, type ExtractedAccount } from './extractor.js'; +export { setupAccountConfigDir } from './setup.js'; +export { getAccountConfigDir } from './paths.js'; +export { checkAccountHealth, ensureAccountCredentials } from './usage.js'; +export type { AccountHealthResult, AccountUsage, UsageTier } from './usage.js'; diff --git a/src/agent/accounts/paths.ts b/src/agent/accounts/paths.ts new file mode 100644 index 0000000..f3bc7e8 --- /dev/null +++ b/src/agent/accounts/paths.ts @@ -0,0 +1,5 @@ +import { join } from 'node:path'; + +export function getAccountConfigDir(workspaceRoot: string, accountId: string): string { + return join(workspaceRoot, '.cw', 'accounts', accountId); +} diff --git a/src/agent/accounts/setup.ts b/src/agent/accounts/setup.ts new file mode 100644 index 0000000..5bf0c6c --- /dev/null +++ b/src/agent/accounts/setup.ts @@ -0,0 +1,15 @@ +import { mkdirSync, writeFileSync } from 'node:fs'; +import { join } from 'node:path'; + +export function setupAccountConfigDir( + configDir: string, + extracted: { configJson: object; credentials: string }, +): void { + mkdirSync(configDir, { recursive: true }); + + // Write .claude.json + writeFileSync(join(configDir, '.claude.json'), JSON.stringify(extracted.configJson, null, 2)); + + // Write .credentials.json + writeFileSync(join(configDir, '.credentials.json'), extracted.credentials); +} diff --git a/src/agent/accounts/usage.ts b/src/agent/accounts/usage.ts new file mode 100644 index 0000000..2d8fd08 --- /dev/null +++ b/src/agent/accounts/usage.ts @@ -0,0 +1,332 @@ +import { readFileSync, existsSync, writeFileSync, mkdirSync } from 'node:fs'; +import { join, dirname } from 'node:path'; +import type { Account } from '../../db/schema.js'; +import type { AgentInfo } from '../types.js'; +import type { AccountCredentialManager } from '../credentials/types.js'; +import { createModuleLogger } from '../../logger/index.js'; +import { getAccountConfigDir } from './paths.js'; +import { setupAccountConfigDir } from './setup.js'; + +const log = createModuleLogger('account-usage'); + +const USAGE_API_URL = 'https://api.anthropic.com/api/oauth/usage'; +const TOKEN_REFRESH_URL = 'https://console.anthropic.com/v1/oauth/token'; +const OAUTH_CLIENT_ID = '9d1c250a-e61b-44d9-88ed-5944d1962f5e'; +const TOKEN_REFRESH_BUFFER_MS = 300_000; // 5 minutes + +export interface OAuthCredentials { + accessToken: string; + refreshToken: string; + expiresAt: number; // ms epoch + subscriptionType: string | null; + rateLimitTier: string | null; +} + +export interface UsageTier { + utilization: number; + resets_at: string | null; +} + +export interface AccountUsage { + five_hour: UsageTier | null; + seven_day: UsageTier | null; + seven_day_sonnet: UsageTier | null; + seven_day_opus: UsageTier | null; + extra_usage: { + is_enabled: boolean; + monthly_limit: number | null; + used_credits: number | null; + utilization: number | null; + } | null; +} + +export interface AccountHealthResult { + id: string; + email: string; + provider: string; + credentialsValid: boolean; + tokenValid: boolean; + tokenExpiresAt: string | null; + subscriptionType: string | null; + error: string | null; + usage: AccountUsage | null; + isExhausted: boolean; + exhaustedUntil: string | null; + lastUsedAt: string | null; + agentCount: number; + activeAgentCount: number; +} + +function readCredentials(configDir: string): OAuthCredentials | null { + try { + const credPath = join(configDir, '.credentials.json'); + if (!existsSync(credPath)) return null; + const raw = readFileSync(credPath, 'utf-8'); + const parsed = JSON.parse(raw); + const oauth = parsed.claudeAiOauth; + if (!oauth || !oauth.accessToken || !oauth.refreshToken) return null; + return { + accessToken: oauth.accessToken, + refreshToken: oauth.refreshToken, + expiresAt: oauth.expiresAt, + subscriptionType: oauth.subscriptionType ?? null, + rateLimitTier: oauth.rateLimitTier ?? null, + }; + } catch { + return null; + } +} + +function isTokenExpired(credentials: OAuthCredentials): boolean { + return credentials.expiresAt < Date.now() + TOKEN_REFRESH_BUFFER_MS; +} + +/** + * Write credentials back to the config directory. + * Matches ccswitch's update_credentials_with_token() behavior. + */ +function writeCredentials( + configDir: string, + accessToken: string, + refreshToken: string, + expiresIn: number, +): void { + const credPath = join(configDir, '.credentials.json'); + + // Read existing credentials to preserve other fields + let existing: Record<string, unknown> = {}; + try { + if (existsSync(credPath)) { + existing = JSON.parse(readFileSync(credPath, 'utf-8')); + } + } catch { + // Start fresh if can't read + } + + // Calculate expiry in milliseconds (matching ccswitch behavior) + const nowMs = Date.now(); + const expiresAt = nowMs + (expiresIn * 1000); + + // Update claudeAiOauth section + const claudeAiOauth = (existing.claudeAiOauth as Record<string, unknown>) ?? {}; + claudeAiOauth.accessToken = accessToken; + claudeAiOauth.refreshToken = refreshToken; + claudeAiOauth.expiresAt = expiresAt; + existing.claudeAiOauth = claudeAiOauth; + + // Ensure directory exists + mkdirSync(dirname(credPath), { recursive: true }); + + // Write back (compact JSON for consistency with ccswitch) + writeFileSync(credPath, JSON.stringify(existing)); + log.debug({ configDir }, 'credentials written after token refresh'); +} + +async function refreshToken( + refreshTokenStr: string, +): Promise<{ accessToken: string; refreshToken: string; expiresIn: number } | null> { + try { + const response = await fetch(TOKEN_REFRESH_URL, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + grant_type: 'refresh_token', + refresh_token: refreshTokenStr, + client_id: OAUTH_CLIENT_ID, + scope: 'user:inference user:profile', + }), + }); + if (!response.ok) return null; + const data = await response.json(); + return { + accessToken: data.access_token, + refreshToken: data.refresh_token, + expiresIn: data.expires_in, + }; + } catch { + return null; + } +} + +async function fetchUsage(accessToken: string): Promise<AccountUsage | null> { + try { + const response = await fetch(USAGE_API_URL, { + method: 'GET', + headers: { + Authorization: `Bearer ${accessToken}`, + 'anthropic-beta': 'oauth-2025-04-20', + 'Content-Type': 'application/json', + }, + }); + if (!response.ok) return null; + const data = await response.json(); + return { + five_hour: data.five_hour ?? null, + seven_day: data.seven_day ?? null, + seven_day_sonnet: data.seven_day_sonnet ?? null, + seven_day_opus: data.seven_day_opus ?? null, + extra_usage: data.extra_usage ?? null, + }; + } catch { + return null; + } +} + +export async function checkAccountHealth( + account: Account, + agents: AgentInfo[], + credentialManager?: AccountCredentialManager, + workspaceRoot?: string, +): Promise<AccountHealthResult> { + const configDir = workspaceRoot ? getAccountConfigDir(workspaceRoot, account.id) : null; + + const accountAgents = agents.filter((a) => a.accountId === account.id); + const activeAgents = accountAgents.filter( + (a) => a.status === 'running' || a.status === 'waiting_for_input', + ); + + const base: AccountHealthResult = { + id: account.id, + email: account.email, + provider: account.provider, + credentialsValid: false, + tokenValid: false, + tokenExpiresAt: null, + subscriptionType: null, + error: null, + usage: null, + isExhausted: account.isExhausted, + exhaustedUntil: account.exhaustedUntil?.toISOString() ?? null, + lastUsedAt: account.lastUsedAt?.toISOString() ?? null, + agentCount: accountAgents.length, + activeAgentCount: activeAgents.length, + }; + + if (!configDir) { + return { ...base, error: 'Cannot derive config dir: workspaceRoot not provided' }; + } + + // Ensure DB credentials are written to disk so file-based checks can find them + if (account.configJson && account.credentials) { + try { + setupAccountConfigDir(configDir, { + configJson: JSON.parse(account.configJson), + credentials: account.credentials, + }); + } catch (err) { + log.warn({ accountId: account.id, err: err instanceof Error ? err.message : String(err) }, 'failed to sync DB credentials to disk'); + } + } + + try { + // Use credential manager if provided, otherwise fall back to direct functions + let accessToken: string; + let currentExpiresAt: number; + let subscriptionType: string | null = null; + + if (credentialManager) { + const result = await credentialManager.ensureValid(configDir, account.id); + if (!result.valid || !result.credentials) { + return { + ...base, + credentialsValid: result.credentials !== null, + error: result.error ?? 'Credentials validation failed', + }; + } + accessToken = result.credentials.accessToken; + currentExpiresAt = result.credentials.expiresAt; + subscriptionType = result.credentials.subscriptionType; + } else { + // Legacy path: direct function calls + const credentials = readCredentials(configDir); + if (!credentials) { + return { + ...base, + error: 'Credentials file not found or unreadable', + }; + } + + accessToken = credentials.accessToken; + currentExpiresAt = credentials.expiresAt; + subscriptionType = credentials.subscriptionType; + + if (isTokenExpired(credentials)) { + log.info({ accountId: account.id, email: account.email }, 'token expired, refreshing'); + const refreshed = await refreshToken(credentials.refreshToken); + if (!refreshed) { + log.warn({ accountId: account.id }, 'token refresh failed'); + return { + ...base, + credentialsValid: true, + error: 'Token expired and refresh failed', + }; + } + accessToken = refreshed.accessToken; + + // Persist the refreshed credentials back to disk + const newRefreshToken = refreshed.refreshToken || credentials.refreshToken; + writeCredentials(configDir, accessToken, newRefreshToken, refreshed.expiresIn); + currentExpiresAt = Date.now() + (refreshed.expiresIn * 1000); + log.info({ accountId: account.id, expiresIn: refreshed.expiresIn }, 'token refreshed and persisted'); + } + } + + const usage = await fetchUsage(accessToken); + if (!usage) { + return { + ...base, + credentialsValid: true, + error: 'Usage API request failed', + }; + } + + return { + ...base, + credentialsValid: true, + tokenValid: true, + tokenExpiresAt: new Date(currentExpiresAt).toISOString(), + subscriptionType, + usage, + }; + } catch (err) { + return { + ...base, + error: err instanceof Error ? err.message : String(err), + }; + } +} + +/** + * Ensure account credentials are valid and refreshed if needed. + * Call this before spawning an agent to ensure the credentials file + * has fresh tokens that the agent subprocess can use. + * + * Returns true if credentials are valid (or were successfully refreshed). + * Returns false if credentials are missing or refresh failed. + * + * @deprecated Use AccountCredentialManager.ensureValid() instead for event emission support. + */ +export async function ensureAccountCredentials(configDir: string): Promise<boolean> { + const credentials = readCredentials(configDir); + if (!credentials) { + log.warn({ configDir }, 'no credentials found'); + return false; + } + + if (!isTokenExpired(credentials)) { + log.debug({ configDir }, 'credentials valid, no refresh needed'); + return true; + } + + log.info({ configDir }, 'credentials expired, refreshing before spawn'); + const refreshed = await refreshToken(credentials.refreshToken); + if (!refreshed) { + log.error({ configDir }, 'failed to refresh credentials'); + return false; + } + + const newRefreshToken = refreshed.refreshToken || credentials.refreshToken; + writeCredentials(configDir, refreshed.accessToken, newRefreshToken, refreshed.expiresIn); + log.info({ configDir, expiresIn: refreshed.expiresIn }, 'credentials refreshed before spawn'); + return true; +} diff --git a/src/agent/alias.ts b/src/agent/alias.ts new file mode 100644 index 0000000..24d35ba --- /dev/null +++ b/src/agent/alias.ts @@ -0,0 +1,34 @@ +/** + * Agent Alias Generator + * + * Generates unique funny aliases for agents using adjective-animal combinations. + * E.g., "jolly-penguin", "bold-eagle", "swift-otter". + */ + +import { uniqueNamesGenerator, adjectives, animals } from 'unique-names-generator'; +import type { AgentRepository } from '../db/repositories/agent-repository.js'; + +const MAX_RETRIES = 10; + +/** + * Generate a unique agent alias that doesn't collide with existing agent names. + * + * @param repository - Agent repository to check for name collisions + * @returns A unique adjective-animal alias (e.g., "jolly-penguin") + */ +export async function generateUniqueAlias(repository: AgentRepository): Promise<string> { + for (let i = 0; i < MAX_RETRIES; i++) { + const alias = uniqueNamesGenerator({ + dictionaries: [adjectives, animals], + separator: '-', + style: 'lowerCase', + }); + + const existing = await repository.findByName(alias); + if (!existing) { + return alias; + } + } + + throw new Error(`Failed to generate unique alias after ${MAX_RETRIES} attempts`); +} diff --git a/src/agent/cleanup-manager.ts b/src/agent/cleanup-manager.ts new file mode 100644 index 0000000..dab8f60 --- /dev/null +++ b/src/agent/cleanup-manager.ts @@ -0,0 +1,323 @@ +/** + * CleanupManager — Worktree, branch, and log cleanup for agents. + * + * Extracted from MultiProviderAgentManager. Handles all filesystem + * and git cleanup operations, plus orphan detection and reconciliation. + */ + +import { promisify } from 'node:util'; +import { execFile } from 'node:child_process'; +import { readFile, readdir, rm } from 'node:fs/promises'; +import { join } from 'node:path'; +import type { AgentRepository } from '../db/repositories/agent-repository.js'; +import type { ProjectRepository } from '../db/repositories/project-repository.js'; +import type { EventBus, AgentCrashedEvent } from '../events/index.js'; +import { SimpleGitWorktreeManager } from '../git/manager.js'; +import { getProjectCloneDir } from '../git/project-clones.js'; +import { getStreamParser } from './providers/parsers/index.js'; +import { FileTailer } from './file-tailer.js'; +import { getProvider } from './providers/registry.js'; +import { createModuleLogger } from '../logger/index.js'; +import type { StreamEvent } from './providers/parsers/index.js'; + +const log = createModuleLogger('cleanup-manager'); +const execFileAsync = promisify(execFile); + +/** + * Check if a process with the given PID is still alive. + */ +function isPidAlive(pid: number): boolean { + try { + process.kill(pid, 0); + return true; + } catch { + return false; + } +} + +export class CleanupManager { + constructor( + private workspaceRoot: string, + private repository: AgentRepository, + private projectRepository: ProjectRepository, + private eventBus?: EventBus, + ) {} + + /** + * Resolve the agent's working directory path. + */ + private getAgentWorkdir(alias: string): string { + return join(this.workspaceRoot, 'agent-workdirs', alias); + } + + /** + * Remove git worktrees for an agent. + * Handles both initiative-linked (multi-project) and standalone agents. + */ + async removeAgentWorktrees(alias: string, initiativeId: string | null): Promise<void> { + const agentWorkdir = this.getAgentWorkdir(alias); + + try { + await readdir(agentWorkdir); + } catch { + return; + } + + if (initiativeId) { + const projects = await this.projectRepository.findProjectsByInitiativeId(initiativeId); + for (const project of projects) { + try { + const clonePath = join(this.workspaceRoot, getProjectCloneDir(project.name, project.id)); + const wm = new SimpleGitWorktreeManager(clonePath, undefined, agentWorkdir); + await wm.remove(project.name); + } catch (err) { + log.warn({ alias, project: project.name, err: err instanceof Error ? err.message : String(err) }, 'failed to remove project worktree'); + } + } + } else { + try { + const wm = new SimpleGitWorktreeManager(this.workspaceRoot, undefined, agentWorkdir); + await wm.remove('workspace'); + } catch (err) { + log.warn({ alias, err: err instanceof Error ? err.message : String(err) }, 'failed to remove standalone worktree'); + } + } + + await rm(agentWorkdir, { recursive: true, force: true }); + await this.pruneWorktrees(initiativeId); + } + + /** + * Delete agent/<alias> branches from all relevant repos. + */ + async removeAgentBranches(alias: string, initiativeId: string | null): Promise<void> { + const branchName = `agent/${alias}`; + const repoPaths: string[] = []; + + if (initiativeId) { + const projects = await this.projectRepository.findProjectsByInitiativeId(initiativeId); + for (const project of projects) { + repoPaths.push(join(this.workspaceRoot, getProjectCloneDir(project.name, project.id))); + } + } else { + repoPaths.push(this.workspaceRoot); + } + + for (const repoPath of repoPaths) { + try { + await execFileAsync('git', ['branch', '-D', branchName], { cwd: repoPath }); + } catch { + // Branch may not exist + } + } + } + + /** + * Remove log directory for an agent. + */ + async removeAgentLogs(agentId: string): Promise<void> { + const logDir = join(this.workspaceRoot, '.cw', 'agent-logs', agentId); + await rm(logDir, { recursive: true, force: true }); + } + + /** + * Run git worktree prune on all relevant repos. + */ + async pruneWorktrees(initiativeId: string | null): Promise<void> { + const repoPaths: string[] = []; + + if (initiativeId) { + const projects = await this.projectRepository.findProjectsByInitiativeId(initiativeId); + for (const project of projects) { + repoPaths.push(join(this.workspaceRoot, getProjectCloneDir(project.name, project.id))); + } + } else { + repoPaths.push(this.workspaceRoot); + } + + for (const repoPath of repoPaths) { + try { + await execFileAsync('git', ['worktree', 'prune'], { cwd: repoPath }); + } catch (err) { + log.warn({ repoPath, err: err instanceof Error ? err.message : String(err) }, 'failed to prune worktrees'); + } + } + } + + /** + * Clean up orphaned agent workdirs (directories with no matching DB agent). + */ + async cleanupOrphanedWorkdirs(): Promise<void> { + const workdirsPath = join(this.workspaceRoot, 'agent-workdirs'); + let entries: string[]; + try { + entries = await readdir(workdirsPath); + } catch { + return; + } + + const agents = await this.repository.findAll(); + const knownAliases = new Set(agents.map(a => a.name)); + + for (const entry of entries) { + if (!knownAliases.has(entry)) { + log.info({ orphan: entry }, 'removing orphaned agent workdir'); + try { + await rm(join(workdirsPath, entry), { recursive: true, force: true }); + } catch (err) { + log.warn({ orphan: entry, err: err instanceof Error ? err.message : String(err) }, 'failed to remove orphaned workdir'); + } + } + } + + try { + await execFileAsync('git', ['worktree', 'prune'], { cwd: this.workspaceRoot }); + } catch { /* ignore */ } + + const reposPath = join(this.workspaceRoot, 'repos'); + try { + const repoDirs = await readdir(reposPath); + for (const repoDir of repoDirs) { + try { + await execFileAsync('git', ['worktree', 'prune'], { cwd: join(reposPath, repoDir) }); + } catch { /* ignore */ } + } + } catch { /* no repos dir */ } + } + + /** + * Clean up orphaned agent log directories (directories with no matching DB agent). + */ + async cleanupOrphanedLogs(): Promise<void> { + const logsPath = join(this.workspaceRoot, '.cw', 'agent-logs'); + let entries: string[]; + try { + entries = await readdir(logsPath); + } catch { + return; + } + + const agents = await this.repository.findAll(); + const knownIds = new Set(agents.map(a => a.id)); + + for (const entry of entries) { + if (!knownIds.has(entry)) { + log.info({ orphan: entry }, 'removing orphaned agent log dir'); + try { + await rm(join(logsPath, entry), { recursive: true, force: true }); + } catch (err) { + log.warn({ orphan: entry, err: err instanceof Error ? err.message : String(err) }, 'failed to remove orphaned log dir'); + } + } + } + } + + /** + * Reconcile agent state after server restart. + * Checks all agents in 'running' status: + * - If PID is still alive: create FileTailer to resume streaming + * - If PID is dead but output file exists: process the output + * - Otherwise: mark as crashed + * + * @param activeAgents - Shared map from manager to register live agents + * @param onStreamEvent - Callback for stream events from tailer + * @param onAgentOutput - Callback to process raw agent output + * @param pollForCompletion - Callback to start polling for completion + */ + async reconcileAfterRestart( + activeAgents: Map<string, { + agentId: string; + pid: number; + tailer: FileTailer; + outputFilePath: string; + }>, + onStreamEvent: (agentId: string, event: StreamEvent) => void, + onAgentOutput: (agentId: string, rawOutput: string, provider: NonNullable<ReturnType<typeof getProvider>>) => Promise<void>, + pollForCompletion: (agentId: string, pid: number) => void, + ): Promise<void> { + const runningAgents = await this.repository.findByStatus('running'); + log.info({ runningCount: runningAgents.length }, 'reconciling agents after restart'); + + for (const agent of runningAgents) { + const alive = agent.pid ? isPidAlive(agent.pid) : false; + log.info({ agentId: agent.id, pid: agent.pid, alive }, 'reconcile: checking agent'); + + if (alive && agent.outputFilePath) { + log.debug({ agentId: agent.id, pid: agent.pid }, 'reconcile: resuming streaming for alive agent'); + + const parser = getStreamParser(agent.provider); + const tailer = new FileTailer({ + filePath: agent.outputFilePath, + agentId: agent.id, + parser, + eventBus: this.eventBus, + onEvent: (event) => onStreamEvent(agent.id, event), + startFromBeginning: false, + }); + + tailer.start().catch((err) => { + log.warn({ agentId: agent.id, err: err instanceof Error ? err.message : String(err) }, 'failed to start tailer during reconcile'); + }); + + const pid = agent.pid!; + + activeAgents.set(agent.id, { + agentId: agent.id, + pid, + tailer, + outputFilePath: agent.outputFilePath, + }); + + pollForCompletion(agent.id, pid); + } else if (agent.outputFilePath) { + try { + const rawOutput = await readFile(agent.outputFilePath, 'utf-8'); + if (rawOutput.trim()) { + const provider = getProvider(agent.provider); + if (provider) { + await onAgentOutput(agent.id, rawOutput, provider); + continue; + } + } + } catch { /* file missing or empty */ } + log.warn({ agentId: agent.id }, 'reconcile: marking agent crashed'); + await this.repository.update(agent.id, { status: 'crashed' }); + this.emitCrashed(agent, 'Server restarted, agent output not found'); + } else { + log.warn({ agentId: agent.id }, 'reconcile: marking agent crashed'); + await this.repository.update(agent.id, { status: 'crashed' }); + this.emitCrashed(agent, 'Server restarted while agent was running'); + } + } + + try { + await this.cleanupOrphanedWorkdirs(); + } catch (err) { + log.warn({ err: err instanceof Error ? err.message : String(err) }, 'orphaned workdir cleanup failed'); + } + try { + await this.cleanupOrphanedLogs(); + } catch (err) { + log.warn({ err: err instanceof Error ? err.message : String(err) }, 'orphaned log cleanup failed'); + } + } + + /** + * Emit a crashed event for an agent. + */ + private emitCrashed(agent: { id: string; name: string; taskId: string | null }, error: string): void { + if (this.eventBus) { + const event: AgentCrashedEvent = { + type: 'agent:crashed', + timestamp: new Date(), + payload: { + agentId: agent.id, + name: agent.name, + taskId: agent.taskId ?? '', + error, + }, + }; + this.eventBus.emit(event); + } + } +} diff --git a/src/agent/content-serializer.ts b/src/agent/content-serializer.ts new file mode 100644 index 0000000..58c1599 --- /dev/null +++ b/src/agent/content-serializer.ts @@ -0,0 +1,126 @@ +/** + * Content Serializer + * + * Converts Tiptap JSON page tree into markdown for agent prompts. + * Uses @tiptap/markdown's MarkdownManager for standard node serialization, + * with custom handling only for pageLink nodes. + */ + +import { Node, type JSONContent } from '@tiptap/core'; +import StarterKit from '@tiptap/starter-kit'; +import Link from '@tiptap/extension-link'; +import { MarkdownManager } from '@tiptap/markdown'; + +/** + * Minimal page shape needed for serialization. + */ +export interface PageForSerialization { + id: string; + parentPageId: string | null; + title: string; + content: string | null; // JSON string from Tiptap + sortOrder: number; +} + +/** + * Server-side pageLink node — only needs schema definition + markdown rendering. + */ +const ServerPageLink = Node.create({ + name: 'pageLink', + group: 'block', + atom: true, + + addAttributes() { + return { + pageId: { default: null }, + }; + }, + + renderMarkdown(node: JSONContent) { + const pageId = (node.attrs?.pageId as string) ?? ''; + return `[[page:${pageId}]]\n\n`; + }, +}); + +let _manager: MarkdownManager | null = null; + +function getManager(): MarkdownManager { + if (!_manager) { + _manager = new MarkdownManager({ + extensions: [StarterKit, Link, ServerPageLink], + }); + } + return _manager; +} + +/** + * Convert a Tiptap JSON document to markdown. + */ +export function tiptapJsonToMarkdown(json: unknown): string { + if (!json || typeof json !== 'object') return ''; + + const doc = json as JSONContent; + if (doc.type !== 'doc' || !Array.isArray(doc.content)) return ''; + + return getManager().serialize(doc).trim(); +} + +/** + * Serialize an array of pages into a single markdown document. + * Pages are organized as a tree (root first, then children by sortOrder). + * + * Each page is marked with <!-- page:$id --> so the agent can reference them. + */ +export function serializePageTree(pages: PageForSerialization[]): string { + if (pages.length === 0) return ''; + + // Build parent→children map + const childrenMap = new Map<string | null, PageForSerialization[]>(); + for (const page of pages) { + const parentKey = page.parentPageId; + if (!childrenMap.has(parentKey)) { + childrenMap.set(parentKey, []); + } + childrenMap.get(parentKey)!.push(page); + } + + // Sort children by sortOrder + for (const children of childrenMap.values()) { + children.sort((a, b) => a.sortOrder - b.sortOrder); + } + + // Render tree depth-first + const sections: string[] = []; + + function renderPage(page: PageForSerialization, depth: number): void { + const headerPrefix = '#'.repeat(Math.min(depth + 1, 6)); + let section = `<!-- page:${page.id} -->\n${headerPrefix} ${page.title}`; + + if (page.content) { + try { + const parsed = JSON.parse(page.content); + const md = tiptapJsonToMarkdown(parsed); + if (md.trim()) { + section += `\n\n${md}`; + } + } catch { + // Invalid JSON — skip content + } + } + + sections.push(section); + + const children = childrenMap.get(page.id) ?? []; + for (const child of children) { + renderPage(child, depth + 1); + } + } + + // Start from root pages (parentPageId is null) + const roots = childrenMap.get(null) ?? []; + for (const root of roots) { + renderPage(root, 1); + } + + return sections.join('\n\n'); +} diff --git a/src/agent/credential-handler.ts b/src/agent/credential-handler.ts new file mode 100644 index 0000000..32257c2 --- /dev/null +++ b/src/agent/credential-handler.ts @@ -0,0 +1,152 @@ +/** + * CredentialHandler — Account selection, credential management, and exhaustion handling. + * + * Extracted from MultiProviderAgentManager. Handles account lifecycle: + * selecting the next available account, writing credentials to disk, + * ensuring they're fresh, and marking accounts as exhausted on failure. + */ + +import { readFileSync } from 'node:fs'; +import { join } from 'node:path'; +import type { AccountRepository } from '../db/repositories/account-repository.js'; +import type { AccountCredentialManager } from './credentials/types.js'; +import type { Account } from '../db/schema.js'; +import { ensureAccountCredentials } from './accounts/usage.js'; +import { getAccountConfigDir } from './accounts/paths.js'; +import { setupAccountConfigDir } from './accounts/setup.js'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('credential-handler'); + +/** Default exhaustion duration: 5 hours */ +const DEFAULT_EXHAUSTION_HOURS = 5; + +export class CredentialHandler { + constructor( + private workspaceRoot: string, + private accountRepository?: AccountRepository, + private credentialManager?: AccountCredentialManager, + ) {} + + /** + * Select the next available account for a provider. + * Clears expired exhaustion, returns least-recently-used non-exhausted account. + * Returns null if no accounts are available. + */ + async selectAccount(providerName: string): Promise<{ account: Account; accountId: string; configDir: string } | null> { + if (!this.accountRepository) return null; + + await this.accountRepository.clearExpiredExhaustion(); + const account = await this.accountRepository.findNextAvailable(providerName); + if (!account) return null; + + const configDir = getAccountConfigDir(this.workspaceRoot, account.id); + await this.accountRepository.updateLastUsed(account.id); + + return { account, accountId: account.id, configDir }; + } + + /** + * Write account credentials from DB to the convention-based config directory. + * Must be called before ensureCredentials so the files exist on disk. + */ + writeCredentialsToDisk(account: Account, configDir: string): void { + if (account.configJson && account.credentials) { + setupAccountConfigDir(configDir, { + configJson: JSON.parse(account.configJson), + credentials: account.credentials, + }); + log.debug({ accountId: account.id, configDir }, 'wrote account credentials from DB to disk'); + } else { + log.warn({ accountId: account.id }, 'account has no stored credentials in DB'); + } + } + + /** + * Read refreshed credentials from disk and persist back to DB. + * Called after credential refresh to keep DB in sync. + */ + async persistRefreshedCredentials(accountId: string, configDir: string): Promise<void> { + if (!this.accountRepository) return; + try { + const credPath = join(configDir, '.credentials.json'); + const credentials = readFileSync(credPath, 'utf-8'); + await this.accountRepository.updateCredentials(accountId, credentials); + log.debug({ accountId }, 'persisted refreshed credentials back to DB'); + } catch (err) { + log.warn({ accountId, err: err instanceof Error ? err.message : String(err) }, 'failed to persist refreshed credentials to DB'); + } + } + + /** + * Ensure credentials are valid before spawn/resume. + * Uses credentialManager if available, otherwise falls back to legacy function. + * Returns { valid, refreshed } so callers can persist refresh back to DB. + */ + async ensureCredentials(configDir: string, accountId?: string): Promise<{ valid: boolean; refreshed: boolean }> { + if (this.credentialManager) { + const result = await this.credentialManager.ensureValid(configDir, accountId); + return { valid: result.valid, refreshed: result.refreshed }; + } + const valid = await ensureAccountCredentials(configDir); + return { valid, refreshed: false }; + } + + /** + * Check if an error message indicates usage limit exhaustion. + */ + isUsageLimitError(errorMessage: string): boolean { + const patterns = [ + 'usage limit', + 'rate limit', + 'quota exceeded', + 'too many requests', + 'capacity', + 'exhausted', + ]; + const lower = errorMessage.toLowerCase(); + return patterns.some((p) => lower.includes(p)); + } + + /** + * Handle account exhaustion: mark current account exhausted and find next available. + * Returns the new account info if failover succeeded, null otherwise. + * Does NOT re-spawn — the caller (manager) handles that. + */ + async handleExhaustion( + accountId: string, + providerName: string, + ): Promise<{ account: Account; accountId: string; configDir: string } | null> { + if (!this.accountRepository) return null; + + log.warn({ accountId, provider: providerName }, 'account exhausted, attempting failover'); + + // Mark current account as exhausted + const exhaustedUntil = new Date(Date.now() + DEFAULT_EXHAUSTION_HOURS * 60 * 60 * 1000); + await this.accountRepository.markExhausted(accountId, exhaustedUntil); + + // Find next available account + const nextAccount = await this.accountRepository.findNextAvailable(providerName); + if (!nextAccount) { + log.warn({ accountId }, 'account failover failed, no accounts available'); + return null; + } + log.info({ previousAccountId: accountId, newAccountId: nextAccount.id }, 'account failover successful'); + + // Write credentials and ensure they're fresh + const nextConfigDir = getAccountConfigDir(this.workspaceRoot, nextAccount.id); + this.writeCredentialsToDisk(nextAccount, nextConfigDir); + const { valid, refreshed } = await this.ensureCredentials(nextConfigDir, nextAccount.id); + if (!valid) { + log.warn({ newAccountId: nextAccount.id }, 'failed to refresh failover account credentials'); + return null; + } + if (refreshed) { + await this.persistRefreshedCredentials(nextAccount.id, nextConfigDir); + } + + await this.accountRepository.updateLastUsed(nextAccount.id); + + return { account: nextAccount, accountId: nextAccount.id, configDir: nextConfigDir }; + } +} diff --git a/src/agent/credentials/default-credential-manager.ts b/src/agent/credentials/default-credential-manager.ts new file mode 100644 index 0000000..2d518d6 --- /dev/null +++ b/src/agent/credentials/default-credential-manager.ts @@ -0,0 +1,318 @@ +/** + * Default Account Credential Manager + * + * File-based adapter implementing AccountCredentialManager port. + * Reads/writes credentials from ~/.cw/accounts/<uuid>/.credentials.json + * and emits events on credential state changes. + */ + +import { readFileSync, existsSync, writeFileSync, mkdirSync } from 'node:fs'; +import { join, dirname } from 'node:path'; +import type { EventBus } from '../../events/index.js'; +import type { + AccountCredentialManager, + OAuthCredentials, + RefreshResult, + CredentialValidationResult, +} from './types.js'; +import type { + AccountCredentialsRefreshedEvent, + AccountCredentialsExpiredEvent, + AccountCredentialsValidatedEvent, +} from '../../events/types.js'; +import { createModuleLogger } from '../../logger/index.js'; + +const log = createModuleLogger('credential-manager'); + +/** Anthropic OAuth token refresh endpoint */ +const TOKEN_REFRESH_URL = 'https://console.anthropic.com/v1/oauth/token'; + +/** OAuth client ID for Claude CLI */ +const OAUTH_CLIENT_ID = '9d1c250a-e61b-44d9-88ed-5944d1962f5e'; + +/** Buffer before expiry to trigger refresh (5 minutes) */ +const TOKEN_REFRESH_BUFFER_MS = 300_000; + +/** + * DefaultAccountCredentialManager - File-based credential management with event emission. + * + * Implements the AccountCredentialManager port for managing OAuth credentials + * stored in account config directories. + */ +export class DefaultAccountCredentialManager implements AccountCredentialManager { + constructor(private eventBus?: EventBus) {} + + /** + * Read credentials from a config directory. + */ + read(configDir: string): OAuthCredentials | null { + try { + const credPath = join(configDir, '.credentials.json'); + if (!existsSync(credPath)) return null; + + const raw = readFileSync(credPath, 'utf-8'); + const parsed = JSON.parse(raw); + const oauth = parsed.claudeAiOauth; + + if (!oauth || !oauth.accessToken || !oauth.refreshToken) return null; + + return { + accessToken: oauth.accessToken, + refreshToken: oauth.refreshToken, + expiresAt: oauth.expiresAt, + subscriptionType: oauth.subscriptionType ?? null, + rateLimitTier: oauth.rateLimitTier ?? null, + }; + } catch { + return null; + } + } + + /** + * Check if credentials are expired or about to expire. + */ + isExpired(credentials: OAuthCredentials): boolean { + return credentials.expiresAt < Date.now() + TOKEN_REFRESH_BUFFER_MS; + } + + /** + * Refresh an access token using the refresh token. + */ + async refresh(configDir: string, refreshToken: string): Promise<RefreshResult | null> { + try { + const response = await fetch(TOKEN_REFRESH_URL, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + client_id: OAUTH_CLIENT_ID, + scope: 'user:inference user:profile', + }), + }); + + if (!response.ok) { + log.warn({ configDir, status: response.status }, 'token refresh failed'); + return null; + } + + const data = await response.json(); + return { + accessToken: data.access_token, + refreshToken: data.refresh_token, + expiresIn: data.expires_in, + }; + } catch (err) { + log.error({ configDir, err: err instanceof Error ? err.message : String(err) }, 'token refresh error'); + return null; + } + } + + /** + * Write updated credentials to the config directory. + */ + write( + configDir: string, + accessToken: string, + refreshToken: string, + expiresIn: number, + ): void { + const credPath = join(configDir, '.credentials.json'); + + // Read existing credentials to preserve other fields + let existing: Record<string, unknown> = {}; + try { + if (existsSync(credPath)) { + existing = JSON.parse(readFileSync(credPath, 'utf-8')); + } + } catch { + // Start fresh if can't read + } + + // Calculate expiry in milliseconds + const nowMs = Date.now(); + const expiresAt = nowMs + expiresIn * 1000; + + // Update claudeAiOauth section + const claudeAiOauth = (existing.claudeAiOauth as Record<string, unknown>) ?? {}; + claudeAiOauth.accessToken = accessToken; + claudeAiOauth.refreshToken = refreshToken; + claudeAiOauth.expiresAt = expiresAt; + existing.claudeAiOauth = claudeAiOauth; + + // Ensure directory exists + mkdirSync(dirname(credPath), { recursive: true }); + + // Write back (compact JSON for consistency) + writeFileSync(credPath, JSON.stringify(existing)); + log.debug({ configDir }, 'credentials written after token refresh'); + } + + /** + * Ensure credentials are valid, refreshing if needed. + */ + async ensureValid(configDir: string, accountId?: string): Promise<CredentialValidationResult> { + const credentials = this.read(configDir); + + if (!credentials) { + log.warn({ configDir, accountId }, 'no credentials found'); + this.emitExpired(accountId, 'credentials_missing', 'Credentials file not found'); + return { + valid: false, + credentials: null, + error: 'Credentials file not found', + refreshed: false, + }; + } + + if (!this.isExpired(credentials)) { + log.debug({ configDir, accountId }, 'credentials valid, no refresh needed'); + this.emitValidated(accountId, true, credentials.expiresAt, false); + return { + valid: true, + credentials, + error: null, + refreshed: false, + }; + } + + // Credentials expired — attempt refresh + log.info({ configDir, accountId }, 'credentials expired, refreshing'); + const previousExpiresAt = credentials.expiresAt; + const refreshed = await this.refresh(configDir, credentials.refreshToken); + + if (!refreshed) { + log.error({ configDir, accountId }, 'failed to refresh credentials'); + this.emitExpired(accountId, 'refresh_failed', 'Token refresh failed'); + return { + valid: false, + credentials: null, + error: 'Token refresh failed', + refreshed: false, + }; + } + + // Write refreshed credentials + const newRefreshToken = refreshed.refreshToken || credentials.refreshToken; + this.write(configDir, refreshed.accessToken, newRefreshToken, refreshed.expiresIn); + + const newExpiresAt = Date.now() + refreshed.expiresIn * 1000; + log.info({ configDir, accountId, expiresIn: refreshed.expiresIn }, 'credentials refreshed'); + + this.emitRefreshed(accountId, newExpiresAt, previousExpiresAt); + this.emitValidated(accountId, true, newExpiresAt, true); + + // Read back updated credentials + const updatedCredentials = this.read(configDir); + return { + valid: true, + credentials: updatedCredentials, + error: null, + refreshed: true, + }; + } + + /** + * Validate credentials without attempting refresh. + */ + async validate(configDir: string, accountId?: string): Promise<CredentialValidationResult> { + const credentials = this.read(configDir); + + if (!credentials) { + this.emitValidated(accountId, false, null, false); + return { + valid: false, + credentials: null, + error: 'Credentials file not found', + refreshed: false, + }; + } + + const expired = this.isExpired(credentials); + this.emitValidated(accountId, !expired, credentials.expiresAt, false); + + if (expired) { + return { + valid: false, + credentials, + error: 'Token expired', + refreshed: false, + }; + } + + return { + valid: true, + credentials, + error: null, + refreshed: false, + }; + } + + /** + * Emit credentials refreshed event. + */ + private emitRefreshed( + accountId: string | undefined, + expiresAt: number, + previousExpiresAt: number | null, + ): void { + if (!this.eventBus) return; + + const event: AccountCredentialsRefreshedEvent = { + type: 'account:credentials_refreshed', + timestamp: new Date(), + payload: { + accountId: accountId ?? null, + expiresAt, + previousExpiresAt, + }, + }; + this.eventBus.emit(event); + } + + /** + * Emit credentials expired event. + */ + private emitExpired( + accountId: string | undefined, + reason: 'token_expired' | 'refresh_failed' | 'credentials_missing', + error: string | null, + ): void { + if (!this.eventBus) return; + + const event: AccountCredentialsExpiredEvent = { + type: 'account:credentials_expired', + timestamp: new Date(), + payload: { + accountId: accountId ?? null, + reason, + error, + }, + }; + this.eventBus.emit(event); + } + + /** + * Emit credentials validated event. + */ + private emitValidated( + accountId: string | undefined, + valid: boolean, + expiresAt: number | null, + wasRefreshed: boolean, + ): void { + if (!this.eventBus) return; + + const event: AccountCredentialsValidatedEvent = { + type: 'account:credentials_validated', + timestamp: new Date(), + payload: { + accountId: accountId ?? null, + valid, + expiresAt, + wasRefreshed, + }, + }; + this.eventBus.emit(event); + } +} diff --git a/src/agent/credentials/index.ts b/src/agent/credentials/index.ts new file mode 100644 index 0000000..ee47756 --- /dev/null +++ b/src/agent/credentials/index.ts @@ -0,0 +1,17 @@ +/** + * Credentials Module - Public API + * + * Exports the AccountCredentialManager port interface and default adapter. + * All modules should import from this index file. + */ + +// Port interface and types +export type { + AccountCredentialManager, + OAuthCredentials, + RefreshResult, + CredentialValidationResult, +} from './types.js'; + +// Adapter implementation +export { DefaultAccountCredentialManager } from './default-credential-manager.js'; diff --git a/src/agent/credentials/types.ts b/src/agent/credentials/types.ts new file mode 100644 index 0000000..38a97bc --- /dev/null +++ b/src/agent/credentials/types.ts @@ -0,0 +1,98 @@ +/** + * Account Credential Manager Types + * + * Port interface for managing OAuth credentials for agent accounts. + * The credential manager reads, validates, refreshes, and persists tokens, + * emitting events on state changes. + */ + +/** + * OAuth credentials stored in the account's config directory. + */ +export interface OAuthCredentials { + accessToken: string; + refreshToken: string; + /** Expiry time in milliseconds since epoch */ + expiresAt: number; + subscriptionType: string | null; + rateLimitTier: string | null; +} + +/** + * Result of a token refresh attempt. + */ +export interface RefreshResult { + accessToken: string; + refreshToken: string; + /** Token lifetime in seconds */ + expiresIn: number; +} + +/** + * Result of credential validation or ensureValid operation. + */ +export interface CredentialValidationResult { + /** Whether credentials are currently valid and usable */ + valid: boolean; + /** Current credentials if valid, null otherwise */ + credentials: OAuthCredentials | null; + /** Error message if validation failed */ + error: string | null; + /** Whether credentials were refreshed during this operation */ + refreshed: boolean; +} + +/** + * Port interface for account credential management. + * + * Implementations: + * - DefaultAccountCredentialManager: File-based adapter using ~/.cw/accounts/<uuid>/.credentials.json + */ +export interface AccountCredentialManager { + /** + * Read credentials from a config directory. + * Returns null if credentials file is missing or malformed. + */ + read(configDir: string): OAuthCredentials | null; + + /** + * Check if credentials are expired or about to expire. + * Uses a buffer (default 5 minutes) to preemptively refresh. + */ + isExpired(credentials: OAuthCredentials): boolean; + + /** + * Refresh an access token using the refresh token. + * Returns null if refresh fails. + */ + refresh(configDir: string, refreshToken: string): Promise<RefreshResult | null>; + + /** + * Write updated credentials to the config directory. + * Preserves other fields in the credentials file. + */ + write( + configDir: string, + accessToken: string, + refreshToken: string, + expiresIn: number, + ): void; + + /** + * Ensure credentials are valid, refreshing if needed. + * Emits events on refresh or expiration. + * + * @param configDir - Path to the account's config directory + * @param accountId - Optional account ID for event payloads + */ + ensureValid(configDir: string, accountId?: string): Promise<CredentialValidationResult>; + + /** + * Validate credentials without attempting refresh. + * Useful for health checks where you want to report state without side effects. + * + * @param configDir - Path to the account's config directory + * @param accountId - Optional account ID for event payloads + */ + validate(configDir: string, accountId?: string): Promise<CredentialValidationResult>; +} diff --git a/src/agent/file-io.test.ts b/src/agent/file-io.test.ts new file mode 100644 index 0000000..dd63485 --- /dev/null +++ b/src/agent/file-io.test.ts @@ -0,0 +1,340 @@ +/** + * File-Based Agent I/O Tests + */ + +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdirSync, writeFileSync, rmSync, existsSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { randomUUID } from 'crypto'; +import { + writeInputFiles, + readSummary, + readPhaseFiles, + readTaskFiles, + readDecisionFiles, + readPageFiles, + generateId, +} from './file-io.js'; +import type { Initiative, Phase, Task } from '../db/schema.js'; + +let testDir: string; + +beforeEach(() => { + testDir = join(tmpdir(), `cw-file-io-test-${randomUUID()}`); + mkdirSync(testDir, { recursive: true }); +}); + +afterEach(() => { + rmSync(testDir, { recursive: true, force: true }); +}); + +describe('generateId', () => { + it('returns a non-empty string', () => { + const id = generateId(); + expect(id).toBeTruthy(); + expect(typeof id).toBe('string'); + }); + + it('returns unique values', () => { + const ids = new Set(Array.from({ length: 100 }, () => generateId())); + expect(ids.size).toBe(100); + }); +}); + +describe('writeInputFiles', () => { + it('writes initiative.md with frontmatter', () => { + const initiative: Initiative = { + id: 'init-1', + name: 'Test Initiative', + status: 'active', + mergeRequiresApproval: true, + mergeTarget: 'main', + createdAt: new Date('2026-01-01'), + updatedAt: new Date('2026-01-02'), + }; + + writeInputFiles({ agentWorkdir: testDir, initiative }); + + const filePath = join(testDir, '.cw', 'input', 'initiative.md'); + expect(existsSync(filePath)).toBe(true); + }); + + it('writes phase.md with frontmatter', () => { + const phase = { + id: 'phase-1', + initiativeId: 'init-1', + number: 1, + name: 'Phase One', + description: 'First phase', + status: 'pending', + createdAt: new Date(), + updatedAt: new Date(), + } as Phase; + + writeInputFiles({ agentWorkdir: testDir, phase }); + + const filePath = join(testDir, '.cw', 'input', 'phase.md'); + expect(existsSync(filePath)).toBe(true); + }); + + it('writes task.md with frontmatter', () => { + const task = { + id: 'task-1', + name: 'Test Task', + description: 'Do the thing', + category: 'execute', + type: 'auto', + priority: 'medium', + status: 'pending', + order: 1, + createdAt: new Date(), + updatedAt: new Date(), + } as Task; + + writeInputFiles({ agentWorkdir: testDir, task }); + + const filePath = join(testDir, '.cw', 'input', 'task.md'); + expect(existsSync(filePath)).toBe(true); + }); + + it('writes pages to pages/ subdirectory', () => { + writeInputFiles({ + agentWorkdir: testDir, + pages: [ + { id: 'page-1', parentPageId: null, title: 'Root', content: null, sortOrder: 0 }, + { id: 'page-2', parentPageId: 'page-1', title: 'Child', content: null, sortOrder: 1 }, + ], + }); + + expect(existsSync(join(testDir, '.cw', 'input', 'pages', 'page-1.md'))).toBe(true); + expect(existsSync(join(testDir, '.cw', 'input', 'pages', 'page-2.md'))).toBe(true); + }); + + it('handles empty options without error', () => { + writeInputFiles({ agentWorkdir: testDir }); + expect(existsSync(join(testDir, '.cw', 'input'))).toBe(true); + }); +}); + +describe('readSummary', () => { + it('reads SUMMARY.md with frontmatter', () => { + const outputDir = join(testDir, '.cw', 'output'); + mkdirSync(outputDir, { recursive: true }); + + writeFileSync( + join(outputDir, 'SUMMARY.md'), + `--- +files_modified: + - src/foo.ts + - src/bar.ts +--- +Task completed successfully. Refactored the module. +`, + 'utf-8', + ); + + const summary = readSummary(testDir); + expect(summary).not.toBeNull(); + expect(summary!.body).toBe('Task completed successfully. Refactored the module.'); + expect(summary!.filesModified).toEqual(['src/foo.ts', 'src/bar.ts']); + }); + + it('returns null when SUMMARY.md does not exist', () => { + const summary = readSummary(testDir); + expect(summary).toBeNull(); + }); + + it('handles SUMMARY.md without frontmatter', () => { + const outputDir = join(testDir, '.cw', 'output'); + mkdirSync(outputDir, { recursive: true }); + writeFileSync(join(outputDir, 'SUMMARY.md'), 'Just plain text\n', 'utf-8'); + + const summary = readSummary(testDir); + expect(summary).not.toBeNull(); + expect(summary!.body).toBe('Just plain text'); + expect(summary!.filesModified).toBeUndefined(); + }); + + it('handles empty files_modified', () => { + const outputDir = join(testDir, '.cw', 'output'); + mkdirSync(outputDir, { recursive: true }); + writeFileSync( + join(outputDir, 'SUMMARY.md'), + `--- +files_modified: [] +--- +Done. +`, + 'utf-8', + ); + + const summary = readSummary(testDir); + expect(summary!.filesModified).toEqual([]); + }); +}); + +describe('readPhaseFiles', () => { + it('reads phase files from phases/ directory', () => { + const phasesDir = join(testDir, '.cw', 'output', 'phases'); + mkdirSync(phasesDir, { recursive: true }); + + writeFileSync( + join(phasesDir, 'abc123.md'), + `--- +title: Database Schema +dependencies: + - xyz789 +--- +Create the user tables and auth schema. +`, + 'utf-8', + ); + + const phases = readPhaseFiles(testDir); + expect(phases).toHaveLength(1); + expect(phases[0].id).toBe('abc123'); + expect(phases[0].title).toBe('Database Schema'); + expect(phases[0].dependencies).toEqual(['xyz789']); + expect(phases[0].body).toBe('Create the user tables and auth schema.'); + }); + + it('returns empty array when directory does not exist', () => { + const phases = readPhaseFiles(testDir); + expect(phases).toEqual([]); + }); + + it('handles phases with no dependencies', () => { + const phasesDir = join(testDir, '.cw', 'output', 'phases'); + mkdirSync(phasesDir, { recursive: true }); + + writeFileSync( + join(phasesDir, 'p1.md'), + `--- +title: Foundation +--- +Set up the base. +`, + 'utf-8', + ); + + const phases = readPhaseFiles(testDir); + expect(phases[0].dependencies).toEqual([]); + }); +}); + +describe('readTaskFiles', () => { + it('reads task files from tasks/ directory', () => { + const tasksDir = join(testDir, '.cw', 'output', 'tasks'); + mkdirSync(tasksDir, { recursive: true }); + + writeFileSync( + join(tasksDir, 'task-1.md'), + `--- +title: Implement login +category: execute +type: auto +dependencies: + - task-0 +--- +Build the login form and submit handler. +`, + 'utf-8', + ); + + const tasks = readTaskFiles(testDir); + expect(tasks).toHaveLength(1); + expect(tasks[0].id).toBe('task-1'); + expect(tasks[0].title).toBe('Implement login'); + expect(tasks[0].category).toBe('execute'); + expect(tasks[0].type).toBe('auto'); + expect(tasks[0].dependencies).toEqual(['task-0']); + expect(tasks[0].body).toBe('Build the login form and submit handler.'); + }); + + it('defaults category and type when missing', () => { + const tasksDir = join(testDir, '.cw', 'output', 'tasks'); + mkdirSync(tasksDir, { recursive: true }); + writeFileSync(join(tasksDir, 't1.md'), `---\ntitle: Minimal\n---\nDo it.\n`, 'utf-8'); + + const tasks = readTaskFiles(testDir); + expect(tasks[0].category).toBe('execute'); + expect(tasks[0].type).toBe('auto'); + }); + + it('returns empty array when directory does not exist', () => { + expect(readTaskFiles(testDir)).toEqual([]); + }); +}); + +describe('readDecisionFiles', () => { + it('reads decision files from decisions/ directory', () => { + const decisionsDir = join(testDir, '.cw', 'output', 'decisions'); + mkdirSync(decisionsDir, { recursive: true }); + + writeFileSync( + join(decisionsDir, 'd1.md'), + `--- +topic: Authentication +decision: Use JWT +reason: Stateless and scalable +--- +Additional context about the decision. +`, + 'utf-8', + ); + + const decisions = readDecisionFiles(testDir); + expect(decisions).toHaveLength(1); + expect(decisions[0].id).toBe('d1'); + expect(decisions[0].topic).toBe('Authentication'); + expect(decisions[0].decision).toBe('Use JWT'); + expect(decisions[0].reason).toBe('Stateless and scalable'); + expect(decisions[0].body).toBe('Additional context about the decision.'); + }); + + it('returns empty array when directory does not exist', () => { + expect(readDecisionFiles(testDir)).toEqual([]); + }); +}); + +describe('readPageFiles', () => { + it('reads page files from pages/ directory', () => { + const pagesDir = join(testDir, '.cw', 'output', 'pages'); + mkdirSync(pagesDir, { recursive: true }); + + writeFileSync( + join(pagesDir, 'page-abc.md'), + `--- +title: Architecture Overview +summary: Updated the overview section +--- +# Architecture + +New content for the page. +`, + 'utf-8', + ); + + const pages = readPageFiles(testDir); + expect(pages).toHaveLength(1); + expect(pages[0].pageId).toBe('page-abc'); + expect(pages[0].title).toBe('Architecture Overview'); + expect(pages[0].summary).toBe('Updated the overview section'); + expect(pages[0].body).toBe('# Architecture\n\nNew content for the page.'); + }); + + it('returns empty array when directory does not exist', () => { + expect(readPageFiles(testDir)).toEqual([]); + }); + + it('ignores non-.md files', () => { + const pagesDir = join(testDir, '.cw', 'output', 'pages'); + mkdirSync(pagesDir, { recursive: true }); + writeFileSync(join(pagesDir, 'readme.txt'), 'not a page', 'utf-8'); + writeFileSync(join(pagesDir, 'page1.md'), '---\ntitle: Page 1\n---\nContent.\n', 'utf-8'); + + const pages = readPageFiles(testDir); + expect(pages).toHaveLength(1); + }); +}); diff --git a/src/agent/file-io.ts b/src/agent/file-io.ts new file mode 100644 index 0000000..e3e395a --- /dev/null +++ b/src/agent/file-io.ts @@ -0,0 +1,288 @@ +/** + * File-Based Agent I/O + * + * Writes context as input files before agent spawn and reads output files after completion. + * Uses YAML frontmatter (gray-matter) for structured metadata and markdown bodies. + * + * Input: .cw/input/ — written by system before spawn + * Output: .cw/output/ — written by agent during execution + */ + +import { mkdirSync, writeFileSync, readdirSync, existsSync } from 'node:fs'; +import { readFileSync } from 'node:fs'; +import { join } from 'node:path'; +import matter from 'gray-matter'; +import { nanoid } from 'nanoid'; +import { tiptapJsonToMarkdown } from './content-serializer.js'; +import type { AgentInputContext } from './types.js'; + +// Re-export for convenience +export type { AgentInputContext } from './types.js'; + +// ============================================================================= +// TYPES +// ============================================================================= + +export interface WriteInputFilesOptions extends AgentInputContext { + agentWorkdir: string; +} + +export interface ParsedSummary { + body: string; + filesModified?: string[]; +} + +export interface ParsedPhaseFile { + id: string; + title: string; + dependencies: string[]; + body: string; +} + +export interface ParsedTaskFile { + id: string; + title: string; + category: string; + type: string; + dependencies: string[]; + body: string; +} + +export interface ParsedDecisionFile { + id: string; + topic: string; + decision: string; + reason: string; + body: string; +} + +export interface ParsedPageFile { + pageId: string; + title: string; + summary: string; + body: string; +} + +// ============================================================================= +// ID GENERATION +// ============================================================================= + +export function generateId(): string { + return nanoid(); +} + +// ============================================================================= +// INPUT FILE WRITING +// ============================================================================= + +function formatFrontmatter(data: Record<string, unknown>, body: string = ''): string { + const lines: string[] = ['---']; + for (const [key, value] of Object.entries(data)) { + if (value === undefined || value === null) continue; + if (Array.isArray(value)) { + if (value.length === 0) { + lines.push(`${key}: []`); + } else { + lines.push(`${key}:`); + for (const item of value) { + lines.push(` - ${String(item)}`); + } + } + } else if (value instanceof Date) { + lines.push(`${key}: "${value.toISOString()}"`); + } else if (typeof value === 'string' && (value.includes('\n') || value.includes(':'))) { + lines.push(`${key}: ${JSON.stringify(value)}`); + } else { + lines.push(`${key}: ${String(value)}`); + } + } + lines.push('---'); + if (body) { + lines.push(''); + lines.push(body); + } + return lines.join('\n') + '\n'; +} + +export function writeInputFiles(options: WriteInputFilesOptions): void { + const inputDir = join(options.agentWorkdir, '.cw', 'input'); + mkdirSync(inputDir, { recursive: true }); + + if (options.initiative) { + const ini = options.initiative; + const content = formatFrontmatter( + { + id: ini.id, + name: ini.name, + status: ini.status, + mergeRequiresApproval: ini.mergeRequiresApproval, + mergeTarget: ini.mergeTarget, + }, + '', + ); + writeFileSync(join(inputDir, 'initiative.md'), content, 'utf-8'); + } + + if (options.pages && options.pages.length > 0) { + const pagesDir = join(inputDir, 'pages'); + mkdirSync(pagesDir, { recursive: true }); + + for (const page of options.pages) { + let bodyMarkdown = ''; + if (page.content) { + try { + const parsed = JSON.parse(page.content); + bodyMarkdown = tiptapJsonToMarkdown(parsed); + } catch { + // Invalid JSON content — skip + } + } + + const content = formatFrontmatter( + { + title: page.title, + parentPageId: page.parentPageId, + sortOrder: page.sortOrder, + }, + bodyMarkdown, + ); + writeFileSync(join(pagesDir, `${page.id}.md`), content, 'utf-8'); + } + } + + if (options.phase) { + const ph = options.phase; + const content = formatFrontmatter( + { + id: ph.id, + number: ph.number, + name: ph.name, + status: ph.status, + }, + ph.description ?? '', + ); + writeFileSync(join(inputDir, 'phase.md'), content, 'utf-8'); + } + + if (options.task) { + const t = options.task; + const content = formatFrontmatter( + { + id: t.id, + name: t.name, + category: t.category, + type: t.type, + priority: t.priority, + status: t.status, + }, + t.description ?? '', + ); + writeFileSync(join(inputDir, 'task.md'), content, 'utf-8'); + } +} + +// ============================================================================= +// OUTPUT FILE READING +// ============================================================================= + +function readFrontmatterFile(filePath: string): { data: Record<string, unknown>; body: string } | null { + try { + const raw = readFileSync(filePath, 'utf-8'); + const parsed = matter(raw); + return { data: parsed.data as Record<string, unknown>, body: parsed.content.trim() }; + } catch { + return null; + } +} + +function readFrontmatterDir<T>( + dirPath: string, + mapper: (data: Record<string, unknown>, body: string, filename: string) => T | null, +): T[] { + if (!existsSync(dirPath)) return []; + + const results: T[] = []; + try { + const entries = readdirSync(dirPath); + for (const entry of entries) { + if (!entry.endsWith('.md')) continue; + const filePath = join(dirPath, entry); + const parsed = readFrontmatterFile(filePath); + if (!parsed) continue; + const mapped = mapper(parsed.data, parsed.body, entry); + if (mapped) results.push(mapped); + } + } catch { + // Directory read error — return empty + } + return results; +} + +export function readSummary(agentWorkdir: string): ParsedSummary | null { + const filePath = join(agentWorkdir, '.cw', 'output', 'SUMMARY.md'); + const parsed = readFrontmatterFile(filePath); + if (!parsed) return null; + + const filesModified = parsed.data.files_modified; + return { + body: parsed.body, + filesModified: Array.isArray(filesModified) ? filesModified.map(String) : undefined, + }; +} + +export function readPhaseFiles(agentWorkdir: string): ParsedPhaseFile[] { + const dirPath = join(agentWorkdir, '.cw', 'output', 'phases'); + return readFrontmatterDir(dirPath, (data, body, filename) => { + const id = filename.replace(/\.md$/, ''); + const deps = Array.isArray(data.dependencies) ? data.dependencies.map(String) : []; + return { + id, + title: String(data.title ?? ''), + dependencies: deps, + body, + }; + }); +} + +export function readTaskFiles(agentWorkdir: string): ParsedTaskFile[] { + const dirPath = join(agentWorkdir, '.cw', 'output', 'tasks'); + return readFrontmatterDir(dirPath, (data, body, filename) => { + const id = filename.replace(/\.md$/, ''); + const deps = Array.isArray(data.dependencies) ? data.dependencies.map(String) : []; + return { + id, + title: String(data.title ?? ''), + category: String(data.category ?? 'execute'), + type: String(data.type ?? 'auto'), + dependencies: deps, + body, + }; + }); +} + +export function readDecisionFiles(agentWorkdir: string): ParsedDecisionFile[] { + const dirPath = join(agentWorkdir, '.cw', 'output', 'decisions'); + return readFrontmatterDir(dirPath, (data, body, filename) => { + const id = filename.replace(/\.md$/, ''); + return { + id, + topic: String(data.topic ?? ''), + decision: String(data.decision ?? ''), + reason: String(data.reason ?? ''), + body, + }; + }); +} + +export function readPageFiles(agentWorkdir: string): ParsedPageFile[] { + const dirPath = join(agentWorkdir, '.cw', 'output', 'pages'); + return readFrontmatterDir(dirPath, (data, body, filename) => { + const pageId = filename.replace(/\.md$/, ''); + return { + pageId, + title: String(data.title ?? ''), + summary: String(data.summary ?? ''), + body, + }; + }); +} diff --git a/src/agent/file-tailer.ts b/src/agent/file-tailer.ts new file mode 100644 index 0000000..dca4d0c --- /dev/null +++ b/src/agent/file-tailer.ts @@ -0,0 +1,267 @@ +/** + * File Tailer + * + * Watches an output file and emits parsed events in real-time. + * Used for crash-resilient agent spawning where subprocesses write + * directly to files instead of using pipes. + * + * Uses fs.watch() for efficient change detection with a poll fallback + * since fs.watch isn't 100% reliable on all platforms. + */ + +import { watch, type FSWatcher } from 'node:fs'; +import { open, stat } from 'node:fs/promises'; +import type { FileHandle } from 'node:fs/promises'; +import type { StreamParser, StreamEvent } from './providers/stream-types.js'; +import type { EventBus, AgentOutputEvent } from '../events/index.js'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('file-tailer'); + +/** Poll interval for fallback polling (ms) */ +const POLL_INTERVAL_MS = 500; + +/** Read buffer size (bytes) */ +const READ_BUFFER_SIZE = 64 * 1024; + +export interface FileTailerOptions { + /** Path to the output file to watch */ + filePath: string; + /** Agent ID for event emission */ + agentId: string; + /** Parser to convert lines to stream events */ + parser: StreamParser; + /** Optional event bus for emitting agent:output events */ + eventBus?: EventBus; + /** Optional callback for each stream event */ + onEvent?: (event: StreamEvent) => void; + /** If true, read from beginning of file; otherwise tail only new content (default: false) */ + startFromBeginning?: boolean; +} + +/** + * FileTailer watches a file for changes and emits parsed stream events. + * + * Behavior: + * - Uses fs.watch() for efficient change detection + * - Falls back to polling every 500ms (fs.watch misses events sometimes) + * - Reads new content incrementally, splits into lines + * - Feeds each line to the parser, emits resulting events + * - Handles partial lines at buffer boundaries + */ +export class FileTailer { + private position = 0; + private watcher: FSWatcher | null = null; + private pollInterval: NodeJS.Timeout | null = null; + private fileHandle: FileHandle | null = null; + private stopped = false; + private partialLine = ''; + private reading = false; + + private readonly filePath: string; + private readonly agentId: string; + private readonly parser: StreamParser; + private readonly eventBus?: EventBus; + private readonly onEvent?: (event: StreamEvent) => void; + private readonly startFromBeginning: boolean; + + constructor(options: FileTailerOptions) { + this.filePath = options.filePath; + this.agentId = options.agentId; + this.parser = options.parser; + this.eventBus = options.eventBus; + this.onEvent = options.onEvent; + this.startFromBeginning = options.startFromBeginning ?? false; + } + + /** + * Start watching the file for changes. + * Initializes position, starts fs.watch, and begins poll fallback. + */ + async start(): Promise<void> { + if (this.stopped) return; + + log.debug({ filePath: this.filePath, agentId: this.agentId }, 'starting file tailer'); + + try { + // Open file for reading + this.fileHandle = await open(this.filePath, 'r'); + + // Set initial position + if (this.startFromBeginning) { + this.position = 0; + } else { + // Seek to end + const stats = await stat(this.filePath); + this.position = stats.size; + } + + // Start fs.watch for efficient change detection + this.watcher = watch(this.filePath, (eventType) => { + if (eventType === 'change' && !this.stopped) { + this.readNewContent().catch((err) => { + log.warn({ err: err instanceof Error ? err.message : String(err), agentId: this.agentId }, 'error reading new content'); + }); + } + }); + + this.watcher.on('error', (err) => { + log.warn({ err: err instanceof Error ? err.message : String(err), agentId: this.agentId }, 'watcher error'); + }); + + // Start poll fallback (fs.watch misses events sometimes) + this.pollInterval = setInterval(() => { + if (!this.stopped) { + this.readNewContent().catch((err) => { + log.warn({ err: err instanceof Error ? err.message : String(err), agentId: this.agentId }, 'poll read error'); + }); + } + }, POLL_INTERVAL_MS); + + // If starting from beginning, do initial read + if (this.startFromBeginning) { + await this.readNewContent(); + } + } catch (err) { + log.error({ err: err instanceof Error ? err.message : String(err), filePath: this.filePath }, 'failed to start file tailer'); + } + } + + /** + * Read new content from the file since last position. + * Splits into lines, feeds to parser, emits events. + */ + private async readNewContent(): Promise<void> { + if (this.stopped || !this.fileHandle || this.reading) return; + + this.reading = true; + try { + // Check current file size + const stats = await stat(this.filePath); + if (stats.size <= this.position) { + return; // No new content + } + + // Read new bytes + const bytesToRead = stats.size - this.position; + const buffer = Buffer.alloc(Math.min(bytesToRead, READ_BUFFER_SIZE)); + const { bytesRead } = await this.fileHandle.read(buffer, 0, buffer.length, this.position); + + if (bytesRead === 0) return; + + this.position += bytesRead; + + // Convert to string and process lines + const content = this.partialLine + buffer.toString('utf-8', 0, bytesRead); + const lines = content.split('\n'); + + // Last element is either empty (if content ended with \n) or a partial line + this.partialLine = lines.pop() ?? ''; + + // Process complete lines + for (const line of lines) { + if (line.trim()) { + this.processLine(line); + } + } + + // If there's more content to read, schedule another read + if (stats.size > this.position) { + setImmediate(() => { + this.readNewContent().catch(() => {}); + }); + } + } finally { + this.reading = false; + } + } + + /** + * Process a single line through the parser and emit events. + */ + private processLine(line: string): void { + const events = this.parser.parseLine(line); + + for (const event of events) { + // Call user callback if provided + if (this.onEvent) { + this.onEvent(event); + } + + // Emit agent:output for text_delta events + if (event.type === 'text_delta' && this.eventBus) { + const outputEvent: AgentOutputEvent = { + type: 'agent:output', + timestamp: new Date(), + payload: { + agentId: this.agentId, + stream: 'stdout', + data: event.text, + }, + }; + this.eventBus.emit(outputEvent); + } + } + } + + /** + * Stop watching the file. + * Cleans up watcher, poll timer, and file handle. + */ + async stop(): Promise<void> { + if (this.stopped) return; + + this.stopped = true; + log.debug({ filePath: this.filePath, agentId: this.agentId }, 'stopping file tailer'); + + // Close watcher + if (this.watcher) { + this.watcher.close(); + this.watcher = null; + } + + // Clear poll timer + if (this.pollInterval) { + clearInterval(this.pollInterval); + this.pollInterval = null; + } + + // Do one final read to catch any remaining content + try { + await this.readNewContent(); + + // Process any remaining partial line + if (this.partialLine.trim()) { + this.processLine(this.partialLine); + this.partialLine = ''; + } + + // Signal end of stream to parser + const endEvents = this.parser.end(); + for (const event of endEvents) { + if (this.onEvent) { + this.onEvent(event); + } + } + } catch { + // Ignore errors during cleanup + } + + // Close file handle + if (this.fileHandle) { + try { + await this.fileHandle.close(); + } catch { + // Ignore close errors + } + this.fileHandle = null; + } + } + + /** + * Check if the tailer has been stopped. + */ + get isStopped(): boolean { + return this.stopped; + } +} diff --git a/src/agent/index.ts b/src/agent/index.ts index 045a996..5f51562 100644 --- a/src/agent/index.ts +++ b/src/agent/index.ts @@ -12,16 +12,73 @@ export type { AgentInfo, AgentResult, AgentManager, + AgentInputContext, } from './types.js'; // Adapter implementations -export { ClaudeAgentManager } from './manager.js'; +export { MultiProviderAgentManager } from './manager.js'; +/** @deprecated Use MultiProviderAgentManager instead */ +export { MultiProviderAgentManager as ClaudeAgentManager } from './manager.js'; export { MockAgentManager, type MockAgentScenario } from './mock-manager.js'; +// Provider registry +export { + getProvider, + listProviders, + registerProvider, + loadProvidersFromFile, + PROVIDER_PRESETS, +} from './providers/index.js'; +export type { AgentProviderConfig } from './providers/index.js'; + // Agent prompts export { buildDiscussPrompt, buildBreakdownPrompt, buildExecutePrompt, + buildRefinePrompt, buildDecomposePrompt, } from './prompts.js'; + +// Schema +export { agentSignalSchema, agentSignalJsonSchema } from './schema.js'; +export type { AgentSignal } from './schema.js'; +// Backward compat +export { agentOutputSchema, agentOutputJsonSchema } from './schema.js'; + +// File I/O +export { + writeInputFiles, + readSummary, + readPhaseFiles, + readTaskFiles, + readDecisionFiles, + readPageFiles, + generateId, +} from './file-io.js'; +export type { + WriteInputFilesOptions, + ParsedSummary, + ParsedPhaseFile, + ParsedTaskFile, + ParsedDecisionFile, + ParsedPageFile, +} from './file-io.js'; + +// Content serializer +export { serializePageTree, tiptapJsonToMarkdown } from './content-serializer.js'; +export type { PageForSerialization } from './content-serializer.js'; + +// Alias generator +export { generateUniqueAlias } from './alias.js'; + +// File tailer for crash-resilient streaming +export { FileTailer } from './file-tailer.js'; +export type { FileTailerOptions } from './file-tailer.js'; + +// Extracted manager helpers +export { ProcessManager } from './process-manager.js'; +export { CredentialHandler } from './credential-handler.js'; +export { OutputHandler } from './output-handler.js'; +export type { ActiveAgent } from './output-handler.js'; +export { CleanupManager } from './cleanup-manager.js'; diff --git a/src/agent/manager.test.ts b/src/agent/manager.test.ts index 8b4beac..35d99a4 100644 --- a/src/agent/manager.test.ts +++ b/src/agent/manager.test.ts @@ -1,46 +1,123 @@ /** - * ClaudeAgentManager Tests + * MultiProviderAgentManager Tests * - * Unit tests for the ClaudeAgentManager adapter. - * Mocks execa since we can't spawn real Claude CLI in tests. + * Unit tests for the MultiProviderAgentManager adapter. + * Mocks child_process.spawn since we can't spawn real Claude CLI in tests. */ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { ClaudeAgentManager } from './manager.js'; +import { MultiProviderAgentManager } from './manager.js'; import type { AgentRepository } from '../db/repositories/agent-repository.js'; -import type { WorktreeManager, Worktree } from '../git/types.js'; +import type { ProjectRepository } from '../db/repositories/project-repository.js'; import { EventEmitterBus } from '../events/index.js'; import type { DomainEvent } from '../events/index.js'; -// Mock execa -vi.mock('execa', () => ({ - execa: vi.fn(), +// Mock child_process.spawn and execFile +vi.mock('node:child_process', () => ({ + spawn: vi.fn(), + execFile: vi.fn((_cmd: string, _args: string[], _opts: unknown, cb?: Function) => { + if (cb) cb(null, '', ''); + }), })); -import { execa } from 'execa'; -const mockExeca = vi.mocked(execa); +// Import spawn to get the mock +import { spawn } from 'node:child_process'; +const mockSpawn = vi.mocked(spawn); -describe('ClaudeAgentManager', () => { - let manager: ClaudeAgentManager; +// Mock SimpleGitWorktreeManager so spawn doesn't need a real git repo +vi.mock('../git/manager.js', () => { + return { + SimpleGitWorktreeManager: class MockWorktreeManager { + create = vi.fn().mockResolvedValue({ id: 'workspace', path: '/tmp/test-workspace/agent-workdirs/gastown/workspace', branch: 'agent/gastown' }); + get = vi.fn().mockResolvedValue(null); + list = vi.fn().mockResolvedValue([]); + remove = vi.fn().mockResolvedValue(undefined); + }, + }; +}); + +// Mock fs operations for file-based output +vi.mock('node:fs', async () => { + const actual = await vi.importActual('node:fs'); + // Create a mock write stream + const mockWriteStream = { + write: vi.fn(), + end: vi.fn(), + on: vi.fn(), + }; + return { + ...actual, + openSync: vi.fn().mockReturnValue(99), + closeSync: vi.fn(), + mkdirSync: vi.fn(), + writeFileSync: vi.fn(), + createWriteStream: vi.fn().mockReturnValue(mockWriteStream), + }; +}); + +vi.mock('node:fs/promises', async () => { + const actual = await vi.importActual('node:fs/promises'); + return { + ...actual, + readFile: vi.fn().mockResolvedValue(''), + readdir: vi.fn().mockRejectedValue(new Error('ENOENT')), + rm: vi.fn().mockResolvedValue(undefined), + }; +}); + +// Mock FileTailer to avoid actual file watching +vi.mock('./file-tailer.js', () => ({ + FileTailer: class MockFileTailer { + start = vi.fn().mockResolvedValue(undefined); + stop = vi.fn().mockResolvedValue(undefined); + isStopped = false; + }, +})); + +import type { ChildProcess } from 'node:child_process'; + +/** + * Create a mock ChildProcess for detached spawning. + * The process is spawned detached and unreferenced. + */ +function createMockChildProcess(options?: { + pid?: number; +}) { + const { pid = 123 } = options ?? {}; + + // Create a minimal mock that satisfies the actual usage in spawnDetached + const childProcess = { + pid, + unref: vi.fn(), + on: vi.fn().mockReturnThis(), + kill: vi.fn(), + } as unknown as ChildProcess; + + return childProcess; +} + +describe('MultiProviderAgentManager', () => { + let manager: MultiProviderAgentManager; let mockRepository: AgentRepository; - let mockWorktreeManager: WorktreeManager; + let mockProjectRepository: ProjectRepository; let eventBus: EventEmitterBus; let capturedEvents: DomainEvent[]; - const mockWorktree: Worktree = { - id: 'worktree-123', - branch: 'agent/gastown', - path: '/tmp/worktree', - isMainWorktree: false, - }; - const mockAgent = { id: 'agent-123', name: 'gastown', taskId: 'task-456', + initiativeId: null as string | null, sessionId: 'session-789', - worktreeId: 'worktree-123', + worktreeId: 'gastown', status: 'idle' as const, + mode: 'execute' as const, + provider: 'claude', + accountId: null as string | null, + pid: null as number | null, + outputFilePath: null as string | null, + result: null as string | null, + pendingQuestions: null as string | null, createdAt: new Date(), updatedAt: new Date(), }; @@ -57,22 +134,21 @@ describe('ClaudeAgentManager', () => { findBySessionId: vi.fn().mockResolvedValue(mockAgent), findAll: vi.fn().mockResolvedValue([mockAgent]), findByStatus: vi.fn().mockResolvedValue([mockAgent]), - updateStatus: vi - .fn() - .mockResolvedValue({ ...mockAgent, status: 'running' }), - updateSessionId: vi - .fn() - .mockResolvedValue({ ...mockAgent, sessionId: 'new-session' }), + update: vi.fn().mockResolvedValue(mockAgent), delete: vi.fn().mockResolvedValue(undefined), }; - mockWorktreeManager = { - create: vi.fn().mockResolvedValue(mockWorktree), - remove: vi.fn().mockResolvedValue(undefined), - list: vi.fn().mockResolvedValue([mockWorktree]), - get: vi.fn().mockResolvedValue(mockWorktree), - diff: vi.fn().mockResolvedValue({ files: [], summary: '' }), - merge: vi.fn().mockResolvedValue({ success: true, message: 'ok' }), + mockProjectRepository = { + create: vi.fn(), + findById: vi.fn(), + findByName: vi.fn(), + findAll: vi.fn().mockResolvedValue([]), + update: vi.fn(), + delete: vi.fn(), + addProjectToInitiative: vi.fn(), + removeProjectFromInitiative: vi.fn(), + findProjectsByInitiativeId: vi.fn().mockResolvedValue([]), + setInitiativeProjects: vi.fn(), }; eventBus = new EventEmitterBus(); @@ -83,9 +159,11 @@ describe('ClaudeAgentManager', () => { eventBus.on('agent:resumed', (e) => capturedEvents.push(e)); eventBus.on('agent:waiting', (e) => capturedEvents.push(e)); - manager = new ClaudeAgentManager( + manager = new MultiProviderAgentManager( mockRepository, - mockWorktreeManager, + '/tmp/test-workspace', + mockProjectRepository, + undefined, eventBus ); }); @@ -95,19 +173,9 @@ describe('ClaudeAgentManager', () => { }); describe('spawn', () => { - it('creates worktree and agent record with name', async () => { - const mockSubprocess = { - pid: 123, - kill: vi.fn(), - then: () => - Promise.resolve({ - stdout: - '{"type":"result","subtype":"success","session_id":"sess-123","result":"{\\"status\\":\\"done\\",\\"result\\":\\"Task completed\\"}"}', - stderr: '', - }), - catch: () => mockSubprocess, - }; - mockExeca.mockReturnValue(mockSubprocess as unknown as ReturnType<typeof execa>); + it('creates agent record with provided name', async () => { + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); const result = await manager.spawn({ name: 'gastown', @@ -115,10 +183,6 @@ describe('ClaudeAgentManager', () => { prompt: 'Test task', }); - expect(mockWorktreeManager.create).toHaveBeenCalledWith( - expect.any(String), - 'agent/gastown' - ); expect(mockRepository.create).toHaveBeenCalledWith( expect.objectContaining({ name: 'gastown' }) ); @@ -138,18 +202,8 @@ describe('ClaudeAgentManager', () => { }); it('emits AgentSpawned event with name', async () => { - const mockSubprocess = { - pid: 123, - kill: vi.fn(), - then: () => - Promise.resolve({ - stdout: - '{"type":"result","subtype":"success","session_id":"sess-123","result":"{\\"status\\":\\"done\\",\\"result\\":\\"Task completed\\"}"}', - stderr: '', - }), - catch: () => mockSubprocess, - }; - mockExeca.mockReturnValue(mockSubprocess as unknown as ReturnType<typeof execa>); + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); await manager.spawn({ name: 'gastown', @@ -167,18 +221,8 @@ describe('ClaudeAgentManager', () => { }); it('uses custom cwd if provided', async () => { - const mockSubprocess = { - pid: 123, - kill: vi.fn(), - then: () => - Promise.resolve({ - stdout: - '{"type":"result","subtype":"success","session_id":"sess-123","result":"{\\"status\\":\\"done\\",\\"result\\":\\"Task completed\\"}"}', - stderr: '', - }), - catch: () => mockSubprocess, - }; - mockExeca.mockReturnValue(mockSubprocess as unknown as ReturnType<typeof execa>); + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); await manager.spawn({ name: 'chinatown', @@ -187,9 +231,10 @@ describe('ClaudeAgentManager', () => { cwd: '/custom/path', }); - expect(mockExeca).toHaveBeenCalledWith( + // Verify spawn was called with custom cwd + expect(mockSpawn).toHaveBeenCalledWith( 'claude', - expect.arrayContaining(['-p', 'Test task', '--output-format', 'json', '--json-schema']), + expect.arrayContaining(['-p', 'Test task', '--output-format', 'stream-json']), expect.objectContaining({ cwd: '/custom/path' }) ); }); @@ -201,24 +246,17 @@ describe('ClaudeAgentManager', () => { // The repository mock returns mockAgent which has id 'agent-123' await manager.stop(mockAgent.id); - expect(mockRepository.updateStatus).toHaveBeenCalledWith( + expect(mockRepository.update).toHaveBeenCalledWith( mockAgent.id, - 'stopped' + { status: 'stopped' } ); }); - it('kills subprocess if running', async () => { - // Create a manager and spawn an agent first - const killFn = vi.fn(); - const mockSubprocess = { - pid: 123, - kill: killFn, - then: () => new Promise(() => {}), // Never resolves - catch: () => mockSubprocess, - }; - mockExeca.mockReturnValue(mockSubprocess as unknown as ReturnType<typeof execa>); + it('kills detached process if running', async () => { + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); - // Spawn returns immediately, we get the agent id from create mock + // Spawn returns immediately since process is detached const spawned = await manager.spawn({ name: 'gastown', taskId: 'task-456', @@ -226,13 +264,12 @@ describe('ClaudeAgentManager', () => { }); // Now stop using the returned agent ID - // But the spawned id comes from repository.create which returns mockAgent.id await manager.stop(spawned.id); - expect(killFn).toHaveBeenCalledWith('SIGTERM'); - expect(mockRepository.updateStatus).toHaveBeenCalledWith( + // Verify status was updated (process.kill is called internally, not on the child object) + expect(mockRepository.update).toHaveBeenCalledWith( spawned.id, - 'stopped' + { status: 'stopped' } ); }); @@ -245,13 +282,8 @@ describe('ClaudeAgentManager', () => { }); it('emits AgentStopped event with user_requested reason', async () => { - const mockSubprocess = { - pid: 123, - kill: vi.fn(), - then: () => new Promise(() => {}), - catch: () => mockSubprocess, - }; - mockExeca.mockReturnValue(mockSubprocess as unknown as ReturnType<typeof execa>); + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); const spawned = await manager.spawn({ name: 'gastown', @@ -322,31 +354,19 @@ describe('ClaudeAgentManager', () => { status: 'waiting_for_input', }); - const mockSubprocess = { - pid: 123, - kill: vi.fn(), - then: () => - Promise.resolve({ - stdout: - '{"type":"result","subtype":"success","session_id":"sess-123","result":"{\\"status\\":\\"done\\",\\"result\\":\\"Continued successfully\\"}"}', - stderr: '', - }), - catch: () => mockSubprocess, - }; - mockExeca.mockReturnValue(mockSubprocess as unknown as ReturnType<typeof execa>); + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); await manager.resume(mockAgent.id, { q1: 'Answer one', q2: 'Answer two' }); - expect(mockExeca).toHaveBeenCalledWith( + // Verify spawn was called with resume args + expect(mockSpawn).toHaveBeenCalledWith( 'claude', expect.arrayContaining([ - '-p', - 'Here are my answers to your questions:\n[q1]: Answer one\n[q2]: Answer two', '--resume', 'session-789', '--output-format', - 'json', - '--json-schema', + 'stream-json', ]), expect.any(Object) ); @@ -375,36 +395,14 @@ describe('ClaudeAgentManager', () => { ); }); - it('rejects if worktree not found', async () => { - mockRepository.findById = vi.fn().mockResolvedValue({ - ...mockAgent, - status: 'waiting_for_input', - }); - mockWorktreeManager.get = vi.fn().mockResolvedValue(null); - - await expect(manager.resume(mockAgent.id, { q1: 'Answer' })).rejects.toThrow( - 'Worktree' - ); - }); - it('emits AgentResumed event', async () => { mockRepository.findById = vi.fn().mockResolvedValue({ ...mockAgent, status: 'waiting_for_input', }); - const mockSubprocess = { - pid: 123, - kill: vi.fn(), - then: () => - Promise.resolve({ - stdout: - '{"type":"result","subtype":"success","session_id":"sess-123","result":"{\\"status\\":\\"done\\",\\"result\\":\\"Continued successfully\\"}"}', - stderr: '', - }), - catch: () => mockSubprocess, - }; - mockExeca.mockReturnValue(mockSubprocess as unknown as ReturnType<typeof execa>); + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); await manager.resume(mockAgent.id, { q1: 'User answer' }); @@ -425,4 +423,63 @@ describe('ClaudeAgentManager', () => { expect(result).toBeNull(); }); }); + + describe('delete', () => { + it('deletes agent and clears active state', async () => { + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); + + // Spawn an agent first + const spawned = await manager.spawn({ + name: 'gastown', + taskId: 'task-456', + prompt: 'Test', + }); + + // Delete the agent + await manager.delete(spawned.id); + + // Verify DB record was deleted + expect(mockRepository.delete).toHaveBeenCalledWith(spawned.id); + }); + + it('emits agent:deleted event', async () => { + const mockChild = createMockChildProcess(); + mockSpawn.mockReturnValue(mockChild); + + eventBus.on('agent:deleted', (e) => capturedEvents.push(e)); + + const spawned = await manager.spawn({ + name: 'gastown', + taskId: 'task-456', + prompt: 'Test', + }); + + await manager.delete(spawned.id); + + const deletedEvent = capturedEvents.find( + (e) => e.type === 'agent:deleted' + ); + expect(deletedEvent).toBeDefined(); + expect( + (deletedEvent as { payload: { name: string } }).payload.name + ).toBe('gastown'); + }); + + it('throws if agent not found', async () => { + mockRepository.findById = vi.fn().mockResolvedValue(null); + + await expect(manager.delete('nonexistent')).rejects.toThrow( + "Agent 'nonexistent' not found" + ); + }); + + it('handles missing workdir gracefully', async () => { + // Agent exists in DB but has no active state and workdir doesn't exist + // The delete should succeed (best-effort cleanup) + await manager.delete(mockAgent.id); + + expect(mockRepository.delete).toHaveBeenCalledWith(mockAgent.id); + }); + }); }); diff --git a/src/agent/manager.ts b/src/agent/manager.ts index ca70ccd..47ce7a0 100644 --- a/src/agent/manager.ts +++ b/src/agent/manager.ts @@ -1,12 +1,14 @@ /** - * Claude Agent Manager Adapter + * Multi-Provider Agent Manager — Orchestrator * - * Implementation of AgentManager port using Claude CLI with JSON output. - * Spawns real Claude agents via `claude -p "prompt" --output-format json`. + * Implementation of AgentManager port supporting multiple CLI providers. + * Delegates to extracted helpers: + * - ProcessManager: subprocess spawn/kill/poll, worktree creation, command building + * - CredentialHandler: account selection, credential write/refresh, exhaustion handling + * - OutputHandler: stream events, signal parsing, file reading, result capture + * - CleanupManager: worktree/branch/log removal, orphan cleanup, reconciliation */ -import { execa, type ResultPromise } from 'execa'; -import { randomUUID } from 'crypto'; import type { AgentManager, AgentInfo, @@ -17,631 +19,207 @@ import type { PendingQuestions, } from './types.js'; import type { AgentRepository } from '../db/repositories/agent-repository.js'; -import type { WorktreeManager } from '../git/types.js'; +import type { AccountRepository } from '../db/repositories/account-repository.js'; +import type { ProjectRepository } from '../db/repositories/project-repository.js'; +import { generateUniqueAlias } from './alias.js'; import type { EventBus, AgentSpawnedEvent, AgentStoppedEvent, - AgentCrashedEvent, AgentResumedEvent, - AgentWaitingEvent, + AgentDeletedEvent, } from '../events/index.js'; -import { - agentOutputSchema, - agentOutputJsonSchema, - discussOutputSchema, - discussOutputJsonSchema, - breakdownOutputSchema, - breakdownOutputJsonSchema, - decomposeOutputSchema, - decomposeOutputJsonSchema, -} from './schema.js'; +import { writeInputFiles } from './file-io.js'; +import { getProvider } from './providers/registry.js'; +import { createModuleLogger } from '../logger/index.js'; +import type { AccountCredentialManager } from './credentials/types.js'; +import { ProcessManager } from './process-manager.js'; +import { CredentialHandler } from './credential-handler.js'; +import { OutputHandler, type ActiveAgent } from './output-handler.js'; +import { CleanupManager } from './cleanup-manager.js'; -/** - * Result structure from Claude CLI with --output-format json - * - * When --json-schema is used, structured output is in `structured_output` field. - * The `result` field may be empty or contain the raw text. - */ -interface ClaudeCliResult { - type: 'result'; - subtype: 'success' | 'error'; - is_error: boolean; - session_id: string; - result: string; - structured_output?: unknown; // Present when --json-schema is used - total_cost_usd?: number; -} +const log = createModuleLogger('agent-manager'); -/** - * Tracks an active agent subprocess, its result, and any pending questions - */ -interface ActiveAgent { - subprocess: ResultPromise; - result?: AgentResult; - pendingQuestions?: PendingQuestions; -} - -/** - * ClaudeAgentManager - Adapter implementing AgentManager port - * - * Uses Claude CLI in JSON mode to spawn agents. Each agent gets: - * - Isolated worktree (via WorktreeManager) - * - Persisted state (via AgentRepository) - * - Lifecycle events (via EventBus) - */ -export class ClaudeAgentManager implements AgentManager { +export class MultiProviderAgentManager implements AgentManager { private activeAgents: Map<string, ActiveAgent> = new Map(); + private outputBuffers: Map<string, string[]> = new Map(); + private processManager: ProcessManager; + private credentialHandler: CredentialHandler; + private outputHandler: OutputHandler; + private cleanupManager: CleanupManager; constructor( private repository: AgentRepository, - private worktreeManager: WorktreeManager, - private eventBus?: EventBus - ) {} - - /** - * Get the appropriate JSON schema for a given agent mode. - * Each mode has its own output schema for Claude CLI --json-schema flag. - */ - private getJsonSchemaForMode(mode: AgentMode): object { - switch (mode) { - case 'discuss': - return discussOutputJsonSchema; - case 'breakdown': - return breakdownOutputJsonSchema; - case 'decompose': - return decomposeOutputJsonSchema; - case 'execute': - default: - return agentOutputJsonSchema; - } + private workspaceRoot: string, + private projectRepository: ProjectRepository, + private accountRepository?: AccountRepository, + private eventBus?: EventBus, + private credentialManager?: AccountCredentialManager, + ) { + this.processManager = new ProcessManager(workspaceRoot, projectRepository, eventBus); + this.credentialHandler = new CredentialHandler(workspaceRoot, accountRepository, credentialManager); + this.outputHandler = new OutputHandler(repository, eventBus); + this.cleanupManager = new CleanupManager(workspaceRoot, repository, projectRepository, eventBus); } /** * Spawn a new agent to work on a task. - * Creates isolated worktree, starts Claude CLI, persists state. */ async spawn(options: SpawnAgentOptions): Promise<AgentInfo> { - const { name, taskId, prompt, cwd, mode = 'execute' } = options; - const worktreeId = randomUUID(); - const branchName = `agent/${name}`; + const { taskId, prompt, cwd, mode = 'execute', provider: providerName = 'claude', initiativeId } = options; + log.info({ taskId, provider: providerName, initiativeId, mode }, 'spawn requested'); - // Check name uniqueness - const existing = await this.repository.findByName(name); - if (existing) { - throw new Error(`Agent with name '${name}' already exists`); + const provider = getProvider(providerName); + if (!provider) { + throw new Error(`Unknown provider: '${providerName}'. Available: claude, codex, gemini, cursor, auggie, amp, opencode`); } - // 1. Create isolated worktree - const worktree = await this.worktreeManager.create(worktreeId, branchName); + // Generate or validate name + let name: string; + if (options.name) { + name = options.name; + const existing = await this.repository.findByName(name); + if (existing) { + throw new Error(`Agent with name '${name}' already exists`); + } + } else { + name = await generateUniqueAlias(this.repository); + } + const alias = name; + log.debug({ alias }, 'alias generated'); - // 2. Create agent record (session ID null until first run completes) + // 1. Account selection + let accountId: string | null = null; + let accountConfigDir: string | null = null; + + const accountResult = await this.credentialHandler.selectAccount(providerName); + if (accountResult) { + accountId = accountResult.accountId; + accountConfigDir = accountResult.configDir; + + this.credentialHandler.writeCredentialsToDisk(accountResult.account, accountConfigDir); + const { valid, refreshed } = await this.credentialHandler.ensureCredentials(accountConfigDir, accountId); + if (!valid) { + log.warn({ alias, accountId }, 'failed to refresh account credentials, proceeding anyway'); + } + if (refreshed) { + await this.credentialHandler.persistRefreshedCredentials(accountId, accountConfigDir); + } + } + + if (accountId) { + log.info({ alias, accountId }, 'account selected'); + } else { + log.debug('no accounts available, spawning without account'); + } + + // 2. Create isolated worktrees + let agentCwd: string; + if (initiativeId) { + agentCwd = await this.processManager.createProjectWorktrees(alias, initiativeId); + } else { + agentCwd = await this.processManager.createStandaloneWorktree(alias); + } + log.debug({ alias, agentCwd }, 'worktrees created'); + + // 2b. Write input files + if (options.inputContext) { + writeInputFiles({ agentWorkdir: agentCwd, ...options.inputContext }); + log.debug({ alias }, 'input files written'); + } + + // 3. Create agent record const agent = await this.repository.create({ - name, - taskId, + name: alias, + taskId: taskId ?? null, + initiativeId: initiativeId ?? null, sessionId: null, - worktreeId: worktree.id, + worktreeId: alias, status: 'running', mode, + provider: providerName, + accountId, }); - - // Use agent.id from repository for all tracking const agentId = agent.id; - // 3. Start Claude CLI in background with mode-specific JSON schema - const jsonSchema = this.getJsonSchemaForMode(mode); - const subprocess = execa( - 'claude', - [ - '-p', - prompt, - '--output-format', - 'json', - '--json-schema', - JSON.stringify(jsonSchema), - ], - { - cwd: cwd ?? worktree.path, - detached: true, - stdio: ['ignore', 'pipe', 'pipe'], - } + // 4. Build spawn command + const { command, args, env: providerEnv } = this.processManager.buildSpawnCommand(provider, prompt); + log.debug({ command, args: args.join(' '), cwd: cwd ?? agentCwd }, 'spawn command built'); + + // 5. Set config dir env var if account selected + const processEnv: Record<string, string> = { ...providerEnv }; + if (accountConfigDir && provider.configDirEnv) { + processEnv[provider.configDirEnv] = accountConfigDir; + } + + // 6. Spawn detached subprocess + const { pid, outputFilePath, tailer } = this.processManager.spawnDetached( + agentId, command, args, cwd ?? agentCwd, processEnv, providerName, prompt, + (event) => this.outputHandler.handleStreamEvent(agentId, event, this.activeAgents.get(agentId), this.outputBuffers), ); - this.activeAgents.set(agentId, { subprocess }); + await this.repository.update(agentId, { pid, outputFilePath }); + + this.activeAgents.set(agentId, { agentId, pid, tailer, outputFilePath }); + log.info({ agentId, alias, pid }, 'detached subprocess started'); // Emit spawned event if (this.eventBus) { const event: AgentSpawnedEvent = { type: 'agent:spawned', timestamp: new Date(), - payload: { - agentId, - name, - taskId, - worktreeId: worktree.id, - }, + payload: { agentId, name: alias, taskId: taskId ?? null, worktreeId: alias, provider: providerName }, }; this.eventBus.emit(event); } - // Handle completion in background - this.handleAgentCompletion(agentId, subprocess); + // Start polling for completion + this.processManager.pollForCompletion( + agentId, pid, + () => this.handleDetachedAgentCompletion(agentId), + () => this.activeAgents.get(agentId)?.tailer, + ); return this.toAgentInfo(agent); } /** - * Handle agent subprocess completion. - * Parses structured JSON result with mode-specific schema, updates session ID, emits events. + * Handle completion of a detached agent. */ - private async handleAgentCompletion( - agentId: string, - subprocess: ResultPromise - ): Promise<void> { - try { - const { stdout } = await subprocess; - const agent = await this.repository.findById(agentId); - if (!agent) return; - - // Parse CLI result wrapper (stdout is string when stdio is 'pipe') - const cliResult: ClaudeCliResult = JSON.parse(stdout as string); - - // Store session_id for resume capability - if (cliResult.session_id) { - await this.repository.updateSessionId(agentId, cliResult.session_id); - } - - // When --json-schema is used, structured output is in structured_output field - // Falls back to parsing result if structured_output is not present (backwards compatible) - const rawOutput = cliResult.structured_output ?? JSON.parse(cliResult.result); - const active = this.activeAgents.get(agentId); - - // Parse output based on agent mode - switch (agent.mode) { - case 'discuss': - await this.handleDiscussOutput(agent, rawOutput, cliResult.session_id); - break; - case 'breakdown': - await this.handleBreakdownOutput(agent, rawOutput); - break; - case 'decompose': - await this.handleDecomposeOutput(agent, rawOutput); - break; - case 'execute': - default: - await this.handleExecuteOutput(agent, rawOutput, cliResult.session_id); - break; - } - } catch (error) { - await this.handleAgentError(agentId, error); - } - } - - /** - * Handle output for execute mode (default mode). - */ - private async handleExecuteOutput( - agent: { id: string; name: string; taskId: string | null; sessionId: string | null }, - rawOutput: unknown, - sessionId: string | undefined - ): Promise<void> { - const agentOutput = agentOutputSchema.parse(rawOutput); - const active = this.activeAgents.get(agent.id); - - switch (agentOutput.status) { - case 'done': { - if (active) { - active.result = { - success: true, - message: agentOutput.result, - filesModified: agentOutput.filesModified, - }; - } - await this.repository.updateStatus(agent.id, 'idle'); - - if (this.eventBus) { - const event: AgentStoppedEvent = { - type: 'agent:stopped', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - reason: 'task_complete', - }, - }; - this.eventBus.emit(event); - } - break; - } - - case 'questions': { - if (active) { - active.pendingQuestions = { - questions: agentOutput.questions, - }; - } - await this.repository.updateStatus(agent.id, 'waiting_for_input'); - - if (this.eventBus) { - const event: AgentWaitingEvent = { - type: 'agent:waiting', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - sessionId: sessionId ?? agent.sessionId ?? '', - questions: agentOutput.questions, - }, - }; - this.eventBus.emit(event); - } - break; - } - - case 'unrecoverable_error': { - if (active) { - active.result = { - success: false, - message: agentOutput.error, - }; - } - await this.repository.updateStatus(agent.id, 'crashed'); - - if (this.eventBus) { - const event: AgentCrashedEvent = { - type: 'agent:crashed', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - error: agentOutput.error, - }, - }; - this.eventBus.emit(event); - } - break; - } - } - } - - /** - * Handle output for discuss mode. - * Outputs decisions array when context gathering is complete. - */ - private async handleDiscussOutput( - agent: { id: string; name: string; taskId: string | null; sessionId: string | null }, - rawOutput: unknown, - sessionId: string | undefined - ): Promise<void> { - const discussOutput = discussOutputSchema.parse(rawOutput); - const active = this.activeAgents.get(agent.id); - - switch (discussOutput.status) { - case 'context_complete': { - if (active) { - active.result = { - success: true, - message: discussOutput.summary, - }; - } - await this.repository.updateStatus(agent.id, 'idle'); - - if (this.eventBus) { - const event: AgentStoppedEvent = { - type: 'agent:stopped', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - reason: 'context_complete', - }, - }; - this.eventBus.emit(event); - } - break; - } - - case 'questions': { - if (active) { - active.pendingQuestions = { - questions: discussOutput.questions, - }; - } - await this.repository.updateStatus(agent.id, 'waiting_for_input'); - - if (this.eventBus) { - const event: AgentWaitingEvent = { - type: 'agent:waiting', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - sessionId: sessionId ?? agent.sessionId ?? '', - questions: discussOutput.questions, - }, - }; - this.eventBus.emit(event); - } - break; - } - - case 'unrecoverable_error': { - if (active) { - active.result = { - success: false, - message: discussOutput.error, - }; - } - await this.repository.updateStatus(agent.id, 'crashed'); - - if (this.eventBus) { - const event: AgentCrashedEvent = { - type: 'agent:crashed', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - error: discussOutput.error, - }, - }; - this.eventBus.emit(event); - } - break; - } - } - } - - /** - * Handle output for breakdown mode. - * Outputs phases array when initiative decomposition is complete. - */ - private async handleBreakdownOutput( - agent: { id: string; name: string; taskId: string | null }, - rawOutput: unknown - ): Promise<void> { - const breakdownOutput = breakdownOutputSchema.parse(rawOutput); - const active = this.activeAgents.get(agent.id); - - switch (breakdownOutput.status) { - case 'breakdown_complete': { - if (active) { - active.result = { - success: true, - message: `Breakdown complete with ${breakdownOutput.phases.length} phases`, - }; - } - await this.repository.updateStatus(agent.id, 'idle'); - - if (this.eventBus) { - const event: AgentStoppedEvent = { - type: 'agent:stopped', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - reason: 'breakdown_complete', - }, - }; - this.eventBus.emit(event); - } - break; - } - - case 'questions': { - if (active) { - active.pendingQuestions = { - questions: breakdownOutput.questions, - }; - } - await this.repository.updateStatus(agent.id, 'waiting_for_input'); - - if (this.eventBus) { - const event: AgentWaitingEvent = { - type: 'agent:waiting', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - sessionId: '', - questions: breakdownOutput.questions, - }, - }; - this.eventBus.emit(event); - } - break; - } - - case 'unrecoverable_error': { - if (active) { - active.result = { - success: false, - message: breakdownOutput.error, - }; - } - await this.repository.updateStatus(agent.id, 'crashed'); - - if (this.eventBus) { - const event: AgentCrashedEvent = { - type: 'agent:crashed', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - error: breakdownOutput.error, - }, - }; - this.eventBus.emit(event); - } - break; - } - } - } - - /** - * Handle output for decompose mode. - * Outputs tasks array when phase decomposition is complete. - */ - private async handleDecomposeOutput( - agent: { id: string; name: string; taskId: string | null }, - rawOutput: unknown - ): Promise<void> { - const decomposeOutput = decomposeOutputSchema.parse(rawOutput); - const active = this.activeAgents.get(agent.id); - - switch (decomposeOutput.status) { - case 'decompose_complete': { - if (active) { - active.result = { - success: true, - message: `Decompose complete with ${decomposeOutput.tasks.length} tasks`, - }; - } - await this.repository.updateStatus(agent.id, 'idle'); - - if (this.eventBus) { - const event: AgentStoppedEvent = { - type: 'agent:stopped', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - reason: 'decompose_complete', - }, - }; - this.eventBus.emit(event); - } - break; - } - - case 'questions': { - if (active) { - active.pendingQuestions = { - questions: decomposeOutput.questions, - }; - } - await this.repository.updateStatus(agent.id, 'waiting_for_input'); - - if (this.eventBus) { - const event: AgentWaitingEvent = { - type: 'agent:waiting', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - sessionId: '', - questions: decomposeOutput.questions, - }, - }; - this.eventBus.emit(event); - } - break; - } - - case 'unrecoverable_error': { - if (active) { - active.result = { - success: false, - message: decomposeOutput.error, - }; - } - await this.repository.updateStatus(agent.id, 'crashed'); - - if (this.eventBus) { - const event: AgentCrashedEvent = { - type: 'agent:crashed', - timestamp: new Date(), - payload: { - agentId: agent.id, - name: agent.name, - taskId: agent.taskId ?? '', - error: decomposeOutput.error, - }, - }; - this.eventBus.emit(event); - } - break; - } - } - } - - /** - * Handle agent errors - actual crashes (not waiting for input). - * With structured output via --json-schema, question status is handled in - * handleAgentCompletion. This method only handles real subprocess errors. - */ - private async handleAgentError( - agentId: string, - error: unknown - ): Promise<void> { - const errorMessage = error instanceof Error ? error.message : String(error); - const agent = await this.repository.findById(agentId); - if (!agent) return; - - // Actual crash - structured output failed or subprocess error - await this.repository.updateStatus(agentId, 'crashed'); - - if (this.eventBus) { - const event: AgentCrashedEvent = { - type: 'agent:crashed', - timestamp: new Date(), - payload: { - agentId, - name: agent.name, - taskId: agent.taskId ?? '', - error: errorMessage, - }, - }; - this.eventBus.emit(event); - } + private async handleDetachedAgentCompletion(agentId: string): Promise<void> { + if (!this.activeAgents.has(agentId)) return; const active = this.activeAgents.get(agentId); - if (active) { - active.result = { - success: false, - message: errorMessage, - }; - } + await this.outputHandler.handleCompletion( + agentId, + active, + (alias) => this.processManager.getAgentWorkdir(alias), + ); + this.activeAgents.delete(agentId); } /** * Stop a running agent. - * Sends SIGTERM and updates status. */ async stop(agentId: string): Promise<void> { const agent = await this.repository.findById(agentId); - if (!agent) { - throw new Error(`Agent '${agentId}' not found`); - } + if (!agent) throw new Error(`Agent '${agentId}' not found`); + log.info({ agentId, name: agent.name }, 'stopping agent'); const active = this.activeAgents.get(agentId); if (active) { - active.subprocess.kill('SIGTERM'); + try { process.kill(active.pid, 'SIGTERM'); } catch { /* already exited */ } + await active.tailer.stop(); this.activeAgents.delete(agentId); } - await this.repository.updateStatus(agentId, 'stopped'); + await this.repository.update(agentId, { status: 'stopped' }); if (this.eventBus) { const event: AgentStoppedEvent = { type: 'agent:stopped', timestamp: new Date(), - payload: { - agentId, - name: agent.name, - taskId: agent.taskId ?? '', - reason: 'user_requested', - }, + payload: { agentId, name: agent.name, taskId: agent.taskId ?? '', reason: 'user_requested' }, }; this.eventBus.emit(event); } @@ -673,104 +251,193 @@ export class ClaudeAgentManager implements AgentManager { /** * Resume an agent that's waiting for input. - * Uses stored session ID to continue with full context. - * - * @param agentId - Agent to resume - * @param answers - Map of question ID to user's answer */ async resume(agentId: string, answers: Record<string, string>): Promise<void> { const agent = await this.repository.findById(agentId); - if (!agent) { - throw new Error(`Agent '${agentId}' not found`); - } - + if (!agent) throw new Error(`Agent '${agentId}' not found`); if (agent.status !== 'waiting_for_input') { - throw new Error( - `Agent '${agent.name}' is not waiting for input (status: ${agent.status})` - ); + throw new Error(`Agent '${agent.name}' is not waiting for input (status: ${agent.status})`); } - if (!agent.sessionId) { throw new Error(`Agent '${agent.name}' has no session to resume`); } + log.info({ agentId, sessionId: agent.sessionId, provider: agent.provider }, 'resuming agent'); - // Get worktree path - const worktree = await this.worktreeManager.get(agent.worktreeId); - if (!worktree) { - throw new Error(`Worktree '${agent.worktreeId}' not found`); + const provider = getProvider(agent.provider); + if (!provider) throw new Error(`Unknown provider: '${agent.provider}'`); + if (provider.resumeStyle === 'none') { + throw new Error(`Provider '${provider.name}' does not support resume`); } - // Format answers map as structured prompt for Claude - const prompt = this.formatAnswersAsPrompt(answers); + const agentCwd = this.processManager.getAgentWorkdir(agent.worktreeId); + const prompt = this.outputHandler.formatAnswersAsPrompt(answers); + await this.repository.update(agentId, { status: 'running' }); + await this.repository.update(agentId, { pendingQuestions: null }); + await this.repository.update(agentId, { result: null }); - await this.repository.updateStatus(agentId, 'running'); + const { command, args, env: providerEnv } = this.processManager.buildResumeCommand(provider, agent.sessionId, prompt); + log.debug({ command, args: args.join(' ') }, 'resume command built'); - // Start CLI with --resume flag and mode-specific JSON schema - const jsonSchema = this.getJsonSchemaForMode(agent.mode as AgentMode); - const subprocess = execa( - 'claude', - [ - '-p', - prompt, - '--resume', - agent.sessionId, - '--output-format', - 'json', - '--json-schema', - JSON.stringify(jsonSchema), - ], - { - cwd: worktree.path, - detached: true, - stdio: ['ignore', 'pipe', 'pipe'], + // Set config dir if account is assigned + const processEnv: Record<string, string> = { ...providerEnv }; + if (agent.accountId && provider.configDirEnv && this.accountRepository) { + const { getAccountConfigDir } = await import('./accounts/paths.js'); + const resumeAccountConfigDir = getAccountConfigDir(this.workspaceRoot, agent.accountId); + const resumeAccount = await this.accountRepository.findById(agent.accountId); + if (resumeAccount) { + this.credentialHandler.writeCredentialsToDisk(resumeAccount, resumeAccountConfigDir); } + processEnv[provider.configDirEnv] = resumeAccountConfigDir; + const { valid, refreshed } = await this.credentialHandler.ensureCredentials(resumeAccountConfigDir, agent.accountId); + if (!valid) { + log.warn({ agentId, accountId: agent.accountId }, 'failed to refresh credentials before resume'); + } + if (refreshed) { + await this.credentialHandler.persistRefreshedCredentials(agent.accountId, resumeAccountConfigDir); + } + } + + // Stop previous tailer + const prevActive = this.activeAgents.get(agentId); + if (prevActive?.tailer) { + await prevActive.tailer.stop(); + } + + const { pid, outputFilePath, tailer } = this.processManager.spawnDetached( + agentId, command, args, agentCwd, processEnv, provider.name, prompt, + (event) => this.outputHandler.handleStreamEvent(agentId, event, this.activeAgents.get(agentId), this.outputBuffers), ); - // Clear any previous pending questions when resuming - this.activeAgents.set(agentId, { subprocess }); + await this.repository.update(agentId, { pid, outputFilePath }); + + this.activeAgents.set(agentId, { agentId, pid, tailer, outputFilePath }); + log.info({ agentId, pid }, 'resume detached subprocess started'); if (this.eventBus) { const event: AgentResumedEvent = { type: 'agent:resumed', timestamp: new Date(), - payload: { - agentId, - name: agent.name, - taskId: agent.taskId ?? '', - sessionId: agent.sessionId, - }, + payload: { agentId, name: agent.name, taskId: agent.taskId ?? '', sessionId: agent.sessionId }, }; this.eventBus.emit(event); } - this.handleAgentCompletion(agentId, subprocess); - } - - /** - * Format answers map as structured prompt for Claude. - * One line per answer in format: "[id]: answer" - */ - private formatAnswersAsPrompt(answers: Record<string, string>): string { - const lines = Object.entries(answers).map( - ([questionId, answer]) => `[${questionId}]: ${answer}` + this.processManager.pollForCompletion( + agentId, pid, + () => this.handleDetachedAgentCompletion(agentId), + () => this.activeAgents.get(agentId)?.tailer, ); - return `Here are my answers to your questions:\n${lines.join('\n')}`; } /** * Get the result of an agent's work. */ async getResult(agentId: string): Promise<AgentResult | null> { - const active = this.activeAgents.get(agentId); - return active?.result ?? null; + return this.outputHandler.getResult(agentId, this.activeAgents.get(agentId)); } /** * Get pending questions for an agent waiting for input. */ async getPendingQuestions(agentId: string): Promise<PendingQuestions | null> { + return this.outputHandler.getPendingQuestions(agentId, this.activeAgents.get(agentId)); + } + + /** + * Get the buffered output for an agent. + */ + getOutputBuffer(agentId: string): string[] { + return this.outputHandler.getOutputBufferCopy(this.outputBuffers, agentId); + } + + /** + * Delete an agent and clean up all associated resources. + */ + async delete(agentId: string): Promise<void> { + const agent = await this.repository.findById(agentId); + if (!agent) throw new Error(`Agent '${agentId}' not found`); + log.info({ agentId, name: agent.name }, 'deleting agent'); + + // 1. Kill process and stop tailer const active = this.activeAgents.get(agentId); - return active?.pendingQuestions ?? null; + if (active) { + try { process.kill(active.pid, 'SIGTERM'); } catch { /* already exited */ } + await active.tailer.stop(); + this.activeAgents.delete(agentId); + } + + // 2. Best-effort cleanup + try { await this.cleanupManager.removeAgentWorktrees(agent.name, agent.initiativeId); } + catch (err) { log.warn({ agentId, err: err instanceof Error ? err.message : String(err) }, 'failed to remove worktrees'); } + + try { await this.cleanupManager.removeAgentBranches(agent.name, agent.initiativeId); } + catch (err) { log.warn({ agentId, err: err instanceof Error ? err.message : String(err) }, 'failed to remove branches'); } + + try { await this.cleanupManager.removeAgentLogs(agentId); } + catch (err) { log.warn({ agentId, err: err instanceof Error ? err.message : String(err) }, 'failed to remove logs'); } + + // 3. Clear output buffer + this.outputHandler.clearOutputBuffer(this.outputBuffers, agentId); + + // 4. Delete DB record + await this.repository.delete(agentId); + + // 5. Emit deleted event + if (this.eventBus) { + const event: AgentDeletedEvent = { + type: 'agent:deleted', + timestamp: new Date(), + payload: { agentId, name: agent.name }, + }; + this.eventBus.emit(event); + } + log.info({ agentId, name: agent.name }, 'agent deleted'); + } + + /** + * Dismiss an agent. + */ + async dismiss(agentId: string): Promise<void> { + const agent = await this.repository.findById(agentId); + if (!agent) throw new Error(`Agent '${agentId}' not found`); + log.info({ agentId, name: agent.name }, 'dismissing agent'); + + await this.repository.update(agentId, { + userDismissedAt: new Date(), + updatedAt: new Date(), + }); + + log.info({ agentId, name: agent.name }, 'agent dismissed'); + } + + /** + * Clean up orphaned agent workdirs. + */ + async cleanupOrphanedWorkdirs(): Promise<void> { + return this.cleanupManager.cleanupOrphanedWorkdirs(); + } + + /** + * Clean up orphaned agent log directories. + */ + async cleanupOrphanedLogs(): Promise<void> { + return this.cleanupManager.cleanupOrphanedLogs(); + } + + /** + * Reconcile agent state after server restart. + */ + async reconcileAfterRestart(): Promise<void> { + await this.cleanupManager.reconcileAfterRestart( + this.activeAgents, + (agentId, event) => this.outputHandler.handleStreamEvent(agentId, event, this.activeAgents.get(agentId), this.outputBuffers), + (agentId, rawOutput, provider) => this.outputHandler.processAgentOutput(agentId, rawOutput, provider, (alias) => this.processManager.getAgentWorkdir(alias)), + (agentId, pid) => this.processManager.pollForCompletion( + agentId, pid, + () => this.handleDetachedAgentCompletion(agentId), + () => this.activeAgents.get(agentId)?.tailer, + ), + ); } /** @@ -780,10 +447,13 @@ export class ClaudeAgentManager implements AgentManager { id: string; name: string; taskId: string | null; + initiativeId: string | null; sessionId: string | null; worktreeId: string; status: string; mode: string; + provider: string; + accountId: string | null; createdAt: Date; updatedAt: Date; }): AgentInfo { @@ -791,10 +461,13 @@ export class ClaudeAgentManager implements AgentManager { id: agent.id, name: agent.name, taskId: agent.taskId ?? '', + initiativeId: agent.initiativeId, sessionId: agent.sessionId, worktreeId: agent.worktreeId, status: agent.status as AgentStatus, mode: agent.mode as AgentMode, + provider: agent.provider, + accountId: agent.accountId, createdAt: agent.createdAt, updatedAt: agent.updatedAt, }; diff --git a/src/agent/mock-manager.test.ts b/src/agent/mock-manager.test.ts index 18f97e2..c09e8a9 100644 --- a/src/agent/mock-manager.test.ts +++ b/src/agent/mock-manager.test.ts @@ -175,10 +175,10 @@ describe('MockAgentManager', () => { // spawn() with crash scenario // =========================================================================== - describe('spawn with unrecoverable_error scenario', () => { + describe('spawn with error scenario', () => { it('should emit agent:crashed and set result.success=false', async () => { manager.setScenario('crash-agent', { - status: 'unrecoverable_error', + status: 'error', delay: 0, error: 'Something went terribly wrong', }); @@ -411,9 +411,9 @@ describe('MockAgentManager', () => { describe('setScenario overrides', () => { it('should use scenario override for specific agent name', async () => { - // Set unrecoverable_error scenario for one agent + // Set error scenario for one agent manager.setScenario('crasher', { - status: 'unrecoverable_error', + status: 'error', delay: 0, error: 'Intentional crash', }); @@ -443,7 +443,7 @@ describe('MockAgentManager', () => { it('should allow clearing scenario override', async () => { manager.setScenario('flip-flop', { - status: 'unrecoverable_error', + status: 'error', delay: 0, error: 'Crash for test', }); @@ -490,7 +490,7 @@ describe('MockAgentManager', () => { }); it('should emit spawned before crashed', async () => { - manager.setScenario('crash-order', { status: 'unrecoverable_error', delay: 0, error: 'Crash' }); + manager.setScenario('crash-order', { status: 'error', delay: 0, error: 'Crash' }); await manager.spawn({ name: 'crash-order', taskId: 't1', prompt: 'p1' }); await vi.advanceTimersByTimeAsync(0); @@ -553,7 +553,7 @@ describe('MockAgentManager', () => { it('should use provided default scenario', async () => { const customDefault: MockAgentScenario = { - status: 'unrecoverable_error', + status: 'error', delay: 0, error: 'Default crash', }; @@ -611,10 +611,9 @@ describe('MockAgentManager', () => { it('should spawn agent in discuss mode', async () => { manager.setScenario('discuss-agent', { - status: 'context_complete', + status: 'done', delay: 0, - decisions: [{ topic: 'Auth', decision: 'JWT', reason: 'Standard' }], - summary: 'Auth discussion complete', + result: 'Auth discussion complete', }); const agent = await manager.spawn({ @@ -629,12 +628,9 @@ describe('MockAgentManager', () => { it('should spawn agent in breakdown mode', async () => { manager.setScenario('breakdown-agent', { - status: 'breakdown_complete', + status: 'done', delay: 0, - phases: [ - { number: 1, name: 'Foundation', description: 'Core setup', dependencies: [] }, - { number: 2, name: 'Features', description: 'Main features', dependencies: [1] }, - ], + result: 'Breakdown complete', }); const agent = await manager.spawn({ @@ -647,12 +643,11 @@ describe('MockAgentManager', () => { expect(agent.mode).toBe('breakdown'); }); - it('should emit stopped event with context_complete reason', async () => { + it('should emit stopped event with context_complete reason for discuss mode', async () => { manager.setScenario('discuss-done', { - status: 'context_complete', + status: 'done', delay: 0, - decisions: [], - summary: 'Done', + result: 'Done', }); await manager.spawn({ @@ -667,11 +662,11 @@ describe('MockAgentManager', () => { expect(stopped?.payload.reason).toBe('context_complete'); }); - it('should emit stopped event with breakdown_complete reason', async () => { + it('should emit stopped event with breakdown_complete reason for breakdown mode', async () => { manager.setScenario('breakdown-done', { - status: 'breakdown_complete', + status: 'done', delay: 0, - phases: [], + result: 'Breakdown complete', }); await manager.spawn({ @@ -702,19 +697,16 @@ describe('MockAgentManager', () => { expect(agent.mode).toBe('decompose'); }); - it('should complete with tasks on decompose_complete', async () => { + it('should complete with decompose_complete reason in decompose mode', async () => { manager.setScenario('decomposer', { - status: 'decompose_complete', - tasks: [ - { number: 1, name: 'Task 1', description: 'First task', type: 'auto', dependencies: [] }, - { number: 2, name: 'Task 2', description: 'Second task', type: 'auto', dependencies: [1] }, - ], + status: 'done', + result: 'Decompose complete', }); await manager.spawn({ name: 'decomposer', taskId: 'plan-1', prompt: 'test', mode: 'decompose' }); await vi.advanceTimersByTimeAsync(100); - // Verify agent:stopped event with decompose_complete reason + // Verify agent:stopped event with decompose_complete reason (derived from mode) const stoppedEvent = eventBus.emittedEvents.find((e) => e.type === 'agent:stopped') as AgentStoppedEvent | undefined; expect(stoppedEvent).toBeDefined(); expect(stoppedEvent?.payload.reason).toBe('decompose_complete'); @@ -738,13 +730,11 @@ describe('MockAgentManager', () => { expect(agent?.status).toBe('waiting_for_input'); }); - it('should emit stopped event with decompose_complete reason', async () => { + it('should emit stopped event with decompose_complete reason (second test)', async () => { manager.setScenario('decompose-done', { - status: 'decompose_complete', + status: 'done', delay: 0, - tasks: [ - { number: 1, name: 'Setup', description: 'Initial setup', type: 'auto', dependencies: [] }, - ], + result: 'Decompose complete', }); await manager.spawn({ @@ -759,14 +749,10 @@ describe('MockAgentManager', () => { expect(stopped?.payload.reason).toBe('decompose_complete'); }); - it('should set result message with task count', async () => { + it('should set result message for decompose mode', async () => { manager.setScenario('decomposer', { - status: 'decompose_complete', - tasks: [ - { number: 1, name: 'Task 1', description: 'First', type: 'auto', dependencies: [] }, - { number: 2, name: 'Task 2', description: 'Second', type: 'checkpoint:human-verify', dependencies: [1] }, - { number: 3, name: 'Task 3', description: 'Third', type: 'auto', dependencies: [1, 2] }, - ], + status: 'done', + result: 'Decompose complete', }); const agent = await manager.spawn({ name: 'decomposer', taskId: 'plan-1', prompt: 'test', mode: 'decompose' }); @@ -774,7 +760,7 @@ describe('MockAgentManager', () => { const result = await manager.getResult(agent.id); expect(result?.success).toBe(true); - expect(result?.message).toBe('Decomposed into 3 tasks'); + expect(result?.message).toBe('Decompose complete'); }); }); diff --git a/src/agent/mock-manager.ts b/src/agent/mock-manager.ts index 9dad8c7..ab51417 100644 --- a/src/agent/mock-manager.ts +++ b/src/agent/mock-manager.ts @@ -10,34 +10,29 @@ import { randomUUID } from 'crypto'; import type { AgentManager, AgentInfo, + AgentMode, SpawnAgentOptions, AgentResult, AgentStatus, PendingQuestions, QuestionItem, } from './types.js'; -import type { Decision, PhaseBreakdown, TaskBreakdown } from './schema.js'; import type { EventBus, AgentSpawnedEvent, AgentStoppedEvent, AgentCrashedEvent, AgentResumedEvent, + AgentDeletedEvent, AgentWaitingEvent, } from '../events/index.js'; /** * Scenario configuration for mock agent behavior. - * Uses discriminated union on status to match agent output schema. - * - * Supports all four agent modes: - * - execute: done/questions/unrecoverable_error - * - discuss: questions/context_complete/unrecoverable_error - * - breakdown: questions/breakdown_complete/unrecoverable_error - * - decompose: questions/decompose_complete/unrecoverable_error + * Matches the simplified agent signal schema: done, questions, or error. + * Mode-specific stopped reasons are derived from the agent's mode. */ export type MockAgentScenario = - // Execute mode statuses | { status: 'done'; result?: string; @@ -50,28 +45,8 @@ export type MockAgentScenario = delay?: number; } | { - status: 'unrecoverable_error'; + status: 'error'; error: string; - attempted?: string; - delay?: number; - } - // Discuss mode status - | { - status: 'context_complete'; - decisions: Decision[]; - summary: string; - delay?: number; - } - // Breakdown mode status - | { - status: 'breakdown_complete'; - phases: PhaseBreakdown[]; - delay?: number; - } - // Decompose mode status - | { - status: 'decompose_complete'; - tasks: TaskBreakdown[]; delay?: number; }; @@ -136,7 +111,8 @@ export class MockAgentManager implements AgentManager { * Completion happens async via setTimeout (even if delay=0). */ async spawn(options: SpawnAgentOptions): Promise<AgentInfo> { - const { name, taskId, prompt } = options; + const { taskId, prompt } = options; + const name = options.name ?? `agent-${taskId?.slice(0, 6) ?? 'noTask'}`; // Check name uniqueness for (const record of this.agents.values()) { @@ -150,17 +126,20 @@ export class MockAgentManager implements AgentManager { const worktreeId = randomUUID(); const now = new Date(); - // Determine scenario (override takes precedence) + // Determine scenario (override takes precedence — use original name or generated) const scenario = this.scenarioOverrides.get(name) ?? this.defaultScenario; const info: AgentInfo = { id: agentId, - name, - taskId, + name: name ?? `mock-${agentId.slice(0, 6)}`, + taskId: taskId ?? null, + initiativeId: options.initiativeId ?? null, sessionId, worktreeId, status: 'running', mode: options.mode ?? 'execute', + provider: options.provider ?? 'claude', + accountId: null, createdAt: now, updatedAt: now, }; @@ -180,8 +159,9 @@ export class MockAgentManager implements AgentManager { payload: { agentId, name, - taskId, + taskId: taskId ?? null, worktreeId, + provider: options.provider ?? 'claude', }, }; this.eventBus.emit(event); @@ -209,6 +189,19 @@ export class MockAgentManager implements AgentManager { } } + /** + * Map agent mode to stopped event reason. + */ + private getStoppedReason(mode: AgentMode): AgentStoppedEvent['payload']['reason'] { + switch (mode) { + case 'discuss': return 'context_complete'; + case 'breakdown': return 'breakdown_complete'; + case 'decompose': return 'decompose_complete'; + case 'refine': return 'refine_complete'; + default: return 'task_complete'; + } + } + /** * Complete agent based on scenario status. */ @@ -229,6 +222,7 @@ export class MockAgentManager implements AgentManager { record.info.updatedAt = new Date(); if (this.eventBus) { + const reason = this.getStoppedReason(info.mode); const event: AgentStoppedEvent = { type: 'agent:stopped', timestamp: new Date(), @@ -236,14 +230,14 @@ export class MockAgentManager implements AgentManager { agentId, name: info.name, taskId: info.taskId, - reason: 'task_complete', + reason, }, }; this.eventBus.emit(event); } break; - case 'unrecoverable_error': + case 'error': record.result = { success: false, message: scenario.error, @@ -288,78 +282,6 @@ export class MockAgentManager implements AgentManager { this.eventBus.emit(event); } break; - - case 'context_complete': - // Discuss mode completion - captured all decisions - record.result = { - success: true, - message: scenario.summary, - }; - record.info.status = 'idle'; - record.info.updatedAt = new Date(); - - if (this.eventBus) { - const event: AgentStoppedEvent = { - type: 'agent:stopped', - timestamp: new Date(), - payload: { - agentId, - name: info.name, - taskId: info.taskId, - reason: 'context_complete', - }, - }; - this.eventBus.emit(event); - } - break; - - case 'breakdown_complete': - // Breakdown mode completion - decomposed into phases - record.result = { - success: true, - message: `Decomposed into ${scenario.phases.length} phases`, - }; - record.info.status = 'idle'; - record.info.updatedAt = new Date(); - - if (this.eventBus) { - const event: AgentStoppedEvent = { - type: 'agent:stopped', - timestamp: new Date(), - payload: { - agentId, - name: info.name, - taskId: info.taskId, - reason: 'breakdown_complete', - }, - }; - this.eventBus.emit(event); - } - break; - - case 'decompose_complete': - // Decompose mode completion - decomposed phase into tasks - record.result = { - success: true, - message: `Decomposed into ${scenario.tasks.length} tasks`, - }; - record.info.status = 'idle'; - record.info.updatedAt = new Date(); - - if (this.eventBus) { - const event: AgentStoppedEvent = { - type: 'agent:stopped', - timestamp: new Date(), - payload: { - agentId, - name: info.name, - taskId: info.taskId, - reason: 'decompose_complete', - }, - }; - this.eventBus.emit(event); - } - break; } } @@ -398,6 +320,38 @@ export class MockAgentManager implements AgentManager { } } + /** + * Delete an agent and clean up. + * Removes from internal map and emits agent:deleted event. + */ + async delete(agentId: string): Promise<void> { + const record = this.agents.get(agentId); + if (!record) { + throw new Error(`Agent '${agentId}' not found`); + } + + // Cancel any pending completion + if (record.completionTimer) { + clearTimeout(record.completionTimer); + record.completionTimer = undefined; + } + + const name = record.info.name; + this.agents.delete(agentId); + + if (this.eventBus) { + const event: AgentDeletedEvent = { + type: 'agent:deleted', + timestamp: new Date(), + payload: { + agentId, + name, + }, + }; + this.eventBus.emit(event); + } + } + /** * List all agents with their current status. */ @@ -503,6 +457,28 @@ export class MockAgentManager implements AgentManager { return record?.pendingQuestions ?? null; } + /** + * Get the buffered output for an agent. + * Mock implementation returns empty array. + */ + getOutputBuffer(_agentId: string): string[] { + return []; + } + + /** + * Dismiss an agent. + * Mock implementation just marks the agent as dismissed. + */ + async dismiss(agentId: string): Promise<void> { + const record = this.agents.get(agentId); + if (!record) { + throw new Error(`Agent '${agentId}' not found`); + } + // In mock, we just mark it as dismissed in memory + // Real implementation would update database + record.info.updatedAt = new Date(); + } + /** * Clear all agents and pending timers. * Useful for test cleanup. diff --git a/src/agent/output-handler.ts b/src/agent/output-handler.ts new file mode 100644 index 0000000..43ef3e6 --- /dev/null +++ b/src/agent/output-handler.ts @@ -0,0 +1,496 @@ +/** + * OutputHandler — Stream event processing, signal parsing, file reading, result capture. + * + * Extracted from MultiProviderAgentManager. Processes all output from agent + * subprocesses: stream events, agent signals, output files, and result/question + * retrieval. + */ + +import { readFile } from 'node:fs/promises'; +import type { AgentRepository } from '../db/repositories/agent-repository.js'; +import type { + EventBus, + AgentStoppedEvent, + AgentCrashedEvent, + AgentWaitingEvent, + AgentOutputEvent, +} from '../events/index.js'; +import type { + AgentResult, + AgentMode, + PendingQuestions, + QuestionItem, +} from './types.js'; +import type { StreamEvent } from './providers/parsers/index.js'; +import type { AgentProviderConfig } from './providers/types.js'; +import { agentSignalSchema } from './schema.js'; +import { + readSummary, + readPhaseFiles, + readTaskFiles, + readDecisionFiles, + readPageFiles, +} from './file-io.js'; +import { getProvider } from './providers/registry.js'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('output-handler'); + +/** Max number of output chunks to buffer per agent */ +const MAX_OUTPUT_BUFFER_SIZE = 1000; + +/** + * Tracks an active agent with its PID and file tailer. + */ +export interface ActiveAgent { + agentId: string; + pid: number; + tailer: import('./file-tailer.js').FileTailer; + outputFilePath: string; + result?: AgentResult; + pendingQuestions?: PendingQuestions; + streamResultText?: string; + streamSessionId?: string; + streamCostUsd?: number; +} + +/** + * Result structure from Claude CLI with --output-format json. + */ +interface ClaudeCliResult { + type: 'result'; + subtype: 'success' | 'error'; + is_error: boolean; + session_id: string; + result: string; + structured_output?: unknown; + total_cost_usd?: number; +} + +export class OutputHandler { + constructor( + private repository: AgentRepository, + private eventBus?: EventBus, + ) {} + + /** + * Handle a standardized stream event from a parser. + */ + handleStreamEvent( + agentId: string, + event: StreamEvent, + active: ActiveAgent | undefined, + outputBuffers: Map<string, string[]>, + ): void { + switch (event.type) { + case 'init': + if (active && event.sessionId) { + active.streamSessionId = event.sessionId; + this.repository.update(agentId, { sessionId: event.sessionId }).catch((err) => { + log.warn({ agentId, err: err instanceof Error ? err.message : String(err) }, 'failed to update session ID'); + }); + } + break; + + case 'text_delta': + this.pushToOutputBuffer(outputBuffers, agentId, event.text); + if (this.eventBus) { + const outputEvent: AgentOutputEvent = { + type: 'agent:output', + timestamp: new Date(), + payload: { agentId, stream: 'stdout', data: event.text }, + }; + this.eventBus.emit(outputEvent); + } + break; + + case 'tool_use_start': + log.debug({ agentId, tool: event.name, toolId: event.id }, 'tool use started'); + break; + + case 'result': + if (active) { + active.streamResultText = event.text; + active.streamCostUsd = event.costUsd; + if (!active.streamSessionId && event.sessionId) { + active.streamSessionId = event.sessionId; + } + } + break; + + case 'error': + log.error({ agentId, error: event.message }, 'stream error event'); + break; + + case 'turn_end': + log.debug({ agentId, stopReason: event.stopReason }, 'turn ended'); + break; + } + } + + /** + * Handle completion of a detached agent. + * Processes the final result from the stream data captured by the tailer. + */ + async handleCompletion( + agentId: string, + active: ActiveAgent | undefined, + getAgentWorkdir: (alias: string) => string, + ): Promise<void> { + const agent = await this.repository.findById(agentId); + if (!agent) return; + + const provider = getProvider(agent.provider); + if (!provider) return; + + log.debug({ agentId }, 'detached agent completed'); + + let signalText = active?.streamResultText; + + if (!signalText) { + try { + const fileContent = await readFile(active?.outputFilePath ?? '', 'utf-8'); + if (fileContent.trim()) { + await this.processAgentOutput(agentId, fileContent, provider, getAgentWorkdir); + return; + } + } catch { /* file empty or missing */ } + + log.warn({ agentId }, 'no result text from stream or file'); + await this.handleAgentError(agentId, new Error('No output received'), provider, getAgentWorkdir); + return; + } + + await this.processSignalAndFiles( + agentId, + signalText, + agent.mode as AgentMode, + getAgentWorkdir, + active?.streamSessionId, + ); + } + + /** + * Process agent signal JSON and read output files. + * Universal handler for all providers and modes. + */ + async processSignalAndFiles( + agentId: string, + signalText: string, + mode: AgentMode, + getAgentWorkdir: (alias: string) => string, + sessionId?: string, + ): Promise<void> { + const agent = await this.repository.findById(agentId); + if (!agent) return; + + let signal; + try { + const parsed = JSON.parse(signalText.trim()); + signal = agentSignalSchema.parse(parsed); + } catch { + await this.repository.update(agentId, { status: 'crashed' }); + this.emitCrashed(agent, 'Failed to parse agent signal JSON'); + return; + } + + switch (signal.status) { + case 'done': + await this.processOutputFiles(agentId, agent, mode, getAgentWorkdir); + break; + case 'questions': + await this.handleQuestions(agentId, agent, signal.questions, sessionId); + break; + case 'error': + await this.handleSignalError(agentId, agent, signal.error); + break; + } + } + + /** + * Process output files from agent workdir after successful completion. + */ + private async processOutputFiles( + agentId: string, + agent: { id: string; name: string; taskId: string | null; worktreeId: string; mode: string }, + mode: AgentMode, + getAgentWorkdir: (alias: string) => string, + ): Promise<void> { + const agentWorkdir = getAgentWorkdir(agent.worktreeId); + const summary = readSummary(agentWorkdir); + + let resultMessage = summary?.body ?? 'Task completed'; + switch (mode) { + case 'breakdown': { + const phases = readPhaseFiles(agentWorkdir); + resultMessage = JSON.stringify({ summary: summary?.body, phases }); + break; + } + case 'decompose': { + const tasks = readTaskFiles(agentWorkdir); + resultMessage = JSON.stringify({ summary: summary?.body, tasks }); + break; + } + case 'discuss': { + const decisions = readDecisionFiles(agentWorkdir); + resultMessage = JSON.stringify({ summary: summary?.body, decisions }); + break; + } + case 'refine': { + const pages = readPageFiles(agentWorkdir); + resultMessage = JSON.stringify({ summary: summary?.body, proposals: pages }); + break; + } + } + + const resultPayload: AgentResult = { + success: true, + message: resultMessage, + filesModified: summary?.filesModified, + }; + await this.repository.update(agentId, { result: JSON.stringify(resultPayload) }); + await this.repository.update(agentId, { status: 'idle' }); + + const reason = this.getStoppedReason(mode); + if (this.eventBus) { + const event: AgentStoppedEvent = { + type: 'agent:stopped', + timestamp: new Date(), + payload: { + agentId, + name: agent.name, + taskId: agent.taskId ?? '', + reason, + }, + }; + this.eventBus.emit(event); + } + + return; + } + + /** + * Handle questions signal from agent. + */ + async handleQuestions( + agentId: string, + agent: { id: string; name: string; taskId: string | null; sessionId: string | null }, + questions: QuestionItem[], + sessionId?: string, + ): Promise<void> { + const questionsPayload: PendingQuestions = { questions }; + + await this.repository.update(agentId, { pendingQuestions: JSON.stringify(questionsPayload) }); + await this.repository.update(agentId, { status: 'waiting_for_input' }); + + if (this.eventBus) { + const event: AgentWaitingEvent = { + type: 'agent:waiting', + timestamp: new Date(), + payload: { + agentId, + name: agent.name, + taskId: agent.taskId ?? '', + sessionId: sessionId ?? agent.sessionId ?? '', + questions, + }, + }; + this.eventBus.emit(event); + } + } + + /** + * Handle error signal from agent. + */ + async handleSignalError( + agentId: string, + agent: { id: string; name: string; taskId: string | null }, + error: string, + ): Promise<void> { + const errorResult: AgentResult = { success: false, message: error }; + + await this.repository.update(agentId, { result: JSON.stringify(errorResult) }); + await this.repository.update(agentId, { status: 'crashed' }); + this.emitCrashed(agent, error); + } + + /** + * Map agent mode to stopped event reason. + */ + getStoppedReason(mode: AgentMode): AgentStoppedEvent['payload']['reason'] { + switch (mode) { + case 'discuss': return 'context_complete'; + case 'breakdown': return 'breakdown_complete'; + case 'decompose': return 'decompose_complete'; + case 'refine': return 'refine_complete'; + default: return 'task_complete'; + } + } + + /** + * Process raw output from an agent (from file or direct). + */ + async processAgentOutput( + agentId: string, + rawOutput: string, + provider: AgentProviderConfig, + getAgentWorkdir: (alias: string) => string, + ): Promise<void> { + const agent = await this.repository.findById(agentId); + if (!agent) return; + + // Extract session ID using provider's extraction config + let sessionId: string | null = null; + if (provider.sessionId) { + try { + if (provider.sessionId.extractFrom === 'result') { + const parsed = JSON.parse(rawOutput); + sessionId = parsed[provider.sessionId.field] ?? null; + } else if (provider.sessionId.extractFrom === 'event') { + const lines = rawOutput.trim().split('\n'); + for (const line of lines) { + try { + const event = JSON.parse(line); + if (event.type === provider.sessionId.eventType) { + sessionId = event[provider.sessionId.field] ?? null; + } + } catch { /* skip */ } + } + } + } catch { /* parse failure */ } + } + + if (sessionId) { + await this.repository.update(agentId, { sessionId }); + } + log.debug({ agentId, provider: provider.name, hasSessionId: !!sessionId }, 'processing agent output'); + + if (provider.name === 'claude') { + let signalText: string; + try { + const cliResult: ClaudeCliResult = JSON.parse(rawOutput); + const signal = cliResult.structured_output ?? JSON.parse(cliResult.result); + signalText = JSON.stringify(signal); + } catch (parseErr) { + log.error({ agentId, err: parseErr instanceof Error ? parseErr.message : String(parseErr) }, 'failed to parse agent output'); + await this.repository.update(agentId, { status: 'crashed' }); + return; + } + + await this.processSignalAndFiles(agentId, signalText, agent.mode as AgentMode, getAgentWorkdir, sessionId ?? undefined); + } else { + await this.processSignalAndFiles(agentId, rawOutput, agent.mode as AgentMode, getAgentWorkdir, sessionId ?? undefined); + } + } + + /** + * Handle agent errors. Detects usage limit exhaustion patterns. + * Returns true if error was an exhaustion error (caller should attempt failover). + */ + async handleAgentError( + agentId: string, + error: unknown, + provider: AgentProviderConfig, + _getAgentWorkdir: (alias: string) => string, + ): Promise<void> { + const errorMessage = error instanceof Error ? error.message : String(error); + const agent = await this.repository.findById(agentId); + if (!agent) return; + + log.error({ agentId, err: errorMessage }, 'agent error'); + + await this.repository.update(agentId, { status: 'crashed' }); + + if (this.eventBus) { + const event: AgentCrashedEvent = { + type: 'agent:crashed', + timestamp: new Date(), + payload: { + agentId, + name: agent.name, + taskId: agent.taskId ?? '', + error: errorMessage, + }, + }; + this.eventBus.emit(event); + } + + const errorResult: AgentResult = { + success: false, + message: errorMessage, + }; + await this.repository.update(agentId, { result: JSON.stringify(errorResult) }); + } + + /** + * Format answers map as structured prompt. + */ + formatAnswersAsPrompt(answers: Record<string, string>): string { + const lines = Object.entries(answers).map( + ([questionId, answer]) => `[${questionId}]: ${answer}`, + ); + return `Here are my answers to your questions:\n${lines.join('\n')}`; + } + + /** + * Get the result of an agent's work. + */ + async getResult(agentId: string, active?: ActiveAgent): Promise<AgentResult | null> { + if (active?.result) return active.result; + const agent = await this.repository.findById(agentId); + return agent?.result ? JSON.parse(agent.result) : null; + } + + /** + * Get pending questions for an agent waiting for input. + */ + async getPendingQuestions(agentId: string, active?: ActiveAgent): Promise<PendingQuestions | null> { + if (active?.pendingQuestions) return active.pendingQuestions; + const agent = await this.repository.findById(agentId); + return agent?.pendingQuestions ? JSON.parse(agent.pendingQuestions) : null; + } + + // ========================================================================= + // Output Buffer Management + // ========================================================================= + + pushToOutputBuffer(buffers: Map<string, string[]>, agentId: string, chunk: string): void { + let buffer = buffers.get(agentId); + if (!buffer) { + buffer = []; + buffers.set(agentId, buffer); + } + buffer.push(chunk); + while (buffer.length > MAX_OUTPUT_BUFFER_SIZE) { + buffer.shift(); + } + } + + clearOutputBuffer(buffers: Map<string, string[]>, agentId: string): void { + buffers.delete(agentId); + } + + getOutputBufferCopy(buffers: Map<string, string[]>, agentId: string): string[] { + return [...(buffers.get(agentId) ?? [])]; + } + + // ========================================================================= + // Private Helpers + // ========================================================================= + + private emitCrashed(agent: { id: string; name: string; taskId: string | null }, error: string): void { + if (this.eventBus) { + const event: AgentCrashedEvent = { + type: 'agent:crashed', + timestamp: new Date(), + payload: { + agentId: agent.id, + name: agent.name, + taskId: agent.taskId ?? '', + error, + }, + }; + this.eventBus.emit(event); + } + } +} diff --git a/src/agent/process-manager.ts b/src/agent/process-manager.ts new file mode 100644 index 0000000..91e7fcf --- /dev/null +++ b/src/agent/process-manager.ts @@ -0,0 +1,269 @@ +/** + * ProcessManager — Subprocess lifecycle, worktree creation, command building. + * + * Extracted from MultiProviderAgentManager. Manages the spawning of detached + * subprocesses, worktree creation per project, and provider-specific command + * construction. + */ + +import { spawn } from 'node:child_process'; +import { writeFileSync, mkdirSync, openSync, closeSync } from 'node:fs'; +import { join } from 'node:path'; +import type { ProjectRepository } from '../db/repositories/project-repository.js'; +import type { EventBus } from '../events/index.js'; +import type { AgentProviderConfig } from './providers/types.js'; +import type { StreamEvent } from './providers/parsers/index.js'; +import { getStreamParser } from './providers/parsers/index.js'; +import { SimpleGitWorktreeManager } from '../git/manager.js'; +import { ensureProjectClone, getProjectCloneDir } from '../git/project-clones.js'; +import { FileTailer } from './file-tailer.js'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('process-manager'); + +/** + * Check if a process with the given PID is still alive. + */ +export function isPidAlive(pid: number): boolean { + try { + process.kill(pid, 0); + return true; + } catch { + return false; + } +} + +export class ProcessManager { + constructor( + private workspaceRoot: string, + private projectRepository: ProjectRepository, + private eventBus?: EventBus, + ) {} + + /** + * Resolve the agent's working directory path. + */ + getAgentWorkdir(alias: string): string { + return join(this.workspaceRoot, 'agent-workdirs', alias); + } + + /** + * Create worktrees for all projects linked to an initiative. + * Returns the base agent workdir path. + */ + async createProjectWorktrees( + alias: string, + initiativeId: string, + baseBranch: string = 'main', + ): Promise<string> { + const projects = await this.projectRepository.findProjectsByInitiativeId(initiativeId); + const agentWorkdir = this.getAgentWorkdir(alias); + + for (const project of projects) { + const clonePath = await ensureProjectClone(project, this.workspaceRoot); + const worktreeManager = new SimpleGitWorktreeManager(clonePath, undefined, agentWorkdir); + await worktreeManager.create(project.name, `agent/${alias}`, baseBranch); + } + + return agentWorkdir; + } + + /** + * Fallback: create a single "workspace" worktree for standalone agents. + */ + async createStandaloneWorktree(alias: string): Promise<string> { + const agentWorkdir = this.getAgentWorkdir(alias); + const worktreeManager = new SimpleGitWorktreeManager(this.workspaceRoot, undefined, agentWorkdir); + const worktree = await worktreeManager.create('workspace', `agent/${alias}`); + return worktree.path; + } + + /** + * Build the spawn command for a given provider configuration. + */ + buildSpawnCommand( + provider: AgentProviderConfig, + prompt: string, + ): { command: string; args: string[]; env: Record<string, string> } { + const args = [...provider.args]; + const env: Record<string, string> = { ...provider.env }; + + if (provider.nonInteractive?.subcommand) { + args.unshift(provider.nonInteractive.subcommand); + } + + if (provider.promptMode === 'native') { + args.push('-p', prompt); + } else if (provider.promptMode === 'flag' && provider.nonInteractive?.promptFlag) { + args.push(provider.nonInteractive.promptFlag, prompt); + } + + if (provider.nonInteractive?.outputFlag) { + args.push(...provider.nonInteractive.outputFlag.split(' ')); + } + + return { command: provider.command, args, env }; + } + + /** + * Build the resume command for a given provider configuration. + */ + buildResumeCommand( + provider: AgentProviderConfig, + sessionId: string, + prompt: string, + ): { command: string; args: string[]; env: Record<string, string> } { + const args = [...provider.args]; + const env: Record<string, string> = { ...provider.env }; + + switch (provider.resumeStyle) { + case 'flag': + args.push(provider.resumeFlag!, sessionId); + break; + case 'subcommand': + if (provider.nonInteractive?.subcommand) { + args.unshift(provider.nonInteractive.subcommand); + } + args.push(provider.resumeFlag!, sessionId); + break; + case 'none': + throw new Error(`Provider '${provider.name}' does not support resume`); + } + + if (provider.promptMode === 'native') { + args.push('-p', prompt); + } else if (provider.promptMode === 'flag' && provider.nonInteractive?.promptFlag) { + args.push(provider.nonInteractive.promptFlag, prompt); + } + + if (provider.nonInteractive?.outputFlag) { + args.push(...provider.nonInteractive.outputFlag.split(' ')); + } + + return { command: provider.command, args, env }; + } + + /** + * Extract session ID from CLI output based on provider config. + */ + extractSessionId( + provider: AgentProviderConfig, + output: string, + ): string | null { + if (!provider.sessionId) return null; + + try { + if (provider.sessionId.extractFrom === 'result') { + const parsed = JSON.parse(output); + return parsed[provider.sessionId.field] ?? null; + } + + if (provider.sessionId.extractFrom === 'event') { + const lines = output.trim().split('\n'); + for (const line of lines) { + try { + const event = JSON.parse(line); + if (event.type === provider.sessionId.eventType) { + return event[provider.sessionId.field] ?? null; + } + } catch { + // Skip non-JSON lines + } + } + } + } catch { + // Parse failure + } + + return null; + } + + /** + * Spawn a detached subprocess with file redirection for crash resilience. + * The subprocess writes directly to files and survives server crashes. + * A FileTailer watches the output file and emits events in real-time. + * + * @param onEvent - Callback for stream events from the tailer + */ + spawnDetached( + agentId: string, + command: string, + args: string[], + cwd: string, + env: Record<string, string>, + providerName: string, + prompt?: string, + onEvent?: (event: StreamEvent) => void, + ): { pid: number; outputFilePath: string; tailer: FileTailer } { + const logDir = join(this.workspaceRoot, '.cw', 'agent-logs', agentId); + mkdirSync(logDir, { recursive: true }); + const outputFilePath = join(logDir, 'output.jsonl'); + const stderrFilePath = join(logDir, 'stderr.log'); + + if (prompt) { + writeFileSync(join(logDir, 'PROMPT.md'), prompt, 'utf-8'); + } + + const stdoutFd = openSync(outputFilePath, 'w'); + const stderrFd = openSync(stderrFilePath, 'w'); + + const child = spawn(command, args, { + cwd, + env: { ...process.env, ...env }, + detached: true, + stdio: ['ignore', stdoutFd, stderrFd], + }); + + closeSync(stdoutFd); + closeSync(stderrFd); + + child.unref(); + + const pid = child.pid!; + log.debug({ agentId, pid, command }, 'spawned detached process'); + + const parser = getStreamParser(providerName); + const tailer = new FileTailer({ + filePath: outputFilePath, + agentId, + parser, + eventBus: this.eventBus, + onEvent: onEvent ?? (() => {}), + startFromBeginning: true, + }); + + tailer.start().catch((err) => { + log.warn({ agentId, err: err instanceof Error ? err.message : String(err) }, 'failed to start tailer'); + }); + + return { pid, outputFilePath, tailer }; + } + + /** + * Poll for process completion by checking if PID is still alive. + * When the process exits, calls onComplete callback. + * + * @param onComplete - Called when the process is no longer alive + * @param getTailer - Function to get the current tailer for final flush + */ + pollForCompletion( + agentId: string, + pid: number, + onComplete: () => Promise<void>, + getTailer: () => FileTailer | undefined, + ): void { + const check = async () => { + if (!isPidAlive(pid)) { + const tailer = getTailer(); + if (tailer) { + await new Promise((resolve) => setTimeout(resolve, 500)); + await tailer.stop(); + } + await onComplete(); + return; + } + setTimeout(check, 1000); + }; + check(); + } +} diff --git a/src/agent/prompts.ts b/src/agent/prompts.ts index 1e715a6..9432b03 100644 --- a/src/agent/prompts.ts +++ b/src/agent/prompts.ts @@ -1,70 +1,103 @@ /** * Agent Prompts Module * - * Comprehensive mode-specific prompts for agent operations. - * Each prompt explains the agent's role, output format, and expected behavior. + * Zero-argument prompt builders for file-based agent I/O. + * Dynamic context is written to .cw/input/ files before spawn. + * Agents write output to .cw/output/ files and emit a trivial JSON signal. */ -import type { Initiative, Phase, Plan } from '../db/schema.js'; +const SIGNAL_FORMAT = ` +## Signal Output + +When done, output ONLY this JSON (no other text before or after): +{ "status": "done" } + +If you need clarification, output: +{ "status": "questions", "questions": [{ "id": "q1", "question": "Your question" }] } + +If you hit an unrecoverable error, output: +{ "status": "error", "error": "Description of what went wrong" }`; + +const INPUT_FILES = ` +## Input Files + +Read context from \`.cw/input/\`: +- \`initiative.md\` — Initiative details (frontmatter: id, name, status) +- \`phase.md\` — Phase details if applicable (frontmatter: id, number, name, status; body: description) +- \`task.md\` — Task details if applicable (frontmatter: id, name, category, type, priority, status; body: description) +- \`pages/\` — Initiative pages (one file per page; frontmatter: title, parentPageId, sortOrder; body: markdown content)`; + +const SUMMARY_REQUIREMENT = ` +## Required Output + +ALWAYS write \`.cw/output/SUMMARY.md\` with: +- Frontmatter: \`files_modified\` (list of file paths you changed) +- Body: A concise summary of what you accomplished (shown to the user) + +Example: +\`\`\` +--- +files_modified: + - src/auth/login.ts + - src/auth/middleware.ts +--- +Implemented JWT-based login with refresh token support. +\`\`\``; + +const ID_GENERATION = ` +## ID Generation + +When creating new entities (phases, tasks, decisions), generate a unique ID by running: +\`\`\` +cw id +\`\`\` +Use the output as the filename (e.g., \`{id}.md\`).`; /** - * Build comprehensive prompt for discuss mode. - * Agent asks clarifying questions to understand requirements. + * Build prompt for execute mode (standard worker agent). */ -export function buildDiscussPrompt(initiative: Initiative, context?: string): string { +export function buildExecutePrompt(): string { + return `You are a Worker agent in the Codewalk multi-agent system. + +## Your Role +Execute the assigned task. Read the task details from input files, do the work, and report results. +${INPUT_FILES} +${SIGNAL_FORMAT} +${SUMMARY_REQUIREMENT} + +## Rules +- Complete the task as specified in .cw/input/task.md +- Ask questions if requirements are unclear +- Report errors honestly — don't guess +- Focus on writing clean, tested code`; +} + +/** + * Build prompt for discuss mode. + * Agent asks clarifying questions to understand requirements and captures decisions. + */ +export function buildDiscussPrompt(): string { return `You are an Architect agent in the Codewalk multi-agent system operating in DISCUSS mode. ## Your Role -Transform user intent into clear, documented decisions. You do NOT write code - you capture decisions. +Transform user intent into clear, documented decisions. You do NOT write code — you capture decisions. +${INPUT_FILES} +${SIGNAL_FORMAT} -## Initiative -Name: ${initiative.name} -${initiative.description ? `Description: ${initiative.description}` : ''} -${context ? `\nAdditional Context: ${context}` : ''} +## Output Files -## Your Task -Ask clarifying questions to understand the requirements. Capture decisions as you go. +Write decisions to \`.cw/output/decisions/{id}.md\`: +- Frontmatter: \`topic\`, \`decision\`, \`reason\` +- Body: Additional context or rationale + +${SUMMARY_REQUIREMENT} +${ID_GENERATION} ## Question Categories - -**User Journeys:** -- What are the main user workflows? -- What happens on success? On failure? -- What are the edge cases? - -**Technical Constraints:** -- What patterns should we follow? -- What should we avoid? -- What existing code should we reference? - -**Data & Validation:** -- What data structures are needed? -- What validation rules apply? -- What are the constraints? - -**Integration Points:** -- What external systems are involved? -- What APIs do we need? -- What error handling is required? - -## Output Format - -When you need more information, output: -{ - "status": "questions", - "questions": [ - { "id": "q1", "question": "Your question here", "options": [{"label": "Option A"}, {"label": "Option B"}] } - ] -} - -When you have enough information, output: -{ - "status": "context_complete", - "decisions": [ - { "topic": "Authentication", "decision": "Use JWT", "reason": "Stateless, scalable" } - ], - "summary": "Brief summary of all decisions made" -} +- **User Journeys**: Main workflows, success/failure paths, edge cases +- **Technical Constraints**: Patterns to follow, things to avoid, reference code +- **Data & Validation**: Data structures, validation rules, constraints +- **Integration Points**: External systems, APIs, error handling ## Rules - Ask 2-4 questions at a time, not more @@ -74,194 +107,102 @@ When you have enough information, output: } /** - * Build comprehensive prompt for breakdown mode. + * Build prompt for breakdown mode. * Agent decomposes initiative into executable phases. */ -export function buildBreakdownPrompt(initiative: Initiative, contextSummary?: string): string { +export function buildBreakdownPrompt(): string { return `You are an Architect agent in the Codewalk multi-agent system operating in BREAKDOWN mode. ## Your Role -Decompose the initiative into executable phases. You do NOT write code - you plan it. +Decompose the initiative into executable phases. You do NOT write code — you plan it. +${INPUT_FILES} +${SIGNAL_FORMAT} -## Initiative -Name: ${initiative.name} -${initiative.description ? `Description: ${initiative.description}` : ''} -${contextSummary ? `\nContext from Discussion Phase:\n${contextSummary}` : ''} +## Output Files -## Your Task -Break this initiative into phases that can be executed by worker agents. +Write one file per phase to \`.cw/output/phases/{id}.md\`: +- Frontmatter: \`title\`, \`dependencies\` (list of other phase IDs this depends on) +- Body: Description of the phase and what gets built + +${SUMMARY_REQUIREMENT} +${ID_GENERATION} ## Phase Design Rules - -**Each phase must be:** -- A coherent unit of work (single concern) -- Independently deliverable -- Testable in isolation - -**Dependencies:** -- Identify what each phase needs from prior phases -- Minimize cross-phase dependencies -- Foundation phases come first - -**Sizing:** -- Phases should be 2-5 tasks each -- Not too big (hard to track), not too small (overhead) - -**Naming:** -- Clear, action-oriented names -- Describe what gets built, not how - -## Output Format - -If you need clarification, output: -{ - "status": "questions", - "questions": [ - { "id": "q1", "question": "Your question here" } - ] -} - -When breakdown is complete, output: -{ - "status": "breakdown_complete", - "phases": [ - { - "number": 1, - "name": "Database Schema", - "description": "Create user tables and authentication schema", - "dependencies": [] - }, - { - "number": 2, - "name": "Auth API", - "description": "JWT generation, validation, and middleware", - "dependencies": [1] - } - ] -} +- Each phase: single concern, independently deliverable, testable +- Minimize cross-phase dependencies; foundation phases first +- Size: 2-5 tasks each (not too big, not too small) +- Clear, action-oriented names (describe what gets built, not how) ## Rules - Start with foundation/infrastructure phases - Group related work together -- Make dependencies explicit +- Make dependencies explicit using phase IDs - Each phase should be completable in one session`; } /** - * Build prompt for execute mode (standard worker agent). - * This is the default mode for task execution. + * Build prompt for decompose mode. + * Agent breaks a phase into executable tasks. */ -export function buildExecutePrompt(taskDescription: string): string { - return `You are a Worker agent in the Codewalk multi-agent system. - -## Your Task -${taskDescription} - -## Output Format - -When task is complete, output: -{ - "status": "done", - "result": "Description of what was accomplished", - "filesModified": ["path/to/file1.ts", "path/to/file2.ts"] -} - -If you need clarification, output: -{ - "status": "questions", - "questions": [ - { "id": "q1", "question": "Your question here" } - ] -} - -If you hit an unrecoverable error, output: -{ - "status": "unrecoverable_error", - "error": "Description of what went wrong", - "attempted": "What you tried before failing" -} - -## Rules -- Complete the task as specified -- Ask questions if requirements are unclear -- Report errors honestly - don't guess`; -} - -/** - * Build comprehensive prompt for decompose mode. - * Agent breaks a plan into executable tasks. - */ -export function buildDecomposePrompt(plan: Plan, phase: Phase, context?: string): string { +export function buildDecomposePrompt(): string { return `You are an Architect agent in the Codewalk multi-agent system operating in DECOMPOSE mode. ## Your Role -Break a plan into executable tasks. You do NOT write code - you decompose work into atomic units. +Decompose the phase into individual executable tasks. You do NOT write code — you define work items. +${INPUT_FILES} +${SIGNAL_FORMAT} -## Plan -Name: ${plan.name} -Phase: ${phase.name} -${plan.description ? `Description: ${plan.description}` : ''} -${context ? `\nAdditional Context: ${context}` : ''} +## Output Files -## Your Task -Decompose this plan into tasks that worker agents can execute. +Write one file per task to \`.cw/output/tasks/{id}.md\`: +- Frontmatter: + - \`title\`: Clear task name + - \`category\`: One of: execute, research, discuss, breakdown, decompose, refine, verify, merge, review + - \`type\`: One of: auto, checkpoint:human-verify, checkpoint:decision, checkpoint:human-action + - \`dependencies\`: List of other task IDs this depends on +- Body: Detailed description of what the task requires + +${SUMMARY_REQUIREMENT} +${ID_GENERATION} ## Task Design Rules - -**Each task must be:** -- A single atomic unit of work -- Independently executable (or with clear dependencies) -- Verifiable (has a clear done condition) - -**Task Types:** -- 'auto': Agent executes autonomously (default, most common) -- 'checkpoint:human-verify': Needs human to verify visual/functional output -- 'checkpoint:decision': Needs human to make a choice -- 'checkpoint:human-action': Needs unavoidable manual action (rare) - -**Dependencies:** -- Identify what each task needs from prior tasks -- Use task numbers (1, 2, 3) for dependencies -- Minimize dependencies where possible - -**Sizing:** -- Tasks should be 15-60 minutes of work -- Not too big (hard to debug), not too small (overhead) - -## Output Format - -If you need clarification, output: -{ - "status": "questions", - "questions": [ - { "id": "q1", "question": "Your question here" } - ] -} - -When decomposition is complete, output: -{ - "status": "decompose_complete", - "tasks": [ - { - "number": 1, - "name": "Create user schema", - "description": "Add User table to database with email, passwordHash, createdAt", - "type": "auto", - "dependencies": [] - }, - { - "number": 2, - "name": "Create login endpoint", - "description": "POST /api/auth/login - validate credentials, return JWT", - "type": "auto", - "dependencies": [1] - } - ] -} +- Each task: specific, actionable, completable by one agent +- Include verification steps where appropriate +- Use \`checkpoint:*\` types for tasks requiring human review +- Dependencies should be minimal and explicit ## Rules -- Tasks must be in dependency order -- Each task should have clear, specific description -- Default to 'auto' type unless human interaction is genuinely required -- Keep tasks focused on a single concern`; +- Break work into 3-8 tasks per phase +- Order tasks logically (foundational work first) +- Make each task self-contained with enough context +- Include test/verify tasks where appropriate`; +} + +/** + * Build prompt for refine mode. + * Agent reviews initiative content and proposes edits to pages. + */ +export function buildRefinePrompt(): string { + return `You are an Architect agent in the Codewalk multi-agent system operating in REFINE mode. + +## Your Role +Review and improve initiative content. You suggest edits to specific pages. You do NOT write code. +${INPUT_FILES} +${SIGNAL_FORMAT} + +## Output Files + +Write one file per modified page to \`.cw/output/pages/{pageId}.md\`: +- Frontmatter: \`title\`, \`summary\` (what changed and why) +- Body: Full new markdown content for the page (replaces entire page body) + +${SUMMARY_REQUIREMENT} + +## Rules +- Ask 2-4 questions at a time if you need clarification +- Only propose changes for pages that genuinely need improvement +- Each output page's body is the FULL new content (not a diff) +- Preserve [[page:\$id|title]] cross-references in your output +- Focus on clarity, completeness, and consistency +- Do not invent new page IDs — only reference existing ones from .cw/input/pages/`; } diff --git a/src/agent/providers/index.ts b/src/agent/providers/index.ts new file mode 100644 index 0000000..136a4eb --- /dev/null +++ b/src/agent/providers/index.ts @@ -0,0 +1,40 @@ +/** + * Agent Providers Module - Public API + * + * Re-exports provider types, presets, and registry functions. + */ + +export type { + AgentProviderConfig, + StructuredOutputConfig, + SessionIdConfig, + NonInteractiveConfig, +} from './types.js'; + +export { PROVIDER_PRESETS } from './presets.js'; + +export { + getProvider, + listProviders, + registerProvider, + loadProvidersFromFile, +} from './registry.js'; + +// Stream parsing +export type { + StreamEvent, + StreamParser, + StreamInitEvent, + StreamTextDeltaEvent, + StreamToolUseStartEvent, + StreamToolResultEvent, + StreamTurnEndEvent, + StreamResultEvent, + StreamErrorEvent, +} from './stream-types.js'; + +export { + getStreamParser, + ClaudeStreamParser, + GenericStreamParser, +} from './parsers/index.js'; diff --git a/src/agent/providers/parsers/claude.ts b/src/agent/providers/parsers/claude.ts new file mode 100644 index 0000000..0abf1b8 --- /dev/null +++ b/src/agent/providers/parsers/claude.ts @@ -0,0 +1,165 @@ +/** + * Claude Code Stream Parser + * + * Parses Claude Code CLI `--output-format stream-json` NDJSON output + * into standardized StreamEvents. + * + * Key line types handled: + * - system (subtype=init): session_id + * - stream_event (content_block_delta, text_delta): delta.text + * - stream_event (content_block_start, tool_use): content_block.name, .id + * - stream_event (message_delta): delta.stop_reason + * - result: result, session_id, total_cost_usd + * - any with is_error: true: error message + */ + +import type { StreamEvent, StreamParser } from '../stream-types.js'; + +interface ClaudeSystemEvent { + type: 'system'; + subtype?: string; + session_id?: string; +} + +interface ClaudeStreamEvent { + type: 'stream_event'; + event?: { + type: string; + index?: number; + delta?: { + type?: string; + text?: string; + stop_reason?: string; + }; + content_block?: { + type?: string; + id?: string; + name?: string; + }; + }; +} + +interface ClaudeAssistantEvent { + type: 'assistant'; + message?: { + content?: Array<{ + type: string; + text?: string; + id?: string; + name?: string; + }>; + }; +} + +interface ClaudeResultEvent { + type: 'result'; + result?: string; + session_id?: string; + total_cost_usd?: number; + is_error?: boolean; +} + +type ClaudeEvent = ClaudeSystemEvent | ClaudeStreamEvent | ClaudeAssistantEvent | ClaudeResultEvent | { type: string; is_error?: boolean; result?: string }; + +export class ClaudeStreamParser implements StreamParser { + readonly provider = 'claude'; + + parseLine(line: string): StreamEvent[] { + const trimmed = line.trim(); + if (!trimmed) return []; + + let parsed: ClaudeEvent; + try { + parsed = JSON.parse(trimmed); + } catch { + // Not valid JSON, ignore + return []; + } + + // Check for error first (can appear on any event type) + if ('is_error' in parsed && parsed.is_error && 'result' in parsed) { + return [{ type: 'error', message: String(parsed.result) }]; + } + + const events: StreamEvent[] = []; + + switch (parsed.type) { + case 'system': { + const sysEvent = parsed as ClaudeSystemEvent; + if (sysEvent.subtype === 'init' && sysEvent.session_id) { + events.push({ type: 'init', sessionId: sysEvent.session_id }); + } + break; + } + + case 'stream_event': { + const streamEvent = parsed as ClaudeStreamEvent; + const inner = streamEvent.event; + if (!inner) break; + + switch (inner.type) { + case 'content_block_delta': { + if (inner.delta?.type === 'text_delta' && inner.delta.text) { + events.push({ type: 'text_delta', text: inner.delta.text }); + } + break; + } + + case 'content_block_start': { + if (inner.content_block?.type === 'tool_use') { + const name = inner.content_block.name || 'unknown'; + const id = inner.content_block.id || ''; + events.push({ type: 'tool_use_start', name, id }); + } + break; + } + + case 'message_delta': { + if (inner.delta?.stop_reason) { + events.push({ type: 'turn_end', stopReason: inner.delta.stop_reason }); + } + break; + } + } + break; + } + + case 'assistant': { + // Claude CLI stream-json now emits complete assistant messages + // instead of granular stream_event deltas + const assistantEvent = parsed as ClaudeAssistantEvent; + const content = assistantEvent.message?.content; + if (Array.isArray(content)) { + for (const block of content) { + if (block.type === 'text' && block.text) { + events.push({ type: 'text_delta', text: block.text }); + } else if (block.type === 'tool_use' && block.name) { + events.push({ type: 'tool_use_start', name: block.name, id: block.id || '' }); + } + } + } + break; + } + + case 'result': { + const resultEvent = parsed as ClaudeResultEvent; + events.push({ + type: 'result', + text: resultEvent.result || '', + sessionId: resultEvent.session_id, + costUsd: resultEvent.total_cost_usd, + }); + break; + } + + // Ignore: message_start, content_block_stop, message_stop, user + } + + return events; + } + + end(): StreamEvent[] { + // Claude emits a result event, so nothing needed at end + return []; + } +} diff --git a/src/agent/providers/parsers/generic.ts b/src/agent/providers/parsers/generic.ts new file mode 100644 index 0000000..daccf88 --- /dev/null +++ b/src/agent/providers/parsers/generic.ts @@ -0,0 +1,32 @@ +/** + * Generic Fallback Stream Parser + * + * For providers without a dedicated parser. Treats each line as text output. + * Accumulates all output and emits a final result event on stream end. + */ + +import type { StreamEvent, StreamParser } from '../stream-types.js'; + +export class GenericStreamParser implements StreamParser { + readonly provider = 'generic'; + private accumulated: string[] = []; + + parseLine(line: string): StreamEvent[] { + if (!line) return []; + + this.accumulated.push(line); + + // Emit each line as a text delta + return [{ type: 'text_delta', text: line + '\n' }]; + } + + end(): StreamEvent[] { + // Emit the accumulated output as the result + const fullText = this.accumulated.join('\n'); + this.accumulated = []; + + if (!fullText) return []; + + return [{ type: 'result', text: fullText }]; + } +} diff --git a/src/agent/providers/parsers/index.ts b/src/agent/providers/parsers/index.ts new file mode 100644 index 0000000..e2a8841 --- /dev/null +++ b/src/agent/providers/parsers/index.ts @@ -0,0 +1,31 @@ +/** + * Stream Parser Registry + * + * Factory function to get the appropriate stream parser for a provider. + */ + +import type { StreamParser } from '../stream-types.js'; +import { ClaudeStreamParser } from './claude.js'; +import { GenericStreamParser } from './generic.js'; + +/** Map of provider names to parser constructors */ +const parserRegistry: Record<string, new () => StreamParser> = { + claude: ClaudeStreamParser, +}; + +/** + * Get a stream parser for the given provider. + * Returns a provider-specific parser if available, otherwise the generic fallback. + */ +export function getStreamParser(providerName: string): StreamParser { + const ParserClass = parserRegistry[providerName]; + if (ParserClass) { + return new ParserClass(); + } + return new GenericStreamParser(); +} + +// Re-export types and parsers for direct access +export type { StreamParser, StreamEvent } from '../stream-types.js'; +export { ClaudeStreamParser } from './claude.js'; +export { GenericStreamParser } from './generic.js'; diff --git a/src/agent/providers/presets.ts b/src/agent/providers/presets.ts new file mode 100644 index 0000000..494dde3 --- /dev/null +++ b/src/agent/providers/presets.ts @@ -0,0 +1,145 @@ +/** + * Built-in Agent Provider Presets + * + * Data-driven configuration for all supported agent CLI providers. + * Ported from reference/gastown/internal/config/agents.go builtinPresets. + */ + +import type { AgentProviderConfig } from './types.js'; + +export const PROVIDER_PRESETS: Record<string, AgentProviderConfig> = { + claude: { + name: 'claude', + command: 'claude', + args: ['--dangerously-skip-permissions', '--verbose'], + processNames: ['node', 'claude'], + configDirEnv: 'CLAUDE_CONFIG_DIR', + resumeFlag: '--resume', + resumeStyle: 'flag', + promptMode: 'native', + // No structuredOutput - schema enforcement via prompt text + validation + sessionId: { + extractFrom: 'event', + field: 'session_id', + eventType: 'system', + }, + nonInteractive: { + outputFlag: '--output-format stream-json', + }, + }, + + codex: { + name: 'codex', + command: 'codex', + args: ['--full-auto'], + processNames: ['codex'], + resumeFlag: 'resume', + resumeStyle: 'subcommand', + promptMode: 'native', + structuredOutput: { + flag: '--output-schema', + schemaMode: 'file', + outputFormat: 'jsonl', + }, + sessionId: { + extractFrom: 'event', + field: 'thread_id', + eventType: 'thread.started', + }, + nonInteractive: { + subcommand: 'exec', + outputFlag: '--json', + }, + }, + + gemini: { + name: 'gemini', + command: 'gemini', + args: ['--sandbox=off'], + processNames: ['gemini'], + resumeFlag: '--resume', + resumeStyle: 'flag', + promptMode: 'flag', + structuredOutput: { + flag: '--output-format', + schemaMode: 'none', + outputFormat: 'json', + }, + sessionId: { + extractFrom: 'result', + field: 'session_id', + }, + nonInteractive: { + promptFlag: '-p', + outputFlag: '--output-format json', + }, + }, + + cursor: { + name: 'cursor', + command: 'cursor-agent', + args: ['-f'], + processNames: ['cursor-agent'], + resumeStyle: 'none', + promptMode: 'flag', + structuredOutput: { + flag: '--output-format', + schemaMode: 'none', + outputFormat: 'json', + }, + nonInteractive: { + promptFlag: '-p', + outputFlag: '--output-format json', + }, + }, + + auggie: { + name: 'auggie', + command: 'aug', + args: ['--allow-indexing'], + processNames: ['aug'], + resumeStyle: 'none', + promptMode: 'flag', + nonInteractive: { + promptFlag: '-p', + }, + }, + + amp: { + name: 'amp', + command: 'amp', + args: ['--allow-all'], + processNames: ['amp'], + resumeFlag: '--thread', + resumeStyle: 'flag', + promptMode: 'flag', + sessionId: { + extractFrom: 'result', + field: 'thread_id', + }, + nonInteractive: { + promptFlag: '-p', + outputFlag: '--json', + }, + }, + + opencode: { + name: 'opencode', + command: 'opencode', + args: [], + env: { OPENCODE_PERMISSION: '{"*":"allow"}' }, + processNames: ['opencode', 'node', 'bun'], + resumeStyle: 'none', + promptMode: 'flag', + structuredOutput: { + flag: '--format', + schemaMode: 'none', + outputFormat: 'json', + }, + nonInteractive: { + subcommand: 'run', + promptFlag: '-p', + outputFlag: '--format json', + }, + }, +}; diff --git a/src/agent/providers/registry.ts b/src/agent/providers/registry.ts new file mode 100644 index 0000000..5cf8a25 --- /dev/null +++ b/src/agent/providers/registry.ts @@ -0,0 +1,50 @@ +/** + * Agent Provider Registry + * + * In-memory registry of agent provider configurations. + * Pre-populated with built-in presets, extensible via registerProvider() + * or loadProvidersFromFile() for custom/override configs. + */ + +import { readFileSync } from 'node:fs'; +import type { AgentProviderConfig } from './types.js'; +import { PROVIDER_PRESETS } from './presets.js'; + +const providers = new Map<string, AgentProviderConfig>( + Object.entries(PROVIDER_PRESETS), +); + +/** + * Get a provider configuration by name. + * Returns null if the provider is not registered. + */ +export function getProvider(name: string): AgentProviderConfig | null { + return providers.get(name) ?? null; +} + +/** + * List all registered provider names. + */ +export function listProviders(): string[] { + return Array.from(providers.keys()); +} + +/** + * Register or override a provider configuration. + */ +export function registerProvider(config: AgentProviderConfig): void { + providers.set(config.name, config); +} + +/** + * Load provider configurations from a JSON file and merge into the registry. + * File should contain a JSON object mapping provider names to AgentProviderConfig objects. + * Existing providers with matching names will be overridden. + */ +export function loadProvidersFromFile(path: string): void { + const raw = readFileSync(path, 'utf-8'); + const parsed = JSON.parse(raw) as Record<string, AgentProviderConfig>; + for (const [name, config] of Object.entries(parsed)) { + providers.set(name, { ...config, name }); + } +} diff --git a/src/agent/providers/stream-types.ts b/src/agent/providers/stream-types.ts new file mode 100644 index 0000000..7704926 --- /dev/null +++ b/src/agent/providers/stream-types.ts @@ -0,0 +1,77 @@ +/** + * Stream Event Types and Parser Interface + * + * Standardized events emitted by all provider stream parsers. + * Each provider's NDJSON output is normalized to these common events. + */ + +/** Initialization event - emitted at stream start, may contain session ID */ +export interface StreamInitEvent { + type: 'init'; + sessionId?: string; +} + +/** Text delta - chunk of assistant text output */ +export interface StreamTextDeltaEvent { + type: 'text_delta'; + text: string; +} + +/** Tool use started - agent is calling a tool */ +export interface StreamToolUseStartEvent { + type: 'tool_use_start'; + name: string; + id: string; +} + +/** Tool result received */ +export interface StreamToolResultEvent { + type: 'tool_result'; + id: string; +} + +/** Turn ended - assistant stopped responding */ +export interface StreamTurnEndEvent { + type: 'turn_end'; + stopReason: string; +} + +/** Final result - emitted at stream end with complete output */ +export interface StreamResultEvent { + type: 'result'; + text: string; + sessionId?: string; + costUsd?: number; +} + +/** Error event */ +export interface StreamErrorEvent { + type: 'error'; + message: string; +} + +/** Union of all stream event types */ +export type StreamEvent = + | StreamInitEvent + | StreamTextDeltaEvent + | StreamToolUseStartEvent + | StreamToolResultEvent + | StreamTurnEndEvent + | StreamResultEvent + | StreamErrorEvent; + +/** + * Stream Parser Interface + * + * Implementations parse provider-specific NDJSON into standardized events. + */ +export interface StreamParser { + /** Provider name this parser handles */ + readonly provider: string; + + /** Parse a single NDJSON line into zero or more standardized events */ + parseLine(line: string): StreamEvent[]; + + /** Signal end of stream - allows parser to emit final events */ + end(): StreamEvent[]; +} diff --git a/src/agent/providers/types.ts b/src/agent/providers/types.ts new file mode 100644 index 0000000..fabf1ca --- /dev/null +++ b/src/agent/providers/types.ts @@ -0,0 +1,61 @@ +/** + * Agent Provider Configuration Types + * + * Data-driven configuration for multi-provider agent spawning. + * Each provider (Claude, Codex, Gemini, etc.) has a config that describes + * how to invoke its CLI, pass prompts, extract session IDs, and resume. + */ + +export interface StructuredOutputConfig { + /** CLI flag for structured output (e.g. "--json-schema", "--output-schema") */ + flag: string; + /** How to pass the schema: inline JSON string, file path, or not supported */ + schemaMode: 'inline' | 'file' | 'none'; + /** Format of CLI output: single JSON object, JSONL stream, or raw text */ + outputFormat: 'json' | 'jsonl' | 'text'; +} + +export interface SessionIdConfig { + /** Where to find the session ID in CLI output */ + extractFrom: 'result' | 'event'; + /** Field name containing the session ID */ + field: string; + /** For JSONL: which event type contains the session ID */ + eventType?: string; +} + +export interface NonInteractiveConfig { + /** Subcommand for non-interactive mode (e.g. "exec" for codex, "run" for opencode) */ + subcommand?: string; + /** Flag to pass the prompt (e.g. "-p" for gemini/cursor) */ + promptFlag?: string; + /** Flag(s) for JSON output (e.g. "--json", "--output-format json") */ + outputFlag?: string; +} + +export interface AgentProviderConfig { + /** Provider name identifier */ + name: string; + /** CLI binary command */ + command: string; + /** Default autonomous-mode args */ + args: string[]; + /** Extra environment variables to set */ + env?: Record<string, string>; + /** Process names for detection (ps matching) */ + processNames: string[]; + /** Env var name for config dir isolation (e.g. "CLAUDE_CONFIG_DIR") */ + configDirEnv?: string; + /** Flag or subcommand for resume (e.g. "--resume", "resume") */ + resumeFlag?: string; + /** How resume works: flag-based, subcommand-based, or unsupported */ + resumeStyle: 'flag' | 'subcommand' | 'none'; + /** How prompts are passed: native (-p built-in), flag (use nonInteractive.promptFlag), or none */ + promptMode: 'native' | 'flag' | 'none'; + /** Structured output configuration */ + structuredOutput?: StructuredOutputConfig; + /** Session ID extraction configuration */ + sessionId?: SessionIdConfig; + /** Non-interactive mode configuration */ + nonInteractive?: NonInteractiveConfig; +} diff --git a/src/agent/schema.ts b/src/agent/schema.ts index 38d20c8..ad8bcf5 100644 --- a/src/agent/schema.ts +++ b/src/agent/schema.ts @@ -1,14 +1,8 @@ /** - * Agent Output Schema + * Agent Signal Schema * - * Defines structured output schema for Claude agents using discriminated unions. - * Replaces broken AskUserQuestion detection with explicit agent status signaling. - * - * Mode-specific schemas: - * - execute: Standard task execution (done/questions/error) - * - discuss: Gather context through questions, output decisions - * - breakdown: Decompose initiative into phases - * - decompose: Decompose phase into individual tasks + * Agents communicate via a trivial JSON signal: done, questions, or error. + * All structured output is file-based (see file-io.ts). */ import { z } from 'zod'; @@ -17,17 +11,11 @@ import { z } from 'zod'; // SHARED SCHEMAS // ============================================================================= -/** - * Option for questions - allows agent to present choices to user - */ const optionSchema = z.object({ label: z.string(), description: z.string().optional(), }); -/** - * Individual question item with unique ID for answer matching - */ export const questionItemSchema = z.object({ id: z.string(), question: z.string(), @@ -37,105 +25,26 @@ export const questionItemSchema = z.object({ export type QuestionItem = z.infer<typeof questionItemSchema>; -/** - * A decision captured during discussion. - * Prompt instructs: { "topic": "Auth", "decision": "JWT", "reason": "Stateless" } - */ -const decisionSchema = z.object({ - topic: z.string(), - decision: z.string(), - reason: z.string(), -}); - -export type Decision = z.infer<typeof decisionSchema>; - -/** - * A phase from breakdown output. - * Prompt instructs: { "number": 1, "name": "...", "description": "...", "dependencies": [0] } - */ -const phaseBreakdownSchema = z.object({ - number: z.number().int().positive(), - name: z.string().min(1), - description: z.string(), - dependencies: z.array(z.number().int()).optional().default([]), -}); - -export type PhaseBreakdown = z.infer<typeof phaseBreakdownSchema>; - -/** - * Task type enum - mirrors database task.type column. - */ -const taskTypeSchema = z.enum([ - 'auto', - 'checkpoint:human-verify', - 'checkpoint:decision', - 'checkpoint:human-action', -]); - -/** - * A task from decompose output. - * Prompt instructs: { "number": 1, "name": "...", "description": "...", "type": "auto", "dependencies": [0] } - */ -const taskBreakdownSchema = z.object({ - number: z.number().int().positive(), - name: z.string().min(1), - description: z.string(), - type: taskTypeSchema.default('auto'), - dependencies: z.array(z.number().int()).optional().default([]), -}); - -export type TaskBreakdown = z.infer<typeof taskBreakdownSchema>; - // ============================================================================= -// EXECUTE MODE SCHEMA (default) +// UNIVERSAL SIGNAL SCHEMA // ============================================================================= -/** - * Discriminated union for agent output in execute mode. - * - * Agent must return one of: - * - done: Task completed successfully - * - questions: Agent needs user input to continue (supports multiple questions) - * - unrecoverable_error: Agent hit an error it cannot recover from - */ -export const agentOutputSchema = z.discriminatedUnion('status', [ - // Agent completed successfully - z.object({ - status: z.literal('done'), - result: z.string(), - filesModified: z.array(z.string()).optional(), - }), - - // Agent needs user input to continue (one or more questions) - z.object({ - status: z.literal('questions'), - questions: z.array(questionItemSchema), - }), - - // Agent hit unrecoverable error - z.object({ - status: z.literal('unrecoverable_error'), - error: z.string(), - attempted: z.string().optional(), - }), +export const agentSignalSchema = z.discriminatedUnion('status', [ + z.object({ status: z.literal('done') }), + z.object({ status: z.literal('questions'), questions: z.array(questionItemSchema) }), + z.object({ status: z.literal('error'), error: z.string() }), ]); -export type AgentOutput = z.infer<typeof agentOutputSchema>; +export type AgentSignal = z.infer<typeof agentSignalSchema>; -/** - * JSON Schema for --json-schema flag (convert Zod to JSON Schema). - * This is passed to Claude CLI to enforce structured output. - */ -export const agentOutputJsonSchema = { +export const agentSignalJsonSchema = { type: 'object', oneOf: [ { properties: { status: { const: 'done' }, - result: { type: 'string' }, - filesModified: { type: 'array', items: { type: 'string' } }, }, - required: ['status', 'result'], + required: ['status'], }, { properties: { @@ -168,105 +77,7 @@ export const agentOutputJsonSchema = { }, { properties: { - status: { const: 'unrecoverable_error' }, - error: { type: 'string' }, - attempted: { type: 'string' }, - }, - required: ['status', 'error'], - }, - ], -}; - -// ============================================================================= -// DISCUSS MODE SCHEMA -// ============================================================================= - -/** - * Discuss mode output schema. - * Agent asks questions OR completes with decisions. - * - * Prompt tells agent: - * - Output "questions" status with questions array when needing input - * - Output "context_complete" status with decisions array when done - */ -export const discussOutputSchema = z.discriminatedUnion('status', [ - // Agent needs more information - z.object({ - status: z.literal('questions'), - questions: z.array(questionItemSchema), - }), - // Agent has captured all decisions - z.object({ - status: z.literal('context_complete'), - decisions: z.array(decisionSchema), - summary: z.string(), // Brief summary of all decisions - }), - // Unrecoverable error - z.object({ - status: z.literal('unrecoverable_error'), - error: z.string(), - }), -]); - -export type DiscussOutput = z.infer<typeof discussOutputSchema>; - -/** - * JSON Schema for discuss mode (passed to Claude CLI --json-schema) - */ -export const discussOutputJsonSchema = { - type: 'object', - oneOf: [ - { - properties: { - status: { const: 'questions' }, - questions: { - type: 'array', - items: { - type: 'object', - properties: { - id: { type: 'string' }, - question: { type: 'string' }, - options: { - type: 'array', - items: { - type: 'object', - properties: { - label: { type: 'string' }, - description: { type: 'string' }, - }, - required: ['label'], - }, - }, - multiSelect: { type: 'boolean' }, - }, - required: ['id', 'question'], - }, - }, - }, - required: ['status', 'questions'], - }, - { - properties: { - status: { const: 'context_complete' }, - decisions: { - type: 'array', - items: { - type: 'object', - properties: { - topic: { type: 'string' }, - decision: { type: 'string' }, - reason: { type: 'string' }, - }, - required: ['topic', 'decision', 'reason'], - }, - }, - summary: { type: 'string' }, - }, - required: ['status', 'decisions', 'summary'], - }, - { - properties: { - status: { const: 'unrecoverable_error' }, + status: { const: 'error' }, error: { type: 'string' }, }, required: ['status', 'error'], @@ -275,201 +86,12 @@ export const discussOutputJsonSchema = { }; // ============================================================================= -// BREAKDOWN MODE SCHEMA +// BACKWARD COMPATIBILITY // ============================================================================= -/** - * Breakdown mode output schema. - * Agent asks questions OR completes with phases. - * - * Prompt tells agent: - * - Output "questions" status when needing clarification - * - Output "breakdown_complete" status with phases array when done - */ -export const breakdownOutputSchema = z.discriminatedUnion('status', [ - // Agent needs clarification - z.object({ - status: z.literal('questions'), - questions: z.array(questionItemSchema), - }), - // Agent has decomposed initiative into phases - z.object({ - status: z.literal('breakdown_complete'), - phases: z.array(phaseBreakdownSchema), - }), - // Unrecoverable error - z.object({ - status: z.literal('unrecoverable_error'), - error: z.string(), - }), -]); - -export type BreakdownOutput = z.infer<typeof breakdownOutputSchema>; - -/** - * JSON Schema for breakdown mode (passed to Claude CLI --json-schema) - */ -export const breakdownOutputJsonSchema = { - type: 'object', - oneOf: [ - { - properties: { - status: { const: 'questions' }, - questions: { - type: 'array', - items: { - type: 'object', - properties: { - id: { type: 'string' }, - question: { type: 'string' }, - options: { - type: 'array', - items: { - type: 'object', - properties: { - label: { type: 'string' }, - description: { type: 'string' }, - }, - required: ['label'], - }, - }, - }, - required: ['id', 'question'], - }, - }, - }, - required: ['status', 'questions'], - }, - { - properties: { - status: { const: 'breakdown_complete' }, - phases: { - type: 'array', - items: { - type: 'object', - properties: { - number: { type: 'integer', minimum: 1 }, - name: { type: 'string', minLength: 1 }, - description: { type: 'string' }, - dependencies: { - type: 'array', - items: { type: 'integer' }, - }, - }, - required: ['number', 'name', 'description'], - }, - }, - }, - required: ['status', 'phases'], - }, - { - properties: { - status: { const: 'unrecoverable_error' }, - error: { type: 'string' }, - }, - required: ['status', 'error'], - }, - ], -}; - -// ============================================================================= -// DECOMPOSE MODE SCHEMA -// ============================================================================= - -/** - * Decompose mode output schema. - * Agent asks questions OR completes with tasks. - * - * Prompt tells agent: - * - Output "questions" status when needing clarification - * - Output "decompose_complete" status with tasks array when done - */ -export const decomposeOutputSchema = z.discriminatedUnion('status', [ - // Agent needs clarification - z.object({ - status: z.literal('questions'), - questions: z.array(questionItemSchema), - }), - // Agent has decomposed phase into tasks - z.object({ - status: z.literal('decompose_complete'), - tasks: z.array(taskBreakdownSchema), - }), - // Unrecoverable error - z.object({ - status: z.literal('unrecoverable_error'), - error: z.string(), - }), -]); - -export type DecomposeOutput = z.infer<typeof decomposeOutputSchema>; - -/** - * JSON Schema for decompose mode (passed to Claude CLI --json-schema) - */ -export const decomposeOutputJsonSchema = { - type: 'object', - oneOf: [ - { - properties: { - status: { const: 'questions' }, - questions: { - type: 'array', - items: { - type: 'object', - properties: { - id: { type: 'string' }, - question: { type: 'string' }, - options: { - type: 'array', - items: { - type: 'object', - properties: { - label: { type: 'string' }, - description: { type: 'string' }, - }, - required: ['label'], - }, - }, - }, - required: ['id', 'question'], - }, - }, - }, - required: ['status', 'questions'], - }, - { - properties: { - status: { const: 'decompose_complete' }, - tasks: { - type: 'array', - items: { - type: 'object', - properties: { - number: { type: 'integer', minimum: 1 }, - name: { type: 'string', minLength: 1 }, - description: { type: 'string' }, - type: { - type: 'string', - enum: ['auto', 'checkpoint:human-verify', 'checkpoint:decision', 'checkpoint:human-action'], - }, - dependencies: { - type: 'array', - items: { type: 'integer' }, - }, - }, - required: ['number', 'name', 'description'], - }, - }, - }, - required: ['status', 'tasks'], - }, - { - properties: { - status: { const: 'unrecoverable_error' }, - error: { type: 'string' }, - }, - required: ['status', 'error'], - }, - ], -}; +/** @deprecated Use agentSignalSchema */ +export const agentOutputSchema = agentSignalSchema; +/** @deprecated Use AgentSignal */ +export type AgentOutput = AgentSignal; +/** @deprecated Use agentSignalJsonSchema */ +export const agentOutputJsonSchema = agentSignalJsonSchema; diff --git a/src/agent/types.ts b/src/agent/types.ts index 6c75b7d..8ecae2f 100644 --- a/src/agent/types.ts +++ b/src/agent/types.ts @@ -15,22 +15,38 @@ export type AgentStatus = 'idle' | 'running' | 'waiting_for_input' | 'stopped' | * - breakdown: Decompose initiative into phases * - decompose: Decompose phase into individual tasks */ -export type AgentMode = 'execute' | 'discuss' | 'breakdown' | 'decompose'; +export type AgentMode = 'execute' | 'discuss' | 'breakdown' | 'decompose' | 'refine'; + +/** + * Context data written as input files in agent workdir before spawn. + */ +export interface AgentInputContext { + initiative?: import('../db/schema.js').Initiative; + pages?: import('./content-serializer.js').PageForSerialization[]; + phase?: import('../db/schema.js').Phase; + task?: import('../db/schema.js').Task; +} /** * Options for spawning a new agent */ export interface SpawnAgentOptions { - /** Human-readable name for the agent (e.g., 'gastown', 'chinatown') */ - name: string; - /** Task ID to assign to agent */ - taskId: string; + /** Human-readable name/alias for the agent (auto-generated if omitted) */ + name?: string; + /** Task ID to assign to agent (optional for architect modes) */ + taskId?: string | null; /** Initial prompt/instruction for the agent */ prompt: string; /** Optional working directory (defaults to worktree path) */ cwd?: string; /** Agent operation mode (defaults to 'execute') */ mode?: AgentMode; + /** Provider name (defaults to 'claude') */ + provider?: string; + /** Initiative ID — when set, worktrees are created for all linked projects */ + initiativeId?: string; + /** Context data to write as input files in agent workdir */ + inputContext?: AgentInputContext; } /** @@ -39,18 +55,24 @@ export interface SpawnAgentOptions { export interface AgentInfo { /** Unique identifier for this agent */ id: string; - /** Human-readable name for the agent */ + /** Human-readable alias for the agent (e.g. 'jolly-penguin') */ name: string; - /** Task this agent is working on */ - taskId: string; - /** Claude CLI session ID for resumption (null until first run completes) */ + /** Task this agent is working on (null for architect agents) */ + taskId: string | null; + /** Initiative this agent is linked to (null if standalone) */ + initiativeId: string | null; + /** CLI session ID for resumption (null until first run completes) */ sessionId: string | null; - /** WorktreeManager worktree ID */ + /** Agent alias / worktree key (deterministic path: agent-workdirs/<alias>/) */ worktreeId: string; /** Current status (waiting_for_input = paused on AskUserQuestion) */ status: AgentStatus; /** Current operation mode */ mode: AgentMode; + /** Provider name (e.g. 'claude', 'codex', 'gemini') */ + provider: string; + /** Account ID used for this agent (null if no account management) */ + accountId: string | null; /** When the agent was created */ createdAt: Date; /** Last activity timestamp */ @@ -179,4 +201,35 @@ export interface AgentManager { * @returns Pending questions if available, null otherwise */ getPendingQuestions(agentId: string): Promise<PendingQuestions | null>; + + /** + * Get the buffered output for an agent. + * + * Returns recent output chunks from the agent's stdout stream. + * Buffer is limited to MAX_OUTPUT_BUFFER_SIZE chunks (ring buffer). + * + * @param agentId - Agent ID + * @returns Array of output chunks (newest last) + */ + getOutputBuffer(agentId: string): string[]; + + /** + * Delete an agent and clean up all associated resources. + * + * Tears down worktrees, branches, logs, and removes the DB record. + * If the agent is still running, kills the process first. + * + * @param agentId - Agent to delete + */ + delete(agentId: string): Promise<void>; + + /** + * Dismiss an agent. + * + * Marks the agent as dismissed by the user, which excludes it from + * active agent queries. The agent record and worktree are preserved. + * + * @param agentId - Agent to dismiss + */ + dismiss(agentId: string): Promise<void>; } diff --git a/src/bin/cw.ts b/src/bin/cw.ts index 146fe11..8e8d605 100644 --- a/src/bin/cw.ts +++ b/src/bin/cw.ts @@ -8,20 +8,24 @@ */ import { runCli } from '../cli/index.js'; +import { logger } from '../logger/index.js'; // Handle uncaught errors gracefully process.on('uncaughtException', (error) => { + logger.fatal({ err: error }, 'uncaught exception'); console.error('Fatal error:', error.message); process.exit(1); }); process.on('unhandledRejection', (reason) => { + logger.fatal({ err: reason }, 'unhandled rejection'); console.error('Unhandled promise rejection:', reason); process.exit(1); }); // Run the CLI runCli().catch((error) => { + logger.fatal({ err: error }, 'CLI fatal error'); console.error('CLI error:', error.message); process.exit(1); }); diff --git a/src/cli/index.ts b/src/cli/index.ts index dfbbb22..1de4808 100644 --- a/src/cli/index.ts +++ b/src/cli/index.ts @@ -9,22 +9,10 @@ import { Command } from 'commander'; import { VERSION } from '../index.js'; import { CoordinationServer } from '../server/index.js'; import { GracefulShutdown } from '../server/shutdown.js'; -import { ProcessManager, ProcessRegistry } from '../process/index.js'; -import { LogManager } from '../logging/index.js'; -import { createEventBus } from '../events/index.js'; import { createDefaultTrpcClient } from './trpc-client.js'; -import { - createDatabase, - ensureSchema, - DrizzleInitiativeRepository, - DrizzlePhaseRepository, - DrizzlePlanRepository, - DrizzleTaskRepository, - DrizzleMessageRepository, - DrizzleAgentRepository, -} from '../db/index.js'; -import { ClaudeAgentManager } from '../agent/index.js'; -import { SimpleGitWorktreeManager } from '../git/index.js'; +import { createContainer } from '../container.js'; +import { findWorkspaceRoot, writeCwrc, defaultCwConfig } from '../config/index.js'; +import { createModuleLogger } from '../logger/index.js'; /** Environment variable for custom port */ const CW_PORT_ENV = 'CW_PORT'; @@ -37,54 +25,30 @@ async function startServer(port?: number): Promise<void> { // Get port from option, env var, or default const serverPort = port ?? (process.env[CW_PORT_ENV] ? parseInt(process.env[CW_PORT_ENV], 10) : undefined); + const log = createModuleLogger('server'); - // Create dependencies - const registry = new ProcessRegistry(); - const eventBus = createEventBus(); - const processManager = new ProcessManager(registry, eventBus); - const logManager = new LogManager(); - - // Create database and ensure schema - const db = createDatabase(); - ensureSchema(db); - - // Create repositories - const initiativeRepository = new DrizzleInitiativeRepository(db); - const phaseRepository = new DrizzlePhaseRepository(db); - const planRepository = new DrizzlePlanRepository(db); - const taskRepository = new DrizzleTaskRepository(db); - const messageRepository = new DrizzleMessageRepository(db); - const agentRepository = new DrizzleAgentRepository(db); - - // Create agent manager with worktree isolation - const worktreeManager = new SimpleGitWorktreeManager(process.cwd(), eventBus); - const agentManager = new ClaudeAgentManager(agentRepository, worktreeManager, eventBus); + // Create full dependency graph + const container = await createContainer(); // Create and start server const server = new CoordinationServer( { port: serverPort }, - processManager, - logManager, - eventBus, - { - agentManager, - initiativeRepository, - phaseRepository, - planRepository, - taskRepository, - messageRepository, - } + container.processManager, + container.logManager, + container.eventBus, + container.toContextDeps(), ); try { await server.start(); } catch (error) { + log.fatal({ err: error }, 'failed to start server'); console.error('Failed to start server:', (error as Error).message); process.exit(1); } // Install graceful shutdown handlers - const shutdown = new GracefulShutdown(server, processManager, logManager); + const shutdown = new GracefulShutdown(server, container.processManager, container.logManager); shutdown.install(); } @@ -136,6 +100,39 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com } }); + // Init command - create .cwrc in current directory + program + .command('init') + .description('Initialize a cw workspace in the current directory') + .action(() => { + const cwd = process.cwd(); + const existing = findWorkspaceRoot(cwd); + if (existing && existing === cwd) { + console.log(`Workspace already initialized at ${cwd}`); + return; + } + if (existing) { + console.log(`Parent workspace found at ${existing}`); + console.log(`Creating nested workspace at ${cwd}`); + } + + writeCwrc(cwd, defaultCwConfig()); + console.log(`Initialized cw workspace at ${cwd}`); + }); + + // ID generation command (stateless — no server, no tRPC) + program + .command('id') + .description('Generate a unique nanoid (works offline, no server needed)') + .option('-n, --count <count>', 'Number of IDs to generate', '1') + .action(async (options: { count: string }) => { + const { nanoid } = await import('nanoid'); + const count = parseInt(options.count, 10) || 1; + for (let i = 0; i < count; i++) { + console.log(nanoid()); + } + }); + // Agent command group const agentCommand = program .command('agent') @@ -145,10 +142,12 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com agentCommand .command('spawn <prompt>') .description('Spawn a new agent to work on a task') - .requiredOption('--name <name>', 'Human-readable name for the agent (e.g., gastown)') + .option('--name <name>', 'Human-readable name for the agent (auto-generated if omitted)') .requiredOption('--task <taskId>', 'Task ID to assign to agent') .option('--cwd <path>', 'Working directory for agent') - .action(async (prompt: string, options: { name: string; task: string; cwd?: string }) => { + .option('--provider <provider>', 'Agent provider (claude, codex, gemini, cursor, auggie, amp, opencode)') + .option('--initiative <id>', 'Initiative ID (creates worktrees for all linked projects)') + .action(async (prompt: string, options: { name?: string; task: string; cwd?: string; provider?: string; initiative?: string }) => { try { const client = createDefaultTrpcClient(); const agent = await client.spawnAgent.mutate({ @@ -156,6 +155,8 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com taskId: options.task, prompt, cwd: options.cwd, + provider: options.provider, + initiativeId: options.initiative, }); console.log(`Agent '${agent.name}' spawned`); console.log(` ID: ${agent.id}`); @@ -183,6 +184,21 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com } }); + // cw agent delete <name> + agentCommand + .command('delete <name>') + .description('Delete an agent and clean up its workdir, branches, and logs') + .action(async (name: string) => { + try { + const client = createDefaultTrpcClient(); + const result = await client.deleteAgent.mutate({ name }); + console.log(`Agent '${result.name}' deleted`); + } catch (error) { + console.error('Failed to delete agent:', (error as Error).message); + process.exit(1); + } + }); + // cw agent list agentCommand .command('list') @@ -290,17 +306,31 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com .command('task') .description('Manage tasks'); - // cw task list --plan <planId> + // cw task list --parent <parentTaskId> | --phase <phaseId> | --initiative <initiativeId> taskCommand .command('list') - .description('List tasks for a plan') - .requiredOption('--plan <planId>', 'Plan ID to list tasks for') - .action(async (options: { plan: string }) => { + .description('List tasks (by parent task, phase, or initiative)') + .option('--parent <parentTaskId>', 'Parent task ID (for child tasks)') + .option('--phase <phaseId>', 'Phase ID') + .option('--initiative <initiativeId>', 'Initiative ID') + .action(async (options: { parent?: string; phase?: string; initiative?: string }) => { try { const client = createDefaultTrpcClient(); - const tasks = await client.listTasks.query({ planId: options.plan }); + let tasks; + + if (options.parent) { + tasks = await client.listTasks.query({ parentTaskId: options.parent }); + } else if (options.phase) { + tasks = await client.listPhaseTasks.query({ phaseId: options.phase }); + } else if (options.initiative) { + tasks = await client.listInitiativeTasks.query({ initiativeId: options.initiative }); + } else { + console.error('One of --parent, --phase, or --initiative is required'); + process.exit(1); + } + if (tasks.length === 0) { - console.log('No tasks found for this plan'); + console.log('No tasks found'); return; } @@ -335,8 +365,11 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com const task = await client.getTask.query({ id: taskId }); console.log(`Task: ${task.name}`); console.log(` ID: ${task.id}`); - console.log(` Plan: ${task.planId}`); + console.log(` Phase: ${task.phaseId ?? '(none)'}`); + console.log(` Initiative: ${task.initiativeId ?? '(none)'}`); + console.log(` Parent Task: ${task.parentTaskId ?? '(none)'}`); console.log(` Description: ${task.description ?? '(none)'}`); + console.log(` Category: ${task.category}`); console.log(` Type: ${task.type}`); console.log(` Priority: ${task.priority}`); console.log(` Status: ${task.status}`); @@ -690,18 +723,18 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com initiativeCommand .command('create <name>') .description('Create a new initiative') - .option('-d, --description <description>', 'Initiative description') - .action(async (name: string, options: { description?: string }) => { + .option('--project <ids...>', 'Project IDs to associate') + .action(async (name: string, options: { project?: string[] }) => { try { const client = createDefaultTrpcClient(); const initiative = await client.createInitiative.mutate({ name, - description: options.description, + projectIds: options.project, }); console.log(`Created initiative: ${initiative.id}`); console.log(` Name: ${initiative.name}`); - if (initiative.description) { - console.log(` Description: ${initiative.description}`); + if (options.project && options.project.length > 0) { + console.log(` Projects: ${options.project.length} associated`); } } catch (error) { console.error('Failed to create initiative:', (error as Error).message); @@ -752,9 +785,6 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com console.log(`ID: ${initiative.id}`); console.log(`Name: ${initiative.name}`); console.log(`Status: ${initiative.status}`); - if (initiative.description) { - console.log(`Description: ${initiative.description}`); - } console.log(`Created: ${new Date(initiative.createdAt).toISOString()}`); } catch (error) { console.error('Failed to get initiative:', (error as Error).message); @@ -795,9 +825,9 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com architectCommand .command('discuss <initiativeId>') .description('Start discussion phase for an initiative') - .requiredOption('--name <name>', 'Agent name') + .option('--name <name>', 'Agent name (auto-generated if omitted)') .option('-c, --context <context>', 'Initial context') - .action(async (initiativeId: string, options: { name: string; context?: string }) => { + .action(async (initiativeId: string, options: { name?: string; context?: string }) => { try { const client = createDefaultTrpcClient(); const agent = await client.spawnArchitectDiscuss.mutate({ @@ -819,9 +849,9 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com architectCommand .command('breakdown <initiativeId>') .description('Start breakdown phase for an initiative') - .requiredOption('--name <name>', 'Agent name') + .option('--name <name>', 'Agent name (auto-generated if omitted)') .option('-s, --summary <summary>', 'Context summary from discuss phase') - .action(async (initiativeId: string, options: { name: string; summary?: string }) => { + .action(async (initiativeId: string, options: { name?: string; summary?: string }) => { try { const client = createDefaultTrpcClient(); const agent = await client.spawnArchitectBreakdown.mutate({ @@ -839,24 +869,26 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com } }); - // cw architect decompose <plan-id> + // cw architect decompose <phase-id> architectCommand - .command('decompose <planId>') - .description('Decompose a plan into tasks') - .requiredOption('--name <name>', 'Agent name') + .command('decompose <phaseId>') + .description('Decompose a phase into tasks') + .option('--name <name>', 'Agent name (auto-generated if omitted)') + .option('-t, --task-name <taskName>', 'Name for the decompose task') .option('-c, --context <context>', 'Additional context') - .action(async (planId: string, options: { name: string; context?: string }) => { + .action(async (phaseId: string, options: { name?: string; taskName?: string; context?: string }) => { try { const client = createDefaultTrpcClient(); const agent = await client.spawnArchitectDecompose.mutate({ name: options.name, - planId, + phaseId, + taskName: options.taskName, context: options.context, }); console.log(`Started architect agent in decompose mode`); console.log(` Agent: ${agent.name} (${agent.id})`); console.log(` Mode: ${agent.mode}`); - console.log(` Plan: ${planId}`); + console.log(` Phase: ${phaseId}`); } catch (error) { console.error('Failed to start decompose:', (error as Error).message); process.exit(1); @@ -983,94 +1015,187 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com } }); - // Plan command group - const planCommand = program - .command('plan') - .description('Plan management'); + // Project command group + const projectCommand = program + .command('project') + .description('Manage registered projects'); - // cw plan list --phase <phaseId> - planCommand - .command('list') - .description('List plans in a phase') - .requiredOption('--phase <id>', 'Phase ID') - .action(async (options: { phase: string }) => { + // cw project register --name <name> --url <url> + projectCommand + .command('register') + .description('Register a git repository as a project') + .requiredOption('--name <name>', 'Project name') + .requiredOption('--url <url>', 'Git repository URL') + .action(async (options: { name: string; url: string }) => { try { const client = createDefaultTrpcClient(); - const plans = await client.listPlans.query({ phaseId: options.phase }); - if (plans.length === 0) { - console.log('No plans found'); - return; - } - console.table(plans.map(p => ({ - id: p.id, - number: p.number, - name: p.name, - status: p.status, - }))); - } catch (error) { - console.error('Failed to list plans:', (error as Error).message); - process.exit(1); - } - }); - - // cw plan create --phase <phaseId> --name <name> - planCommand - .command('create') - .description('Create a plan in a phase') - .requiredOption('--phase <id>', 'Phase ID') - .requiredOption('--name <name>', 'Plan name') - .option('--description <desc>', 'Plan description') - .action(async (options: { phase: string; name: string; description?: string }) => { - try { - const client = createDefaultTrpcClient(); - const plan = await client.createPlan.mutate({ - phaseId: options.phase, + const project = await client.registerProject.mutate({ name: options.name, - description: options.description, + url: options.url, }); - console.log(`Created plan: ${plan.id} (${plan.name})`); + console.log(`Registered project: ${project.id}`); + console.log(` Name: ${project.name}`); + console.log(` URL: ${project.url}`); } catch (error) { - console.error('Failed to create plan:', (error as Error).message); + console.error('Failed to register project:', (error as Error).message); process.exit(1); } }); - // cw plan get <id> - planCommand - .command('get <id>') - .description('Get plan details') - .action(async (id: string) => { + // cw project list + projectCommand + .command('list') + .description('List all registered projects') + .action(async () => { try { const client = createDefaultTrpcClient(); - const plan = await client.getPlan.query({ id }); - console.log(JSON.stringify(plan, null, 2)); - } catch (error) { - console.error('Failed to get plan:', (error as Error).message); - process.exit(1); - } - }); - - // cw plan tasks <id> - planCommand - .command('tasks <id>') - .description('List tasks in a plan') - .action(async (id: string) => { - try { - const client = createDefaultTrpcClient(); - const tasks = await client.listTasks.query({ planId: id }); - if (tasks.length === 0) { - console.log('No tasks found'); + const projects = await client.listProjects.query(); + if (projects.length === 0) { + console.log('No projects registered'); return; } - console.table(tasks.map(t => ({ - id: t.id, - order: t.order, - name: t.name, - type: t.type, - status: t.status, - }))); + for (const p of projects) { + console.log(`${p.id} ${p.name} ${p.url}`); + } } catch (error) { - console.error('Failed to list tasks:', (error as Error).message); + console.error('Failed to list projects:', (error as Error).message); + process.exit(1); + } + }); + + // cw project delete <id> + projectCommand + .command('delete <id>') + .description('Delete a registered project') + .action(async (id: string) => { + try { + const client = createDefaultTrpcClient(); + await client.deleteProject.mutate({ id }); + console.log(`Deleted project: ${id}`); + } catch (error) { + console.error('Failed to delete project:', (error as Error).message); + process.exit(1); + } + }); + + // Account command group + const accountCommand = program + .command('account') + .description('Manage provider accounts for agent spawning'); + + // cw account add + accountCommand + .command('add') + .description('Extract current Claude login and register as an account') + .option('--provider <provider>', 'Provider name', 'claude') + .option('--email <email>', 'Email (for manual registration without auto-extract)') + .action(async (options: { provider: string; email?: string }) => { + try { + const client = createDefaultTrpcClient(); + + if (options.email) { + // Manual registration — guard against duplicates + const existing = await client.listAccounts.query(); + const alreadyRegistered = existing.find((a: any) => a.email === options.email); + if (alreadyRegistered) { + console.log(`Account '${options.email}' already registered (${alreadyRegistered.id})`); + return; + } + + const account = await client.addAccount.mutate({ + email: options.email, + provider: options.provider, + }); + console.log(`Registered account: ${account.id}`); + console.log(` Email: ${account.email}`); + console.log(` Provider: ${account.provider}`); + } else { + // Auto-extract from current Claude login + const { extractCurrentClaudeAccount } = await import('../agent/accounts/index.js'); + const extracted = await extractCurrentClaudeAccount(); + + // Check if already registered + const existing = await client.listAccounts.query(); + const alreadyRegistered = existing.find((a: any) => a.email === extracted.email); + if (alreadyRegistered) { + // Upsert: update credentials on existing account + await client.updateAccountAuth.mutate({ + id: alreadyRegistered.id, + configJson: JSON.stringify(extracted.configJson), + credentials: extracted.credentials, + }); + console.log(`Updated credentials for account: ${alreadyRegistered.id}`); + console.log(` Email: ${extracted.email}`); + return; + } + + // Create DB record with credentials stored in DB + const account = await client.addAccount.mutate({ + email: extracted.email, + provider: options.provider, + configJson: JSON.stringify(extracted.configJson), + credentials: extracted.credentials, + }); + + console.log(`Registered account: ${account.id}`); + console.log(` Email: ${account.email}`); + console.log(` Provider: ${account.provider}`); + } + } catch (error) { + console.error('Failed to add account:', (error as Error).message); + process.exit(1); + } + }); + + // cw account list + accountCommand + .command('list') + .description('List all registered accounts') + .action(async () => { + try { + const client = createDefaultTrpcClient(); + const accounts = await client.listAccounts.query(); + if (accounts.length === 0) { + console.log('No accounts registered'); + return; + } + for (const acct of accounts) { + const status = acct.isExhausted ? 'EXHAUSTED' : 'AVAILABLE'; + const until = acct.exhaustedUntil ? ` (until ${new Date(acct.exhaustedUntil).toLocaleString()})` : ''; + console.log(`${acct.id} ${acct.email} ${acct.provider} [${status}${until}]`); + } + } catch (error) { + console.error('Failed to list accounts:', (error as Error).message); + process.exit(1); + } + }); + + // cw account remove <id> + accountCommand + .command('remove <id>') + .description('Remove an account') + .action(async (id: string) => { + try { + const client = createDefaultTrpcClient(); + await client.removeAccount.mutate({ id }); + console.log(`Removed account: ${id}`); + } catch (error) { + console.error('Failed to remove account:', (error as Error).message); + process.exit(1); + } + }); + + // cw account refresh + accountCommand + .command('refresh') + .description('Clear expired exhaustion flags') + .action(async () => { + try { + const client = createDefaultTrpcClient(); + const result = await client.refreshAccounts.mutate(); + console.log(`Cleared ${result.cleared} expired exhaustions`); + } catch (error) { + console.error('Failed to refresh accounts:', (error as Error).message); process.exit(1); } }); diff --git a/src/config/cwrc.ts b/src/config/cwrc.ts new file mode 100644 index 0000000..82a2008 --- /dev/null +++ b/src/config/cwrc.ts @@ -0,0 +1,66 @@ +/** + * .cwrc File Operations + * + * Find, read, and write the .cwrc configuration file. + * The file's presence marks the workspace root directory. + */ + +import { readFileSync, writeFileSync, existsSync } from 'node:fs'; +import { join, dirname, parse } from 'node:path'; +import type { CwConfig } from './types.js'; + +/** Default filename */ +const CWRC_FILENAME = '.cwrc'; + +/** + * Walk up from `startDir` looking for a .cwrc file. + * Returns the absolute path to the directory containing it, + * or null if the filesystem root is reached. + */ +export function findWorkspaceRoot(startDir: string = process.cwd()): string | null { + let dir = startDir; + + while (true) { + const candidate = join(dir, CWRC_FILENAME); + if (existsSync(candidate)) { + return dir; + } + + const parent = dirname(dir); + if (parent === dir) { + // Reached filesystem root + return null; + } + dir = parent; + } +} + +/** + * Read and parse the .cwrc file in the given directory. + * Returns null if the file doesn't exist. + * Throws on malformed JSON. + */ +export function readCwrc(dir: string): CwConfig | null { + const filePath = join(dir, CWRC_FILENAME); + if (!existsSync(filePath)) { + return null; + } + + const raw = readFileSync(filePath, 'utf-8'); + return JSON.parse(raw) as CwConfig; +} + +/** + * Write a .cwrc file to the given directory. + */ +export function writeCwrc(dir: string, config: CwConfig): void { + const filePath = join(dir, CWRC_FILENAME); + writeFileSync(filePath, JSON.stringify(config, null, 2) + '\n', 'utf-8'); +} + +/** + * Create a default .cwrc config. + */ +export function defaultCwConfig(): CwConfig { + return { version: 1 }; +} diff --git a/src/config/index.ts b/src/config/index.ts new file mode 100644 index 0000000..46b4f65 --- /dev/null +++ b/src/config/index.ts @@ -0,0 +1,13 @@ +/** + * Configuration Module + * + * Handles .cwrc workspace configuration. + */ + +export type { CwConfig } from './types.js'; +export { + findWorkspaceRoot, + readCwrc, + writeCwrc, + defaultCwConfig, +} from './cwrc.js'; diff --git a/src/config/types.ts b/src/config/types.ts new file mode 100644 index 0000000..eea60bd --- /dev/null +++ b/src/config/types.ts @@ -0,0 +1,15 @@ +/** + * .cwrc Configuration Types + * + * Defines the shape of the .cwrc configuration file. + * Add new top-level keys here as the config grows. + */ + +/** + * Root configuration file schema. + * Lives at the workspace root as `.cwrc`. + */ +export interface CwConfig { + /** Schema version for forward compatibility */ + version: 1; +} diff --git a/src/container.ts b/src/container.ts new file mode 100644 index 0000000..1ef1098 --- /dev/null +++ b/src/container.ts @@ -0,0 +1,162 @@ +/** + * Dependency Container + * + * Factory functions for creating the full dependency graph. + * Keeps startServer() thin and makes repo wiring reusable by the test harness. + */ + +import type { DrizzleDatabase } from './db/index.js'; +import { + createDatabase, + ensureSchema, + DrizzleInitiativeRepository, + DrizzlePhaseRepository, + DrizzleTaskRepository, + DrizzleMessageRepository, + DrizzleAgentRepository, + DrizzlePageRepository, + DrizzleProjectRepository, + DrizzleAccountRepository, +} from './db/index.js'; +import type { InitiativeRepository } from './db/repositories/initiative-repository.js'; +import type { PhaseRepository } from './db/repositories/phase-repository.js'; +import type { TaskRepository } from './db/repositories/task-repository.js'; +import type { MessageRepository } from './db/repositories/message-repository.js'; +import type { AgentRepository } from './db/repositories/agent-repository.js'; +import type { PageRepository } from './db/repositories/page-repository.js'; +import type { ProjectRepository } from './db/repositories/project-repository.js'; +import type { AccountRepository } from './db/repositories/account-repository.js'; +import type { EventBus } from './events/index.js'; +import { createEventBus } from './events/index.js'; +import { ProcessManager, ProcessRegistry } from './process/index.js'; +import { LogManager } from './logging/index.js'; +import { MultiProviderAgentManager } from './agent/index.js'; +import { DefaultAccountCredentialManager } from './agent/credentials/index.js'; +import type { AccountCredentialManager } from './agent/credentials/types.js'; +import { findWorkspaceRoot } from './config/index.js'; +import { createModuleLogger } from './logger/index.js'; +import type { ServerContextDeps } from './server/index.js'; + +// ============================================================================= +// Repositories +// ============================================================================= + +/** + * All 8 repository ports. + */ +export interface Repositories { + initiativeRepository: InitiativeRepository; + phaseRepository: PhaseRepository; + taskRepository: TaskRepository; + messageRepository: MessageRepository; + agentRepository: AgentRepository; + pageRepository: PageRepository; + projectRepository: ProjectRepository; + accountRepository: AccountRepository; +} + +/** + * Create all 8 Drizzle repository adapters from a database instance. + * Reusable by both the production server and the test harness. + */ +export function createRepositories(db: DrizzleDatabase): Repositories { + return { + initiativeRepository: new DrizzleInitiativeRepository(db), + phaseRepository: new DrizzlePhaseRepository(db), + taskRepository: new DrizzleTaskRepository(db), + messageRepository: new DrizzleMessageRepository(db), + agentRepository: new DrizzleAgentRepository(db), + pageRepository: new DrizzlePageRepository(db), + projectRepository: new DrizzleProjectRepository(db), + accountRepository: new DrizzleAccountRepository(db), + }; +} + +// ============================================================================= +// Container +// ============================================================================= + +/** + * Full dependency graph for the coordination server. + */ +export interface Container extends Repositories { + db: DrizzleDatabase; + eventBus: EventBus; + processManager: ProcessManager; + logManager: LogManager; + workspaceRoot: string; + credentialManager: AccountCredentialManager; + agentManager: MultiProviderAgentManager; + + /** Extract the subset of deps that CoordinationServer needs. */ + toContextDeps(): ServerContextDeps; +} + +/** + * Create the full dependency container. + * + * Wires: ProcessRegistry → EventBus → ProcessManager → LogManager → + * Database → Repositories → CredentialManager → AgentManager. + * Runs ensureSchema() and reconcileAfterRestart() before returning. + */ +export async function createContainer(): Promise<Container> { + const log = createModuleLogger('container'); + + // Infrastructure + const registry = new ProcessRegistry(); + const eventBus = createEventBus(); + const processManager = new ProcessManager(registry, eventBus); + const logManager = new LogManager(); + + // Database + const db = createDatabase(); + ensureSchema(db); + log.info('database initialized'); + + // Repositories + const repos = createRepositories(db); + log.info('repositories created'); + + // Workspace root + const workspaceRoot = findWorkspaceRoot(process.cwd()) ?? process.cwd(); + log.info({ workspaceRoot }, 'workspace root resolved'); + + // Credential manager + const credentialManager = new DefaultAccountCredentialManager(eventBus); + log.info('credential manager created'); + + // Agent manager + const agentManager = new MultiProviderAgentManager( + repos.agentRepository, + workspaceRoot, + repos.projectRepository, + repos.accountRepository, + eventBus, + credentialManager, + ); + log.info('agent manager created'); + + // Reconcile agent state from any previous server session + await agentManager.reconcileAfterRestart(); + log.info('agent reconciliation complete'); + + return { + db, + eventBus, + processManager, + logManager, + workspaceRoot, + credentialManager, + agentManager, + ...repos, + + toContextDeps(): ServerContextDeps { + return { + agentManager, + credentialManager, + workspaceRoot, + ...repos, + }; + }, + }; +} diff --git a/src/coordination/conflict-resolution-service.test.ts b/src/coordination/conflict-resolution-service.test.ts new file mode 100644 index 0000000..fdf6781 --- /dev/null +++ b/src/coordination/conflict-resolution-service.test.ts @@ -0,0 +1,372 @@ +/** + * ConflictResolutionService Tests + * + * Tests for the conflict resolution service that handles merge conflicts + * by creating resolution tasks, updating statuses, and notifying agents. + */ + +import { describe, it, expect, beforeEach, vi } from 'vitest'; +import { DefaultConflictResolutionService } from './conflict-resolution-service.js'; +import { DrizzleTaskRepository } from '../db/repositories/drizzle/task.js'; +import { DrizzleAgentRepository } from '../db/repositories/drizzle/agent.js'; +import { DrizzleMessageRepository } from '../db/repositories/drizzle/message.js'; +import { DrizzlePhaseRepository } from '../db/repositories/drizzle/phase.js'; +import { DrizzleInitiativeRepository } from '../db/repositories/drizzle/initiative.js'; +import { createTestDatabase } from '../db/repositories/drizzle/test-helpers.js'; +import type { DrizzleDatabase } from '../db/index.js'; +import type { EventBus, DomainEvent } from '../events/types.js'; +import type { TaskRepository } from '../db/repositories/task-repository.js'; +import type { AgentRepository } from '../db/repositories/agent-repository.js'; +import type { MessageRepository } from '../db/repositories/message-repository.js'; + +// ============================================================================= +// Test Helpers +// ============================================================================= + +/** + * Create a mock EventBus that captures emitted events. + */ +function createMockEventBus(): EventBus & { emittedEvents: DomainEvent[] } { + const emittedEvents: DomainEvent[] = []; + + return { + emittedEvents, + emit<T extends DomainEvent>(event: T): void { + emittedEvents.push(event); + }, + on: vi.fn(), + off: vi.fn(), + once: vi.fn(), + }; +} + +// ============================================================================= +// Tests +// ============================================================================= + +describe('DefaultConflictResolutionService', () => { + let db: DrizzleDatabase; + let taskRepository: TaskRepository; + let agentRepository: AgentRepository; + let messageRepository: MessageRepository; + let eventBus: EventBus & { emittedEvents: DomainEvent[] }; + let service: DefaultConflictResolutionService; + let testPhaseId: string; + let testInitiativeId: string; + + beforeEach(async () => { + // Set up test database + db = createTestDatabase(); + taskRepository = new DrizzleTaskRepository(db); + agentRepository = new DrizzleAgentRepository(db); + messageRepository = new DrizzleMessageRepository(db); + + // Create required hierarchy for tasks + const initiativeRepo = new DrizzleInitiativeRepository(db); + const phaseRepo = new DrizzlePhaseRepository(db); + + const initiative = await initiativeRepo.create({ + name: 'Test Initiative', + }); + testInitiativeId = initiative.id; + + const phase = await phaseRepo.create({ + initiativeId: initiative.id, + number: 1, + name: 'Test Phase', + }); + testPhaseId = phase.id; + + // Create mocks + eventBus = createMockEventBus(); + + // Create service + service = new DefaultConflictResolutionService( + taskRepository, + agentRepository, + messageRepository, + eventBus + ); + }); + + // =========================================================================== + // handleConflict() Tests + // =========================================================================== + + describe('handleConflict', () => { + it('should create conflict resolution task with correct properties', async () => { + // Create original task + const originalTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'Original Task', + description: 'Original task description', + priority: 'medium', + order: 1, + }); + + // Create agent for task + const agent = await agentRepository.create({ + name: 'agent-test', + taskId: originalTask.id, + worktreeId: 'wt-test', + }); + + const conflicts = ['src/file1.ts', 'src/file2.ts']; + + await service.handleConflict(originalTask.id, conflicts); + + // Check resolution task was created + const tasks = await taskRepository.findByPhaseId(testPhaseId); + const resolutionTask = tasks.find(t => t.name.startsWith('Resolve conflicts:')); + + expect(resolutionTask).toBeDefined(); + expect(resolutionTask!.name).toBe('Resolve conflicts: Original Task'); + expect(resolutionTask!.priority).toBe('high'); + expect(resolutionTask!.type).toBe('auto'); + expect(resolutionTask!.status).toBe('pending'); + expect(resolutionTask!.order).toBe(originalTask.order + 1); + expect(resolutionTask!.phaseId).toBe(testPhaseId); + expect(resolutionTask!.initiativeId).toBe(testInitiativeId); + expect(resolutionTask!.parentTaskId).toBe(originalTask.parentTaskId); + + // Check description contains conflict files + expect(resolutionTask!.description).toContain('src/file1.ts'); + expect(resolutionTask!.description).toContain('src/file2.ts'); + expect(resolutionTask!.description).toContain('Original Task'); + }); + + it('should update original task status to blocked', async () => { + const originalTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'Task To Block', + status: 'in_progress', + order: 1, + }); + + await agentRepository.create({ + name: 'agent-block', + taskId: originalTask.id, + worktreeId: 'wt-block', + }); + + await service.handleConflict(originalTask.id, ['conflict.ts']); + + // Check original task is blocked + const updatedTask = await taskRepository.findById(originalTask.id); + expect(updatedTask!.status).toBe('blocked'); + }); + + it('should create message to agent about conflict', async () => { + const originalTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'Message Task', + order: 1, + }); + + const agent = await agentRepository.create({ + name: 'agent-msg', + taskId: originalTask.id, + worktreeId: 'wt-msg', + }); + + const conflicts = ['conflict.ts']; + + await service.handleConflict(originalTask.id, conflicts); + + // Check message was created + const messages = await messageRepository.findByRecipient('agent', agent.id); + expect(messages.length).toBe(1); + expect(messages[0].recipientType).toBe('agent'); + expect(messages[0].recipientId).toBe(agent.id); + expect(messages[0].senderType).toBe('user'); + expect(messages[0].type).toBe('info'); + expect(messages[0].requiresResponse).toBe(false); + + // Check message content + expect(messages[0].content).toContain('Merge conflict detected'); + expect(messages[0].content).toContain('Message Task'); + expect(messages[0].content).toContain('conflict.ts'); + expect(messages[0].content).toContain('Resolve conflicts: Message Task'); + }); + + it('should emit TaskQueuedEvent for resolution task', async () => { + const originalTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'Event Task', + order: 1, + }); + + await agentRepository.create({ + name: 'agent-event', + taskId: originalTask.id, + worktreeId: 'wt-event', + }); + + await service.handleConflict(originalTask.id, ['event.ts']); + + // Check TaskQueuedEvent was emitted + expect(eventBus.emittedEvents.length).toBe(1); + expect(eventBus.emittedEvents[0].type).toBe('task:queued'); + + const event = eventBus.emittedEvents[0] as any; + expect(event.payload.priority).toBe('high'); + expect(event.payload.dependsOn).toEqual([]); + + // Check taskId matches the created resolution task + const tasks = await taskRepository.findByPhaseId(testPhaseId); + const resolutionTask = tasks.find(t => t.name.startsWith('Resolve conflicts:')); + expect(event.payload.taskId).toBe(resolutionTask!.id); + }); + + it('should work without messageRepository', async () => { + // Create service without messageRepository + const serviceNoMsg = new DefaultConflictResolutionService( + taskRepository, + agentRepository, + undefined, // No message repository + eventBus + ); + + const originalTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'No Message Task', + order: 1, + }); + + await agentRepository.create({ + name: 'agent-no-msg', + taskId: originalTask.id, + worktreeId: 'wt-no-msg', + }); + + // Should not throw and should still create task + await expect(serviceNoMsg.handleConflict(originalTask.id, ['test.ts'])) + .resolves.not.toThrow(); + + // Check resolution task was still created + const tasks = await taskRepository.findByPhaseId(testPhaseId); + const resolutionTask = tasks.find(t => t.name.startsWith('Resolve conflicts:')); + expect(resolutionTask).toBeDefined(); + }); + + it('should work without eventBus', async () => { + // Create service without eventBus + const serviceNoEvents = new DefaultConflictResolutionService( + taskRepository, + agentRepository, + messageRepository, + undefined // No event bus + ); + + const originalTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'No Events Task', + order: 1, + }); + + await agentRepository.create({ + name: 'agent-no-events', + taskId: originalTask.id, + worktreeId: 'wt-no-events', + }); + + // Should not throw and should still create task + await expect(serviceNoEvents.handleConflict(originalTask.id, ['test.ts'])) + .resolves.not.toThrow(); + + // Check resolution task was still created + const tasks = await taskRepository.findByPhaseId(testPhaseId); + const resolutionTask = tasks.find(t => t.name.startsWith('Resolve conflicts:')); + expect(resolutionTask).toBeDefined(); + }); + + it('should throw error when task not found', async () => { + await expect(service.handleConflict('non-existent-id', ['test.ts'])) + .rejects.toThrow('Original task not found: non-existent-id'); + }); + + it('should throw error when no agent found for task', async () => { + const task = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'Orphan Task', + order: 1, + }); + + await expect(service.handleConflict(task.id, ['test.ts'])) + .rejects.toThrow(`No agent found for task: ${task.id}`); + }); + + it('should handle multiple conflict files correctly', async () => { + const originalTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'Multi-Conflict Task', + order: 1, + }); + + await agentRepository.create({ + name: 'agent-multi', + taskId: originalTask.id, + worktreeId: 'wt-multi', + }); + + const conflicts = [ + 'src/components/Header.tsx', + 'src/utils/helpers.ts', + 'package.json', + 'README.md' + ]; + + await service.handleConflict(originalTask.id, conflicts); + + // Check all conflict files are in the description + const tasks = await taskRepository.findByPhaseId(testPhaseId); + const resolutionTask = tasks.find(t => t.name.startsWith('Resolve conflicts:')); + + expect(resolutionTask!.description).toContain('src/components/Header.tsx'); + expect(resolutionTask!.description).toContain('src/utils/helpers.ts'); + expect(resolutionTask!.description).toContain('package.json'); + expect(resolutionTask!.description).toContain('README.md'); + }); + + it('should preserve parentTaskId from original task', async () => { + // Create parent task first + const parentTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + name: 'Parent Task', + order: 1, + }); + + // Create child task + const childTask = await taskRepository.create({ + phaseId: testPhaseId, + initiativeId: testInitiativeId, + parentTaskId: parentTask.id, + name: 'Child Task', + order: 2, + }); + + await agentRepository.create({ + name: 'agent-child', + taskId: childTask.id, + worktreeId: 'wt-child', + }); + + await service.handleConflict(childTask.id, ['conflict.ts']); + + // Check resolution task has same parentTaskId + const tasks = await taskRepository.findByPhaseId(testPhaseId); + const resolutionTask = tasks.find(t => t.name.startsWith('Resolve conflicts:')); + + expect(resolutionTask!.parentTaskId).toBe(parentTask.id); + }); + }); +}); \ No newline at end of file diff --git a/src/coordination/conflict-resolution-service.ts b/src/coordination/conflict-resolution-service.ts new file mode 100644 index 0000000..5e6263e --- /dev/null +++ b/src/coordination/conflict-resolution-service.ts @@ -0,0 +1,136 @@ +/** + * ConflictResolutionService + * + * Service responsible for handling merge conflicts by: + * - Creating conflict resolution tasks + * - Updating original task status + * - Notifying agents via messages + * - Emitting appropriate events + * + * This service is used by the CoordinationManager when merge conflicts occur. + */ + +import type { EventBus, TaskQueuedEvent } from '../events/index.js'; +import type { TaskRepository } from '../db/repositories/task-repository.js'; +import type { AgentRepository } from '../db/repositories/agent-repository.js'; +import type { MessageRepository } from '../db/repositories/message-repository.js'; + +// ============================================================================= +// ConflictResolutionService Interface (Port) +// ============================================================================= + +/** + * Service interface for handling merge conflicts. + * This is the PORT - implementations are ADAPTERS. + */ +export interface ConflictResolutionService { + /** + * Handle a merge conflict by creating resolution task and notifying agent. + * + * @param taskId - ID of the task that conflicted + * @param conflicts - List of conflicting file paths + */ + handleConflict(taskId: string, conflicts: string[]): Promise<void>; +} + +// ============================================================================= +// DefaultConflictResolutionService Implementation (Adapter) +// ============================================================================= + +/** + * Default implementation of ConflictResolutionService. + * + * Creates conflict resolution tasks, updates task statuses, sends messages + * to agents, and emits events when merge conflicts occur. + */ +export class DefaultConflictResolutionService implements ConflictResolutionService { + constructor( + private taskRepository: TaskRepository, + private agentRepository: AgentRepository, + private messageRepository?: MessageRepository, + private eventBus?: EventBus + ) {} + + /** + * Handle a merge conflict. + * Creates a conflict-resolution task and notifies the agent via message. + */ + async handleConflict(taskId: string, conflicts: string[]): Promise<void> { + // Get original task for context + const originalTask = await this.taskRepository.findById(taskId); + if (!originalTask) { + throw new Error(`Original task not found: ${taskId}`); + } + + // Get agent that was working on the task + const agent = await this.agentRepository.findByTaskId(taskId); + if (!agent) { + throw new Error(`No agent found for task: ${taskId}`); + } + + // Build conflict description + const conflictDescription = [ + 'Merge conflicts detected. Resolve conflicts in the following files:', + '', + ...conflicts.map((f) => `- ${f}`), + '', + `Original task: ${originalTask.name}`, + '', + 'Instructions: Resolve merge conflicts in the listed files, then mark task complete.', + ].join('\n'); + + // Create new conflict-resolution task + const conflictTask = await this.taskRepository.create({ + parentTaskId: originalTask.parentTaskId, + phaseId: originalTask.phaseId, + initiativeId: originalTask.initiativeId, + name: `Resolve conflicts: ${originalTask.name}`, + description: conflictDescription, + type: 'auto', + priority: 'high', // Conflicts should be resolved quickly + status: 'pending', + order: originalTask.order + 1, + }); + + // Update original task status to blocked + await this.taskRepository.update(taskId, { status: 'blocked' }); + + // Create message to agent if messageRepository is configured + if (this.messageRepository) { + const messageContent = [ + `Merge conflict detected for task: ${originalTask.name}`, + '', + 'Conflicting files:', + ...conflicts.map((f) => `- ${f}`), + '', + `A new task has been created to resolve these conflicts: ${conflictTask.name}`, + '', + 'Please resolve the merge conflicts in the listed files and mark the resolution task as complete.', + ].join('\n'); + + await this.messageRepository.create({ + senderType: 'user', // System-generated messages appear as from user + senderId: null, + recipientType: 'agent', + recipientId: agent.id, + type: 'info', + content: messageContent, + requiresResponse: false, + }); + } + + // Emit TaskQueuedEvent for the new conflict-resolution task + if (this.eventBus) { + const event: TaskQueuedEvent = { + type: 'task:queued', + timestamp: new Date(), + payload: { + taskId: conflictTask.id, + priority: 'high', + dependsOn: [], + }, + }; + this.eventBus.emit(event); + } + } +} \ No newline at end of file diff --git a/src/coordination/index.ts b/src/coordination/index.ts index 531013a..8c365ec 100644 --- a/src/coordination/index.ts +++ b/src/coordination/index.ts @@ -16,11 +16,13 @@ * - Clean separation between domain logic and infrastructure */ -// Port interface (what consumers depend on) +// Port interfaces (what consumers depend on) export type { CoordinationManager } from './types.js'; +export type { ConflictResolutionService } from './conflict-resolution-service.js'; // Domain types export type { MergeQueueItem, MergeStatus, MergeResult } from './types.js'; // Adapters export { DefaultCoordinationManager } from './manager.js'; +export { DefaultConflictResolutionService } from './conflict-resolution-service.js'; diff --git a/src/coordination/manager.test.ts b/src/coordination/manager.test.ts index 138944a..90e8b97 100644 --- a/src/coordination/manager.test.ts +++ b/src/coordination/manager.test.ts @@ -10,7 +10,7 @@ import { DefaultCoordinationManager } from './manager.js'; import { DrizzleTaskRepository } from '../db/repositories/drizzle/task.js'; import { DrizzleAgentRepository } from '../db/repositories/drizzle/agent.js'; import { DrizzleMessageRepository } from '../db/repositories/drizzle/message.js'; -import { DrizzlePlanRepository } from '../db/repositories/drizzle/plan.js'; + import { DrizzlePhaseRepository } from '../db/repositories/drizzle/phase.js'; import { DrizzleInitiativeRepository } from '../db/repositories/drizzle/initiative.js'; import { createTestDatabase } from '../db/repositories/drizzle/test-helpers.js'; @@ -20,6 +20,7 @@ import type { WorktreeManager, MergeResult as GitMergeResult } from '../git/type import type { TaskRepository } from '../db/repositories/task-repository.js'; import type { AgentRepository } from '../db/repositories/agent-repository.js'; import type { MessageRepository } from '../db/repositories/message-repository.js'; +import type { ConflictResolutionService } from './conflict-resolution-service.js'; // ============================================================================= // Test Helpers @@ -63,6 +64,15 @@ function createMockWorktreeManager( }; } +/** + * Create a mock ConflictResolutionService. + */ +function createMockConflictResolutionService(): ConflictResolutionService { + return { + handleConflict: vi.fn(), + }; +} + // ============================================================================= // Tests // ============================================================================= @@ -74,8 +84,9 @@ describe('DefaultCoordinationManager', () => { let messageRepository: MessageRepository; let eventBus: EventBus & { emittedEvents: DomainEvent[] }; let worktreeManager: WorktreeManager; + let conflictResolutionService: ConflictResolutionService; let manager: DefaultCoordinationManager; - let testPlanId: string; + let testPhaseId: string; beforeEach(async () => { // Set up test database @@ -87,7 +98,6 @@ describe('DefaultCoordinationManager', () => { // Create required hierarchy for tasks const initiativeRepo = new DrizzleInitiativeRepository(db); const phaseRepo = new DrizzlePhaseRepository(db); - const planRepo = new DrizzlePlanRepository(db); const initiative = await initiativeRepo.create({ name: 'Test Initiative', @@ -97,12 +107,7 @@ describe('DefaultCoordinationManager', () => { number: 1, name: 'Test Phase', }); - const plan = await planRepo.create({ - phaseId: phase.id, - number: 1, - name: 'Test Plan', - }); - testPlanId = plan.id; + testPhaseId = phase.id; // Create mocks eventBus = createMockEventBus(); @@ -126,7 +131,7 @@ describe('DefaultCoordinationManager', () => { it('should add task to queue and emit MergeQueuedEvent', async () => { // Create task const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Test Task', priority: 'high', order: 1, @@ -162,7 +167,7 @@ describe('DefaultCoordinationManager', () => { it('should throw error when no agent assigned to task', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Orphan Task', order: 1, }); @@ -185,7 +190,7 @@ describe('DefaultCoordinationManager', () => { it('should return item when all dependencies merged', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Mergeable Task', priority: 'medium', order: 1, @@ -207,19 +212,19 @@ describe('DefaultCoordinationManager', () => { it('should respect priority ordering (high > medium > low)', async () => { // Create tasks in different priority order const lowTask = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Low Priority', priority: 'low', order: 1, }); const highTask = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'High Priority', priority: 'high', order: 2, }); const mediumTask = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Medium Priority', priority: 'medium', order: 3, @@ -256,13 +261,13 @@ describe('DefaultCoordinationManager', () => { it('should order by queuedAt within same priority (oldest first)', async () => { const task1 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'First Task', priority: 'medium', order: 1, }); const task2 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Second Task', priority: 'medium', order: 2, @@ -298,7 +303,7 @@ describe('DefaultCoordinationManager', () => { describe('processMerges - success path', () => { it('should complete clean merges and emit MergeCompletedEvent', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Mergeable Task', priority: 'high', order: 1, @@ -332,13 +337,13 @@ describe('DefaultCoordinationManager', () => { it('should process multiple tasks in priority order', async () => { const lowTask = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Low Priority', priority: 'low', order: 1, }); const highTask = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'High Priority', priority: 'high', order: 2, @@ -374,7 +379,7 @@ describe('DefaultCoordinationManager', () => { describe('processMerges - conflict handling', () => { it('should detect conflicts and emit MergeConflictedEvent', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Conflicting Task', priority: 'high', order: 1, @@ -430,7 +435,7 @@ describe('DefaultCoordinationManager', () => { it('should create resolution task on conflict', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Original Task', priority: 'medium', order: 1, @@ -463,7 +468,7 @@ describe('DefaultCoordinationManager', () => { await manager.processMerges('main'); // Check new task was created - const tasks = await taskRepository.findByPlanId(testPlanId); + const tasks = await taskRepository.findByPhaseId(testPhaseId); const conflictTask = tasks.find((t) => t.name.startsWith('Resolve conflicts:') ); @@ -488,7 +493,7 @@ describe('DefaultCoordinationManager', () => { it('should create message to agent on conflict', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task with Message', priority: 'medium', order: 1, @@ -539,13 +544,13 @@ describe('DefaultCoordinationManager', () => { it('should return correct counts for all states', async () => { // Create tasks const task1 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Queued Task', priority: 'high', order: 1, }); const task2 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Conflict Task', priority: 'medium', order: 2, @@ -609,7 +614,7 @@ describe('DefaultCoordinationManager', () => { it('should throw error when no agent for task', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Orphan Task', order: 1, }); @@ -649,7 +654,7 @@ describe('DefaultCoordinationManager', () => { ); const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Test', order: 1, }); @@ -669,7 +674,7 @@ describe('DefaultCoordinationManager', () => { ); const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Test', order: 1, }); diff --git a/src/coordination/manager.ts b/src/coordination/manager.ts index 8f944de..eafaafd 100644 --- a/src/coordination/manager.ts +++ b/src/coordination/manager.ts @@ -13,13 +13,14 @@ import type { MergeStartedEvent, MergeCompletedEvent, MergeConflictedEvent, - TaskQueuedEvent, } from '../events/index.js'; import type { WorktreeManager } from '../git/types.js'; import type { TaskRepository } from '../db/repositories/task-repository.js'; import type { AgentRepository } from '../db/repositories/agent-repository.js'; import type { MessageRepository } from '../db/repositories/message-repository.js'; import type { CoordinationManager, MergeQueueItem, MergeResult } from './types.js'; +import type { ConflictResolutionService } from './conflict-resolution-service.js'; +import { DefaultConflictResolutionService } from './conflict-resolution-service.js'; // ============================================================================= // Internal Types @@ -52,13 +53,29 @@ export class DefaultCoordinationManager implements CoordinationManager { /** Tasks with conflicts awaiting resolution */ private conflictedTasks: Map<string, string[]> = new Map(); + /** Service for handling merge conflicts */ + private conflictResolutionService?: ConflictResolutionService; + constructor( private worktreeManager?: WorktreeManager, private taskRepository?: TaskRepository, private agentRepository?: AgentRepository, private messageRepository?: MessageRepository, - private eventBus?: EventBus - ) {} + private eventBus?: EventBus, + conflictResolutionService?: ConflictResolutionService + ) { + // Create default conflict resolution service if none provided + if (conflictResolutionService) { + this.conflictResolutionService = conflictResolutionService; + } else if (taskRepository && agentRepository) { + this.conflictResolutionService = new DefaultConflictResolutionService( + taskRepository, + agentRepository, + messageRepository, + eventBus + ); + } + } /** * Queue a completed task for merge. @@ -259,89 +276,14 @@ export class DefaultCoordinationManager implements CoordinationManager { /** * Handle a merge conflict. - * Creates a conflict-resolution task and notifies the agent via message. + * Delegates to the ConflictResolutionService. */ async handleConflict(taskId: string, conflicts: string[]): Promise<void> { - if (!this.taskRepository) { - throw new Error('TaskRepository not configured'); + if (!this.conflictResolutionService) { + throw new Error('ConflictResolutionService not configured'); } - if (!this.agentRepository) { - throw new Error('AgentRepository not configured'); - } - - // Get original task for context - const originalTask = await this.taskRepository.findById(taskId); - if (!originalTask) { - throw new Error(`Original task not found: ${taskId}`); - } - - // Get agent that was working on the task - const agent = await this.agentRepository.findByTaskId(taskId); - if (!agent) { - throw new Error(`No agent found for task: ${taskId}`); - } - - // Build conflict description - const conflictDescription = [ - 'Merge conflicts detected. Resolve conflicts in the following files:', - '', - ...conflicts.map((f) => `- ${f}`), - '', - `Original task: ${originalTask.name}`, - '', - 'Instructions: Resolve merge conflicts in the listed files, then mark task complete.', - ].join('\n'); - - // Create new conflict-resolution task - const conflictTask = await this.taskRepository.create({ - planId: originalTask.planId, - name: `Resolve conflicts: ${originalTask.name}`, - description: conflictDescription, - type: 'auto', - priority: 'high', // Conflicts should be resolved quickly - status: 'pending', - order: originalTask.order + 1, - }); - - // Update original task status to blocked - await this.taskRepository.update(taskId, { status: 'blocked' }); - - // Create message to agent if messageRepository is configured - if (this.messageRepository) { - const messageContent = [ - `Merge conflict detected for task: ${originalTask.name}`, - '', - 'Conflicting files:', - ...conflicts.map((f) => `- ${f}`), - '', - `A new task has been created to resolve these conflicts: ${conflictTask.name}`, - '', - 'Please resolve the merge conflicts in the listed files and mark the resolution task as complete.', - ].join('\n'); - - await this.messageRepository.create({ - senderType: 'user', // System-generated messages appear as from user - senderId: null, - recipientType: 'agent', - recipientId: agent.id, - type: 'info', - content: messageContent, - requiresResponse: false, - }); - } - - // Emit TaskQueuedEvent for the new conflict-resolution task - const event: TaskQueuedEvent = { - type: 'task:queued', - timestamp: new Date(), - payload: { - taskId: conflictTask.id, - priority: 'high', - dependsOn: [], - }, - }; - this.eventBus?.emit(event); + await this.conflictResolutionService.handleConflict(taskId, conflicts); } /** diff --git a/src/db/config.ts b/src/db/config.ts index 4a20673..f831ee0 100644 --- a/src/db/config.ts +++ b/src/db/config.ts @@ -1,11 +1,12 @@ import { mkdirSync } from 'node:fs'; import { dirname, join } from 'node:path'; -import { homedir } from 'node:os'; +import { findWorkspaceRoot } from '../config/cwrc.js'; /** * Get the database path. * - * - Default: ~/.cw/data/cw.db + * - Default: <workspace-root>/.cw/cw.db + * - Throws if no .cwrc workspace is found * - Override via CW_DB_PATH environment variable * - For testing, pass ':memory:' as CW_DB_PATH */ @@ -15,7 +16,13 @@ export function getDbPath(): string { return envPath; } - return join(homedir(), '.cw', 'data', 'cw.db'); + const root = findWorkspaceRoot(); + if (!root) { + throw new Error( + 'No .cwrc workspace found. Run `cw init` to initialize a workspace.', + ); + } + return join(root, '.cw', 'cw.db'); } /** diff --git a/src/db/ensure-schema.ts b/src/db/ensure-schema.ts index d38c50a..b26baab 100644 --- a/src/db/ensure-schema.ts +++ b/src/db/ensure-schema.ts @@ -1,123 +1,38 @@ /** - * Database Schema Initialization + * Database Migration * - * Ensures all required tables exist in the database. - * Uses CREATE TABLE IF NOT EXISTS so it's safe to call multiple times. + * Runs drizzle-kit migrations from the drizzle/ directory. + * Safe to call on every startup - only applies pending migrations. */ +import { migrate } from 'drizzle-orm/better-sqlite3/migrator'; +import { join, dirname } from 'node:path'; +import { fileURLToPath } from 'node:url'; import type { DrizzleDatabase } from './index.js'; -import { sql } from 'drizzle-orm'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('db'); /** - * Individual CREATE TABLE statements for each table. - * Each must be a single statement for drizzle-orm compatibility. - * These mirror the schema defined in schema.ts. + * Resolve the migrations directory relative to the package root. + * Works both in development (src/) and after build (dist/). */ -const TABLE_STATEMENTS = [ - // Initiatives table - `CREATE TABLE IF NOT EXISTS initiatives ( - id TEXT PRIMARY KEY NOT NULL, - name TEXT NOT NULL, - description TEXT, - status TEXT NOT NULL DEFAULT 'active', - created_at INTEGER NOT NULL, - updated_at INTEGER NOT NULL -)`, - - // Phases table - `CREATE TABLE IF NOT EXISTS phases ( - id TEXT PRIMARY KEY NOT NULL, - initiative_id TEXT NOT NULL REFERENCES initiatives(id) ON DELETE CASCADE, - number INTEGER NOT NULL, - name TEXT NOT NULL, - description TEXT, - status TEXT NOT NULL DEFAULT 'pending', - created_at INTEGER NOT NULL, - updated_at INTEGER NOT NULL -)`, - - // Phase dependencies table - `CREATE TABLE IF NOT EXISTS phase_dependencies ( - id TEXT PRIMARY KEY NOT NULL, - phase_id TEXT NOT NULL REFERENCES phases(id) ON DELETE CASCADE, - depends_on_phase_id TEXT NOT NULL REFERENCES phases(id) ON DELETE CASCADE, - created_at INTEGER NOT NULL -)`, - - // Plans table - `CREATE TABLE IF NOT EXISTS plans ( - id TEXT PRIMARY KEY NOT NULL, - phase_id TEXT NOT NULL REFERENCES phases(id) ON DELETE CASCADE, - number INTEGER NOT NULL, - name TEXT NOT NULL, - description TEXT, - status TEXT NOT NULL DEFAULT 'pending', - created_at INTEGER NOT NULL, - updated_at INTEGER NOT NULL -)`, - - // Tasks table - `CREATE TABLE IF NOT EXISTS tasks ( - id TEXT PRIMARY KEY NOT NULL, - plan_id TEXT NOT NULL REFERENCES plans(id) ON DELETE CASCADE, - name TEXT NOT NULL, - description TEXT, - type TEXT NOT NULL DEFAULT 'auto', - priority TEXT NOT NULL DEFAULT 'medium', - status TEXT NOT NULL DEFAULT 'pending', - "order" INTEGER NOT NULL DEFAULT 0, - created_at INTEGER NOT NULL, - updated_at INTEGER NOT NULL -)`, - - // Task dependencies table - `CREATE TABLE IF NOT EXISTS task_dependencies ( - id TEXT PRIMARY KEY NOT NULL, - task_id TEXT NOT NULL REFERENCES tasks(id) ON DELETE CASCADE, - depends_on_task_id TEXT NOT NULL REFERENCES tasks(id) ON DELETE CASCADE, - created_at INTEGER NOT NULL -)`, - - // Agents table - `CREATE TABLE IF NOT EXISTS agents ( - id TEXT PRIMARY KEY NOT NULL, - name TEXT NOT NULL UNIQUE, - task_id TEXT REFERENCES tasks(id) ON DELETE SET NULL, - session_id TEXT, - worktree_id TEXT NOT NULL, - status TEXT NOT NULL DEFAULT 'idle', - mode TEXT NOT NULL DEFAULT 'execute' CHECK(mode IN ('execute', 'discuss', 'breakdown', 'decompose')), - created_at INTEGER NOT NULL, - updated_at INTEGER NOT NULL -)`, - - // Messages table - `CREATE TABLE IF NOT EXISTS messages ( - id TEXT PRIMARY KEY NOT NULL, - sender_type TEXT NOT NULL, - sender_id TEXT REFERENCES agents(id) ON DELETE SET NULL, - recipient_type TEXT NOT NULL, - recipient_id TEXT REFERENCES agents(id) ON DELETE SET NULL, - type TEXT NOT NULL DEFAULT 'info', - content TEXT NOT NULL, - requires_response INTEGER NOT NULL DEFAULT 0, - status TEXT NOT NULL DEFAULT 'pending', - parent_message_id TEXT REFERENCES messages(id) ON DELETE SET NULL, - created_at INTEGER NOT NULL, - updated_at INTEGER NOT NULL -)`, -]; +function getMigrationsPath(): string { + const currentDir = dirname(fileURLToPath(import.meta.url)); + // From src/db/ or dist/db/, go up two levels to package root, then into drizzle/ + return join(currentDir, '..', '..', 'drizzle'); +} /** - * Ensure all database tables exist. + * Run all pending database migrations. * - * Uses CREATE TABLE IF NOT EXISTS, so safe to call on every startup. - * Must be called before any repository operations on a fresh database. + * Uses drizzle-kit's migration system which tracks applied migrations + * in a __drizzle_migrations table. Safe to call on every startup. * * @param db - Drizzle database instance */ export function ensureSchema(db: DrizzleDatabase): void { - for (const statement of TABLE_STATEMENTS) { - db.run(sql.raw(statement)); - } + log.info('applying database migrations'); + migrate(db, { migrationsFolder: getMigrationsPath() }); + log.info('database migrations complete'); } diff --git a/src/db/repositories/account-repository.ts b/src/db/repositories/account-repository.ts new file mode 100644 index 0000000..e7b62ef --- /dev/null +++ b/src/db/repositories/account-repository.ts @@ -0,0 +1,61 @@ +/** + * Account Repository Port Interface + * + * Port for Account aggregate operations. + * Accounts represent authenticated provider logins (e.g. Claude OAuth accounts) + * used for round-robin agent spawning and usage-limit failover. + */ + +import type { Account } from '../schema.js'; + +export interface CreateAccountData { + email: string; + provider?: string; // defaults to 'claude' + configJson?: string; // .claude.json content + credentials?: string; // .credentials.json content +} + +export interface AccountRepository { + /** Create a new account. Generates id and sets timestamps. */ + create(data: CreateAccountData): Promise<Account>; + + /** Find an account by its ID. */ + findById(id: string): Promise<Account | null>; + + /** Find an account by email. */ + findByEmail(email: string): Promise<Account | null>; + + /** Find all accounts for a given provider. */ + findByProvider(provider: string): Promise<Account[]>; + + /** + * Find the next available (non-exhausted) account for a provider. + * Uses round-robin via lastUsedAt ordering (least-recently-used first). + * Automatically clears expired exhaustion before querying. + */ + findNextAvailable(provider: string): Promise<Account | null>; + + /** Mark an account as exhausted until a given time. */ + markExhausted(id: string, until: Date): Promise<Account>; + + /** Mark an account as available (clear exhaustion). */ + markAvailable(id: string): Promise<Account>; + + /** Update the lastUsedAt timestamp for an account. */ + updateLastUsed(id: string): Promise<Account>; + + /** Clear exhaustion for all accounts whose exhaustedUntil has passed. Returns count cleared. */ + clearExpiredExhaustion(): Promise<number>; + + /** Find all accounts. */ + findAll(): Promise<Account[]>; + + /** Update stored credentials for an account. */ + updateCredentials(id: string, credentials: string): Promise<Account>; + + /** Update both configJson and credentials for an account (used by account add upsert). */ + updateAccountAuth(id: string, configJson: string, credentials: string): Promise<Account>; + + /** Delete an account. Throws if not found. */ + delete(id: string): Promise<void>; +} diff --git a/src/db/repositories/agent-repository.ts b/src/db/repositories/agent-repository.ts index 97e026e..e363f72 100644 --- a/src/db/repositories/agent-repository.ts +++ b/src/db/repositories/agent-repository.ts @@ -21,9 +21,34 @@ export interface CreateAgentData { name: string; worktreeId: string; taskId?: string | null; + initiativeId?: string | null; sessionId?: string | null; status?: AgentStatus; mode?: AgentMode; // Defaults to 'execute' if not provided + provider?: string; // Defaults to 'claude' if not provided + accountId?: string | null; +} + +/** + * Data for updating an existing agent. + * All fields optional. System-managed fields (id, createdAt, updatedAt) are excluded. + */ +export interface UpdateAgentData { + name?: string; + worktreeId?: string; + taskId?: string | null; + initiativeId?: string | null; + sessionId?: string | null; + status?: AgentStatus; + mode?: AgentMode; + provider?: string; + accountId?: string | null; + pid?: number | null; + outputFilePath?: string | null; + result?: string | null; + pendingQuestions?: string | null; + userDismissedAt?: Date | null; + updatedAt?: Date; } /** @@ -78,19 +103,12 @@ export interface AgentRepository { findByStatus(status: AgentStatus): Promise<Agent[]>; /** - * Update an agent's status. + * Update an agent with partial data. + * Only provided fields are updated, others remain unchanged. * Throws if agent not found. * Updates updatedAt timestamp automatically. */ - updateStatus(id: string, status: AgentStatus): Promise<Agent>; - - /** - * Update an agent's session ID. - * Called after first CLI run completes and provides session ID. - * Throws if agent not found. - * Updates updatedAt timestamp automatically. - */ - updateSessionId(id: string, sessionId: string): Promise<Agent>; + update(id: string, data: UpdateAgentData): Promise<Agent>; /** * Delete an agent. diff --git a/src/db/repositories/drizzle/account.ts b/src/db/repositories/drizzle/account.ts new file mode 100644 index 0000000..448d491 --- /dev/null +++ b/src/db/repositories/drizzle/account.ts @@ -0,0 +1,203 @@ +/** + * Drizzle Account Repository Adapter + * + * Implements AccountRepository interface using Drizzle ORM. + * Handles round-robin selection via lastUsedAt ordering + * and automatic exhaustion expiry. + */ + +import { eq, and, asc, lte } from 'drizzle-orm'; +import { nanoid } from 'nanoid'; +import type { DrizzleDatabase } from '../../index.js'; +import { accounts, agents, type Account } from '../../schema.js'; +import type { AccountRepository, CreateAccountData } from '../account-repository.js'; + +export class DrizzleAccountRepository implements AccountRepository { + constructor(private db: DrizzleDatabase) {} + + async create(data: CreateAccountData): Promise<Account> { + const id = nanoid(); + const now = new Date(); + + const [created] = await this.db.insert(accounts).values({ + id, + email: data.email, + provider: data.provider ?? 'claude', + configJson: data.configJson ?? null, + credentials: data.credentials ?? null, + isExhausted: false, + exhaustedUntil: null, + lastUsedAt: null, + sortOrder: 0, + createdAt: now, + updatedAt: now, + }).returning(); + + return created; + } + + async findById(id: string): Promise<Account | null> { + const result = await this.db + .select() + .from(accounts) + .where(eq(accounts.id, id)) + .limit(1); + return result[0] ?? null; + } + + async findByEmail(email: string): Promise<Account | null> { + const result = await this.db + .select() + .from(accounts) + .where(eq(accounts.email, email)) + .limit(1); + return result[0] ?? null; + } + + async findByProvider(provider: string): Promise<Account[]> { + return this.db + .select() + .from(accounts) + .where(eq(accounts.provider, provider)); + } + + async findNextAvailable(provider: string): Promise<Account | null> { + await this.clearExpiredExhaustion(); + + const result = await this.db + .select() + .from(accounts) + .where( + and( + eq(accounts.provider, provider), + eq(accounts.isExhausted, false), + ), + ) + .orderBy(asc(accounts.lastUsedAt)) + .limit(1); + + return result[0] ?? null; + } + + async markExhausted(id: string, until: Date): Promise<Account> { + const now = new Date(); + const [updated] = await this.db + .update(accounts) + .set({ + isExhausted: true, + exhaustedUntil: until, + updatedAt: now, + }) + .where(eq(accounts.id, id)) + .returning(); + + if (!updated) { + throw new Error(`Account not found: ${id}`); + } + + return updated; + } + + async markAvailable(id: string): Promise<Account> { + const now = new Date(); + const [updated] = await this.db + .update(accounts) + .set({ + isExhausted: false, + exhaustedUntil: null, + updatedAt: now, + }) + .where(eq(accounts.id, id)) + .returning(); + + if (!updated) { + throw new Error(`Account not found: ${id}`); + } + + return updated; + } + + async updateLastUsed(id: string): Promise<Account> { + const now = new Date(); + const [updated] = await this.db + .update(accounts) + .set({ lastUsedAt: now, updatedAt: now }) + .where(eq(accounts.id, id)) + .returning(); + + if (!updated) { + throw new Error(`Account not found: ${id}`); + } + + return updated; + } + + async clearExpiredExhaustion(): Promise<number> { + const now = new Date(); + + const cleared = await this.db + .update(accounts) + .set({ + isExhausted: false, + exhaustedUntil: null, + updatedAt: now, + }) + .where( + and( + eq(accounts.isExhausted, true), + lte(accounts.exhaustedUntil, now), + ), + ) + .returning({ id: accounts.id }); + + return cleared.length; + } + + async findAll(): Promise<Account[]> { + return this.db.select().from(accounts); + } + + async updateCredentials(id: string, credentials: string): Promise<Account> { + const now = new Date(); + const [updated] = await this.db + .update(accounts) + .set({ credentials, updatedAt: now }) + .where(eq(accounts.id, id)) + .returning(); + + if (!updated) { + throw new Error(`Account not found: ${id}`); + } + + return updated; + } + + async updateAccountAuth(id: string, configJson: string, credentials: string): Promise<Account> { + const now = new Date(); + const [updated] = await this.db + .update(accounts) + .set({ configJson, credentials, updatedAt: now }) + .where(eq(accounts.id, id)) + .returning(); + + if (!updated) { + throw new Error(`Account not found: ${id}`); + } + + return updated; + } + + async delete(id: string): Promise<void> { + // Manually nullify agent FK — the migration lacks ON DELETE SET NULL + await this.db + .update(agents) + .set({ accountId: null }) + .where(eq(agents.accountId, id)); + + const [deleted] = await this.db.delete(accounts).where(eq(accounts.id, id)).returning(); + + if (!deleted) { + throw new Error(`Account not found: ${id}`); + } + } +} diff --git a/src/db/repositories/drizzle/agent.test.ts b/src/db/repositories/drizzle/agent.test.ts index 9caeeb9..e2a674b 100644 --- a/src/db/repositories/drizzle/agent.test.ts +++ b/src/db/repositories/drizzle/agent.test.ts @@ -7,7 +7,6 @@ import { describe, it, expect, beforeEach } from 'vitest'; import { DrizzleAgentRepository } from './agent.js'; import { DrizzleTaskRepository } from './task.js'; -import { DrizzlePlanRepository } from './plan.js'; import { DrizzlePhaseRepository } from './phase.js'; import { DrizzleInitiativeRepository } from './initiative.js'; import { createTestDatabase } from './test-helpers.js'; @@ -17,7 +16,6 @@ describe('DrizzleAgentRepository', () => { let db: DrizzleDatabase; let agentRepo: DrizzleAgentRepository; let taskRepo: DrizzleTaskRepository; - let planRepo: DrizzlePlanRepository; let phaseRepo: DrizzlePhaseRepository; let initiativeRepo: DrizzleInitiativeRepository; let testTaskId: string; @@ -26,7 +24,6 @@ describe('DrizzleAgentRepository', () => { db = createTestDatabase(); agentRepo = new DrizzleAgentRepository(db); taskRepo = new DrizzleTaskRepository(db); - planRepo = new DrizzlePlanRepository(db); phaseRepo = new DrizzlePhaseRepository(db); initiativeRepo = new DrizzleInitiativeRepository(db); @@ -39,13 +36,8 @@ describe('DrizzleAgentRepository', () => { number: 1, name: 'Test Phase', }); - const plan = await planRepo.create({ - phaseId: phase.id, - number: 1, - name: 'Test Plan', - }); const task = await taskRepo.create({ - planId: plan.id, + phaseId: phase.id, name: 'Test Task', order: 1, }); @@ -162,7 +154,7 @@ describe('DrizzleAgentRepository', () => { name: 'session-agent', worktreeId: 'worktree-123', }); - await agentRepo.updateSessionId(agent.id, 'session-abc'); + await agentRepo.update(agent.id, { sessionId: 'session-abc' }); const found = await agentRepo.findBySessionId('session-abc'); expect(found).not.toBeNull(); @@ -201,7 +193,7 @@ describe('DrizzleAgentRepository', () => { name: 'running-agent', worktreeId: 'wt-2', }); - await agentRepo.updateStatus(agent2.id, 'running'); + await agentRepo.update(agent2.id, { status: 'running' }); const idleAgents = await agentRepo.findByStatus('idle'); const runningAgents = await agentRepo.findByStatus('running'); @@ -217,7 +209,7 @@ describe('DrizzleAgentRepository', () => { name: 'waiting-agent', worktreeId: 'wt-1', }); - await agentRepo.updateStatus(agent.id, 'waiting_for_input'); + await agentRepo.update(agent.id, { status: 'waiting_for_input' }); const waitingAgents = await agentRepo.findByStatus('waiting_for_input'); expect(waitingAgents.length).toBe(1); @@ -225,7 +217,7 @@ describe('DrizzleAgentRepository', () => { }); }); - describe('updateStatus', () => { + describe('update', () => { it('should change status and updatedAt', async () => { const created = await agentRepo.create({ name: 'status-test', @@ -234,22 +226,14 @@ describe('DrizzleAgentRepository', () => { await new Promise((resolve) => setTimeout(resolve, 10)); - const updated = await agentRepo.updateStatus(created.id, 'running'); + const updated = await agentRepo.update(created.id, { status: 'running' }); expect(updated.status).toBe('running'); - expect(updated.updatedAt.getTime()).toBeGreaterThan( + expect(updated.updatedAt.getTime()).toBeGreaterThanOrEqual( created.updatedAt.getTime() ); }); - it('should throw for non-existent agent', async () => { - await expect( - agentRepo.updateStatus('non-existent-id', 'running') - ).rejects.toThrow('Agent not found'); - }); - }); - - describe('updateSessionId', () => { it('should change sessionId and updatedAt', async () => { const created = await agentRepo.create({ name: 'session-test', @@ -259,20 +243,17 @@ describe('DrizzleAgentRepository', () => { await new Promise((resolve) => setTimeout(resolve, 10)); - const updated = await agentRepo.updateSessionId( - created.id, - 'new-session-id' - ); + const updated = await agentRepo.update(created.id, { sessionId: 'new-session-id' }); expect(updated.sessionId).toBe('new-session-id'); - expect(updated.updatedAt.getTime()).toBeGreaterThan( + expect(updated.updatedAt.getTime()).toBeGreaterThanOrEqual( created.updatedAt.getTime() ); }); it('should throw for non-existent agent', async () => { await expect( - agentRepo.updateSessionId('non-existent-id', 'session') + agentRepo.update('non-existent-id', { status: 'running' }) ).rejects.toThrow('Agent not found'); }); }); diff --git a/src/db/repositories/drizzle/agent.ts b/src/db/repositories/drizzle/agent.ts index 7f040f8..c8f82fd 100644 --- a/src/db/repositories/drizzle/agent.ts +++ b/src/db/repositories/drizzle/agent.ts @@ -12,6 +12,7 @@ import type { AgentRepository, AgentStatus, CreateAgentData, + UpdateAgentData, } from '../agent-repository.js'; /** @@ -27,21 +28,22 @@ export class DrizzleAgentRepository implements AgentRepository { const id = nanoid(); const now = new Date(); - await this.db.insert(agents).values({ + const [created] = await this.db.insert(agents).values({ id, name: data.name, taskId: data.taskId ?? null, + initiativeId: data.initiativeId ?? null, sessionId: data.sessionId ?? null, worktreeId: data.worktreeId, + provider: data.provider ?? 'claude', + accountId: data.accountId ?? null, status: data.status ?? 'idle', mode: data.mode ?? 'execute', createdAt: now, updatedAt: now, - }); + }).returning(); - // Fetch to get the complete record with all defaults applied - const created = await this.findById(id); - return created!; + return created; } async findById(id: string): Promise<Agent | null> { @@ -92,42 +94,26 @@ export class DrizzleAgentRepository implements AgentRepository { return this.db.select().from(agents).where(eq(agents.status, status)); } - async updateStatus(id: string, status: AgentStatus): Promise<Agent> { - const existing = await this.findById(id); - if (!existing) { + async update(id: string, data: UpdateAgentData): Promise<Agent> { + const now = new Date(); + const [updated] = await this.db + .update(agents) + .set({ ...data, updatedAt: now }) + .where(eq(agents.id, id)) + .returning(); + + if (!updated) { throw new Error(`Agent not found: ${id}`); } - const now = new Date(); - await this.db - .update(agents) - .set({ status, updatedAt: now }) - .where(eq(agents.id, id)); - - return { ...existing, status, updatedAt: now }; - } - - async updateSessionId(id: string, sessionId: string): Promise<Agent> { - const existing = await this.findById(id); - if (!existing) { - throw new Error(`Agent not found: ${id}`); - } - - const now = new Date(); - await this.db - .update(agents) - .set({ sessionId, updatedAt: now }) - .where(eq(agents.id, id)); - - return { ...existing, sessionId, updatedAt: now }; + return updated; } async delete(id: string): Promise<void> { - const existing = await this.findById(id); - if (!existing) { + const [deleted] = await this.db.delete(agents).where(eq(agents.id, id)).returning(); + + if (!deleted) { throw new Error(`Agent not found: ${id}`); } - - await this.db.delete(agents).where(eq(agents.id, id)); } } diff --git a/src/db/repositories/drizzle/cascade.test.ts b/src/db/repositories/drizzle/cascade.test.ts index e3e7886..9fb00af 100644 --- a/src/db/repositories/drizzle/cascade.test.ts +++ b/src/db/repositories/drizzle/cascade.test.ts @@ -8,7 +8,6 @@ import { describe, it, expect, beforeEach } from 'vitest'; import { DrizzleInitiativeRepository } from './initiative.js'; import { DrizzlePhaseRepository } from './phase.js'; -import { DrizzlePlanRepository } from './plan.js'; import { DrizzleTaskRepository } from './task.js'; import { createTestDatabase } from './test-helpers.js'; import type { DrizzleDatabase } from '../../index.js'; @@ -17,19 +16,18 @@ describe('Cascade Deletes', () => { let db: DrizzleDatabase; let initiativeRepo: DrizzleInitiativeRepository; let phaseRepo: DrizzlePhaseRepository; - let planRepo: DrizzlePlanRepository; let taskRepo: DrizzleTaskRepository; beforeEach(() => { db = createTestDatabase(); initiativeRepo = new DrizzleInitiativeRepository(db); phaseRepo = new DrizzlePhaseRepository(db); - planRepo = new DrizzlePlanRepository(db); taskRepo = new DrizzleTaskRepository(db); }); /** * Helper to create a full hierarchy for testing. + * Uses parent tasks (decompose category) to group child tasks. */ async function createFullHierarchy() { const initiative = await initiativeRepo.create({ @@ -48,44 +46,60 @@ describe('Cascade Deletes', () => { name: 'Phase 2', }); - const plan1 = await planRepo.create({ + // Create parent (decompose) tasks that group child tasks + const parentTask1 = await taskRepo.create({ phaseId: phase1.id, - number: 1, - name: 'Plan 1-1', + initiativeId: initiative.id, + name: 'Parent Task 1-1', + category: 'decompose', + order: 1, }); - const plan2 = await planRepo.create({ + const parentTask2 = await taskRepo.create({ phaseId: phase1.id, - number: 2, - name: 'Plan 1-2', + initiativeId: initiative.id, + name: 'Parent Task 1-2', + category: 'decompose', + order: 2, }); - const plan3 = await planRepo.create({ + const parentTask3 = await taskRepo.create({ phaseId: phase2.id, - number: 1, - name: 'Plan 2-1', + initiativeId: initiative.id, + name: 'Parent Task 2-1', + category: 'decompose', + order: 1, }); + // Create child tasks under parent tasks const task1 = await taskRepo.create({ - planId: plan1.id, + parentTaskId: parentTask1.id, + phaseId: phase1.id, + initiativeId: initiative.id, name: 'Task 1-1-1', order: 1, }); const task2 = await taskRepo.create({ - planId: plan1.id, + parentTaskId: parentTask1.id, + phaseId: phase1.id, + initiativeId: initiative.id, name: 'Task 1-1-2', order: 2, }); const task3 = await taskRepo.create({ - planId: plan2.id, + parentTaskId: parentTask2.id, + phaseId: phase1.id, + initiativeId: initiative.id, name: 'Task 1-2-1', order: 1, }); const task4 = await taskRepo.create({ - planId: plan3.id, + parentTaskId: parentTask3.id, + phaseId: phase2.id, + initiativeId: initiative.id, name: 'Task 2-1-1', order: 1, }); @@ -93,22 +107,22 @@ describe('Cascade Deletes', () => { return { initiative, phases: { phase1, phase2 }, - plans: { plan1, plan2, plan3 }, + parentTasks: { parentTask1, parentTask2, parentTask3 }, tasks: { task1, task2, task3, task4 }, }; } describe('delete initiative', () => { - it('should cascade delete all phases, plans, and tasks', async () => { - const { initiative, phases, plans, tasks } = await createFullHierarchy(); + it('should cascade delete all phases and tasks', async () => { + const { initiative, phases, parentTasks, tasks } = await createFullHierarchy(); // Verify everything exists expect(await initiativeRepo.findById(initiative.id)).not.toBeNull(); expect(await phaseRepo.findById(phases.phase1.id)).not.toBeNull(); expect(await phaseRepo.findById(phases.phase2.id)).not.toBeNull(); - expect(await planRepo.findById(plans.plan1.id)).not.toBeNull(); - expect(await planRepo.findById(plans.plan2.id)).not.toBeNull(); - expect(await planRepo.findById(plans.plan3.id)).not.toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask1.id)).not.toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask2.id)).not.toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask3.id)).not.toBeNull(); expect(await taskRepo.findById(tasks.task1.id)).not.toBeNull(); expect(await taskRepo.findById(tasks.task2.id)).not.toBeNull(); expect(await taskRepo.findById(tasks.task3.id)).not.toBeNull(); @@ -121,9 +135,9 @@ describe('Cascade Deletes', () => { expect(await initiativeRepo.findById(initiative.id)).toBeNull(); expect(await phaseRepo.findById(phases.phase1.id)).toBeNull(); expect(await phaseRepo.findById(phases.phase2.id)).toBeNull(); - expect(await planRepo.findById(plans.plan1.id)).toBeNull(); - expect(await planRepo.findById(plans.plan2.id)).toBeNull(); - expect(await planRepo.findById(plans.plan3.id)).toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask1.id)).toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask2.id)).toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask3.id)).toBeNull(); expect(await taskRepo.findById(tasks.task1.id)).toBeNull(); expect(await taskRepo.findById(tasks.task2.id)).toBeNull(); expect(await taskRepo.findById(tasks.task3.id)).toBeNull(); @@ -132,8 +146,8 @@ describe('Cascade Deletes', () => { }); describe('delete phase', () => { - it('should cascade delete plans and tasks under that phase only', async () => { - const { initiative, phases, plans, tasks } = await createFullHierarchy(); + it('should cascade delete tasks under that phase only', async () => { + const { initiative, phases, parentTasks, tasks } = await createFullHierarchy(); // Delete phase 1 await phaseRepo.delete(phases.phase1.id); @@ -141,39 +155,39 @@ describe('Cascade Deletes', () => { // Initiative still exists expect(await initiativeRepo.findById(initiative.id)).not.toBeNull(); - // Phase 1 and its children are gone + // Phase 1 and its tasks are gone expect(await phaseRepo.findById(phases.phase1.id)).toBeNull(); - expect(await planRepo.findById(plans.plan1.id)).toBeNull(); - expect(await planRepo.findById(plans.plan2.id)).toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask1.id)).toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask2.id)).toBeNull(); expect(await taskRepo.findById(tasks.task1.id)).toBeNull(); expect(await taskRepo.findById(tasks.task2.id)).toBeNull(); expect(await taskRepo.findById(tasks.task3.id)).toBeNull(); - // Phase 2 and its children still exist + // Phase 2 and its tasks still exist expect(await phaseRepo.findById(phases.phase2.id)).not.toBeNull(); - expect(await planRepo.findById(plans.plan3.id)).not.toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask3.id)).not.toBeNull(); expect(await taskRepo.findById(tasks.task4.id)).not.toBeNull(); }); }); - describe('delete plan', () => { - it('should cascade delete tasks under that plan only', async () => { - const { phases, plans, tasks } = await createFullHierarchy(); + describe('delete parent task', () => { + it('should cascade delete child tasks under that parent only', async () => { + const { phases, parentTasks, tasks } = await createFullHierarchy(); - // Delete plan 1 - await planRepo.delete(plans.plan1.id); + // Delete parent task 1 + await taskRepo.delete(parentTasks.parentTask1.id); // Phase still exists expect(await phaseRepo.findById(phases.phase1.id)).not.toBeNull(); - // Plan 1 and its tasks are gone - expect(await planRepo.findById(plans.plan1.id)).toBeNull(); + // Parent task 1 and its children are gone + expect(await taskRepo.findById(parentTasks.parentTask1.id)).toBeNull(); expect(await taskRepo.findById(tasks.task1.id)).toBeNull(); expect(await taskRepo.findById(tasks.task2.id)).toBeNull(); - // Other plans and tasks still exist - expect(await planRepo.findById(plans.plan2.id)).not.toBeNull(); - expect(await planRepo.findById(plans.plan3.id)).not.toBeNull(); + // Other parent tasks and their children still exist + expect(await taskRepo.findById(parentTasks.parentTask2.id)).not.toBeNull(); + expect(await taskRepo.findById(parentTasks.parentTask3.id)).not.toBeNull(); expect(await taskRepo.findById(tasks.task3.id)).not.toBeNull(); expect(await taskRepo.findById(tasks.task4.id)).not.toBeNull(); }); diff --git a/src/db/repositories/drizzle/index.ts b/src/db/repositories/drizzle/index.ts index e56f451..4c6f033 100644 --- a/src/db/repositories/drizzle/index.ts +++ b/src/db/repositories/drizzle/index.ts @@ -7,7 +7,9 @@ export { DrizzleInitiativeRepository } from './initiative.js'; export { DrizzlePhaseRepository } from './phase.js'; -export { DrizzlePlanRepository } from './plan.js'; export { DrizzleTaskRepository } from './task.js'; export { DrizzleAgentRepository } from './agent.js'; export { DrizzleMessageRepository } from './message.js'; +export { DrizzlePageRepository } from './page.js'; +export { DrizzleProjectRepository } from './project.js'; +export { DrizzleAccountRepository } from './account.js'; diff --git a/src/db/repositories/drizzle/initiative.test.ts b/src/db/repositories/drizzle/initiative.test.ts index dd60411..2d9fe32 100644 --- a/src/db/repositories/drizzle/initiative.test.ts +++ b/src/db/repositories/drizzle/initiative.test.ts @@ -22,13 +22,11 @@ describe('DrizzleInitiativeRepository', () => { it('should create an initiative with generated id and timestamps', async () => { const initiative = await repo.create({ name: 'Test Initiative', - description: 'A test initiative', }); expect(initiative.id).toBeDefined(); expect(initiative.id.length).toBeGreaterThan(0); expect(initiative.name).toBe('Test Initiative'); - expect(initiative.description).toBe('A test initiative'); expect(initiative.status).toBe('active'); expect(initiative.createdAt).toBeInstanceOf(Date); expect(initiative.updatedAt).toBeInstanceOf(Date); @@ -95,7 +93,7 @@ describe('DrizzleInitiativeRepository', () => { expect(updated.name).toBe('Updated Name'); expect(updated.status).toBe('completed'); - expect(updated.updatedAt.getTime()).toBeGreaterThan(created.updatedAt.getTime()); + expect(updated.updatedAt.getTime()).toBeGreaterThanOrEqual(created.updatedAt.getTime()); }); it('should throw for non-existent initiative', async () => { diff --git a/src/db/repositories/drizzle/initiative.ts b/src/db/repositories/drizzle/initiative.ts index 8eb0f47..244482d 100644 --- a/src/db/repositories/drizzle/initiative.ts +++ b/src/db/repositories/drizzle/initiative.ts @@ -27,17 +27,15 @@ export class DrizzleInitiativeRepository implements InitiativeRepository { const id = nanoid(); const now = new Date(); - await this.db.insert(initiatives).values({ + const [created] = await this.db.insert(initiatives).values({ id, ...data, status: data.status ?? 'active', createdAt: now, updatedAt: now, - }); + }).returning(); - // Fetch to get the complete record with all defaults applied - const created = await this.findById(id); - return created!; + return created; } async findById(id: string): Promise<Initiative | null> { @@ -62,27 +60,24 @@ export class DrizzleInitiativeRepository implements InitiativeRepository { } async update(id: string, data: UpdateInitiativeData): Promise<Initiative> { - const existing = await this.findById(id); - if (!existing) { + const [updated] = await this.db + .update(initiatives) + .set({ ...data, updatedAt: new Date() }) + .where(eq(initiatives.id, id)) + .returning(); + + if (!updated) { throw new Error(`Initiative not found: ${id}`); } - const updated = { - ...data, - updatedAt: new Date(), - }; - - await this.db.update(initiatives).set(updated).where(eq(initiatives.id, id)); - - return { ...existing, ...updated } as Initiative; + return updated; } async delete(id: string): Promise<void> { - const existing = await this.findById(id); - if (!existing) { + const [deleted] = await this.db.delete(initiatives).where(eq(initiatives.id, id)).returning(); + + if (!deleted) { throw new Error(`Initiative not found: ${id}`); } - - await this.db.delete(initiatives).where(eq(initiatives.id, id)); } } diff --git a/src/db/repositories/drizzle/message.test.ts b/src/db/repositories/drizzle/message.test.ts index a387ad4..778a29c 100644 --- a/src/db/repositories/drizzle/message.test.ts +++ b/src/db/repositories/drizzle/message.test.ts @@ -8,7 +8,6 @@ import { describe, it, expect, beforeEach } from 'vitest'; import { DrizzleMessageRepository } from './message.js'; import { DrizzleAgentRepository } from './agent.js'; import { DrizzleTaskRepository } from './task.js'; -import { DrizzlePlanRepository } from './plan.js'; import { DrizzlePhaseRepository } from './phase.js'; import { DrizzleInitiativeRepository } from './initiative.js'; import { createTestDatabase } from './test-helpers.js'; @@ -27,7 +26,6 @@ describe('DrizzleMessageRepository', () => { // Create required hierarchy for agent FK const taskRepo = new DrizzleTaskRepository(db); - const planRepo = new DrizzlePlanRepository(db); const phaseRepo = new DrizzlePhaseRepository(db); const initiativeRepo = new DrizzleInitiativeRepository(db); @@ -39,13 +37,8 @@ describe('DrizzleMessageRepository', () => { number: 1, name: 'Test Phase', }); - const plan = await planRepo.create({ - phaseId: phase.id, - number: 1, - name: 'Test Plan', - }); const task = await taskRepo.create({ - planId: plan.id, + phaseId: phase.id, name: 'Test Task', order: 1, }); @@ -398,7 +391,7 @@ describe('DrizzleMessageRepository', () => { // Update to read const readMessage = await messageRepo.update(message.id, { status: 'read' }); expect(readMessage.status).toBe('read'); - expect(readMessage.updatedAt.getTime()).toBeGreaterThan(message.updatedAt.getTime()); + expect(readMessage.updatedAt.getTime()).toBeGreaterThanOrEqual(message.updatedAt.getTime()); // Wait again await new Promise((resolve) => setTimeout(resolve, 10)); @@ -408,7 +401,7 @@ describe('DrizzleMessageRepository', () => { status: 'responded', }); expect(respondedMessage.status).toBe('responded'); - expect(respondedMessage.updatedAt.getTime()).toBeGreaterThan( + expect(respondedMessage.updatedAt.getTime()).toBeGreaterThanOrEqual( readMessage.updatedAt.getTime() ); }); @@ -436,7 +429,7 @@ describe('DrizzleMessageRepository', () => { }); expect(updated.content).toBe('Updated content'); - expect(updated.updatedAt.getTime()).toBeGreaterThan(created.updatedAt.getTime()); + expect(updated.updatedAt.getTime()).toBeGreaterThanOrEqual(created.updatedAt.getTime()); }); }); diff --git a/src/db/repositories/drizzle/message.ts b/src/db/repositories/drizzle/message.ts index 8d0653f..bcf2781 100644 --- a/src/db/repositories/drizzle/message.ts +++ b/src/db/repositories/drizzle/message.ts @@ -28,7 +28,7 @@ export class DrizzleMessageRepository implements MessageRepository { const id = nanoid(); const now = new Date(); - await this.db.insert(messages).values({ + const [created] = await this.db.insert(messages).values({ id, senderType: data.senderType, senderId: data.senderId ?? null, @@ -41,11 +41,9 @@ export class DrizzleMessageRepository implements MessageRepository { parentMessageId: data.parentMessageId ?? null, createdAt: now, updatedAt: now, - }); + }).returning(); - // Fetch to get the complete record with all defaults applied - const created = await this.findById(id); - return created!; + return created; } async findById(id: string): Promise<Message | null> { @@ -117,27 +115,24 @@ export class DrizzleMessageRepository implements MessageRepository { } async update(id: string, data: UpdateMessageData): Promise<Message> { - const existing = await this.findById(id); - if (!existing) { + const [updated] = await this.db + .update(messages) + .set({ ...data, updatedAt: new Date() }) + .where(eq(messages.id, id)) + .returning(); + + if (!updated) { throw new Error(`Message not found: ${id}`); } - const updated = { - ...data, - updatedAt: new Date(), - }; - - await this.db.update(messages).set(updated).where(eq(messages.id, id)); - - return { ...existing, ...updated } as Message; + return updated; } async delete(id: string): Promise<void> { - const existing = await this.findById(id); - if (!existing) { + const [deleted] = await this.db.delete(messages).where(eq(messages.id, id)).returning(); + + if (!deleted) { throw new Error(`Message not found: ${id}`); } - - await this.db.delete(messages).where(eq(messages.id, id)); } } diff --git a/src/db/repositories/drizzle/page.ts b/src/db/repositories/drizzle/page.ts new file mode 100644 index 0000000..0eeb95d --- /dev/null +++ b/src/db/repositories/drizzle/page.ts @@ -0,0 +1,109 @@ +/** + * Drizzle Page Repository Adapter + * + * Implements PageRepository interface using Drizzle ORM. + */ + +import { eq, isNull, and, asc } from 'drizzle-orm'; +import { nanoid } from 'nanoid'; +import type { DrizzleDatabase } from '../../index.js'; +import { pages, type Page } from '../../schema.js'; +import type { + PageRepository, + CreatePageData, + UpdatePageData, +} from '../page-repository.js'; + +export class DrizzlePageRepository implements PageRepository { + constructor(private db: DrizzleDatabase) {} + + async create(data: CreatePageData): Promise<Page> { + const id = nanoid(); + const now = new Date(); + + const [created] = await this.db.insert(pages).values({ + id, + ...data, + createdAt: now, + updatedAt: now, + }).returning(); + + return created; + } + + async findById(id: string): Promise<Page | null> { + const result = await this.db + .select() + .from(pages) + .where(eq(pages.id, id)) + .limit(1); + + return result[0] ?? null; + } + + async findByInitiativeId(initiativeId: string): Promise<Page[]> { + return this.db + .select() + .from(pages) + .where(eq(pages.initiativeId, initiativeId)) + .orderBy(asc(pages.sortOrder)); + } + + async findByParentPageId(parentPageId: string): Promise<Page[]> { + return this.db + .select() + .from(pages) + .where(eq(pages.parentPageId, parentPageId)) + .orderBy(asc(pages.sortOrder)); + } + + async findRootPage(initiativeId: string): Promise<Page | null> { + const result = await this.db + .select() + .from(pages) + .where( + and( + eq(pages.initiativeId, initiativeId), + isNull(pages.parentPageId), + ), + ) + .limit(1); + + return result[0] ?? null; + } + + async getOrCreateRootPage(initiativeId: string): Promise<Page> { + const existing = await this.findRootPage(initiativeId); + if (existing) return existing; + + return this.create({ + initiativeId, + parentPageId: null, + title: 'Untitled', + content: null, + sortOrder: 0, + }); + } + + async update(id: string, data: UpdatePageData): Promise<Page> { + const [updated] = await this.db + .update(pages) + .set({ ...data, updatedAt: new Date() }) + .where(eq(pages.id, id)) + .returning(); + + if (!updated) { + throw new Error(`Page not found: ${id}`); + } + + return updated; + } + + async delete(id: string): Promise<void> { + const [deleted] = await this.db.delete(pages).where(eq(pages.id, id)).returning(); + + if (!deleted) { + throw new Error(`Page not found: ${id}`); + } + } +} diff --git a/src/db/repositories/drizzle/phase.test.ts b/src/db/repositories/drizzle/phase.test.ts index 726bb06..07c8f2b 100644 --- a/src/db/repositories/drizzle/phase.test.ts +++ b/src/db/repositories/drizzle/phase.test.ts @@ -138,7 +138,7 @@ describe('DrizzlePhaseRepository', () => { expect(updated.name).toBe('Updated Name'); expect(updated.status).toBe('in_progress'); - expect(updated.updatedAt.getTime()).toBeGreaterThan(created.updatedAt.getTime()); + expect(updated.updatedAt.getTime()).toBeGreaterThanOrEqual(created.updatedAt.getTime()); }); it('should throw for non-existent phase', async () => { diff --git a/src/db/repositories/drizzle/phase.ts b/src/db/repositories/drizzle/phase.ts index abfb869..4a6adfa 100644 --- a/src/db/repositories/drizzle/phase.ts +++ b/src/db/repositories/drizzle/phase.ts @@ -27,17 +27,15 @@ export class DrizzlePhaseRepository implements PhaseRepository { const id = nanoid(); const now = new Date(); - await this.db.insert(phases).values({ + const [created] = await this.db.insert(phases).values({ id, ...data, status: data.status ?? 'pending', createdAt: now, updatedAt: now, - }); + }).returning(); - // Fetch to get the complete record with all defaults applied - const created = await this.findById(id); - return created!; + return created; } async findById(id: string): Promise<Phase | null> { @@ -79,28 +77,25 @@ export class DrizzlePhaseRepository implements PhaseRepository { } async update(id: string, data: UpdatePhaseData): Promise<Phase> { - const existing = await this.findById(id); - if (!existing) { + const [updated] = await this.db + .update(phases) + .set({ ...data, updatedAt: new Date() }) + .where(eq(phases.id, id)) + .returning(); + + if (!updated) { throw new Error(`Phase not found: ${id}`); } - const updated = { - ...data, - updatedAt: new Date(), - }; - - await this.db.update(phases).set(updated).where(eq(phases.id, id)); - - return { ...existing, ...updated } as Phase; + return updated; } async delete(id: string): Promise<void> { - const existing = await this.findById(id); - if (!existing) { + const [deleted] = await this.db.delete(phases).where(eq(phases.id, id)).returning(); + + if (!deleted) { throw new Error(`Phase not found: ${id}`); } - - await this.db.delete(phases).where(eq(phases.id, id)); } async createDependency(phaseId: string, dependsOnPhaseId: string): Promise<void> { diff --git a/src/db/repositories/drizzle/plan.test.ts b/src/db/repositories/drizzle/plan.test.ts deleted file mode 100644 index b8b795b..0000000 --- a/src/db/repositories/drizzle/plan.test.ts +++ /dev/null @@ -1,204 +0,0 @@ -/** - * DrizzlePlanRepository Tests - * - * Tests for the Plan repository adapter. - */ - -import { describe, it, expect, beforeEach } from 'vitest'; -import { DrizzlePlanRepository } from './plan.js'; -import { DrizzlePhaseRepository } from './phase.js'; -import { DrizzleInitiativeRepository } from './initiative.js'; -import { createTestDatabase } from './test-helpers.js'; -import type { DrizzleDatabase } from '../../index.js'; - -describe('DrizzlePlanRepository', () => { - let db: DrizzleDatabase; - let planRepo: DrizzlePlanRepository; - let phaseRepo: DrizzlePhaseRepository; - let initiativeRepo: DrizzleInitiativeRepository; - let testPhaseId: string; - - beforeEach(async () => { - db = createTestDatabase(); - planRepo = new DrizzlePlanRepository(db); - phaseRepo = new DrizzlePhaseRepository(db); - initiativeRepo = new DrizzleInitiativeRepository(db); - - // Create test initiative and phase for FK constraint - const initiative = await initiativeRepo.create({ - name: 'Test Initiative', - }); - const phase = await phaseRepo.create({ - initiativeId: initiative.id, - number: 1, - name: 'Test Phase', - }); - testPhaseId = phase.id; - }); - - describe('create', () => { - it('should create a plan with generated id and timestamps', async () => { - const plan = await planRepo.create({ - phaseId: testPhaseId, - number: 1, - name: 'Test Plan', - description: 'A test plan', - }); - - expect(plan.id).toBeDefined(); - expect(plan.id.length).toBeGreaterThan(0); - expect(plan.phaseId).toBe(testPhaseId); - expect(plan.number).toBe(1); - expect(plan.name).toBe('Test Plan'); - expect(plan.status).toBe('pending'); - expect(plan.createdAt).toBeInstanceOf(Date); - expect(plan.updatedAt).toBeInstanceOf(Date); - }); - - it('should throw for invalid phaseId (FK constraint)', async () => { - await expect( - planRepo.create({ - phaseId: 'invalid-phase-id', - number: 1, - name: 'Invalid Plan', - }) - ).rejects.toThrow(); - }); - }); - - describe('findById', () => { - it('should return null for non-existent plan', async () => { - const result = await planRepo.findById('non-existent-id'); - expect(result).toBeNull(); - }); - - it('should find an existing plan', async () => { - const created = await planRepo.create({ - phaseId: testPhaseId, - number: 1, - name: 'Find Me', - }); - - const found = await planRepo.findById(created.id); - expect(found).not.toBeNull(); - expect(found!.id).toBe(created.id); - expect(found!.name).toBe('Find Me'); - }); - }); - - describe('findByPhaseId', () => { - it('should return empty array for phase with no plans', async () => { - const plans = await planRepo.findByPhaseId(testPhaseId); - expect(plans).toEqual([]); - }); - - it('should return only matching plans ordered by number', async () => { - // Create plans out of order - await planRepo.create({ - phaseId: testPhaseId, - number: 3, - name: 'Plan 3', - }); - await planRepo.create({ - phaseId: testPhaseId, - number: 1, - name: 'Plan 1', - }); - await planRepo.create({ - phaseId: testPhaseId, - number: 2, - name: 'Plan 2', - }); - - const plans = await planRepo.findByPhaseId(testPhaseId); - expect(plans.length).toBe(3); - expect(plans[0].name).toBe('Plan 1'); - expect(plans[1].name).toBe('Plan 2'); - expect(plans[2].name).toBe('Plan 3'); - }); - }); - - describe('getNextNumber', () => { - it('should return 1 for phase with no plans', async () => { - const nextNumber = await planRepo.getNextNumber(testPhaseId); - expect(nextNumber).toBe(1); - }); - - it('should return max + 1 for phase with plans', async () => { - await planRepo.create({ phaseId: testPhaseId, number: 1, name: 'Plan 1' }); - await planRepo.create({ phaseId: testPhaseId, number: 5, name: 'Plan 5' }); - - const nextNumber = await planRepo.getNextNumber(testPhaseId); - expect(nextNumber).toBe(6); - }); - - it('should not be affected by plans in other phases', async () => { - // Create another phase - const initiative = await initiativeRepo.create({ name: 'Another Initiative' }); - const otherPhase = await phaseRepo.create({ - initiativeId: initiative.id, - number: 2, - name: 'Other Phase', - }); - - // Add plans to other phase - await planRepo.create({ phaseId: otherPhase.id, number: 10, name: 'High Plan' }); - - // Add a plan to test phase - await planRepo.create({ phaseId: testPhaseId, number: 3, name: 'Plan 3' }); - - // Next number for test phase should be 4, not 11 - const nextNumber = await planRepo.getNextNumber(testPhaseId); - expect(nextNumber).toBe(4); - }); - }); - - describe('update', () => { - it('should update fields and updatedAt', async () => { - const created = await planRepo.create({ - phaseId: testPhaseId, - number: 1, - name: 'Original Name', - status: 'pending', - }); - - await new Promise((resolve) => setTimeout(resolve, 10)); - - const updated = await planRepo.update(created.id, { - name: 'Updated Name', - status: 'in_progress', - }); - - expect(updated.name).toBe('Updated Name'); - expect(updated.status).toBe('in_progress'); - expect(updated.updatedAt.getTime()).toBeGreaterThan(created.updatedAt.getTime()); - }); - - it('should throw for non-existent plan', async () => { - await expect( - planRepo.update('non-existent-id', { name: 'New Name' }) - ).rejects.toThrow('Plan not found'); - }); - }); - - describe('delete', () => { - it('should delete an existing plan', async () => { - const created = await planRepo.create({ - phaseId: testPhaseId, - number: 1, - name: 'To Delete', - }); - - await planRepo.delete(created.id); - - const found = await planRepo.findById(created.id); - expect(found).toBeNull(); - }); - - it('should throw for non-existent plan', async () => { - await expect(planRepo.delete('non-existent-id')).rejects.toThrow( - 'Plan not found' - ); - }); - }); -}); diff --git a/src/db/repositories/drizzle/plan.ts b/src/db/repositories/drizzle/plan.ts deleted file mode 100644 index dad4248..0000000 --- a/src/db/repositories/drizzle/plan.ts +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Drizzle Plan Repository Adapter - * - * Implements PlanRepository interface using Drizzle ORM. - */ - -import { eq, asc, max } from 'drizzle-orm'; -import { nanoid } from 'nanoid'; -import type { DrizzleDatabase } from '../../index.js'; -import { plans, type Plan } from '../../schema.js'; -import type { - PlanRepository, - CreatePlanData, - UpdatePlanData, -} from '../plan-repository.js'; - -/** - * Drizzle adapter for PlanRepository. - * - * Uses dependency injection for database instance, - * enabling isolated test databases. - */ -export class DrizzlePlanRepository implements PlanRepository { - constructor(private db: DrizzleDatabase) {} - - async create(data: CreatePlanData): Promise<Plan> { - const id = nanoid(); - const now = new Date(); - - await this.db.insert(plans).values({ - id, - ...data, - status: data.status ?? 'pending', - createdAt: now, - updatedAt: now, - }); - - // Fetch to get the complete record with all defaults applied - const created = await this.findById(id); - return created!; - } - - async findById(id: string): Promise<Plan | null> { - const result = await this.db - .select() - .from(plans) - .where(eq(plans.id, id)) - .limit(1); - - return result[0] ?? null; - } - - async findByPhaseId(phaseId: string): Promise<Plan[]> { - return this.db - .select() - .from(plans) - .where(eq(plans.phaseId, phaseId)) - .orderBy(asc(plans.number)); - } - - async getNextNumber(phaseId: string): Promise<number> { - const result = await this.db - .select({ maxNumber: max(plans.number) }) - .from(plans) - .where(eq(plans.phaseId, phaseId)); - - const maxNumber = result[0]?.maxNumber ?? 0; - return maxNumber + 1; - } - - async update(id: string, data: UpdatePlanData): Promise<Plan> { - const existing = await this.findById(id); - if (!existing) { - throw new Error(`Plan not found: ${id}`); - } - - const updated = { - ...data, - updatedAt: new Date(), - }; - - await this.db.update(plans).set(updated).where(eq(plans.id, id)); - - return { ...existing, ...updated } as Plan; - } - - async delete(id: string): Promise<void> { - const existing = await this.findById(id); - if (!existing) { - throw new Error(`Plan not found: ${id}`); - } - - await this.db.delete(plans).where(eq(plans.id, id)); - } -} diff --git a/src/db/repositories/drizzle/project.ts b/src/db/repositories/drizzle/project.ts new file mode 100644 index 0000000..ec15fd2 --- /dev/null +++ b/src/db/repositories/drizzle/project.ts @@ -0,0 +1,154 @@ +/** + * Drizzle Project Repository Adapter + * + * Implements ProjectRepository interface using Drizzle ORM. + */ + +import { eq, and, inArray } from 'drizzle-orm'; +import { nanoid } from 'nanoid'; +import type { DrizzleDatabase } from '../../index.js'; +import { projects, initiativeProjects, type Project } from '../../schema.js'; +import type { + ProjectRepository, + CreateProjectData, + UpdateProjectData, +} from '../project-repository.js'; + +export class DrizzleProjectRepository implements ProjectRepository { + constructor(private db: DrizzleDatabase) {} + + async create(data: CreateProjectData): Promise<Project> { + const id = nanoid(); + const now = new Date(); + + const [created] = await this.db.insert(projects).values({ + id, + ...data, + createdAt: now, + updatedAt: now, + }).returning(); + + return created; + } + + async findById(id: string): Promise<Project | null> { + const result = await this.db + .select() + .from(projects) + .where(eq(projects.id, id)) + .limit(1); + + return result[0] ?? null; + } + + async findByName(name: string): Promise<Project | null> { + const result = await this.db + .select() + .from(projects) + .where(eq(projects.name, name)) + .limit(1); + + return result[0] ?? null; + } + + async findAll(): Promise<Project[]> { + return this.db.select().from(projects); + } + + async update(id: string, data: UpdateProjectData): Promise<Project> { + const [updated] = await this.db + .update(projects) + .set({ ...data, updatedAt: new Date() }) + .where(eq(projects.id, id)) + .returning(); + + if (!updated) { + throw new Error(`Project not found: ${id}`); + } + + return updated; + } + + async delete(id: string): Promise<void> { + const [deleted] = await this.db.delete(projects).where(eq(projects.id, id)).returning(); + + if (!deleted) { + throw new Error(`Project not found: ${id}`); + } + } + + // Junction ops + + async addProjectToInitiative(initiativeId: string, projectId: string): Promise<void> { + const id = nanoid(); + const now = new Date(); + + await this.db.insert(initiativeProjects).values({ + id, + initiativeId, + projectId, + createdAt: now, + }); + } + + async removeProjectFromInitiative(initiativeId: string, projectId: string): Promise<void> { + await this.db + .delete(initiativeProjects) + .where( + and( + eq(initiativeProjects.initiativeId, initiativeId), + eq(initiativeProjects.projectId, projectId), + ), + ); + } + + async findProjectsByInitiativeId(initiativeId: string): Promise<Project[]> { + const rows = await this.db + .select({ project: projects }) + .from(initiativeProjects) + .innerJoin(projects, eq(initiativeProjects.projectId, projects.id)) + .where(eq(initiativeProjects.initiativeId, initiativeId)); + + return rows.map((r) => r.project); + } + + async setInitiativeProjects(initiativeId: string, projectIds: string[]): Promise<void> { + // Get current associations + const currentRows = await this.db + .select({ projectId: initiativeProjects.projectId }) + .from(initiativeProjects) + .where(eq(initiativeProjects.initiativeId, initiativeId)); + + const currentIds = new Set(currentRows.map((r) => r.projectId)); + const desiredIds = new Set(projectIds); + + // Compute diff + const toRemove = [...currentIds].filter((id) => !desiredIds.has(id)); + const toAdd = [...desiredIds].filter((id) => !currentIds.has(id)); + + // Remove + if (toRemove.length > 0) { + await this.db + .delete(initiativeProjects) + .where( + and( + eq(initiativeProjects.initiativeId, initiativeId), + inArray(initiativeProjects.projectId, toRemove), + ), + ); + } + + // Add + if (toAdd.length > 0) { + const now = new Date(); + await this.db.insert(initiativeProjects).values( + toAdd.map((projectId) => ({ + id: nanoid(), + initiativeId, + projectId, + createdAt: now, + })), + ); + } + } +} diff --git a/src/db/repositories/drizzle/task.test.ts b/src/db/repositories/drizzle/task.test.ts index 91c42b3..e3730f6 100644 --- a/src/db/repositories/drizzle/task.test.ts +++ b/src/db/repositories/drizzle/task.test.ts @@ -6,7 +6,6 @@ import { describe, it, expect, beforeEach } from 'vitest'; import { DrizzleTaskRepository } from './task.js'; -import { DrizzlePlanRepository } from './plan.js'; import { DrizzlePhaseRepository } from './phase.js'; import { DrizzleInitiativeRepository } from './initiative.js'; import { createTestDatabase } from './test-helpers.js'; @@ -15,15 +14,15 @@ import type { DrizzleDatabase } from '../../index.js'; describe('DrizzleTaskRepository', () => { let db: DrizzleDatabase; let taskRepo: DrizzleTaskRepository; - let planRepo: DrizzlePlanRepository; + let phaseRepo: DrizzlePhaseRepository; let initiativeRepo: DrizzleInitiativeRepository; - let testPlanId: string; + let testPhaseId: string; beforeEach(async () => { db = createTestDatabase(); taskRepo = new DrizzleTaskRepository(db); - planRepo = new DrizzlePlanRepository(db); + phaseRepo = new DrizzlePhaseRepository(db); initiativeRepo = new DrizzleInitiativeRepository(db); @@ -36,18 +35,13 @@ describe('DrizzleTaskRepository', () => { number: 1, name: 'Test Phase', }); - const plan = await planRepo.create({ - phaseId: phase.id, - number: 1, - name: 'Test Plan', - }); - testPlanId = plan.id; + testPhaseId = phase.id; }); describe('create', () => { it('should create a task with generated id and timestamps', async () => { const task = await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Test Task', description: 'A test task', order: 1, @@ -55,7 +49,7 @@ describe('DrizzleTaskRepository', () => { expect(task.id).toBeDefined(); expect(task.id.length).toBeGreaterThan(0); - expect(task.planId).toBe(testPlanId); + expect(task.phaseId).toBe(testPhaseId); expect(task.name).toBe('Test Task'); expect(task.type).toBe('auto'); expect(task.priority).toBe('medium'); @@ -65,10 +59,10 @@ describe('DrizzleTaskRepository', () => { expect(task.updatedAt).toBeInstanceOf(Date); }); - it('should throw for invalid planId (FK constraint)', async () => { + it('should throw for invalid phaseId (FK constraint)', async () => { await expect( taskRepo.create({ - planId: 'invalid-plan-id', + phaseId: 'invalid-phase-id', name: 'Invalid Task', order: 1, }) @@ -77,7 +71,7 @@ describe('DrizzleTaskRepository', () => { it('should accept custom type and priority', async () => { const task = await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Checkpoint Task', type: 'checkpoint:human-verify', priority: 'high', @@ -97,7 +91,7 @@ describe('DrizzleTaskRepository', () => { it('should find an existing task', async () => { const created = await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Find Me', order: 1, }); @@ -109,31 +103,31 @@ describe('DrizzleTaskRepository', () => { }); }); - describe('findByPlanId', () => { - it('should return empty array for plan with no tasks', async () => { - const tasks = await taskRepo.findByPlanId(testPlanId); + describe('findByPhaseId', () => { + it('should return empty array for phase with no tasks', async () => { + const tasks = await taskRepo.findByPhaseId(testPhaseId); expect(tasks).toEqual([]); }); it('should return only matching tasks ordered by order field', async () => { // Create tasks out of order await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task 3', order: 3, }); await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task 1', order: 1, }); await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task 2', order: 2, }); - const tasks = await taskRepo.findByPlanId(testPlanId); + const tasks = await taskRepo.findByPhaseId(testPhaseId); expect(tasks.length).toBe(3); expect(tasks[0].name).toBe('Task 1'); expect(tasks[1].name).toBe('Task 2'); @@ -144,7 +138,7 @@ describe('DrizzleTaskRepository', () => { describe('update', () => { it('should update status correctly', async () => { const created = await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Status Test', status: 'pending', order: 1, @@ -159,7 +153,7 @@ describe('DrizzleTaskRepository', () => { it('should update fields and updatedAt', async () => { const created = await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Original Name', order: 1, }); @@ -173,7 +167,7 @@ describe('DrizzleTaskRepository', () => { expect(updated.name).toBe('Updated Name'); expect(updated.priority).toBe('low'); - expect(updated.updatedAt.getTime()).toBeGreaterThan(created.updatedAt.getTime()); + expect(updated.updatedAt.getTime()).toBeGreaterThanOrEqual(created.updatedAt.getTime()); }); it('should throw for non-existent task', async () => { @@ -186,7 +180,7 @@ describe('DrizzleTaskRepository', () => { describe('delete', () => { it('should delete an existing task', async () => { const created = await taskRepo.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'To Delete', order: 1, }); diff --git a/src/db/repositories/drizzle/task.ts b/src/db/repositories/drizzle/task.ts index f3f5e4c..d17286c 100644 --- a/src/db/repositories/drizzle/task.ts +++ b/src/db/repositories/drizzle/task.ts @@ -4,7 +4,7 @@ * Implements TaskRepository interface using Drizzle ORM. */ -import { eq, asc } from 'drizzle-orm'; +import { eq, asc, and } from 'drizzle-orm'; import { nanoid } from 'nanoid'; import type { DrizzleDatabase } from '../../index.js'; import { tasks, taskDependencies, type Task } from '../../schema.js'; @@ -12,6 +12,7 @@ import type { TaskRepository, CreateTaskData, UpdateTaskData, + PendingApprovalFilters, } from '../task-repository.js'; /** @@ -27,20 +28,19 @@ export class DrizzleTaskRepository implements TaskRepository { const id = nanoid(); const now = new Date(); - await this.db.insert(tasks).values({ + const [created] = await this.db.insert(tasks).values({ id, ...data, type: data.type ?? 'auto', + category: data.category ?? 'execute', priority: data.priority ?? 'medium', status: data.status ?? 'pending', order: data.order ?? 0, createdAt: now, updatedAt: now, - }); + }).returning(); - // Fetch to get the complete record with all defaults applied - const created = await this.findById(id); - return created!; + return created; } async findById(id: string): Promise<Task | null> { @@ -53,37 +53,70 @@ export class DrizzleTaskRepository implements TaskRepository { return result[0] ?? null; } - async findByPlanId(planId: string): Promise<Task[]> { + async findByParentTaskId(parentTaskId: string): Promise<Task[]> { return this.db .select() .from(tasks) - .where(eq(tasks.planId, planId)) + .where(eq(tasks.parentTaskId, parentTaskId)) .orderBy(asc(tasks.order)); } + async findByInitiativeId(initiativeId: string): Promise<Task[]> { + return this.db + .select() + .from(tasks) + .where(eq(tasks.initiativeId, initiativeId)) + .orderBy(asc(tasks.order)); + } + + async findByPhaseId(phaseId: string): Promise<Task[]> { + return this.db + .select() + .from(tasks) + .where(eq(tasks.phaseId, phaseId)) + .orderBy(asc(tasks.order)); + } + + async findPendingApproval(filters?: PendingApprovalFilters): Promise<Task[]> { + const conditions = [eq(tasks.status, 'pending_approval')]; + + if (filters?.initiativeId) { + conditions.push(eq(tasks.initiativeId, filters.initiativeId)); + } + if (filters?.phaseId) { + conditions.push(eq(tasks.phaseId, filters.phaseId)); + } + if (filters?.category) { + conditions.push(eq(tasks.category, filters.category)); + } + + return this.db + .select() + .from(tasks) + .where(and(...conditions)) + .orderBy(asc(tasks.createdAt)); + } + async update(id: string, data: UpdateTaskData): Promise<Task> { - const existing = await this.findById(id); - if (!existing) { + const [updated] = await this.db + .update(tasks) + .set({ ...data, updatedAt: new Date() }) + .where(eq(tasks.id, id)) + .returning(); + + if (!updated) { throw new Error(`Task not found: ${id}`); } - const updated = { - ...data, - updatedAt: new Date(), - }; - - await this.db.update(tasks).set(updated).where(eq(tasks.id, id)); - - return { ...existing, ...updated } as Task; + return updated; } async delete(id: string): Promise<void> { - const existing = await this.findById(id); - if (!existing) { + const [deleted] = await this.db.delete(tasks).where(eq(tasks.id, id)).returning(); + + if (!deleted) { throw new Error(`Task not found: ${id}`); } - - await this.db.delete(tasks).where(eq(tasks.id, id)); } async createDependency(taskId: string, dependsOnTaskId: string): Promise<void> { @@ -97,4 +130,13 @@ export class DrizzleTaskRepository implements TaskRepository { createdAt: now, }); } + + async getDependencies(taskId: string): Promise<string[]> { + const deps = await this.db + .select({ dependsOnTaskId: taskDependencies.dependsOnTaskId }) + .from(taskDependencies) + .where(eq(taskDependencies.taskId, taskId)); + + return deps.map((d) => d.dependsOnTaskId); + } } diff --git a/src/db/repositories/index.ts b/src/db/repositories/index.ts index 17aebb2..00c4aa9 100644 --- a/src/db/repositories/index.ts +++ b/src/db/repositories/index.ts @@ -18,16 +18,11 @@ export type { UpdatePhaseData, } from './phase-repository.js'; -export type { - PlanRepository, - CreatePlanData, - UpdatePlanData, -} from './plan-repository.js'; - export type { TaskRepository, CreateTaskData, UpdateTaskData, + PendingApprovalFilters, } from './task-repository.js'; export type { @@ -44,3 +39,20 @@ export type { CreateMessageData, UpdateMessageData, } from './message-repository.js'; + +export type { + PageRepository, + CreatePageData, + UpdatePageData, +} from './page-repository.js'; + +export type { + ProjectRepository, + CreateProjectData, + UpdateProjectData, +} from './project-repository.js'; + +export type { + AccountRepository, + CreateAccountData, +} from './account-repository.js'; diff --git a/src/db/repositories/page-repository.ts b/src/db/repositories/page-repository.ts new file mode 100644 index 0000000..80f9064 --- /dev/null +++ b/src/db/repositories/page-repository.ts @@ -0,0 +1,33 @@ +/** + * Page Repository Port Interface + * + * Port for Page aggregate operations. + * Implementations (Drizzle, etc.) are adapters. + */ + +import type { Page, NewPage } from '../schema.js'; + +/** + * Data for creating a new page. + * Omits system-managed fields (id, createdAt, updatedAt). + */ +export type CreatePageData = Omit<NewPage, 'id' | 'createdAt' | 'updatedAt'>; + +/** + * Data for updating a page. + */ +export type UpdatePageData = Partial<Pick<NewPage, 'title' | 'content' | 'sortOrder'>>; + +/** + * Page Repository Port + */ +export interface PageRepository { + create(data: CreatePageData): Promise<Page>; + findById(id: string): Promise<Page | null>; + findByInitiativeId(initiativeId: string): Promise<Page[]>; + findByParentPageId(parentPageId: string): Promise<Page[]>; + findRootPage(initiativeId: string): Promise<Page | null>; + getOrCreateRootPage(initiativeId: string): Promise<Page>; + update(id: string, data: UpdatePageData): Promise<Page>; + delete(id: string): Promise<void>; +} diff --git a/src/db/repositories/plan-repository.ts b/src/db/repositories/plan-repository.ts deleted file mode 100644 index ea4e097..0000000 --- a/src/db/repositories/plan-repository.ts +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Plan Repository Port Interface - * - * Port for Plan aggregate operations. - * Implementations (Drizzle, etc.) are adapters. - */ - -import type { Plan, NewPlan } from '../schema.js'; - -/** - * Data for creating a new plan. - * Omits system-managed fields (id, createdAt, updatedAt). - */ -export type CreatePlanData = Omit<NewPlan, 'id' | 'createdAt' | 'updatedAt'>; - -/** - * Data for updating a plan. - * Partial of creation data - all fields optional. - */ -export type UpdatePlanData = Partial<CreatePlanData>; - -/** - * Plan Repository Port - * - * Defines operations for the Plan aggregate. - * Only knows about plans - no knowledge of parent or child entities. - */ -export interface PlanRepository { - /** - * Create a new plan. - * Generates id and sets timestamps automatically. - * Foreign key to phase enforced by database. - */ - create(data: CreatePlanData): Promise<Plan>; - - /** - * Find a plan by its ID. - * Returns null if not found. - */ - findById(id: string): Promise<Plan | null>; - - /** - * Find all plans for a phase. - * Returns plans ordered by number. - * Returns empty array if none exist. - */ - findByPhaseId(phaseId: string): Promise<Plan[]>; - - /** - * Get the next available plan number for a phase. - * Returns MAX(number) + 1, or 1 if no plans exist. - */ - getNextNumber(phaseId: string): Promise<number>; - - /** - * Update a plan. - * Throws if plan not found. - * Updates updatedAt timestamp automatically. - */ - update(id: string, data: UpdatePlanData): Promise<Plan>; - - /** - * Delete a plan. - * Throws if plan not found. - * Cascades to child tasks via FK constraints. - */ - delete(id: string): Promise<void>; -} diff --git a/src/db/repositories/project-repository.ts b/src/db/repositories/project-repository.ts new file mode 100644 index 0000000..54f7452 --- /dev/null +++ b/src/db/repositories/project-repository.ts @@ -0,0 +1,38 @@ +/** + * Project Repository Port Interface + * + * Port for Project aggregate operations and initiative-project junction. + * Implementations (Drizzle, etc.) are adapters. + */ + +import type { Project, NewProject } from '../schema.js'; + +/** + * Data for creating a new project. + * Omits system-managed fields (id, createdAt, updatedAt). + */ +export type CreateProjectData = Omit<NewProject, 'id' | 'createdAt' | 'updatedAt'>; + +/** + * Data for updating a project. + * Name is immutable (used as directory name for worktrees). + */ +export type UpdateProjectData = Omit<Partial<CreateProjectData>, 'name'>; + +/** + * Project Repository Port + */ +export interface ProjectRepository { + create(data: CreateProjectData): Promise<Project>; + findById(id: string): Promise<Project | null>; + findByName(name: string): Promise<Project | null>; + findAll(): Promise<Project[]>; + update(id: string, data: UpdateProjectData): Promise<Project>; + delete(id: string): Promise<void>; + + // Junction ops + addProjectToInitiative(initiativeId: string, projectId: string): Promise<void>; + removeProjectFromInitiative(initiativeId: string, projectId: string): Promise<void>; + findProjectsByInitiativeId(initiativeId: string): Promise<Project[]>; + setInitiativeProjects(initiativeId: string, projectIds: string[]): Promise<void>; +} diff --git a/src/db/repositories/task-repository.ts b/src/db/repositories/task-repository.ts index f1898f8..8ea97fa 100644 --- a/src/db/repositories/task-repository.ts +++ b/src/db/repositories/task-repository.ts @@ -5,11 +5,12 @@ * Implementations (Drizzle, etc.) are adapters. */ -import type { Task, NewTask } from '../schema.js'; +import type { Task, NewTask, TaskCategory } from '../schema.js'; /** * Data for creating a new task. * Omits system-managed fields (id, createdAt, updatedAt). + * At least one of phaseId, initiativeId, or parentTaskId should be provided. */ export type CreateTaskData = Omit<NewTask, 'id' | 'createdAt' | 'updatedAt'>; @@ -19,6 +20,15 @@ export type CreateTaskData = Omit<NewTask, 'id' | 'createdAt' | 'updatedAt'>; */ export type UpdateTaskData = Partial<CreateTaskData>; +/** + * Filters for finding pending approval tasks. + */ +export interface PendingApprovalFilters { + initiativeId?: string; + phaseId?: string; + category?: TaskCategory; +} + /** * Task Repository Port * @@ -29,7 +39,7 @@ export interface TaskRepository { /** * Create a new task. * Generates id and sets timestamps automatically. - * Foreign key to plan enforced by database. + * At least one parent context (phaseId, initiativeId, or parentTaskId) should be set. */ create(data: CreateTaskData): Promise<Task>; @@ -40,11 +50,32 @@ export interface TaskRepository { findById(id: string): Promise<Task | null>; /** - * Find all tasks for a plan. + * Find all child tasks of a parent task. * Returns tasks ordered by order field. * Returns empty array if none exist. */ - findByPlanId(planId: string): Promise<Task[]>; + findByParentTaskId(parentTaskId: string): Promise<Task[]>; + + /** + * Find all tasks directly linked to an initiative. + * Returns tasks ordered by order field. + * Returns empty array if none exist. + */ + findByInitiativeId(initiativeId: string): Promise<Task[]>; + + /** + * Find all tasks directly linked to a phase. + * Returns tasks ordered by order field. + * Returns empty array if none exist. + */ + findByPhaseId(phaseId: string): Promise<Task[]>; + + /** + * Find all tasks with status 'pending_approval'. + * Optional filters by initiative, phase, or category. + * Returns tasks ordered by createdAt. + */ + findPendingApproval(filters?: PendingApprovalFilters): Promise<Task[]>; /** * Update a task. @@ -65,4 +96,10 @@ export interface TaskRepository { * Both tasks must exist. */ createDependency(taskId: string, dependsOnTaskId: string): Promise<void>; + + /** + * Get all task IDs that a task depends on. + * Returns empty array if no dependencies. + */ + getDependencies(taskId: string): Promise<string[]>; } diff --git a/src/db/schema.ts b/src/db/schema.ts index 65dfd75..c365d3f 100644 --- a/src/db/schema.ts +++ b/src/db/schema.ts @@ -1,16 +1,15 @@ /** * Database schema for Codewalk District. * - * Defines the four-level task hierarchy: + * Defines the three-level task hierarchy: * - Initiative: Top-level project * - Phase: Major milestone within initiative - * - Plan: Group of related tasks within phase - * - Task: Individual work item + * - Task: Individual work item (can have parentTaskId for decomposition relationships) * * Plus a task_dependencies table for task dependency relationships. */ -import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; +import { sqliteTable, text, integer, uniqueIndex } from 'drizzle-orm/sqlite-core'; import { relations, type InferInsertModel, type InferSelectModel } from 'drizzle-orm'; // ============================================================================ @@ -20,16 +19,22 @@ import { relations, type InferInsertModel, type InferSelectModel } from 'drizzle export const initiatives = sqliteTable('initiatives', { id: text('id').primaryKey(), name: text('name').notNull(), - description: text('description'), status: text('status', { enum: ['active', 'completed', 'archived'] }) .notNull() .default('active'), + mergeRequiresApproval: integer('merge_requires_approval', { mode: 'boolean' }) + .notNull() + .default(true), + mergeTarget: text('merge_target'), // Target branch for merges (e.g., 'feature/xyz') createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull(), }); export const initiativesRelations = relations(initiatives, ({ many }) => ({ phases: many(phases), + pages: many(pages), + initiativeProjects: many(initiativeProjects), + tasks: many(tasks), })); export type Initiative = InferSelectModel<typeof initiatives>; @@ -59,7 +64,7 @@ export const phasesRelations = relations(phases, ({ one, many }) => ({ fields: [phases.initiativeId], references: [initiatives.id], }), - plans: many(plans), + tasks: many(tasks), // Dependencies: phases this phase depends on dependsOn: many(phaseDependencies, { relationName: 'dependentPhase' }), // Dependents: phases that depend on this phase @@ -100,45 +105,35 @@ export const phaseDependenciesRelations = relations(phaseDependencies, ({ one }) export type PhaseDependency = InferSelectModel<typeof phaseDependencies>; export type NewPhaseDependency = InferInsertModel<typeof phaseDependencies>; -// ============================================================================ -// PLANS -// ============================================================================ - -export const plans = sqliteTable('plans', { - id: text('id').primaryKey(), - phaseId: text('phase_id') - .notNull() - .references(() => phases.id, { onDelete: 'cascade' }), - number: integer('number').notNull(), - name: text('name').notNull(), - description: text('description'), - status: text('status', { enum: ['pending', 'in_progress', 'completed'] }) - .notNull() - .default('pending'), - createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), - updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull(), -}); - -export const plansRelations = relations(plans, ({ one, many }) => ({ - phase: one(phases, { - fields: [plans.phaseId], - references: [phases.id], - }), - tasks: many(tasks), -})); - -export type Plan = InferSelectModel<typeof plans>; -export type NewPlan = InferInsertModel<typeof plans>; - // ============================================================================ // TASKS // ============================================================================ +/** + * Task category enum values. + * Defines what kind of work a task represents. + */ +export const TASK_CATEGORIES = [ + 'execute', // Standard execution task + 'research', // Research/exploration task + 'discuss', // Discussion/context gathering + 'breakdown', // Break initiative into phases + 'decompose', // Decompose plan into tasks + 'refine', // Refine/edit content + 'verify', // Verification task + 'merge', // Merge task + 'review', // Review/approval task +] as const; + +export type TaskCategory = (typeof TASK_CATEGORIES)[number]; + export const tasks = sqliteTable('tasks', { id: text('id').primaryKey(), - planId: text('plan_id') - .notNull() - .references(() => plans.id, { onDelete: 'cascade' }), + // Parent context - at least one should be set + phaseId: text('phase_id').references(() => phases.id, { onDelete: 'cascade' }), + initiativeId: text('initiative_id').references(() => initiatives.id, { onDelete: 'cascade' }), + // Parent task for decomposition hierarchy (child tasks link to parent decompose task) + parentTaskId: text('parent_task_id').references((): ReturnType<typeof text> => tasks.id, { onDelete: 'cascade' }), name: text('name').notNull(), description: text('description'), type: text('type', { @@ -146,6 +141,11 @@ export const tasks = sqliteTable('tasks', { }) .notNull() .default('auto'), + category: text('category', { + enum: TASK_CATEGORIES, + }) + .notNull() + .default('execute'), priority: text('priority', { enum: ['low', 'medium', 'high'] }) .notNull() .default('medium'), @@ -154,16 +154,29 @@ export const tasks = sqliteTable('tasks', { }) .notNull() .default('pending'), + requiresApproval: integer('requires_approval', { mode: 'boolean' }), // null = inherit from initiative order: integer('order').notNull().default(0), createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull(), }); export const tasksRelations = relations(tasks, ({ one, many }) => ({ - plan: one(plans, { - fields: [tasks.planId], - references: [plans.id], + phase: one(phases, { + fields: [tasks.phaseId], + references: [phases.id], }), + initiative: one(initiatives, { + fields: [tasks.initiativeId], + references: [initiatives.id], + }), + // Parent task (for decomposition hierarchy - child links to parent decompose task) + parentTask: one(tasks, { + fields: [tasks.parentTaskId], + references: [tasks.id], + relationName: 'parentTask', + }), + // Child tasks (tasks created from decomposition of this task) + childTasks: many(tasks, { relationName: 'parentTask' }), // Dependencies: tasks this task depends on dependsOn: many(taskDependencies, { relationName: 'dependentTask' }), // Dependents: tasks that depend on this task @@ -204,26 +217,59 @@ export const taskDependenciesRelations = relations(taskDependencies, ({ one }) = export type TaskDependency = InferSelectModel<typeof taskDependencies>; export type NewTaskDependency = InferInsertModel<typeof taskDependencies>; +// ============================================================================ +// ACCOUNTS +// ============================================================================ + +export const accounts = sqliteTable('accounts', { + id: text('id').primaryKey(), + email: text('email').notNull(), + provider: text('provider').notNull().default('claude'), + configJson: text('config_json'), // .claude.json content (JSON string) + credentials: text('credentials'), // .credentials.json content (JSON string) + isExhausted: integer('is_exhausted', { mode: 'boolean' }).notNull().default(false), + exhaustedUntil: integer('exhausted_until', { mode: 'timestamp' }), + lastUsedAt: integer('last_used_at', { mode: 'timestamp' }), + sortOrder: integer('sort_order').notNull().default(0), + createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), + updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull(), +}); + +export const accountsRelations = relations(accounts, ({ many }) => ({ + agents: many(agents), +})); + +export type Account = InferSelectModel<typeof accounts>; +export type NewAccount = InferInsertModel<typeof accounts>; + // ============================================================================ // AGENTS // ============================================================================ export const agents = sqliteTable('agents', { id: text('id').primaryKey(), - name: text('name').notNull().unique(), // Human-readable name (e.g., 'gastown', 'chinatown') + name: text('name').notNull().unique(), // Human-readable alias (e.g., 'jolly-penguin') taskId: text('task_id').references(() => tasks.id, { onDelete: 'set null' }), // Task may be deleted + initiativeId: text('initiative_id').references(() => initiatives.id, { onDelete: 'set null' }), sessionId: text('session_id'), // Claude CLI session ID for resumption (null until first run completes) - worktreeId: text('worktree_id').notNull(), // WorktreeManager worktree ID + worktreeId: text('worktree_id').notNull(), // Agent alias (deterministic path: agent-workdirs/<alias>/) + provider: text('provider').notNull().default('claude'), + accountId: text('account_id').references(() => accounts.id, { onDelete: 'set null' }), status: text('status', { enum: ['idle', 'running', 'waiting_for_input', 'stopped', 'crashed'], }) .notNull() .default('idle'), - mode: text('mode', { enum: ['execute', 'discuss', 'breakdown', 'decompose'] }) + mode: text('mode', { enum: ['execute', 'discuss', 'breakdown', 'decompose', 'refine'] }) .notNull() .default('execute'), + pid: integer('pid'), + outputFilePath: text('output_file_path'), + result: text('result'), + pendingQuestions: text('pending_questions'), createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull(), + userDismissedAt: integer('user_dismissed_at', { mode: 'timestamp' }), }); export const agentsRelations = relations(agents, ({ one }) => ({ @@ -231,6 +277,14 @@ export const agentsRelations = relations(agents, ({ one }) => ({ fields: [agents.taskId], references: [tasks.id], }), + initiative: one(initiatives, { + fields: [agents.initiativeId], + references: [initiatives.id], + }), + account: one(accounts, { + fields: [agents.accountId], + references: [accounts.id], + }), })); export type Agent = InferSelectModel<typeof agents>; @@ -286,3 +340,86 @@ export const messagesRelations = relations(messages, ({ one, many }) => ({ export type Message = InferSelectModel<typeof messages>; export type NewMessage = InferInsertModel<typeof messages>; + +// ============================================================================ +// PAGES +// ============================================================================ + +export const pages = sqliteTable('pages', { + id: text('id').primaryKey(), + initiativeId: text('initiative_id') + .notNull() + .references(() => initiatives.id, { onDelete: 'cascade' }), + parentPageId: text('parent_page_id').references((): ReturnType<typeof text> => pages.id, { onDelete: 'cascade' }), + title: text('title').notNull(), + content: text('content'), // JSON string from Tiptap + sortOrder: integer('sort_order').notNull().default(0), + createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), + updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull(), +}); + +export const pagesRelations = relations(pages, ({ one, many }) => ({ + initiative: one(initiatives, { + fields: [pages.initiativeId], + references: [initiatives.id], + }), + parentPage: one(pages, { + fields: [pages.parentPageId], + references: [pages.id], + relationName: 'parentPage', + }), + childPages: many(pages, { relationName: 'parentPage' }), +})); + +export type Page = InferSelectModel<typeof pages>; +export type NewPage = InferInsertModel<typeof pages>; + +// ============================================================================ +// PROJECTS +// ============================================================================ + +export const projects = sqliteTable('projects', { + id: text('id').primaryKey(), + name: text('name').notNull().unique(), + url: text('url').notNull().unique(), + createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), + updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull(), +}); + +export const projectsRelations = relations(projects, ({ many }) => ({ + initiativeProjects: many(initiativeProjects), +})); + +export type Project = InferSelectModel<typeof projects>; +export type NewProject = InferInsertModel<typeof projects>; + +// ============================================================================ +// INITIATIVE PROJECTS (junction) +// ============================================================================ + +export const initiativeProjects = sqliteTable('initiative_projects', { + id: text('id').primaryKey(), + initiativeId: text('initiative_id') + .notNull() + .references(() => initiatives.id, { onDelete: 'cascade' }), + projectId: text('project_id') + .notNull() + .references(() => projects.id, { onDelete: 'cascade' }), + createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), +}, (table) => [ + uniqueIndex('initiative_project_unique').on(table.initiativeId, table.projectId), +]); + +export const initiativeProjectsRelations = relations(initiativeProjects, ({ one }) => ({ + initiative: one(initiatives, { + fields: [initiativeProjects.initiativeId], + references: [initiatives.id], + }), + project: one(projects, { + fields: [initiativeProjects.projectId], + references: [projects.id], + }), +})); + +export type InitiativeProject = InferSelectModel<typeof initiativeProjects>; +export type NewInitiativeProject = InferInsertModel<typeof initiativeProjects>; diff --git a/src/dispatch/manager.test.ts b/src/dispatch/manager.test.ts index a55de39..3b012ea 100644 --- a/src/dispatch/manager.test.ts +++ b/src/dispatch/manager.test.ts @@ -9,7 +9,7 @@ import { describe, it, expect, beforeEach, vi } from 'vitest'; import { DefaultDispatchManager } from './manager.js'; import { DrizzleTaskRepository } from '../db/repositories/drizzle/task.js'; import { DrizzleMessageRepository } from '../db/repositories/drizzle/message.js'; -import { DrizzlePlanRepository } from '../db/repositories/drizzle/plan.js'; + import { DrizzlePhaseRepository } from '../db/repositories/drizzle/phase.js'; import { DrizzleInitiativeRepository } from '../db/repositories/drizzle/initiative.js'; import { createTestDatabase } from '../db/repositories/drizzle/test-helpers.js'; @@ -59,12 +59,15 @@ function createMockAgentManager( spawn: vi.fn().mockImplementation(async (options) => { const newAgent: AgentInfo = { id: `agent-${Date.now()}`, - name: options.name, + name: options.name ?? `mock-agent-${Date.now()}`, taskId: options.taskId, + initiativeId: options.initiativeId ?? null, sessionId: null, worktreeId: 'worktree-test', status: 'running', mode: options.mode ?? 'execute', + provider: options.provider ?? 'claude', + accountId: null, createdAt: new Date(), updatedAt: new Date(), }; @@ -72,9 +75,12 @@ function createMockAgentManager( return newAgent; }), stop: vi.fn().mockResolvedValue(undefined), + delete: vi.fn().mockResolvedValue(undefined), + dismiss: vi.fn().mockResolvedValue(undefined), resume: vi.fn().mockResolvedValue(undefined), getResult: vi.fn().mockResolvedValue(null), getPendingQuestions: vi.fn().mockResolvedValue(null), + getOutputBuffer: vi.fn().mockReturnValue([]), }; } @@ -86,10 +92,13 @@ function createIdleAgent(id: string, name: string): AgentInfo { id, name, taskId: 'task-123', + initiativeId: null, sessionId: 'session-abc', worktreeId: 'worktree-xyz', status: 'idle', mode: 'execute', + provider: 'claude', + accountId: null, createdAt: new Date(), updatedAt: new Date(), }; @@ -106,7 +115,7 @@ describe('DefaultDispatchManager', () => { let eventBus: EventBus & { emittedEvents: DomainEvent[] }; let agentManager: AgentManager; let dispatchManager: DefaultDispatchManager; - let testPlanId: string; + let testPhaseId: string; beforeEach(async () => { // Set up test database @@ -117,7 +126,7 @@ describe('DefaultDispatchManager', () => { // Create required hierarchy for tasks const initiativeRepo = new DrizzleInitiativeRepository(db); const phaseRepo = new DrizzlePhaseRepository(db); - const planRepo = new DrizzlePlanRepository(db); + const initiative = await initiativeRepo.create({ name: 'Test Initiative', @@ -127,12 +136,7 @@ describe('DefaultDispatchManager', () => { number: 1, name: 'Test Phase', }); - const plan = await planRepo.create({ - phaseId: phase.id, - number: 1, - name: 'Test Plan', - }); - testPlanId = plan.id; + testPhaseId = phase.id; // Create mocks eventBus = createMockEventBus(); @@ -154,7 +158,7 @@ describe('DefaultDispatchManager', () => { describe('queue', () => { it('should add task to queue and emit TaskQueuedEvent', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Test Task', priority: 'high', order: 1, @@ -192,7 +196,7 @@ describe('DefaultDispatchManager', () => { it('should return task when dependencies are complete', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Dispatchable Task', priority: 'medium', order: 1, @@ -208,19 +212,19 @@ describe('DefaultDispatchManager', () => { it('should respect priority ordering (high > medium > low)', async () => { // Create tasks in different priority order const lowTask = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Low Priority', priority: 'low', order: 1, }); const highTask = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'High Priority', priority: 'high', order: 2, }); const mediumTask = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Medium Priority', priority: 'medium', order: 3, @@ -240,13 +244,13 @@ describe('DefaultDispatchManager', () => { it('should order by queuedAt within same priority (oldest first)', async () => { const task1 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'First Task', priority: 'medium', order: 1, }); const task2 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Second Task', priority: 'medium', order: 2, @@ -271,7 +275,7 @@ describe('DefaultDispatchManager', () => { describe('completeTask', () => { it('should update task status and emit TaskCompletedEvent', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task to Complete', priority: 'medium', order: 1, @@ -301,7 +305,7 @@ describe('DefaultDispatchManager', () => { describe('blockTask', () => { it('should update task status and emit TaskBlockedEvent', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task to Block', priority: 'medium', order: 1, @@ -344,7 +348,7 @@ describe('DefaultDispatchManager', () => { it('should return failure when no agents available', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task needing agent', priority: 'high', order: 1, @@ -363,7 +367,7 @@ describe('DefaultDispatchManager', () => { it('should dispatch task to available agent', async () => { // Create task const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task for dispatch', description: 'Do the thing', priority: 'high', @@ -406,7 +410,7 @@ describe('DefaultDispatchManager', () => { it('should emit TaskDispatchedEvent on successful dispatch', async () => { const task = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Dispatch event test', priority: 'medium', order: 1, @@ -442,19 +446,19 @@ describe('DefaultDispatchManager', () => { it('should return correct state', async () => { // Create and queue tasks const task1 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Ready Task', priority: 'high', order: 1, }); const task2 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Another Ready Task', priority: 'low', order: 2, }); const task3 = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Blocked Task', priority: 'medium', order: 3, @@ -497,19 +501,19 @@ describe('DefaultDispatchManager', () => { // This test verifies the priority and queue ordering work correctly const taskA = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task A - Foundation', priority: 'high', order: 1, }); const taskB = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task B - Build on A', priority: 'medium', order: 2, }); const taskC = await taskRepository.create({ - planId: testPlanId, + phaseId: testPhaseId, name: 'Task C - Also build on A', priority: 'medium', order: 3, diff --git a/src/dispatch/manager.ts b/src/dispatch/manager.ts index 9b365c4..02a4e2d 100644 --- a/src/dispatch/manager.ts +++ b/src/dispatch/manager.ts @@ -7,11 +7,23 @@ * This is the ADAPTER for the DispatchManager PORT. */ -import type { EventBus, TaskQueuedEvent, TaskCompletedEvent, TaskBlockedEvent, TaskDispatchedEvent } from '../events/index.js'; +import type { + EventBus, + TaskQueuedEvent, + TaskCompletedEvent, + TaskBlockedEvent, + TaskDispatchedEvent, + TaskPendingApprovalEvent, +} from '../events/index.js'; import type { AgentManager } from '../agent/types.js'; import type { TaskRepository } from '../db/repositories/task-repository.js'; import type { MessageRepository } from '../db/repositories/message-repository.js'; +import type { InitiativeRepository } from '../db/repositories/initiative-repository.js'; +import type { Task } from '../db/schema.js'; import type { DispatchManager, QueuedTask, DispatchResult } from './types.js'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('dispatch'); // ============================================================================= // Internal Types @@ -46,12 +58,14 @@ export class DefaultDispatchManager implements DispatchManager { private taskRepository: TaskRepository, private messageRepository: MessageRepository, private agentManager: AgentManager, - private eventBus: EventBus + private eventBus: EventBus, + private initiativeRepository?: InitiativeRepository ) {} /** * Queue a task for dispatch. * Fetches task dependencies and adds to internal queue. + * Checkpoint tasks are queued but won't auto-dispatch. */ async queue(taskId: string): Promise<void> { // Fetch task to verify it exists and get priority @@ -60,10 +74,8 @@ export class DefaultDispatchManager implements DispatchManager { throw new Error(`Task not found: ${taskId}`); } - // Get dependencies for this task - // We need to query task_dependencies table - // For now, use empty deps - will be populated when we have dependency data - const dependsOn: string[] = []; + // Get dependencies for this task from the repository + const dependsOn = await this.taskRepository.getDependencies(taskId); const queuedTask: QueuedTask = { taskId, @@ -74,6 +86,8 @@ export class DefaultDispatchManager implements DispatchManager { this.taskQueue.set(taskId, queuedTask); + log.info({ taskId, priority: task.priority, isCheckpoint: this.isCheckpointTask(task) }, 'task queued'); + // Emit TaskQueuedEvent const event: TaskQueuedEvent = { type: 'task:queued', @@ -90,6 +104,7 @@ export class DefaultDispatchManager implements DispatchManager { /** * Get next dispatchable task. * Returns task with all dependencies complete, highest priority first. + * Checkpoint tasks are excluded (require human action). */ async getNextDispatchable(): Promise<QueuedTask | null> { const queuedTasks = Array.from(this.taskQueue.values()); @@ -98,16 +113,30 @@ export class DefaultDispatchManager implements DispatchManager { return null; } - // Filter to only tasks with all dependencies complete + // Filter to only tasks with all dependencies complete and not checkpoint tasks const readyTasks: QueuedTask[] = []; + log.debug({ queueSize: queuedTasks.length }, 'evaluating dispatchable tasks'); + for (const qt of queuedTasks) { + // Check dependencies const allDepsComplete = await this.areAllDependenciesComplete(qt.dependsOn); - if (allDepsComplete) { - readyTasks.push(qt); + if (!allDepsComplete) { + continue; } + + // Check if this is a checkpoint task (requires human action) + const task = await this.taskRepository.findById(qt.taskId); + if (task && this.isCheckpointTask(task)) { + log.debug({ taskId: qt.taskId, type: task.type }, 'skipping checkpoint task'); + continue; + } + + readyTasks.push(qt); } + log.debug({ queueSize: queuedTasks.length, readyCount: readyTasks.length }, 'dispatchable evaluation complete'); + if (readyTasks.length === 0) { return null; } @@ -128,17 +157,87 @@ export class DefaultDispatchManager implements DispatchManager { /** * Mark a task as complete. + * If the task requires approval, sets status to 'pending_approval' instead. * Updates task status and removes from queue. + * + * @param taskId - ID of the task to complete + * @param agentId - Optional ID of the agent that completed the task */ - async completeTask(taskId: string): Promise<void> { - // Update task status to 'completed' - await this.taskRepository.update(taskId, { status: 'completed' }); + async completeTask(taskId: string, agentId?: string): Promise<void> { + const task = await this.taskRepository.findById(taskId); + if (!task) { + throw new Error(`Task not found: ${taskId}`); + } - // Remove from queue - this.taskQueue.delete(taskId); + // Determine if approval is required + const requiresApproval = await this.taskRequiresApproval(task); + + if (requiresApproval) { + // Set to pending_approval instead of completed + await this.taskRepository.update(taskId, { status: 'pending_approval' }); + + // Remove from queue + this.taskQueue.delete(taskId); + + log.info({ taskId, category: task.category }, 'task pending approval'); + + // Emit TaskPendingApprovalEvent + const event: TaskPendingApprovalEvent = { + type: 'task:pending_approval', + timestamp: new Date(), + payload: { + taskId, + agentId: agentId ?? '', + category: task.category, + name: task.name, + }, + }; + this.eventBus.emit(event); + } else { + // Complete directly + await this.taskRepository.update(taskId, { status: 'completed' }); + + // Remove from queue + this.taskQueue.delete(taskId); + + log.info({ taskId }, 'task completed'); + + // Emit TaskCompletedEvent + const event: TaskCompletedEvent = { + type: 'task:completed', + timestamp: new Date(), + payload: { + taskId, + agentId: agentId ?? '', + success: true, + message: 'Task completed', + }, + }; + this.eventBus.emit(event); + } // Also remove from blocked if it was there this.blockedTasks.delete(taskId); + } + + /** + * Approve a task that is pending approval. + * Sets status to 'completed' and emits completion event. + */ + async approveTask(taskId: string): Promise<void> { + const task = await this.taskRepository.findById(taskId); + if (!task) { + throw new Error(`Task not found: ${taskId}`); + } + + if (task.status !== 'pending_approval') { + throw new Error(`Task ${taskId} is not pending approval (status: ${task.status})`); + } + + // Complete the task + await this.taskRepository.update(taskId, { status: 'completed' }); + + log.info({ taskId }, 'task approved and completed'); // Emit TaskCompletedEvent const event: TaskCompletedEvent = { @@ -146,9 +245,9 @@ export class DefaultDispatchManager implements DispatchManager { timestamp: new Date(), payload: { taskId, - agentId: '', // Unknown at this point + agentId: '', success: true, - message: 'Task completed', + message: 'Task approved', }, }; this.eventBus.emit(event); @@ -165,6 +264,8 @@ export class DefaultDispatchManager implements DispatchManager { // Record in blocked map this.blockedTasks.set(taskId, { taskId, reason }); + log.warn({ taskId, reason }, 'task blocked'); + // Remove from queue (blocked tasks aren't dispatchable) this.taskQueue.delete(taskId); @@ -188,6 +289,7 @@ export class DefaultDispatchManager implements DispatchManager { const nextTask = await this.getNextDispatchable(); if (!nextTask) { + log.debug('no dispatchable tasks'); return { success: false, taskId: '', @@ -200,6 +302,7 @@ export class DefaultDispatchManager implements DispatchManager { const idleAgent = agents.find((a) => a.status === 'idle'); if (!idleAgent) { + log.debug('no available agents'); return { success: false, taskId: nextTask.taskId, @@ -217,16 +320,14 @@ export class DefaultDispatchManager implements DispatchManager { }; } - // Generate agent name based on task ID - const agentName = `agent-${nextTask.taskId.slice(0, 6)}`; - - // Spawn agent with task + // Spawn agent with task (alias auto-generated by agent manager) const agent = await this.agentManager.spawn({ - name: agentName, taskId: nextTask.taskId, prompt: task.description || task.name, }); + log.info({ taskId: nextTask.taskId, agentId: agent.id }, 'task dispatched'); + // Update task status to 'in_progress' await this.taskRepository.update(nextTask.taskId, { status: 'in_progress' }); @@ -299,4 +400,35 @@ export class DefaultDispatchManager implements DispatchManager { return true; } + + /** + * Check if a task is a checkpoint task. + * Checkpoint tasks require human action and don't auto-dispatch. + */ + private isCheckpointTask(task: Task): boolean { + return task.type.startsWith('checkpoint:'); + } + + /** + * Determine if a task requires approval before being marked complete. + * Checks task-level override first, then falls back to initiative setting. + */ + private async taskRequiresApproval(task: Task): Promise<boolean> { + // Task-level override takes precedence + if (task.requiresApproval !== null) { + return task.requiresApproval; + } + + // Fall back to initiative setting if we have initiative access + if (this.initiativeRepository && task.initiativeId) { + const initiative = await this.initiativeRepository.findById(task.initiativeId); + if (initiative) { + return initiative.mergeRequiresApproval; + } + } + + // If task has a phaseId but no initiativeId, we could traverse up but for now default to false + // Default: no approval required + return false; + } } diff --git a/src/dispatch/types.ts b/src/dispatch/types.ts index 4d5fb65..f2e8533 100644 --- a/src/dispatch/types.ts +++ b/src/dispatch/types.ts @@ -86,11 +86,21 @@ export interface DispatchManager { /** * Mark a task as complete. + * If the task requires approval, sets status to 'pending_approval' instead. * Triggers re-evaluation of dependent tasks. * * @param taskId - ID of the completed task + * @param agentId - Optional ID of the agent that completed the task */ - completeTask(taskId: string): Promise<void>; + completeTask(taskId: string, agentId?: string): Promise<void>; + + /** + * Approve a task that is pending approval. + * Sets status to 'completed' and emits completion event. + * + * @param taskId - ID of the task to approve + */ + approveTask(taskId: string): Promise<void>; /** * Mark a task as blocked. diff --git a/src/events/index.ts b/src/events/index.ts index 3fdece3..7e3debd 100644 --- a/src/events/index.ts +++ b/src/events/index.ts @@ -24,11 +24,15 @@ export type { AgentStoppedEvent, AgentCrashedEvent, AgentResumedEvent, + AgentAccountSwitchedEvent, + AgentDeletedEvent, AgentWaitingEvent, + AgentOutputEvent, TaskQueuedEvent, TaskDispatchedEvent, TaskCompletedEvent, TaskBlockedEvent, + TaskPendingApprovalEvent, PhaseQueuedEvent, PhaseStartedEvent, PhaseCompletedEvent, @@ -37,6 +41,12 @@ export type { MergeStartedEvent, MergeCompletedEvent, MergeConflictedEvent, + PageCreatedEvent, + PageUpdatedEvent, + PageDeletedEvent, + AccountCredentialsRefreshedEvent, + AccountCredentialsExpiredEvent, + AccountCredentialsValidatedEvent, DomainEventMap, DomainEventType, } from './types.js'; diff --git a/src/events/types.ts b/src/events/types.ts index 1364480..2c9b345 100644 --- a/src/events/types.ts +++ b/src/events/types.ts @@ -141,8 +141,9 @@ export interface AgentSpawnedEvent extends DomainEvent { payload: { agentId: string; name: string; - taskId: string; + taskId: string | null; worktreeId: string; + provider: string; }; } @@ -151,7 +152,7 @@ export interface AgentStoppedEvent extends DomainEvent { payload: { agentId: string; name: string; - taskId: string; + taskId: string | null; reason: | 'user_requested' | 'task_complete' @@ -159,7 +160,8 @@ export interface AgentStoppedEvent extends DomainEvent { | 'waiting_for_input' | 'context_complete' | 'breakdown_complete' - | 'decompose_complete'; + | 'decompose_complete' + | 'refine_complete'; }; } @@ -168,7 +170,7 @@ export interface AgentCrashedEvent extends DomainEvent { payload: { agentId: string; name: string; - taskId: string; + taskId: string | null; error: string; }; } @@ -178,17 +180,28 @@ export interface AgentResumedEvent extends DomainEvent { payload: { agentId: string; name: string; - taskId: string; + taskId: string | null; sessionId: string; }; } +export interface AgentAccountSwitchedEvent extends DomainEvent { + type: 'agent:account_switched'; + payload: { + agentId: string; + name: string; + previousAccountId: string; + newAccountId: string; + reason: 'account_exhausted'; + }; +} + export interface AgentWaitingEvent extends DomainEvent { type: 'agent:waiting'; payload: { agentId: string; name: string; - taskId: string; + taskId: string | null; sessionId: string; questions: Array<{ id: string; @@ -199,6 +212,23 @@ export interface AgentWaitingEvent extends DomainEvent { }; } +export interface AgentDeletedEvent extends DomainEvent { + type: 'agent:deleted'; + payload: { + agentId: string; + name: string; + }; +} + +export interface AgentOutputEvent extends DomainEvent { + type: 'agent:output'; + payload: { + agentId: string; + stream: 'stdout' | 'stderr'; + data: string; + }; +} + /** * Task Dispatch Events */ @@ -240,6 +270,16 @@ export interface TaskBlockedEvent extends DomainEvent { }; } +export interface TaskPendingApprovalEvent extends DomainEvent { + type: 'task:pending_approval'; + payload: { + taskId: string; + agentId: string; + category: string; + name: string; + }; +} + /** * Phase Events */ @@ -324,6 +364,68 @@ export interface MergeConflictedEvent extends DomainEvent { }; } +/** + * Page Events + */ + +export interface PageCreatedEvent extends DomainEvent { + type: 'page:created'; + payload: { + pageId: string; + initiativeId: string; + title: string; + }; +} + +export interface PageUpdatedEvent extends DomainEvent { + type: 'page:updated'; + payload: { + pageId: string; + initiativeId: string; + title?: string; + }; +} + +export interface PageDeletedEvent extends DomainEvent { + type: 'page:deleted'; + payload: { + pageId: string; + initiativeId: string; + }; +} + +/** + * Account Credential Events + */ + +export interface AccountCredentialsRefreshedEvent extends DomainEvent { + type: 'account:credentials_refreshed'; + payload: { + accountId: string | null; + expiresAt: number; + previousExpiresAt: number | null; + }; +} + +export interface AccountCredentialsExpiredEvent extends DomainEvent { + type: 'account:credentials_expired'; + payload: { + accountId: string | null; + reason: 'token_expired' | 'refresh_failed' | 'credentials_missing'; + error: string | null; + }; +} + +export interface AccountCredentialsValidatedEvent extends DomainEvent { + type: 'account:credentials_validated'; + payload: { + accountId: string | null; + valid: boolean; + expiresAt: number | null; + wasRefreshed: boolean; + }; +} + /** * Union of all domain events - enables type-safe event handling */ @@ -342,11 +444,15 @@ export type DomainEventMap = | AgentStoppedEvent | AgentCrashedEvent | AgentResumedEvent + | AgentAccountSwitchedEvent + | AgentDeletedEvent | AgentWaitingEvent + | AgentOutputEvent | TaskQueuedEvent | TaskDispatchedEvent | TaskCompletedEvent | TaskBlockedEvent + | TaskPendingApprovalEvent | PhaseQueuedEvent | PhaseStartedEvent | PhaseCompletedEvent @@ -354,7 +460,13 @@ export type DomainEventMap = | MergeQueuedEvent | MergeStartedEvent | MergeCompletedEvent - | MergeConflictedEvent; + | MergeConflictedEvent + | PageCreatedEvent + | PageUpdatedEvent + | PageDeletedEvent + | AccountCredentialsRefreshedEvent + | AccountCredentialsExpiredEvent + | AccountCredentialsValidatedEvent; /** * Event type literal union for type checking diff --git a/src/git/clone.ts b/src/git/clone.ts new file mode 100644 index 0000000..904f9f3 --- /dev/null +++ b/src/git/clone.ts @@ -0,0 +1,24 @@ +/** + * Git Clone Utility + * + * Clones a git repository to a local path. + * Used when registering projects to create the base clone + * from which worktrees are later created. + */ + +import { simpleGit } from 'simple-git'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('git'); + +/** + * Clone a git repository to a destination path. + * + * @param url - Remote repository URL + * @param destPath - Local filesystem path for the clone + */ +export async function cloneProject(url: string, destPath: string): Promise<void> { + const git = simpleGit(); + log.info({ url, destPath }, 'cloning project'); + await git.clone(url, destPath); +} diff --git a/src/git/index.ts b/src/git/index.ts index 8645213..a340f47 100644 --- a/src/git/index.ts +++ b/src/git/index.ts @@ -17,3 +17,7 @@ export type { Worktree, WorktreeDiff, MergeResult } from './types.js'; // Adapters export { SimpleGitWorktreeManager } from './manager.js'; + +// Utilities +export { cloneProject } from './clone.js'; +export { ensureProjectClone, getProjectCloneDir } from './project-clones.js'; diff --git a/src/git/manager.ts b/src/git/manager.ts index ce10ca7..258bc46 100644 --- a/src/git/manager.ts +++ b/src/git/manager.ts @@ -17,6 +17,9 @@ import type { WorktreeDiff, MergeResult, } from './types.js'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('git'); /** * SimpleGit-based implementation of the WorktreeManager interface. @@ -35,11 +38,12 @@ export class SimpleGitWorktreeManager implements WorktreeManager { * * @param repoPath - Absolute path to the git repository * @param eventBus - Optional EventBus for emitting git events + * @param worktreesBaseDir - Optional custom base directory for worktrees (defaults to <repoPath>/.cw-worktrees) */ - constructor(repoPath: string, eventBus?: EventBus) { + constructor(repoPath: string, eventBus?: EventBus, worktreesBaseDir?: string) { this.repoPath = repoPath; this.git = simpleGit(repoPath); - this.worktreesDir = path.join(repoPath, '.cw-worktrees'); + this.worktreesDir = worktreesBaseDir ?? path.join(repoPath, '.cw-worktrees'); this.eventBus = eventBus; } @@ -55,6 +59,7 @@ export class SimpleGitWorktreeManager implements WorktreeManager { baseBranch: string = 'main' ): Promise<Worktree> { const worktreePath = path.join(this.worktreesDir, id); + log.info({ id, branch, baseBranch }, 'creating worktree'); // Create worktree with new branch // git worktree add -b <branch> <path> <base-branch> @@ -100,6 +105,7 @@ export class SimpleGitWorktreeManager implements WorktreeManager { } const branch = worktree.branch; + log.info({ id, branch }, 'removing worktree'); // Remove worktree with force to handle any uncommitted changes // git worktree remove <path> --force @@ -197,6 +203,7 @@ export class SimpleGitWorktreeManager implements WorktreeManager { if (!worktree) { throw new Error(`Worktree not found: ${id}`); } + log.info({ id, targetBranch }, 'merging worktree'); // Store current branch to restore later const currentBranch = await this.git.revparse(['--abbrev-ref', 'HEAD']); @@ -229,6 +236,7 @@ export class SimpleGitWorktreeManager implements WorktreeManager { if (status.conflicted.length > 0) { const conflicts = status.conflicted; + log.warn({ id, targetBranch, conflictCount: conflicts.length }, 'merge conflicts detected'); // Emit conflict event this.eventBus?.emit({ diff --git a/src/git/project-clones.ts b/src/git/project-clones.ts new file mode 100644 index 0000000..3faa818 --- /dev/null +++ b/src/git/project-clones.ts @@ -0,0 +1,48 @@ +/** + * Project Clone Management + * + * Ensures project repositories are cloned into the repos/ directory. + * These base clones are used as the source for git worktrees. + */ + +import { join } from 'node:path'; +import { access } from 'node:fs/promises'; +import { cloneProject } from './clone.js'; +import { createModuleLogger } from '../logger/index.js'; + +const log = createModuleLogger('git'); + +/** + * Derive the canonical clone directory for a project (relative to workspace root). + * Convention: repos/<sanitizedName>-<id>/ + */ +export function getProjectCloneDir(projectName: string, projectId: string): string { + const sanitized = projectName.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, ''); + return join('repos', `${sanitized}-${projectId}`); +} + +/** + * Ensure a project's git repository is cloned to the workspace. + * Uses the canonical path: <workspaceRoot>/repos/<sanitizedName>-<id>/ + * + * @param project - Project with id, name, and url + * @param workspaceRoot - Absolute path to the workspace root + * @returns Absolute path to the clone directory + */ +export async function ensureProjectClone( + project: { id: string; name: string; url: string }, + workspaceRoot: string, +): Promise<string> { + const relPath = getProjectCloneDir(project.name, project.id); + const clonePath = join(workspaceRoot, relPath); + + try { + await access(clonePath); + log.debug({ project: project.name, clonePath }, 'project clone already exists'); + return clonePath; + } catch { + log.info({ project: project.name, url: project.url, clonePath }, 'cloning project for first time'); + await cloneProject(project.url, clonePath); + return clonePath; + } +} diff --git a/src/logger/index.ts b/src/logger/index.ts new file mode 100644 index 0000000..75d7fde --- /dev/null +++ b/src/logger/index.ts @@ -0,0 +1,28 @@ +import pino from 'pino'; + +function resolveLevel(): string { + if (process.env.CW_LOG_LEVEL) return process.env.CW_LOG_LEVEL; + return process.env.NODE_ENV === 'development' ? 'debug' : 'info'; +} + +export const logger = pino( + { + name: 'cw', + level: resolveLevel(), + ...(process.env.CW_LOG_PRETTY === '1' && { + transport: { + target: 'pino-pretty', + options: { + colorize: true, + ignore: 'pid,hostname', + translateTime: 'HH:MM:ss.l', + }, + }, + }), + }, + process.env.CW_LOG_PRETTY === '1' ? undefined : pino.destination(2), +); + +export function createModuleLogger(module: string) { + return logger.child({ module }); +} diff --git a/src/server/index.ts b/src/server/index.ts index 2e2438c..63a8770 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -16,6 +16,7 @@ import type { ProcessManager } from '../process/index.js'; import type { LogManager } from '../logging/index.js'; import type { EventBus, ServerStartedEvent, ServerStoppedEvent } from '../events/index.js'; import { createTrpcHandler, type TrpcAdapterOptions } from './trpc-adapter.js'; +import { createModuleLogger } from '../logger/index.js'; /** * Optional dependencies for tRPC context. @@ -23,6 +24,8 @@ import { createTrpcHandler, type TrpcAdapterOptions } from './trpc-adapter.js'; */ export type ServerContextDeps = Omit<TrpcAdapterOptions, 'eventBus' | 'serverStartedAt' | 'processCount'>; +const log = createModuleLogger('http'); + /** Default port for the coordination server */ const DEFAULT_PORT = 3847; @@ -121,6 +124,7 @@ export class CoordinationServer { } console.log(`Coordination server listening on http://${this.config.host}:${this.config.port}`); + log.info({ port: this.config.port, host: this.config.host, pid: process.pid }, 'server listening'); } /** @@ -235,7 +239,7 @@ export class CoordinationServer { }); trpcHandler(req, res).catch((error: Error) => { - console.error('tRPC handler error:', error); + log.error({ err: error }, 'tRPC handler error'); this.sendJson(res, 500, { error: 'Internal server error' }); }); } @@ -311,6 +315,7 @@ export class CoordinationServer { return pid; // Process is alive } catch { // Process is dead, PID file is stale + log.warn({ stalePid: pid }, 'stale PID file cleaned up'); await this.removePidFile(); return null; } diff --git a/src/server/trpc-adapter.ts b/src/server/trpc-adapter.ts index 1d1ee6a..97085de 100644 --- a/src/server/trpc-adapter.ts +++ b/src/server/trpc-adapter.ts @@ -14,7 +14,10 @@ import type { TaskRepository } from '../db/repositories/task-repository.js'; import type { MessageRepository } from '../db/repositories/message-repository.js'; import type { InitiativeRepository } from '../db/repositories/initiative-repository.js'; import type { PhaseRepository } from '../db/repositories/phase-repository.js'; -import type { PlanRepository } from '../db/repositories/plan-repository.js'; +import type { PageRepository } from '../db/repositories/page-repository.js'; +import type { ProjectRepository } from '../db/repositories/project-repository.js'; +import type { AccountRepository } from '../db/repositories/account-repository.js'; +import type { AccountCredentialManager } from '../agent/credentials/types.js'; import type { DispatchManager, PhaseDispatchManager } from '../dispatch/types.js'; import type { CoordinationManager } from '../coordination/types.js'; @@ -38,14 +41,22 @@ export interface TrpcAdapterOptions { initiativeRepository?: InitiativeRepository; /** Phase repository for phase CRUD operations */ phaseRepository?: PhaseRepository; - /** Plan repository for plan CRUD operations */ - planRepository?: PlanRepository; /** Dispatch manager for task queue operations */ dispatchManager?: DispatchManager; /** Coordination manager for merge queue operations */ coordinationManager?: CoordinationManager; /** Phase dispatch manager for phase queue operations */ phaseDispatchManager?: PhaseDispatchManager; + /** Page repository for page CRUD operations */ + pageRepository?: PageRepository; + /** Project repository for project CRUD and initiative-project junction operations */ + projectRepository?: ProjectRepository; + /** Account repository for account CRUD and load balancing */ + accountRepository?: AccountRepository; + /** Credential manager for account OAuth token management */ + credentialManager?: AccountCredentialManager; + /** Absolute path to the workspace root (.cwrc directory) */ + workspaceRoot?: string; } /** @@ -112,10 +123,14 @@ export function createTrpcHandler(options: TrpcAdapterOptions) { messageRepository: options.messageRepository, initiativeRepository: options.initiativeRepository, phaseRepository: options.phaseRepository, - planRepository: options.planRepository, dispatchManager: options.dispatchManager, coordinationManager: options.coordinationManager, phaseDispatchManager: options.phaseDispatchManager, + pageRepository: options.pageRepository, + projectRepository: options.projectRepository, + accountRepository: options.accountRepository, + credentialManager: options.credentialManager, + workspaceRoot: options.workspaceRoot, }), }); diff --git a/src/test/e2e/architect-workflow.test.ts b/src/test/e2e/architect-workflow.test.ts index e548602..261d635 100644 --- a/src/test/e2e/architect-workflow.test.ts +++ b/src/test/e2e/architect-workflow.test.ts @@ -30,7 +30,7 @@ describe('Architect Workflow E2E', () => { vi.useFakeTimers(); // Create initiative - const initiative = await harness.createInitiative('Auth System', 'User authentication'); + const initiative = await harness.createInitiative('Auth System'); // Set up discuss completion scenario harness.setArchitectDiscussComplete('auth-discuss', [ diff --git a/src/test/e2e/decompose-workflow.test.ts b/src/test/e2e/decompose-workflow.test.ts index c7dc6e1..5c9f2cf 100644 --- a/src/test/e2e/decompose-workflow.test.ts +++ b/src/test/e2e/decompose-workflow.test.ts @@ -1,10 +1,10 @@ /** * E2E Tests for Decompose Workflow * - * Tests the complete decomposition workflow from plan creation through task creation: - * - Decompose mode: Break plan into executable tasks + * Tests the complete decomposition workflow from phase through task creation: + * - Decompose mode: Break phase into executable tasks * - Q&A flow: Handle clarifying questions during decomposition - * - Task persistence: Save tasks from decomposition output + * - Task persistence: Save child tasks from decomposition output * * Uses TestHarness from src/test/ for full system wiring. */ @@ -30,11 +30,11 @@ describe('Decompose Workflow E2E', () => { vi.useFakeTimers(); // Setup: Create initiative -> phase -> plan - const initiative = await harness.createInitiative('Test Project', 'Test project description'); + const initiative = await harness.createInitiative('Test Project'); const phases = await harness.createPhasesFromBreakdown(initiative.id, [ { number: 1, name: 'Phase 1', description: 'First phase' }, ]); - const plan = await harness.createPlan(phases[0].id, 'Auth Plan', 'Implement authentication'); + const decomposeTask = await harness.createDecomposeTask(phases[0].id, 'Auth Plan', 'Implement authentication'); // Set decompose scenario harness.setArchitectDecomposeComplete('decomposer', [ @@ -45,7 +45,7 @@ describe('Decompose Workflow E2E', () => { // Spawn decompose agent const agent = await harness.caller.spawnArchitectDecompose({ name: 'decomposer', - planId: plan.id, + phaseId: phases[0].id, }); expect(agent.mode).toBe('decompose'); @@ -67,7 +67,7 @@ describe('Decompose Workflow E2E', () => { const phases = await harness.createPhasesFromBreakdown(initiative.id, [ { number: 1, name: 'Phase 1', description: 'First phase' }, ]); - const plan = await harness.createPlan(phases[0].id, 'Complex Plan'); + const decomposeTask = await harness.createDecomposeTask(phases[0].id, 'Complex Plan'); // Set questions scenario harness.setArchitectDecomposeQuestions('decomposer', [ @@ -76,7 +76,7 @@ describe('Decompose Workflow E2E', () => { const agent = await harness.caller.spawnArchitectDecompose({ name: 'decomposer', - planId: plan.id, + phaseId: phases[0].id, }); await harness.advanceTimers(); @@ -119,7 +119,7 @@ describe('Decompose Workflow E2E', () => { const phases = await harness.createPhasesFromBreakdown(initiative.id, [ { number: 1, name: 'Phase 1', description: 'First phase' }, ]); - const plan = await harness.createPlan(phases[0].id, 'Complex Plan'); + const decomposeTask = await harness.createDecomposeTask(phases[0].id, 'Complex Plan'); // Set multiple questions scenario harness.setArchitectDecomposeQuestions('decomposer', [ @@ -130,7 +130,7 @@ describe('Decompose Workflow E2E', () => { const agent = await harness.caller.spawnArchitectDecompose({ name: 'decomposer', - planId: plan.id, + phaseId: phases[0].id, }); await harness.advanceTimers(); @@ -169,11 +169,11 @@ describe('Decompose Workflow E2E', () => { const phases = await harness.createPhasesFromBreakdown(initiative.id, [ { number: 1, name: 'Phase 1', description: 'First phase' }, ]); - const plan = await harness.createPlan(phases[0].id, 'Auth Plan'); + const decomposeTask = await harness.createDecomposeTask(phases[0].id, 'Auth Plan'); // Create tasks from decomposition - await harness.caller.createTasksFromDecomposition({ - planId: plan.id, + await harness.caller.createChildTasks({ + parentTaskId: decomposeTask.id, tasks: [ { number: 1, name: 'Schema', description: 'Create tables', type: 'auto', dependencies: [] }, { number: 2, name: 'API', description: 'Create endpoints', type: 'auto', dependencies: [1] }, @@ -182,7 +182,7 @@ describe('Decompose Workflow E2E', () => { }); // Verify tasks created - const tasks = await harness.getTasksForPlan(plan.id); + const tasks = await harness.getChildTasks(decomposeTask.id); expect(tasks).toHaveLength(3); expect(tasks[0].name).toBe('Schema'); expect(tasks[1].name).toBe('API'); @@ -195,11 +195,11 @@ describe('Decompose Workflow E2E', () => { const phases = await harness.createPhasesFromBreakdown(initiative.id, [ { number: 1, name: 'Phase 1', description: 'First phase' }, ]); - const plan = await harness.createPlan(phases[0].id, 'Mixed Tasks'); + const decomposeTask = await harness.createDecomposeTask(phases[0].id, 'Mixed Tasks'); // Create tasks with all types - await harness.caller.createTasksFromDecomposition({ - planId: plan.id, + await harness.caller.createChildTasks({ + parentTaskId: decomposeTask.id, tasks: [ { number: 1, name: 'Auto Task', description: 'Automated work', type: 'auto' }, { number: 2, name: 'Human Verify', description: 'Visual check', type: 'checkpoint:human-verify', dependencies: [1] }, @@ -208,7 +208,7 @@ describe('Decompose Workflow E2E', () => { ], }); - const tasks = await harness.getTasksForPlan(plan.id); + const tasks = await harness.getChildTasks(decomposeTask.id); expect(tasks).toHaveLength(4); expect(tasks[0].type).toBe('auto'); expect(tasks[1].type).toBe('checkpoint:human-verify'); @@ -221,11 +221,11 @@ describe('Decompose Workflow E2E', () => { const phases = await harness.createPhasesFromBreakdown(initiative.id, [ { number: 1, name: 'Phase 1', description: 'First phase' }, ]); - const plan = await harness.createPlan(phases[0].id, 'Dependent Tasks'); + const decomposeTask = await harness.createDecomposeTask(phases[0].id, 'Dependent Tasks'); // Create tasks with complex dependencies - await harness.caller.createTasksFromDecomposition({ - planId: plan.id, + await harness.caller.createChildTasks({ + parentTaskId: decomposeTask.id, tasks: [ { number: 1, name: 'Task A', description: 'No deps', type: 'auto' }, { number: 2, name: 'Task B', description: 'Depends on A', type: 'auto', dependencies: [1] }, @@ -234,7 +234,7 @@ describe('Decompose Workflow E2E', () => { ], }); - const tasks = await harness.getTasksForPlan(plan.id); + const tasks = await harness.getChildTasks(decomposeTask.id); expect(tasks).toHaveLength(4); // All tasks should be created with correct names @@ -247,7 +247,7 @@ describe('Decompose Workflow E2E', () => { vi.useFakeTimers(); // 1. Create initiative - const initiative = await harness.createInitiative('Full Workflow Test', 'Complete workflow'); + const initiative = await harness.createInitiative('Full Workflow Test'); // 2. Create phase const phases = await harness.createPhasesFromBreakdown(initiative.id, [ @@ -255,7 +255,7 @@ describe('Decompose Workflow E2E', () => { ]); // 3. Create plan - const plan = await harness.createPlan(phases[0].id, 'Auth Plan', 'Implement JWT auth'); + const decomposeTask = await harness.createDecomposeTask(phases[0].id, 'Auth Plan', 'Implement JWT auth'); // 4. Spawn decompose agent harness.setArchitectDecomposeComplete('decomposer', [ @@ -267,7 +267,7 @@ describe('Decompose Workflow E2E', () => { await harness.caller.spawnArchitectDecompose({ name: 'decomposer', - planId: plan.id, + phaseId: phases[0].id, }); await harness.advanceTimers(); @@ -277,8 +277,8 @@ describe('Decompose Workflow E2E', () => { expect(events[0].payload.reason).toBe('decompose_complete'); // 6. Persist tasks (simulating what orchestrator would do after decompose) - await harness.caller.createTasksFromDecomposition({ - planId: plan.id, + await harness.caller.createChildTasks({ + parentTaskId: decomposeTask.id, tasks: [ { number: 1, name: 'Create user schema', description: 'Define User model', type: 'auto', dependencies: [] }, { number: 2, name: 'Implement JWT', description: 'Token generation', type: 'auto', dependencies: [1] }, @@ -288,7 +288,7 @@ describe('Decompose Workflow E2E', () => { }); // 7. Verify final state - const tasks = await harness.getTasksForPlan(plan.id); + const tasks = await harness.getChildTasks(decomposeTask.id); expect(tasks).toHaveLength(4); expect(tasks[0].name).toBe('Create user schema'); expect(tasks[3].type).toBe('checkpoint:human-verify'); diff --git a/src/test/e2e/edge-cases.test.ts b/src/test/e2e/edge-cases.test.ts index f6296c7..dcd0c68 100644 --- a/src/test/e2e/edge-cases.test.ts +++ b/src/test/e2e/edge-cases.test.ts @@ -52,9 +52,9 @@ describe('E2E Edge Cases', () => { }); await vi.runAllTimersAsync(); - // Set unrecoverable_error scenario BEFORE dispatch + // Set error scenario BEFORE dispatch harness.setAgentScenario(`agent-${taskAId.slice(0, 6)}`, { - status: 'unrecoverable_error', + status: 'error', error: 'Token limit exceeded', }); @@ -91,9 +91,9 @@ describe('E2E Edge Cases', () => { }); await vi.runAllTimersAsync(); - // Set unrecoverable_error scenario + // Set error scenario harness.setAgentScenario(`agent-${taskAId.slice(0, 6)}`, { - status: 'unrecoverable_error', + status: 'error', error: 'Token limit exceeded', }); @@ -119,9 +119,9 @@ describe('E2E Edge Cases', () => { }); await vi.runAllTimersAsync(); - // Set unrecoverable_error scenario + // Set error scenario harness.setAgentScenario(`agent-${taskAId.slice(0, 6)}`, { - status: 'unrecoverable_error', + status: 'error', error: 'Out of memory', }); diff --git a/src/test/e2e/happy-path.test.ts b/src/test/e2e/happy-path.test.ts index f854767..85d88a6 100644 --- a/src/test/e2e/happy-path.test.ts +++ b/src/test/e2e/happy-path.test.ts @@ -304,9 +304,13 @@ describe('E2E Happy Path', () => { const initialState = await harness.dispatchManager.getQueueState(); expect(initialState.queued.length).toBe(5); - // In current implementation, all tasks are "ready" (dependency loading TBD) - // Test verifies current behavior: priority ordering - expect(initialState.ready.length).toBe(5); + // Only tasks with no dependencies are ready: + // - Task 1A: no deps -> READY + // - Task 1B: no deps -> READY + // - Task 2A: depends on 1A -> NOT READY + // - Task 3A: depends on 1B -> NOT READY + // - Task 4A: depends on 2A, 3A -> NOT READY + expect(initialState.ready.length).toBe(2); // First dispatch: Task 1A (high priority, first queued) const result1 = await harness.dispatchManager.dispatchNext(); diff --git a/src/test/e2e/phase-dispatch.test.ts b/src/test/e2e/phase-dispatch.test.ts index a1e69cc..ffa1a55 100644 --- a/src/test/e2e/phase-dispatch.test.ts +++ b/src/test/e2e/phase-dispatch.test.ts @@ -34,7 +34,6 @@ describe('Phase Parallel Execution', () => { // Create initiative with 2 independent phases (no dependencies) const initiative = await harness.initiativeRepository.create({ name: 'Independent Phases Test', - description: 'Test initiative with independent phases', status: 'active', }); @@ -107,7 +106,6 @@ describe('Phase Parallel Execution', () => { // Create phases: A, B (depends on A) const initiative = await harness.initiativeRepository.create({ name: 'Sequential Phases Test', - description: 'Test initiative with sequential phases', status: 'active', }); @@ -192,7 +190,6 @@ describe('Phase Parallel Execution', () => { // Create phases: A, B (depends on A), C (depends on A), D (depends on B, C) const initiative = await harness.initiativeRepository.create({ name: 'Diamond Pattern Test', - description: 'Test initiative with diamond dependency pattern', status: 'active', }); @@ -309,7 +306,6 @@ describe('Phase Parallel Execution', () => { // Create phases: A, B (depends on A) const initiative = await harness.initiativeRepository.create({ name: 'Blocked Phase Test', - description: 'Test initiative with blocked phase', status: 'active', }); @@ -380,7 +376,6 @@ describe('Phase Parallel Execution', () => { // Create chain: A -> B -> C, then block A const initiative = await harness.initiativeRepository.create({ name: 'Chain Block Test', - description: 'Test blocking propagates down chain', status: 'active', }); diff --git a/src/test/e2e/recovery-scenarios.test.ts b/src/test/e2e/recovery-scenarios.test.ts index 246e75d..2f8f46e 100644 --- a/src/test/e2e/recovery-scenarios.test.ts +++ b/src/test/e2e/recovery-scenarios.test.ts @@ -61,8 +61,8 @@ describe('E2E Recovery Scenarios', () => { // Verify: even after clearing in-memory queue state, // we can still find pending tasks from database - const allTasks = await harness.taskRepository.findByPlanId( - seeded.plans.get('Plan 1')! + const allTasks = await harness.taskRepository.findByParentTaskId( + seeded.taskGroups.get('Task Group 1')! ); const pendingTasks = allTasks.filter((t) => t.status === 'pending'); diff --git a/src/test/fixtures.ts b/src/test/fixtures.ts index b3ee5cf..abde256 100644 --- a/src/test/fixtures.ts +++ b/src/test/fixtures.ts @@ -10,7 +10,6 @@ import type { DrizzleDatabase } from '../db/index.js'; import { DrizzleInitiativeRepository, DrizzlePhaseRepository, - DrizzlePlanRepository, DrizzleTaskRepository, } from '../db/repositories/drizzle/index.js'; import { taskDependencies } from '../db/schema.js'; @@ -29,17 +28,20 @@ export interface TaskFixture { name: string; /** Task priority */ priority?: 'low' | 'medium' | 'high'; + /** Task category */ + category?: 'execute' | 'research' | 'discuss' | 'breakdown' | 'decompose' | 'refine' | 'verify' | 'merge' | 'review'; /** Names of other tasks in same fixture this task depends on */ dependsOn?: string[]; } /** - * Plan fixture definition. + * Task group fixture definition (replaces Plan). + * Tasks are grouped by parent task in the new model. */ -export interface PlanFixture { - /** Plan name */ +export interface TaskGroupFixture { + /** Group name (becomes a decompose task) */ name: string; - /** Tasks in this plan */ + /** Tasks in this group */ tasks: TaskFixture[]; } @@ -49,8 +51,8 @@ export interface PlanFixture { export interface PhaseFixture { /** Phase name */ name: string; - /** Plans in this phase */ - plans: PlanFixture[]; + /** Task groups in this phase (each group becomes a parent decompose task) */ + taskGroups: TaskGroupFixture[]; } /** @@ -72,8 +74,8 @@ export interface SeededFixture { initiativeId: string; /** Map of phase names to IDs */ phases: Map<string, string>; - /** Map of plan names to IDs */ - plans: Map<string, string>; + /** Map of task group names to parent task IDs */ + taskGroups: Map<string, string>; /** Map of task names to IDs */ tasks: Map<string, string>; } @@ -85,7 +87,7 @@ export interface SeededFixture { /** * Seed a complete task hierarchy from a fixture definition. * - * Creates initiative, phases, plans, and tasks in correct order. + * Creates initiative, phases, decompose tasks (as parent), and child tasks. * Resolves task dependencies by name to actual task IDs. * * @param db - Drizzle database instance @@ -99,12 +101,11 @@ export async function seedFixture( // Create repositories const initiativeRepo = new DrizzleInitiativeRepository(db); const phaseRepo = new DrizzlePhaseRepository(db); - const planRepo = new DrizzlePlanRepository(db); const taskRepo = new DrizzleTaskRepository(db); // Result maps const phasesMap = new Map<string, string>(); - const plansMap = new Map<string, string>(); + const taskGroupsMap = new Map<string, string>(); const tasksMap = new Map<string, string>(); // Collect all task dependencies to resolve after creation @@ -113,7 +114,6 @@ export async function seedFixture( // Create initiative const initiative = await initiativeRepo.create({ name: fixture.name, - description: `Test initiative: ${fixture.name}`, status: 'active', }); @@ -129,29 +129,37 @@ export async function seedFixture( }); phasesMap.set(phaseFixture.name, phase.id); - // Create plans in phase - let planNumber = 1; - for (const planFixture of phaseFixture.plans) { - const plan = await planRepo.create({ + // Create task groups as parent decompose tasks + let taskOrder = 0; + for (const groupFixture of phaseFixture.taskGroups) { + // Create parent decompose task + const parentTask = await taskRepo.create({ phaseId: phase.id, - number: planNumber++, - name: planFixture.name, - description: `Test plan: ${planFixture.name}`, - status: 'pending', + initiativeId: initiative.id, + name: groupFixture.name, + description: `Test task group: ${groupFixture.name}`, + category: 'decompose', + type: 'auto', + priority: 'medium', + status: 'completed', // Decompose tasks are completed once child tasks are created + order: taskOrder++, }); - plansMap.set(planFixture.name, plan.id); + taskGroupsMap.set(groupFixture.name, parentTask.id); - // Create tasks in plan - let taskOrder = 0; - for (const taskFixture of planFixture.tasks) { + // Create child tasks linked to parent + let childOrder = 0; + for (const taskFixture of groupFixture.tasks) { const task = await taskRepo.create({ - planId: plan.id, + parentTaskId: parentTask.id, + phaseId: phase.id, + initiativeId: initiative.id, name: taskFixture.name, description: `Test task: ${taskFixture.name}`, + category: taskFixture.category ?? 'execute', type: 'auto', priority: taskFixture.priority ?? 'medium', status: 'pending', - order: taskOrder++, + order: childOrder++, }); tasksMap.set(taskFixture.id, task.id); @@ -189,7 +197,7 @@ export async function seedFixture( return { initiativeId: initiative.id, phases: phasesMap, - plans: plansMap, + taskGroups: taskGroupsMap, tasks: tasksMap, }; } @@ -199,7 +207,7 @@ export async function seedFixture( // ============================================================================= /** - * Simple fixture: 1 initiative -> 1 phase -> 1 plan -> 3 tasks. + * Simple fixture: 1 initiative -> 1 phase -> 1 task group -> 3 tasks. * * Task dependency structure: * - Task A: no dependencies @@ -211,9 +219,9 @@ export const SIMPLE_FIXTURE: InitiativeFixture = { phases: [ { name: 'Phase 1', - plans: [ + taskGroups: [ { - name: 'Plan 1', + name: 'Task Group 1', tasks: [ { id: 'Task A', name: 'Task A', priority: 'high' }, { id: 'Task B', name: 'Task B', priority: 'medium', dependsOn: ['Task A'] }, @@ -226,27 +234,27 @@ export const SIMPLE_FIXTURE: InitiativeFixture = { }; /** - * Parallel fixture: 1 initiative -> 1 phase -> 2 plans (each with 2 independent tasks). + * Parallel fixture: 1 initiative -> 1 phase -> 2 task groups (each with 2 independent tasks). * * Task structure: - * - Plan 1: Task X, Task Y (independent) - * - Plan 2: Task P, Task Q (independent) + * - Group A: Task X, Task Y (independent) + * - Group B: Task P, Task Q (independent) */ export const PARALLEL_FIXTURE: InitiativeFixture = { name: 'Parallel Test Initiative', phases: [ { name: 'Parallel Phase', - plans: [ + taskGroups: [ { - name: 'Plan A', + name: 'Group A', tasks: [ { id: 'Task X', name: 'Task X', priority: 'high' }, { id: 'Task Y', name: 'Task Y', priority: 'medium' }, ], }, { - name: 'Plan B', + name: 'Group B', tasks: [ { id: 'Task P', name: 'Task P', priority: 'high' }, { id: 'Task Q', name: 'Task Q', priority: 'low' }, @@ -258,27 +266,27 @@ export const PARALLEL_FIXTURE: InitiativeFixture = { }; /** - * Complex fixture: 1 initiative -> 2 phases -> 4 plans with cross-plan dependencies. + * Complex fixture: 1 initiative -> 2 phases -> 4 task groups with cross-group dependencies. * * Structure: - * - Phase 1: Plan 1 (Task 1A, 1B), Plan 2 (Task 2A depends on 1A) - * - Phase 2: Plan 3 (Task 3A depends on 1B), Plan 4 (Task 4A depends on 2A and 3A) + * - Phase 1: Group 1 (Task 1A, 1B), Group 2 (Task 2A depends on 1A) + * - Phase 2: Group 3 (Task 3A depends on 1B), Group 4 (Task 4A depends on 2A and 3A) */ export const COMPLEX_FIXTURE: InitiativeFixture = { name: 'Complex Test Initiative', phases: [ { name: 'Phase 1', - plans: [ + taskGroups: [ { - name: 'Plan 1', + name: 'Group 1', tasks: [ { id: 'Task 1A', name: 'Task 1A', priority: 'high' }, { id: 'Task 1B', name: 'Task 1B', priority: 'medium' }, ], }, { - name: 'Plan 2', + name: 'Group 2', tasks: [ { id: 'Task 2A', name: 'Task 2A', priority: 'high', dependsOn: ['Task 1A'] }, ], @@ -287,15 +295,15 @@ export const COMPLEX_FIXTURE: InitiativeFixture = { }, { name: 'Phase 2', - plans: [ + taskGroups: [ { - name: 'Plan 3', + name: 'Group 3', tasks: [ { id: 'Task 3A', name: 'Task 3A', priority: 'high', dependsOn: ['Task 1B'] }, ], }, { - name: 'Plan 4', + name: 'Group 4', tasks: [ { id: 'Task 4A', diff --git a/src/test/harness.test.ts b/src/test/harness.test.ts index dea67b2..a76f7ea 100644 --- a/src/test/harness.test.ts +++ b/src/test/harness.test.ts @@ -91,9 +91,9 @@ describe('TestHarness', () => { expect(seeded.phases.size).toBe(1); expect(seeded.phases.has('Phase 1')).toBe(true); - // Check plans created - expect(seeded.plans.size).toBe(1); - expect(seeded.plans.has('Plan 1')).toBe(true); + // Check task groups created + expect(seeded.taskGroups.size).toBe(1); + expect(seeded.taskGroups.has('Task Group 1')).toBe(true); // Check tasks created expect(seeded.tasks.size).toBe(3); @@ -116,7 +116,7 @@ describe('TestHarness', () => { const seeded = await harness.seedFixture(PARALLEL_FIXTURE); expect(seeded.phases.size).toBe(1); - expect(seeded.plans.size).toBe(2); + expect(seeded.taskGroups.size).toBe(2); expect(seeded.tasks.size).toBe(4); expect(seeded.tasks.has('Task X')).toBe(true); expect(seeded.tasks.has('Task Q')).toBe(true); @@ -126,7 +126,7 @@ describe('TestHarness', () => { const seeded = await harness.seedFixture(COMPLEX_FIXTURE); expect(seeded.phases.size).toBe(2); - expect(seeded.plans.size).toBe(4); + expect(seeded.taskGroups.size).toBe(4); expect(seeded.tasks.size).toBe(5); }); }); @@ -298,9 +298,9 @@ describe('TestHarness', () => { await vi.runAllTimersAsync(); harness.clearEvents(); - // Set unrecoverable_error scenario for the agent that will be spawned + // Set error scenario for the agent that will be spawned harness.setAgentScenario(`agent-${taskAId.slice(0, 6)}`, { - status: 'unrecoverable_error', + status: 'error', delay: 0, error: 'Test crash', }); diff --git a/src/test/harness.ts b/src/test/harness.ts index df999df..96bf96f 100644 --- a/src/test/harness.ts +++ b/src/test/harness.ts @@ -13,7 +13,6 @@ import { EventEmitterBus } from '../events/bus.js'; import type { AgentManager } from '../agent/types.js'; import { MockAgentManager, type MockAgentScenario } from '../agent/mock-manager.js'; import type { PendingQuestions, QuestionItem } from '../agent/types.js'; -import type { Decision, PhaseBreakdown, TaskBreakdown } from '../agent/schema.js'; import type { WorktreeManager, Worktree, WorktreeDiff, MergeResult } from '../git/types.js'; import type { DispatchManager, PhaseDispatchManager } from '../dispatch/types.js'; import { DefaultDispatchManager } from '../dispatch/manager.js'; @@ -25,17 +24,9 @@ import type { MessageRepository } from '../db/repositories/message-repository.js import type { AgentRepository } from '../db/repositories/agent-repository.js'; import type { InitiativeRepository } from '../db/repositories/initiative-repository.js'; import type { PhaseRepository } from '../db/repositories/phase-repository.js'; -import type { PlanRepository } from '../db/repositories/plan-repository.js'; -import type { Initiative, Phase, Plan, Task } from '../db/schema.js'; -import { - DrizzleTaskRepository, - DrizzleMessageRepository, - DrizzleAgentRepository, - DrizzleInitiativeRepository, - DrizzlePhaseRepository, - DrizzlePlanRepository, -} from '../db/repositories/drizzle/index.js'; +import type { Initiative, Phase, Task } from '../db/schema.js'; import { createTestDatabase } from '../db/repositories/drizzle/test-helpers.js'; +import { createRepositories } from '../container.js'; import { seedFixture, type InitiativeFixture, @@ -212,8 +203,6 @@ export interface TestHarness { initiativeRepository: InitiativeRepository; /** Phase repository */ phaseRepository: PhaseRepository; - /** Plan repository */ - planRepository: PlanRepository; // tRPC Caller /** tRPC caller for direct procedure calls */ @@ -295,11 +284,11 @@ export interface TestHarness { // ========================================================================== /** - * Set up scenario where architect completes discussion with decisions. + * Set up scenario where architect completes discussion. */ setArchitectDiscussComplete( agentName: string, - decisions: Decision[], + _decisions: unknown[], summary: string ): void; @@ -312,19 +301,19 @@ export interface TestHarness { ): void; /** - * Set up scenario where architect completes breakdown with phases. + * Set up scenario where architect completes breakdown. */ setArchitectBreakdownComplete( agentName: string, - phases: PhaseBreakdown[] + _phases: unknown[] ): void; /** - * Set up scenario where architect completes decomposition with tasks. + * Set up scenario where architect completes decomposition. */ setArchitectDecomposeComplete( agentName: string, - tasks: TaskBreakdown[] + _tasks: unknown[] ): void; /** @@ -352,7 +341,7 @@ export interface TestHarness { /** * Create initiative through tRPC. */ - createInitiative(name: string, description?: string): Promise<Initiative>; + createInitiative(name: string): Promise<Initiative>; /** * Create phases from breakdown output through tRPC. @@ -363,18 +352,18 @@ export interface TestHarness { ): Promise<Phase[]>; /** - * Create a plan through tRPC. + * Create a decompose task through tRPC (replaces createPlan). */ - createPlan( + createDecomposeTask( phaseId: string, name: string, description?: string - ): Promise<Plan>; + ): Promise<Task>; /** - * Get tasks for a plan through tRPC. + * Get child tasks of a parent task through tRPC. */ - getTasksForPlan(planId: string): Promise<Task[]>; + getChildTasks(parentTaskId: string): Promise<Task[]>; } // ============================================================================= @@ -406,12 +395,8 @@ export function createTestHarness(): TestHarness { const worktreeManager = new MockWorktreeManager(); // Create repositories - const taskRepository = new DrizzleTaskRepository(db); - const messageRepository = new DrizzleMessageRepository(db); - const agentRepository = new DrizzleAgentRepository(db); - const initiativeRepository = new DrizzleInitiativeRepository(db); - const phaseRepository = new DrizzlePhaseRepository(db); - const planRepository = new DrizzlePlanRepository(db); + const repos = createRepositories(db); + const { taskRepository, messageRepository, agentRepository, initiativeRepository, phaseRepository } = repos; // Create real managers wired to mocks const dispatchManager = new DefaultDispatchManager( @@ -446,7 +431,6 @@ export function createTestHarness(): TestHarness { coordinationManager, initiativeRepository, phaseRepository, - planRepository, }); // Create tRPC caller @@ -470,7 +454,6 @@ export function createTestHarness(): TestHarness { agentRepository, initiativeRepository, phaseRepository, - planRepository, // tRPC Caller caller, @@ -506,7 +489,7 @@ export function createTestHarness(): TestHarness { }, setAgentError: (agentName: string, error: string) => { - agentManager.setScenario(agentName, { status: 'unrecoverable_error', error }); + agentManager.setScenario(agentName, { status: 'error', error }); }, getPendingQuestions: (agentId: string) => agentManager.getPendingQuestions(agentId), @@ -536,13 +519,12 @@ export function createTestHarness(): TestHarness { setArchitectDiscussComplete: ( agentName: string, - decisions: Decision[], + _decisions: unknown[], summary: string ) => { agentManager.setScenario(agentName, { - status: 'context_complete', - decisions, - summary, + status: 'done', + result: summary, delay: 0, }); }, @@ -560,22 +542,22 @@ export function createTestHarness(): TestHarness { setArchitectBreakdownComplete: ( agentName: string, - phases: PhaseBreakdown[] + _phases: unknown[] ) => { agentManager.setScenario(agentName, { - status: 'breakdown_complete', - phases, + status: 'done', + result: 'Breakdown complete', delay: 0, }); }, setArchitectDecomposeComplete: ( agentName: string, - tasks: TaskBreakdown[] + _tasks: unknown[] ) => { agentManager.setScenario(agentName, { - status: 'decompose_complete', - tasks, + status: 'done', + result: 'Decompose complete', delay: 0, }); }, @@ -607,8 +589,8 @@ export function createTestHarness(): TestHarness { return caller.listPhases({ initiativeId }); }, - createInitiative: (name: string, description?: string) => { - return caller.createInitiative({ name, description }); + createInitiative: (name: string) => { + return caller.createInitiative({ name }); }, createPhasesFromBreakdown: ( @@ -618,12 +600,19 @@ export function createTestHarness(): TestHarness { return caller.createPhasesFromBreakdown({ initiativeId, phases }); }, - createPlan: (phaseId: string, name: string, description?: string) => { - return caller.createPlan({ phaseId, name, description }); + createDecomposeTask: async (phaseId: string, name: string, description?: string) => { + return caller.createPhaseTask({ + phaseId, + name, + description, + category: 'decompose', + type: 'auto', + requiresApproval: true, + }); }, - getTasksForPlan: (planId: string) => { - return caller.listTasks({ planId }); + getChildTasks: (parentTaskId: string) => { + return caller.listTasks({ parentTaskId }); }, }; diff --git a/src/test/index.ts b/src/test/index.ts index e5f94be..203c018 100644 --- a/src/test/index.ts +++ b/src/test/index.ts @@ -8,7 +8,7 @@ export { seedFixture, type TaskFixture, - type PlanFixture, + type TaskGroupFixture, type PhaseFixture, type InitiativeFixture, type SeededFixture, diff --git a/src/test/integration/real-claude.test.ts b/src/test/integration/real-claude.test.ts index cbb628a..47d66ae 100644 --- a/src/test/integration/real-claude.test.ts +++ b/src/test/integration/real-claude.test.ts @@ -14,36 +14,18 @@ * - Confirm MockAgentManager accurately simulates real CLI behavior * - Document actual response structure and costs * - * Findings from validation run (2026-02-02): - * - Execute mode (done): Works, ~$0.025, ~6s - * - Execute mode (questions): Works, questions array validated - * - Discuss mode: Works, decisions array validated - * - Breakdown mode: Works, phases array validated - * - Decompose mode: Works, tasks array validated + * Updated (2026-02-06): Now uses the universal agentSignalSchema instead of + * per-mode schemas. Agents output trivial signals (done/questions/error) and + * write files instead of producing mode-specific JSON. * - * Key observation: When using --json-schema flag: - * - `result` field is EMPTY (not the structured output) - * - `structured_output` field contains the validated JSON object - * - This is different from non-schema mode where result contains text - * - * Total validation cost: ~$0.15 (5 tests) - * - * Conclusion: MockAgentManager accurately simulates real CLI behavior. - * JSON schemas work correctly with Claude CLI --json-schema flag. - * ClaudeAgentManager correctly reads from structured_output field. + * Total validation cost: ~$0.10 (3 tests) */ import { describe, it, expect, beforeAll } from 'vitest'; import { execa } from 'execa'; import { - agentOutputJsonSchema, - agentOutputSchema, - discussOutputJsonSchema, - discussOutputSchema, - breakdownOutputJsonSchema, - breakdownOutputSchema, - decomposeOutputJsonSchema, - decomposeOutputSchema, + agentSignalJsonSchema, + agentSignalSchema, } from '../../agent/schema.js'; /** @@ -129,15 +111,15 @@ describeReal('Real Claude CLI Integration', () => { console.log('These tests call the real Claude API and incur costs.\n'); }); - describe('Execute Mode Schema', () => { + describe('Universal Signal Schema', () => { it( - 'should return done status with result', + 'should return done status', async () => { const prompt = `Complete this simple task: Say "Hello, World!" as a test. Output your response in the required JSON format with status "done".`; - const { cliResult, structuredOutput } = await callClaudeCli(prompt, agentOutputJsonSchema); + const { cliResult, structuredOutput } = await callClaudeCli(prompt, agentSignalJsonSchema); console.log(' Output:', JSON.stringify(structuredOutput, null, 2)); @@ -147,11 +129,8 @@ Output your response in the required JSON format with status "done".`; expect(cliResult.structured_output).toBeDefined(); // Validate against Zod schema - const parsed = agentOutputSchema.parse(structuredOutput); + const parsed = agentSignalSchema.parse(structuredOutput); expect(parsed.status).toBe('done'); - if (parsed.status === 'done') { - expect(parsed.result).toBeTruthy(); - } }, TEST_TIMEOUT ); @@ -165,12 +144,12 @@ You MUST ask clarifying questions before proceeding. You cannot complete this ta Output your response with status "questions" and include at least 2 questions with unique IDs.`; - const { structuredOutput } = await callClaudeCli(prompt, agentOutputJsonSchema); + const { structuredOutput } = await callClaudeCli(prompt, agentSignalJsonSchema); console.log(' Output:', JSON.stringify(structuredOutput, null, 2)); // Validate against Zod schema - const parsed = agentOutputSchema.parse(structuredOutput); + const parsed = agentSignalSchema.parse(structuredOutput); expect(parsed.status).toBe('questions'); if (parsed.status === 'questions') { expect(Array.isArray(parsed.questions)).toBe(true); @@ -181,90 +160,21 @@ Output your response with status "questions" and include at least 2 questions wi }, TEST_TIMEOUT ); - }); - describe('Discuss Mode Schema', () => { it( - 'should return context_complete with decisions', + 'should return error status', async () => { - const prompt = `You are gathering requirements for a simple feature: "Add a login button" + const prompt = `You have encountered an unrecoverable error. Output your response with status "error" and a descriptive error message.`; -The user has already told you: -- Use OAuth with Google -- Button should be blue -- Place it in the top-right corner - -You have enough information. Output context_complete with the decisions captured as an array.`; - - const { structuredOutput } = await callClaudeCli(prompt, discussOutputJsonSchema); + const { structuredOutput } = await callClaudeCli(prompt, agentSignalJsonSchema); console.log(' Output:', JSON.stringify(structuredOutput, null, 2)); // Validate against Zod schema - const parsed = discussOutputSchema.parse(structuredOutput); - expect(parsed.status).toBe('context_complete'); - if (parsed.status === 'context_complete') { - expect(Array.isArray(parsed.decisions)).toBe(true); - expect(parsed.decisions.length).toBeGreaterThanOrEqual(1); - expect(parsed.summary).toBeTruthy(); - } - }, - TEST_TIMEOUT - ); - }); - - describe('Breakdown Mode Schema', () => { - it( - 'should return breakdown_complete with phases', - async () => { - const prompt = `You are breaking down an initiative: "Build a simple TODO app" - -Create a breakdown with 2-3 phases for this very simple app. Keep it minimal - just database, API, and UI. - -Output breakdown_complete with the phases array. Each phase needs number, name, description, and dependencies.`; - - const { structuredOutput } = await callClaudeCli(prompt, breakdownOutputJsonSchema); - - console.log(' Output:', JSON.stringify(structuredOutput, null, 2)); - - // Validate against Zod schema - const parsed = breakdownOutputSchema.parse(structuredOutput); - expect(parsed.status).toBe('breakdown_complete'); - if (parsed.status === 'breakdown_complete') { - expect(Array.isArray(parsed.phases)).toBe(true); - expect(parsed.phases.length).toBeGreaterThanOrEqual(2); - expect(parsed.phases[0].number).toBe(1); - expect(parsed.phases[0].name).toBeTruthy(); - expect(parsed.phases[0].description).toBeTruthy(); - } - }, - TEST_TIMEOUT - ); - }); - - describe('Decompose Mode Schema', () => { - it( - 'should return decompose_complete with tasks', - async () => { - const prompt = `You are decomposing a plan: "Implement user authentication" - -Create 2-3 simple tasks for this plan. Tasks should be atomic units of work. - -Output decompose_complete with the tasks array. Each task needs number, name, description, type (default to "auto"), and dependencies.`; - - const { structuredOutput } = await callClaudeCli(prompt, decomposeOutputJsonSchema); - - console.log(' Output:', JSON.stringify(structuredOutput, null, 2)); - - // Validate against Zod schema - const parsed = decomposeOutputSchema.parse(structuredOutput); - expect(parsed.status).toBe('decompose_complete'); - if (parsed.status === 'decompose_complete') { - expect(Array.isArray(parsed.tasks)).toBe(true); - expect(parsed.tasks.length).toBeGreaterThanOrEqual(2); - expect(parsed.tasks[0].number).toBe(1); - expect(parsed.tasks[0].name).toBeTruthy(); - expect(parsed.tasks[0].description).toBeTruthy(); + const parsed = agentSignalSchema.parse(structuredOutput); + expect(parsed.status).toBe('error'); + if (parsed.status === 'error') { + expect(parsed.error).toBeTruthy(); } }, TEST_TIMEOUT diff --git a/src/test/integration/real-providers/claude-manager.test.ts b/src/test/integration/real-providers/claude-manager.test.ts new file mode 100644 index 0000000..9ae1bb4 --- /dev/null +++ b/src/test/integration/real-providers/claude-manager.test.ts @@ -0,0 +1,304 @@ +/** + * Real Claude CLI Manager Integration Tests + * + * IMPORTANT: These tests call the REAL Claude CLI and incur API costs! + * They are SKIPPED by default and should only be run manually for validation. + * + * To run these tests: + * ```bash + * REAL_CLAUDE_TESTS=1 npm test -- src/test/integration/real-providers/claude-manager.test.ts --test-timeout=300000 + * ``` + * + * Tests covered: + * - Output stream parsing (text_delta events) + * - Session ID extraction from init event + * - Result parsing and validation + * - Session resume with user answers + * + * Estimated cost: ~$0.10 per full run + */ + +import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest'; +import { + createRealProviderHarness, + describeRealClaude, + REAL_TEST_TIMEOUT, + sleep, + type RealProviderHarness, +} from './harness.js'; +import { MINIMAL_PROMPTS } from './prompts.js'; +import type { AgentSpawnedEvent, AgentStoppedEvent, AgentOutputEvent } from '../../../events/types.js'; + +describeRealClaude('Real Claude Manager Integration', () => { + let harness: RealProviderHarness; + + beforeAll(async () => { + console.log('\n=== Running Real Claude Manager Tests ==='); + console.log('These tests call the real Claude API and incur costs.\n'); + harness = await createRealProviderHarness({ provider: 'claude' }); + }); + + afterAll(async () => { + await harness.cleanup(); + }); + + beforeEach(() => { + harness.clearEvents(); + }); + + describe('Output Parsing', () => { + it( + 'parses text_delta events from stream', + async () => { + // Spawn agent with streaming prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.streaming, + mode: 'execute', + provider: 'claude', + }); + + expect(agent.id).toBeTruthy(); + expect(agent.status).toBe('running'); + + // Wait for completion + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + // Verify we got output (either via events or buffer) + const outputEvents = harness.getEventsByType<AgentOutputEvent>('agent:output'); + const outputBuffer = harness.agentManager.getOutputBuffer(agent.id); + + // At least one should have content (output may be emitted as event or buffered) + const hasOutput = outputEvents.length > 0 || outputBuffer.length > 0; + console.log(' Output events:', outputEvents.length); + console.log(' Output buffer:', outputBuffer.length); + + // Verify completion + expect(result).toBeTruthy(); + console.log(' Output chunks:', outputBuffer.length); + console.log(' Result:', result?.message); + }, + REAL_TEST_TIMEOUT + ); + + it( + 'parses init event and extracts session ID', + async () => { + // Spawn agent with simple done prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.done, + mode: 'execute', + provider: 'claude', + }); + + // Wait for completion + await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + // Verify session ID was extracted and persisted + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.sessionId).toBeTruthy(); + expect(dbAgent?.sessionId).toMatch(/^[a-f0-9-]+$/); + + console.log(' Session ID:', dbAgent?.sessionId); + }, + REAL_TEST_TIMEOUT + ); + + it( + 'parses result event with completion', + async () => { + // Spawn agent with simple done prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.done, + mode: 'execute', + provider: 'claude', + }); + + // Wait for completion + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + // Verify result was parsed + expect(result).toBeTruthy(); + expect(result?.success).toBe(true); + expect(result?.message).toBeTruthy(); + + // Verify events + const spawnedEvents = harness.getEventsByType<AgentSpawnedEvent>('agent:spawned'); + expect(spawnedEvents.length).toBe(1); + expect(spawnedEvents[0].payload.agentId).toBe(agent.id); + expect(spawnedEvents[0].payload.provider).toBe('claude'); + + const stoppedEvents = harness.getEventsByType<AgentStoppedEvent>('agent:stopped'); + expect(stoppedEvents.length).toBe(1); + expect(stoppedEvents[0].payload.agentId).toBe(agent.id); + expect(stoppedEvents[0].payload.reason).toBe('task_complete'); + + console.log(' Result message:', result?.message); + }, + REAL_TEST_TIMEOUT + ); + }); + + describe('Questions Flow', () => { + it( + 'parses questions status and enters waiting_for_input', + async () => { + // Spawn agent with questions prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.questions, + mode: 'execute', + provider: 'claude', + }); + + // Wait for waiting_for_input status + const questions = await harness.waitForAgentWaiting(agent.id, REAL_TEST_TIMEOUT); + + // Verify questions were parsed + expect(questions).toBeTruthy(); + expect(questions?.questions).toBeTruthy(); + expect(questions?.questions.length).toBeGreaterThan(0); + expect(questions?.questions[0].id).toBeTruthy(); + expect(questions?.questions[0].question).toBeTruthy(); + + // Verify agent status + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.status).toBe('waiting_for_input'); + expect(dbAgent?.sessionId).toBeTruthy(); + + console.log(' Questions:', questions?.questions.length); + console.log(' First question:', questions?.questions[0].question); + }, + REAL_TEST_TIMEOUT + ); + }); + + describe('Session Resume', () => { + it( + 'resumes session with user answers', + async () => { + // 1. Spawn agent that asks questions + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.questions, + mode: 'execute', + provider: 'claude', + }); + + // 2. Wait for waiting_for_input + const questions = await harness.waitForAgentWaiting(agent.id, REAL_TEST_TIMEOUT); + expect(questions?.questions.length).toBeGreaterThan(0); + + const sessionIdBeforeResume = (await harness.agentRepository.findById(agent.id))?.sessionId; + console.log(' Session ID before resume:', sessionIdBeforeResume); + console.log(' Questions received:', questions?.questions.map((q) => q.id).join(', ')); + + harness.clearEvents(); + + // 3. Resume with answer + const answers: Record<string, string> = {}; + for (const q of questions?.questions ?? []) { + answers[q.id] = `Answer to ${q.id}`; + } + + await harness.agentManager.resume(agent.id, answers); + + // 4. Wait for completion or another waiting state + let attempts = 0; + let finalStatus = 'running'; + while (attempts < 60) { + const agent2 = await harness.agentRepository.findById(agent.id); + if (agent2?.status !== 'running') { + finalStatus = agent2?.status ?? 'unknown'; + break; + } + await sleep(1000); + attempts++; + } + + // Verify the agent processed the resume (either completed or asked more questions) + const dbAgent = await harness.agentRepository.findById(agent.id); + console.log(' Final status:', dbAgent?.status); + + // Agent should not still be running + expect(['idle', 'waiting_for_input', 'crashed']).toContain(dbAgent?.status); + + // If idle, verify result + if (dbAgent?.status === 'idle') { + const result = await harness.agentManager.getResult(agent.id); + console.log(' Result:', result?.message); + expect(result).toBeTruthy(); + } + }, + REAL_TEST_TIMEOUT * 2 // Double timeout for two-step process + ); + + it( + 'maintains session continuity across resume', + async () => { + // 1. Spawn agent that asks questions + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.questions, + mode: 'execute', + provider: 'claude', + }); + + // 2. Wait for waiting_for_input + const questions = await harness.waitForAgentWaiting(agent.id, REAL_TEST_TIMEOUT); + expect(questions?.questions.length).toBeGreaterThan(0); + + const sessionIdBefore = (await harness.agentRepository.findById(agent.id))?.sessionId; + expect(sessionIdBefore).toBeTruthy(); + + // 3. Resume with answer + const answers: Record<string, string> = {}; + for (const q of questions?.questions ?? []) { + answers[q.id] = `Answer to ${q.id}`; + } + + await harness.agentManager.resume(agent.id, answers); + + // 4. Wait for completion + await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + // Verify session ID exists (may be same or new depending on CLI behavior) + const sessionIdAfter = (await harness.agentRepository.findById(agent.id))?.sessionId; + expect(sessionIdAfter).toBeTruthy(); + + console.log(' Session ID before:', sessionIdBefore); + console.log(' Session ID after:', sessionIdAfter); + }, + REAL_TEST_TIMEOUT * 2 + ); + }); + + describe('Error Handling', () => { + it( + 'handles error status', + async () => { + // Spawn agent with error prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.error, + mode: 'execute', + provider: 'claude', + }); + + // Wait for completion (will be crashed) + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + // Verify error was handled + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.status).toBe('crashed'); + expect(result?.success).toBe(false); + expect(result?.message).toContain('Test error'); + + console.log(' Error message:', result?.message); + }, + REAL_TEST_TIMEOUT + ); + }); +}); diff --git a/src/test/integration/real-providers/codex-manager.test.ts b/src/test/integration/real-providers/codex-manager.test.ts new file mode 100644 index 0000000..622b84a --- /dev/null +++ b/src/test/integration/real-providers/codex-manager.test.ts @@ -0,0 +1,176 @@ +/** + * Real Codex CLI Manager Integration Tests + * + * IMPORTANT: These tests call the REAL Codex CLI and incur API costs! + * They are SKIPPED by default and should only be run manually for validation. + * + * To run these tests: + * ```bash + * REAL_CODEX_TESTS=1 npm test -- src/test/integration/real-providers/codex-manager.test.ts --test-timeout=300000 + * ``` + * + * Tests covered: + * - Codex spawn and thread_id extraction + * - Generic output parsing (non-schema) + * - Streaming output + * + * Estimated cost: ~$0.10 per full run + * + * Note: Codex uses different output format and session ID field (thread_id). + */ + +import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest'; +import { + createRealProviderHarness, + describeRealCodex, + REAL_TEST_TIMEOUT, + type RealProviderHarness, +} from './harness.js'; +import { CODEX_PROMPTS } from './prompts.js'; +import type { AgentSpawnedEvent, AgentOutputEvent } from '../../../events/types.js'; + +describeRealCodex('Real Codex Manager Integration', () => { + let harness: RealProviderHarness; + + beforeAll(async () => { + console.log('\n=== Running Real Codex Manager Tests ==='); + console.log('These tests call the real Codex API and incur costs.\n'); + harness = await createRealProviderHarness({ provider: 'codex' }); + }); + + afterAll(async () => { + await harness.cleanup(); + }); + + beforeEach(() => { + harness.clearEvents(); + }); + + describe('Codex Spawn', () => { + it( + 'spawns codex agent and extracts thread_id', + async () => { + // Spawn agent with simple task + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: CODEX_PROMPTS.done, + mode: 'execute', + provider: 'codex', + }); + + expect(agent.id).toBeTruthy(); + expect(agent.provider).toBe('codex'); + expect(agent.status).toBe('running'); + + // Verify spawned event + const spawnedEvents = harness.getEventsByType<AgentSpawnedEvent>('agent:spawned'); + expect(spawnedEvents.length).toBe(1); + expect(spawnedEvents[0].payload.provider).toBe('codex'); + + // Wait for completion + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + // Verify session ID (thread_id) was extracted + const dbAgent = await harness.agentRepository.findById(agent.id); + console.log(' Thread ID:', dbAgent?.sessionId); + console.log(' Status:', dbAgent?.status); + console.log(' Result:', result?.message); + + // Codex should complete or crash + expect(['idle', 'crashed']).toContain(dbAgent?.status); + + // If completed successfully, should have extracted thread_id + if (dbAgent?.status === 'idle' && dbAgent?.sessionId) { + expect(dbAgent.sessionId).toBeTruthy(); + } + }, + REAL_TEST_TIMEOUT + ); + + it( + 'uses generic parser for output', + async () => { + // Spawn agent with streaming prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: CODEX_PROMPTS.streaming, + mode: 'execute', + provider: 'codex', + }); + + // Wait for completion + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + // Verify output events were captured + const outputEvents = harness.getEventsByType<AgentOutputEvent>('agent:output'); + console.log(' Output events:', outputEvents.length); + + // Verify output buffer + const outputBuffer = harness.agentManager.getOutputBuffer(agent.id); + console.log(' Output buffer chunks:', outputBuffer.length); + + // For generic provider, result should be captured + const dbAgent = await harness.agentRepository.findById(agent.id); + console.log(' Status:', dbAgent?.status); + console.log(' Result:', result?.message?.substring(0, 100) + '...'); + + expect(['idle', 'crashed']).toContain(dbAgent?.status); + }, + REAL_TEST_TIMEOUT + ); + }); + + describe('Codex Provider Config', () => { + it( + 'uses correct command and args for codex', + async () => { + // This is more of a config verification test + // The actual command execution is validated by the spawn test + + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: 'Say hello', + mode: 'execute', + provider: 'codex', + }); + + // Verify agent was created with codex provider + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.provider).toBe('codex'); + + // Wait for completion (or timeout) + try { + await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + } catch { + // Codex might fail if not installed, that's OK for config test + } + + const finalAgent = await harness.agentRepository.findById(agent.id); + console.log(' Provider:', finalAgent?.provider); + console.log(' Status:', finalAgent?.status); + }, + REAL_TEST_TIMEOUT + ); + }); +}); + +/** + * Codex-specific observations from testing: + * + * 1. Output Format: + * - Codex uses JSONL streaming with different event types + * - thread.started event contains thread_id + * - Output parsing is more generic (not JSON schema validated) + * + * 2. Command Structure: + * - codex exec --full-auto --json -p "prompt" + * - resume: codex exec resume <thread_id> + * + * 3. Session ID: + * - Called "thread_id" in Codex + * - Extracted from thread.started event + * + * 4. Resume: + * - Uses subcommand style: codex exec resume <thread_id> + * - Different from Claude's flag style: claude --resume <session_id> + */ diff --git a/src/test/integration/real-providers/crash-recovery.test.ts b/src/test/integration/real-providers/crash-recovery.test.ts new file mode 100644 index 0000000..d340059 --- /dev/null +++ b/src/test/integration/real-providers/crash-recovery.test.ts @@ -0,0 +1,265 @@ +/** + * Crash Recovery Integration Tests + * + * IMPORTANT: These tests call the REAL Claude CLI and incur API costs! + * They are SKIPPED by default and should only be run manually for validation. + * + * To run these tests: + * ```bash + * REAL_CLAUDE_TESTS=1 npm test -- src/test/integration/real-providers/crash-recovery.test.ts --test-timeout=300000 + * ``` + * + * Tests covered: + * - Server restart while agent is running + * - Resuming streaming after restart + * - Marking dead agents as crashed + * - Output file processing after restart + * + * Estimated cost: ~$0.08 per full run + */ + +import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest'; +import { + createRealProviderHarness, + describeRealClaude, + REAL_TEST_TIMEOUT, + EXTENDED_TEST_TIMEOUT, + sleep, + type RealProviderHarness, +} from './harness.js'; +import { MINIMAL_PROMPTS } from './prompts.js'; +import { MultiProviderAgentManager } from '../../../agent/manager.js'; + +describeRealClaude('Crash Recovery', () => { + let harness: RealProviderHarness; + + beforeAll(async () => { + console.log('\n=== Running Crash Recovery Tests ==='); + console.log('These tests call the real Claude API and incur costs.\n'); + harness = await createRealProviderHarness({ provider: 'claude' }); + }); + + afterAll(async () => { + await harness.cleanup(); + }); + + beforeEach(() => { + harness.clearEvents(); + }); + + describe('Server Restart Simulation', () => { + it( + 'resumes streaming for still-running agent after restart', + async () => { + // 1. Spawn agent with slow task + console.log(' 1. Spawning agent with slow task...'); + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.slow, + mode: 'execute', + provider: 'claude', + }); + + // 2. Wait for agent to be running + await harness.waitForAgentStatus(agent.id, 'running', 10000); + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.pid).toBeTruthy(); + expect(dbAgent?.outputFilePath).toBeTruthy(); + console.log(' 2. Agent running with PID:', dbAgent?.pid); + + // 3. Give the agent a moment to start writing output + await sleep(2000); + + // 4. Simulate server crash - create NEW manager (old state lost) + console.log(' 3. Simulating server restart with new manager...'); + harness.clearEvents(); // Clear events from old manager + + const newManager = new MultiProviderAgentManager( + harness.agentRepository, + harness.workspaceRoot, + harness.projectRepository, + harness.accountRepository, + harness.eventBus + ); + + // 5. Reconcile - should pick up running agent + console.log(' 4. Reconciling agent state...'); + await newManager.reconcileAfterRestart(); + + // 6. Wait for completion via new manager + console.log(' 5. Waiting for completion via new manager...'); + let attempts = 0; + let finalStatus = 'running'; + while (attempts < 60) { + const refreshed = await harness.agentRepository.findById(agent.id); + if (refreshed?.status !== 'running') { + finalStatus = refreshed?.status ?? 'unknown'; + break; + } + await sleep(2000); + attempts++; + } + + const finalAgent = await harness.agentRepository.findById(agent.id); + console.log(' 6. Final status:', finalAgent?.status); + + // Either completed successfully or crashed (both are valid outcomes) + expect(['idle', 'crashed', 'stopped']).toContain(finalAgent?.status); + + if (finalAgent?.status === 'idle') { + const result = await newManager.getResult(agent.id); + console.log(' Result:', result?.message); + } + }, + EXTENDED_TEST_TIMEOUT + ); + + it( + 'marks dead agent as crashed during reconcile', + async () => { + // 1. Create a fake agent record with a dead PID + console.log(' 1. Creating fake agent with dead PID...'); + const fakeAgent = await harness.agentRepository.create({ + name: 'dead-agent-test', + taskId: null, + initiativeId: null, + sessionId: null, + worktreeId: 'dead-worktree', + status: 'running', + mode: 'execute', + provider: 'claude', + accountId: null, + }); + + // Set a PID that's definitely dead (high number that won't exist) + await harness.agentRepository.update(fakeAgent.id, { pid: 999999, outputFilePath: '/nonexistent/path' }); + + // Verify it's marked as running + let agent = await harness.agentRepository.findById(fakeAgent.id); + expect(agent?.status).toBe('running'); + expect(agent?.pid).toBe(999999); + + // 2. Create new manager and reconcile + console.log(' 2. Creating new manager and reconciling...'); + const newManager = new MultiProviderAgentManager( + harness.agentRepository, + harness.workspaceRoot, + harness.projectRepository, + harness.accountRepository, + harness.eventBus + ); + + await newManager.reconcileAfterRestart(); + + // 3. Verify agent is now crashed + agent = await harness.agentRepository.findById(fakeAgent.id); + expect(agent?.status).toBe('crashed'); + console.log(' 3. Agent marked as crashed (dead PID detected)'); + }, + REAL_TEST_TIMEOUT + ); + + it( + 'processes output file for dead agent during reconcile', + async () => { + // 1. Spawn agent and wait for completion + console.log(' 1. Spawning agent to completion...'); + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.done, + mode: 'execute', + provider: 'claude', + }); + + await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + const dbAgent = await harness.agentRepository.findById(agent.id); + const outputFilePath = dbAgent?.outputFilePath; + expect(outputFilePath).toBeTruthy(); + console.log(' 2. Output file:', outputFilePath); + + // 2. Reset agent to "running" to simulate mid-crash state + await harness.agentRepository.update(agent.id, { status: 'running' }); + // Clear result so reconcile has to re-process + await harness.agentRepository.update(agent.id, { result: null }); + + // Verify reset + let resetAgent = await harness.agentRepository.findById(agent.id); + expect(resetAgent?.status).toBe('running'); + + // 3. Create new manager and reconcile + console.log(' 3. Creating new manager and reconciling...'); + harness.clearEvents(); + + const newManager = new MultiProviderAgentManager( + harness.agentRepository, + harness.workspaceRoot, + harness.projectRepository, + harness.accountRepository, + harness.eventBus + ); + + await newManager.reconcileAfterRestart(); + + // Give it a moment to process the file + await sleep(1000); + + // 4. Verify agent was processed from output file + const finalAgent = await harness.agentRepository.findById(agent.id); + console.log(' 4. Final status:', finalAgent?.status); + + // Should either be idle (processed successfully) or crashed (couldn't process) + expect(['idle', 'crashed']).toContain(finalAgent?.status); + }, + REAL_TEST_TIMEOUT + ); + }); + + describe('Event Consistency', () => { + it( + 'does not duplicate events on restart', + async () => { + // 1. Spawn agent with slow task + console.log(' 1. Spawning agent...'); + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.streaming, + mode: 'execute', + provider: 'claude', + }); + + // 2. Wait for some output events + await sleep(3000); + const initialOutputCount = harness.getEventsByType('agent:output').length; + console.log(' 2. Initial output events:', initialOutputCount); + + // 3. Wait for completion + await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + const finalOutputCount = harness.getEventsByType('agent:output').length; + console.log(' 3. Final output events:', finalOutputCount); + + // 4. Create new manager and reconcile (agent already complete) + harness.clearEvents(); + + const newManager = new MultiProviderAgentManager( + harness.agentRepository, + harness.workspaceRoot, + harness.projectRepository, + harness.accountRepository, + harness.eventBus + ); + + await newManager.reconcileAfterRestart(); + await sleep(1000); + + // 5. Verify no new output events (agent was already complete) + const postReconcileOutputCount = harness.getEventsByType('agent:output').length; + console.log(' 4. Post-reconcile output events:', postReconcileOutputCount); + + // Should not have re-emitted all the old output events + expect(postReconcileOutputCount).toBe(0); + }, + REAL_TEST_TIMEOUT + ); + }); +}); diff --git a/src/test/integration/real-providers/harness.ts b/src/test/integration/real-providers/harness.ts new file mode 100644 index 0000000..28f28ab --- /dev/null +++ b/src/test/integration/real-providers/harness.ts @@ -0,0 +1,378 @@ +/** + * Real Provider Test Harness + * + * Extends the existing test infrastructure to use REAL MultiProviderAgentManager + * for integration testing with actual CLI providers like Claude and Codex. + * + * Unlike the standard TestHarness which uses MockAgentManager, this harness: + * - Uses real CLI spawning (costs real API credits!) + * - Provides poll-based waiting helpers + * - Captures events for inspection + * - Manages temp directories for worktrees + */ + +import { mkdtemp, rm } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; +import { describe } from 'vitest'; +import type { DrizzleDatabase } from '../../../db/index.js'; +import type { DomainEvent, EventBus } from '../../../events/types.js'; +import { EventEmitterBus } from '../../../events/bus.js'; +import { MultiProviderAgentManager } from '../../../agent/manager.js'; +import type { AgentResult, PendingQuestions, AgentStatus } from '../../../agent/types.js'; +import type { AgentRepository } from '../../../db/repositories/agent-repository.js'; +import type { ProjectRepository } from '../../../db/repositories/project-repository.js'; +import type { AccountRepository } from '../../../db/repositories/account-repository.js'; +import type { InitiativeRepository } from '../../../db/repositories/initiative-repository.js'; +import { + DrizzleAgentRepository, + DrizzleProjectRepository, + DrizzleAccountRepository, + DrizzleInitiativeRepository, +} from '../../../db/repositories/drizzle/index.js'; +import { createTestDatabase } from '../../../db/repositories/drizzle/test-helpers.js'; + +/** + * Sleep helper for polling loops. + */ +export function sleep(ms: number): Promise<void> { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +/** + * Event bus that captures all emitted events for inspection. + */ +export class CapturingEventBus extends EventEmitterBus { + emittedEvents: DomainEvent[] = []; + + emit<T extends DomainEvent>(event: T): void { + this.emittedEvents.push(event); + super.emit(event); + } + + getEventsByType<T extends DomainEvent>(type: T['type']): T[] { + return this.emittedEvents.filter((e) => e.type === type) as T[]; + } + + clearEvents(): void { + this.emittedEvents = []; + } +} + +/** + * Options for creating a real provider test harness. + */ +export interface RealProviderHarnessOptions { + /** Which provider to test (default: 'claude') */ + provider?: 'claude' | 'codex'; + /** Optional workspace root (temp dir created if omitted) */ + workspaceRoot?: string; +} + +/** + * Real Provider Test Harness interface. + * + * Provides everything needed to test against real CLI providers: + * - In-memory database with real repositories + * - Real MultiProviderAgentManager (spawns actual CLI processes) + * - Event capture for verification + * - Polling-based wait helpers + */ +export interface RealProviderHarness { + /** In-memory SQLite database */ + db: DrizzleDatabase; + /** Event bus with capture capability */ + eventBus: CapturingEventBus; + /** Real agent manager (not mock!) */ + agentManager: MultiProviderAgentManager; + /** Workspace root directory */ + workspaceRoot: string; + + /** Agent repository */ + agentRepository: AgentRepository; + /** Project repository */ + projectRepository: ProjectRepository; + /** Account repository */ + accountRepository: AccountRepository; + /** Initiative repository */ + initiativeRepository: InitiativeRepository; + + /** + * Wait for an agent to reach idle or crashed status. + * Polls the database at regular intervals. + * + * @param agentId - The agent ID to wait for + * @param timeoutMs - Maximum time to wait (default 120000ms = 2 minutes) + * @returns The agent result if completed, or null if crashed/timeout + */ + waitForAgentCompletion(agentId: string, timeoutMs?: number): Promise<AgentResult | null>; + + /** + * Wait for an agent to enter waiting_for_input status. + * Polls the database at regular intervals. + * + * @param agentId - The agent ID to wait for + * @param timeoutMs - Maximum time to wait (default 120000ms) + * @returns The pending questions if waiting, or null if timeout/other status + */ + waitForAgentWaiting(agentId: string, timeoutMs?: number): Promise<PendingQuestions | null>; + + /** + * Wait for an agent to reach a specific status. + * + * @param agentId - The agent ID to wait for + * @param status - The target status + * @param timeoutMs - Maximum time to wait (default 120000ms) + */ + waitForAgentStatus(agentId: string, status: AgentStatus, timeoutMs?: number): Promise<void>; + + /** + * Get captured events filtered by type. + */ + getEventsByType<T extends DomainEvent>(type: T['type']): T[]; + + /** + * Clear all captured events. + */ + clearEvents(): void; + + /** + * Kill all running agents (for cleanup). + */ + killAllAgents(): Promise<void>; + + /** + * Clean up all resources (directories, processes). + * Call this in afterAll/afterEach. + */ + cleanup(): Promise<void>; +} + +/** Default poll interval for status checks */ +const POLL_INTERVAL_MS = 1000; + +/** + * Create a test harness for real provider integration tests. + * + * This creates: + * - In-memory SQLite database + * - Temp directory for worktrees (or uses provided workspace) + * - Real MultiProviderAgentManager + * - Event capture bus + * + * @example + * ```typescript + * let harness: RealProviderHarness; + * + * beforeAll(async () => { + * harness = await createRealProviderHarness({ provider: 'claude' }); + * }); + * + * afterAll(async () => { + * await harness.cleanup(); + * }); + * + * it('spawns and completes', async () => { + * const agent = await harness.agentManager.spawn({...}); + * const result = await harness.waitForAgentCompletion(agent.id); + * expect(result?.success).toBe(true); + * }); + * ``` + */ +export async function createRealProviderHarness( + options: RealProviderHarnessOptions = {} +): Promise<RealProviderHarness> { + // Create workspace directory (temp if not provided) + const workspaceRoot = options.workspaceRoot ?? (await mkdtemp(join(tmpdir(), 'cw-test-'))); + const ownedWorkspace = !options.workspaceRoot; // Track if we need to clean up + + // Initialize git repo in temp workspace (required for worktree operations) + if (ownedWorkspace) { + const { execSync } = await import('node:child_process'); + execSync('git init', { cwd: workspaceRoot, stdio: 'ignore' }); + execSync('git config user.email "test@test.com"', { cwd: workspaceRoot, stdio: 'ignore' }); + execSync('git config user.name "Test"', { cwd: workspaceRoot, stdio: 'ignore' }); + // Create initial commit (worktrees require at least one commit) + execSync('touch .gitkeep && git add .gitkeep && git commit -m "init"', { cwd: workspaceRoot, stdio: 'ignore' }); + } + + // Create in-memory database + const db = createTestDatabase(); + + // Create repositories + const agentRepository = new DrizzleAgentRepository(db); + const projectRepository = new DrizzleProjectRepository(db); + const accountRepository = new DrizzleAccountRepository(db); + const initiativeRepository = new DrizzleInitiativeRepository(db); + + // Create event bus with capture (parent class already sets maxListeners to 100) + const eventBus = new CapturingEventBus(); + + // Create REAL agent manager (not mock!) + const agentManager = new MultiProviderAgentManager( + agentRepository, + workspaceRoot, + projectRepository, + accountRepository, + eventBus + ); + + // Build harness + const harness: RealProviderHarness = { + db, + eventBus, + agentManager, + workspaceRoot, + agentRepository, + projectRepository, + accountRepository, + initiativeRepository, + + async waitForAgentCompletion(agentId: string, timeoutMs = 120000): Promise<AgentResult | null> { + const deadline = Date.now() + timeoutMs; + + while (Date.now() < deadline) { + const agent = await agentRepository.findById(agentId); + if (!agent) return null; + + if (agent.status === 'idle' || agent.status === 'stopped') { + // Agent completed - get result + return agentManager.getResult(agentId); + } + + if (agent.status === 'crashed') { + // Agent crashed - return the error result + return agentManager.getResult(agentId); + } + + if (agent.status === 'waiting_for_input') { + // Agent is waiting - return null (not completed) + return null; + } + + // Still running - wait and check again + await sleep(POLL_INTERVAL_MS); + } + + throw new Error(`Timeout waiting for agent ${agentId} to complete after ${timeoutMs}ms`); + }, + + async waitForAgentWaiting(agentId: string, timeoutMs = 120000): Promise<PendingQuestions | null> { + const deadline = Date.now() + timeoutMs; + + while (Date.now() < deadline) { + const agent = await agentRepository.findById(agentId); + if (!agent) return null; + + if (agent.status === 'waiting_for_input') { + return agentManager.getPendingQuestions(agentId); + } + + if (agent.status === 'idle' || agent.status === 'stopped' || agent.status === 'crashed') { + // Agent finished without asking questions + return null; + } + + // Still running - wait and check again + await sleep(POLL_INTERVAL_MS); + } + + throw new Error(`Timeout waiting for agent ${agentId} to request input after ${timeoutMs}ms`); + }, + + async waitForAgentStatus(agentId: string, status: AgentStatus, timeoutMs = 120000): Promise<void> { + const deadline = Date.now() + timeoutMs; + + while (Date.now() < deadline) { + const agent = await agentRepository.findById(agentId); + if (!agent) { + throw new Error(`Agent ${agentId} not found`); + } + + if (agent.status === status) { + return; + } + + // Check for terminal states that mean we'll never reach target + if (status === 'running' && ['idle', 'stopped', 'crashed', 'waiting_for_input'].includes(agent.status)) { + throw new Error(`Agent ${agentId} already in terminal state ${agent.status}, cannot reach ${status}`); + } + + await sleep(POLL_INTERVAL_MS); + } + + throw new Error(`Timeout waiting for agent ${agentId} to reach status ${status} after ${timeoutMs}ms`); + }, + + getEventsByType<T extends DomainEvent>(type: T['type']): T[] { + return eventBus.getEventsByType<T>(type); + }, + + clearEvents(): void { + eventBus.clearEvents(); + }, + + async killAllAgents(): Promise<void> { + const agents = await agentRepository.findAll(); + for (const agent of agents) { + if (agent.status === 'running') { + try { + await agentManager.stop(agent.id); + } catch { + // Ignore errors during cleanup + } + } + } + }, + + async cleanup(): Promise<void> { + // Kill any running agents + await harness.killAllAgents(); + + // Clean up workspace directory if we created it + if (ownedWorkspace) { + try { + await rm(workspaceRoot, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + }, + }; + + return harness; +} + +/** + * Check if real Claude tests should run. + * Set REAL_CLAUDE_TESTS=1 environment variable to enable. + */ +export const shouldRunRealClaudeTests = process.env.REAL_CLAUDE_TESTS === '1'; + +/** + * Check if real Codex tests should run. + * Set REAL_CODEX_TESTS=1 environment variable to enable. + */ +export const shouldRunRealCodexTests = process.env.REAL_CODEX_TESTS === '1'; + +/** + * Skip wrapper for Claude tests - skips unless REAL_CLAUDE_TESTS=1. + */ +export const describeRealClaude: typeof describe = shouldRunRealClaudeTests ? describe : describe.skip; + +/** + * Skip wrapper for Codex tests - skips unless REAL_CODEX_TESTS=1. + */ +export const describeRealCodex: typeof describe = shouldRunRealCodexTests ? describe : describe.skip; + +/** + * Default test timeout for real CLI tests (2 minutes). + * Real API calls take 5-30 seconds typically. + */ +export const REAL_TEST_TIMEOUT = 120000; + +/** + * Extended test timeout for slow tests (5 minutes). + * Used for schema retry tests and crash recovery tests. + */ +export const EXTENDED_TEST_TIMEOUT = 300000; diff --git a/src/test/integration/real-providers/index.ts b/src/test/integration/real-providers/index.ts new file mode 100644 index 0000000..5e28dd3 --- /dev/null +++ b/src/test/integration/real-providers/index.ts @@ -0,0 +1,56 @@ +/** + * Real Provider Integration Tests + * + * This module provides infrastructure for testing against real CLI providers. + * Tests are expensive (real API calls) and skipped by default. + * + * ## Running Tests + * + * ```bash + * # Claude tests only + * REAL_CLAUDE_TESTS=1 npm test -- src/test/integration/real-providers/ --test-timeout=300000 + * + * # Codex tests only + * REAL_CODEX_TESTS=1 npm test -- src/test/integration/real-providers/codex-manager.test.ts + * + * # All real provider tests + * REAL_CLAUDE_TESTS=1 REAL_CODEX_TESTS=1 npm test -- src/test/integration/real-providers/ + * ``` + * + * ## Cost Estimates + * + * | Suite | Tests | Est. Cost | Duration | + * |-------|-------|-----------|----------| + * | Output Parsing | 3 | $0.06 | ~2 min | + * | Schema Validation | 4 | $0.22 | ~4 min | + * | Crash Recovery | 3 | $0.08 | ~3 min | + * | Session Resume | 2 | $0.08 | ~3 min | + * | Codex Integration | 2 | $0.10 | ~2 min | + * | **TOTAL** | **14** | **~$0.54** | **~14 min** | + * + * ## Test Files + * + * - `harness.ts` - RealProviderHarness factory and utilities + * - `prompts.ts` - Minimal cost test prompts + * - `claude-manager.test.ts` - Claude spawn/resume/output tests + * - `codex-manager.test.ts` - Codex provider tests + * - `schema-retry.test.ts` - Schema validation + retry tests + * - `crash-recovery.test.ts` - Server restart simulation + * - `sample-outputs/` - Captured CLI output for parser unit tests + */ + +export { + createRealProviderHarness, + CapturingEventBus, + sleep, + shouldRunRealClaudeTests, + shouldRunRealCodexTests, + describeRealClaude, + describeRealCodex, + REAL_TEST_TIMEOUT, + EXTENDED_TEST_TIMEOUT, + type RealProviderHarness, + type RealProviderHarnessOptions, +} from './harness.js'; + +export { MINIMAL_PROMPTS, CODEX_PROMPTS } from './prompts.js'; diff --git a/src/test/integration/real-providers/prompts.ts b/src/test/integration/real-providers/prompts.ts new file mode 100644 index 0000000..d408477 --- /dev/null +++ b/src/test/integration/real-providers/prompts.ts @@ -0,0 +1,113 @@ +/** + * Minimal Cost Test Prompts + * + * Carefully crafted prompts designed to minimize token usage while + * testing specific CLI behaviors. Each prompt aims for the smallest + * possible API cost while still exercising the target functionality. + * + * Cost estimates assume Claude Sonnet pricing (~$3/M input, $15/M output). + */ + +export const MINIMAL_PROMPTS = { + /** + * ~$0.01 - Cheapest done response + * Tests: basic spawn → completion flow, status parsing + */ + done: `Output exactly this JSON with no other text: +{"status":"done","result":"ok"}`, + + /** + * ~$0.01 - Cheapest questions response + * Tests: waiting_for_input status, questions array parsing + */ + questions: `Output exactly this JSON with no other text: +{"status":"questions","questions":[{"id":"q1","question":"What is your name?"}]}`, + + /** + * ~$0.03 - Slow task for timing tests + * Tests: streaming during long-running task, crash recovery + * Note: Agent may not actually wait 30 seconds, but will produce delayed output + */ + slow: `Think through a simple problem step by step, counting from 1 to 10 slowly, then output: +{"status":"done","result":"counted to 10"}`, + + /** + * ~$0.02 - Produces text deltas for streaming tests + * Tests: text_delta event parsing, output buffering + */ + streaming: `Count from 1 to 5, outputting each number, then output: +{"status":"done","result":"counted"}`, + + /** + * ~$0.03 - Deliberately produces non-JSON first + * Tests: schema validation failure, retry logic + */ + badThenGood: `First say "thinking..." on its own line, then output: +{"status":"done","result":"fixed"}`, + + /** + * ~$0.02 - Multiple questions + * Tests: questions array with multiple items + */ + multipleQuestions: `Output exactly this JSON with no other text: +{"status":"questions","questions":[{"id":"q1","question":"First question?"},{"id":"q2","question":"Second question?"}]}`, + + /** + * ~$0.01 - Error signal + * Tests: error status handling + */ + error: `Output exactly this JSON with no other text: +{"status":"error","error":"Test error message"}`, + + /** + * ~$0.02 - Answer continuation + * Tests: session resume with answers + */ + answerContinuation: (answers: Record<string, string>): string => { + const answerLines = Object.entries(answers) + .map(([id, answer]) => `${id}: ${answer}`) + .join('\n'); + return `I received your answers: +${answerLines} + +Now complete the task by outputting: +{"status":"done","result":"completed with answers"}`; + }, + + /** + * ~$0.02 - Context complete for discuss mode + * Tests: discuss mode output handling (now uses universal done signal) + */ + discussComplete: `Output exactly this JSON with no other text: +{"status":"done"}`, + + /** + * ~$0.02 - Breakdown complete + * Tests: breakdown mode output handling (now uses universal done signal) + */ + breakdownComplete: `Output exactly this JSON with no other text: +{"status":"done"}`, + + /** + * ~$0.02 - Decompose complete + * Tests: decompose mode output handling (now uses universal done signal) + */ + decomposeComplete: `Output exactly this JSON with no other text: +{"status":"done"}`, +} as const; + +/** + * Prompts specifically for Codex provider testing. + * Codex may have different output format requirements. + */ +export const CODEX_PROMPTS = { + /** + * Basic completion for Codex + */ + done: `Complete this simple task: output "done" and finish.`, + + /** + * Produces streaming output + */ + streaming: `Count from 1 to 5, saying each number aloud, then say "finished".`, +} as const; diff --git a/src/test/integration/real-providers/sample-outputs/README.md b/src/test/integration/real-providers/sample-outputs/README.md new file mode 100644 index 0000000..060de11 --- /dev/null +++ b/src/test/integration/real-providers/sample-outputs/README.md @@ -0,0 +1,68 @@ +# Sample CLI Outputs + +This directory contains captured real CLI outputs for use in parser unit tests. +These files allow testing stream parsers without incurring API costs. + +## Files + +### claude-stream-success.jsonl +A successful Claude CLI session (v2.1.33) that: +- Initializes with `system` event containing `session_id` +- Emits `assistant` message with content +- Completes with `result` event containing `done` status JSON + +### claude-stream-questions.jsonl +A Claude CLI session that: +- Initializes with `system` event containing `session_id` +- Emits `assistant` message with content wrapped in markdown code block +- Completes with `result` event containing `questions` status JSON + +### codex-stream-success.jsonl +A successful Codex CLI session (v0.98.0) that: +- Starts with `thread.started` event containing `thread_id` +- Emits `turn.started`, `item.completed` events +- Completes with `turn.completed` event containing usage stats + +## Event Type Differences + +### Claude CLI (`--output-format stream-json`) +- `system` (subtype: `init`) - Contains `session_id`, tools, model info +- `assistant` - Contains message content in `content[].text` +- `result` - Contains final `result` text and `total_cost_usd` + +### Codex CLI (`--json`) +- `thread.started` - Contains `thread_id` (equivalent to session_id) +- `turn.started` - Marks beginning of turn +- `item.completed` - Contains reasoning or agent_message items +- `turn.completed` - Contains usage stats + +## Usage + +These files can be used to test stream parsers in isolation: + +```typescript +import { readFileSync } from 'fs'; +import { ClaudeStreamParser } from '../../../agent/providers/parsers/claude.js'; + +const output = readFileSync('sample-outputs/claude-stream-success.jsonl', 'utf-8'); +const parser = new ClaudeStreamParser(); + +for (const line of output.split('\n')) { + if (line.trim()) { + const events = parser.parseLine(line); + // Assert on events... + } +} +``` + +## Capturing New Outputs + +### Claude +```bash +claude -p "your prompt" --output-format stream-json --verbose > output.jsonl +``` + +### Codex +```bash +codex exec --full-auto --json "your prompt" > output.jsonl +``` diff --git a/src/test/integration/real-providers/sample-outputs/claude-stream-questions.jsonl b/src/test/integration/real-providers/sample-outputs/claude-stream-questions.jsonl new file mode 100644 index 0000000..2957928 --- /dev/null +++ b/src/test/integration/real-providers/sample-outputs/claude-stream-questions.jsonl @@ -0,0 +1,3 @@ +{"type":"system","subtype":"init","cwd":"/Users/lukasmay/development/projects/codewalk-district","session_id":"774631da-8e54-445e-9ccb-eea8e7fe805e","tools":["Task","TaskOutput","Bash","Glob","Grep","ExitPlanMode","Read","Edit","Write","NotebookEdit","WebFetch","TodoWrite","WebSearch","TaskStop","AskUserQuestion","Skill","EnterPlanMode","ToolSearch"],"mcp_servers":[],"model":"claude-opus-4-6","permissionMode":"default","slash_commands":["keybindings-help","debug","gsd:define-requirements","gsd:list-phase-assumptions","gsd:debug","gsd:remove-phase","gsd:complete-milestone","gsd:research-phase","gsd:plan-phase","gsd:check-todos","gsd:pause-work","gsd:execute-plan","gsd:research-project","gsd:add-todo","gsd:plan-fix","gsd:resume-work","gsd:progress","gsd:help","gsd:discuss-milestone","gsd:add-phase","gsd:create-roadmap","gsd:map-codebase","gsd:whats-new","gsd:insert-phase","gsd:new-milestone","gsd:new-project","gsd:execute-phase","gsd:verify-work","gsd:discuss-phase","compact","context","cost","init","pr-comments","release-notes","review","security-review","insights"],"apiKeySource":"none","claude_code_version":"2.1.33","output_style":"default","agents":["Bash","general-purpose","statusline-setup","Explore","Plan","claude-code-guide","jira-sw-assessment"],"skills":["keybindings-help","debug"],"plugins":[],"uuid":"224c683c-41f4-4fdd-9af6-f8cdca366ec1"} +{"type":"assistant","message":{"model":"claude-opus-4-6","id":"msg_01CfDymxvSRFodJ5Zm6NjLHV","type":"message","role":"assistant","content":[{"type":"text","text":"```json\n{\"status\":\"questions\",\"questions\":[{\"id\":\"q1\",\"question\":\"What is your name?\"},{\"id\":\"q2\",\"question\":\"What is the deadline?\"}]}\n```"}],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":3,"cache_creation_input_tokens":5983,"cache_read_input_tokens":18026,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":5983},"output_tokens":1,"service_tier":"standard","inference_geo":"not_available"},"context_management":null},"parent_tool_use_id":null,"session_id":"774631da-8e54-445e-9ccb-eea8e7fe805e","uuid":"29288f20-766c-4047-82f5-679024188f52"} +{"type":"result","subtype":"success","is_error":false,"duration_ms":3213,"duration_api_ms":3203,"num_turns":1,"result":"```json\n{\"status\":\"questions\",\"questions\":[{\"id\":\"q1\",\"question\":\"What is your name?\"},{\"id\":\"q2\",\"question\":\"What is the deadline?\"}]}\n```","stop_reason":null,"session_id":"774631da-8e54-445e-9ccb-eea8e7fe805e","total_cost_usd":0.04754675,"usage":{"input_tokens":3,"cache_creation_input_tokens":5983,"cache_read_input_tokens":18026,"output_tokens":45,"server_tool_use":{"web_search_requests":0,"web_fetch_requests":0},"service_tier":"standard","cache_creation":{"ephemeral_1h_input_tokens":5983,"ephemeral_5m_input_tokens":0}},"modelUsage":{"claude-opus-4-6":{"inputTokens":3,"outputTokens":45,"cacheReadInputTokens":18026,"cacheCreationInputTokens":5983,"webSearchRequests":0,"costUSD":0.04754675,"contextWindow":200000,"maxOutputTokens":32000}},"permission_denials":[],"uuid":"08db08cd-0f12-47ae-8c21-c29e11a6d7df"} diff --git a/src/test/integration/real-providers/sample-outputs/claude-stream-success.jsonl b/src/test/integration/real-providers/sample-outputs/claude-stream-success.jsonl new file mode 100644 index 0000000..70e09fe --- /dev/null +++ b/src/test/integration/real-providers/sample-outputs/claude-stream-success.jsonl @@ -0,0 +1,3 @@ +{"type":"system","subtype":"init","cwd":"/Users/lukasmay/development/projects/codewalk-district","session_id":"a0aa6272-b3a6-443c-9ef5-de3a2450dc6d","tools":["Task","TaskOutput","Bash","Glob","Grep","ExitPlanMode","Read","Edit","Write","NotebookEdit","WebFetch","TodoWrite","WebSearch","TaskStop","AskUserQuestion","Skill","EnterPlanMode","ToolSearch"],"mcp_servers":[],"model":"claude-opus-4-6","permissionMode":"default","slash_commands":["keybindings-help","debug","gsd:define-requirements","gsd:list-phase-assumptions","gsd:debug","gsd:remove-phase","gsd:complete-milestone","gsd:research-phase","gsd:plan-phase","gsd:check-todos","gsd:pause-work","gsd:execute-plan","gsd:research-project","gsd:add-todo","gsd:plan-fix","gsd:resume-work","gsd:progress","gsd:help","gsd:discuss-milestone","gsd:add-phase","gsd:create-roadmap","gsd:map-codebase","gsd:whats-new","gsd:insert-phase","gsd:new-milestone","gsd:new-project","gsd:execute-phase","gsd:verify-work","gsd:discuss-phase","compact","context","cost","init","pr-comments","release-notes","review","security-review","insights"],"apiKeySource":"none","claude_code_version":"2.1.33","output_style":"default","agents":["Bash","general-purpose","statusline-setup","Explore","Plan","claude-code-guide","jira-sw-assessment"],"skills":["keybindings-help","debug"],"plugins":[],"uuid":"c1d6dced-ca04-4335-a624-624660479b7b"} +{"type":"assistant","message":{"model":"claude-opus-4-6","id":"msg_01RjSiQY1RUgT47j73Dom93j","type":"message","role":"assistant","content":[{"type":"text","text":"{\"status\":\"done\",\"result\":\"ok\"}"}],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":3,"cache_creation_input_tokens":5958,"cache_read_input_tokens":18026,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":5958},"output_tokens":1,"service_tier":"standard","inference_geo":"not_available"},"context_management":null},"parent_tool_use_id":null,"session_id":"a0aa6272-b3a6-443c-9ef5-de3a2450dc6d","uuid":"f1c8695a-534e-4de2-a684-fa4a1ec03749"} +{"type":"result","subtype":"success","is_error":false,"duration_ms":2465,"duration_api_ms":2453,"num_turns":1,"result":"{\"status\":\"done\",\"result\":\"ok\"}","stop_reason":null,"session_id":"a0aa6272-b3a6-443c-9ef5-de3a2450dc6d","total_cost_usd":0.046565499999999996,"usage":{"input_tokens":3,"cache_creation_input_tokens":5958,"cache_read_input_tokens":18026,"output_tokens":12,"server_tool_use":{"web_search_requests":0,"web_fetch_requests":0},"service_tier":"standard","cache_creation":{"ephemeral_1h_input_tokens":5958,"ephemeral_5m_input_tokens":0}},"modelUsage":{"claude-opus-4-6":{"inputTokens":3,"outputTokens":12,"cacheReadInputTokens":18026,"cacheCreationInputTokens":5958,"webSearchRequests":0,"costUSD":0.046565499999999996,"contextWindow":200000,"maxOutputTokens":32000}},"permission_denials":[],"uuid":"53139e08-b4f3-4f94-b129-82759f77fdca"} diff --git a/src/test/integration/real-providers/sample-outputs/codex-stream-success.jsonl b/src/test/integration/real-providers/sample-outputs/codex-stream-success.jsonl new file mode 100644 index 0000000..3922628 --- /dev/null +++ b/src/test/integration/real-providers/sample-outputs/codex-stream-success.jsonl @@ -0,0 +1,5 @@ +{"type":"thread.started","thread_id":"019c3242-955e-7140-9978-517f0b5a22cb"} +{"type":"turn.started"} +{"type":"item.completed","item":{"id":"item_0","type":"reasoning","text":"**Confirming simple greeting task**"}} +{"type":"item.completed","item":{"id":"item_1","type":"agent_message","text":"Hello!"}} +{"type":"turn.completed","usage":{"input_tokens":8458,"cached_input_tokens":6912,"output_tokens":32}} diff --git a/src/test/integration/real-providers/schema-retry.test.ts b/src/test/integration/real-providers/schema-retry.test.ts new file mode 100644 index 0000000..838a5dc --- /dev/null +++ b/src/test/integration/real-providers/schema-retry.test.ts @@ -0,0 +1,306 @@ +/** + * Schema Validation & Retry Integration Tests + * + * IMPORTANT: These tests call the REAL Claude CLI and incur API costs! + * They are SKIPPED by default and should only be run manually for validation. + * + * To run these tests: + * ```bash + * REAL_CLAUDE_TESTS=1 npm test -- src/test/integration/real-providers/schema-retry.test.ts --test-timeout=300000 + * ``` + * + * Tests covered: + * - Valid JSON output validation + * - Questions status parsing + * - Schema validation failure with retry + * - Max retry limit handling + * + * Estimated cost: ~$0.20 per full run (includes retries) + */ + +import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest'; +import { + createRealProviderHarness, + describeRealClaude, + REAL_TEST_TIMEOUT, + EXTENDED_TEST_TIMEOUT, + type RealProviderHarness, +} from './harness.js'; +import { MINIMAL_PROMPTS } from './prompts.js'; +import type { AgentResumedEvent, AgentCrashedEvent } from '../../../events/types.js'; + +describeRealClaude('Schema Validation & Retry', () => { + let harness: RealProviderHarness; + + beforeAll(async () => { + console.log('\n=== Running Schema Validation & Retry Tests ==='); + console.log('These tests call the real Claude API and incur costs.'); + console.log('Retry tests may take longer and cost more.\n'); + harness = await createRealProviderHarness({ provider: 'claude' }); + }); + + afterAll(async () => { + await harness.cleanup(); + }); + + beforeEach(() => { + harness.clearEvents(); + }); + + describe('Valid Output', () => { + it( + 'validates done status output', + async () => { + // Spawn agent with minimal done prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.done, + mode: 'execute', + provider: 'claude', + }); + + // Wait for completion + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + // Verify completion + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.status).toBe('idle'); + expect(result?.success).toBe(true); + + // No retry events should have been emitted + const resumeEvents = harness.getEventsByType<AgentResumedEvent>('agent:resumed'); + expect(resumeEvents.length).toBe(0); + + console.log(' Status: idle (valid done output)'); + console.log(' Result:', result?.message); + }, + REAL_TEST_TIMEOUT + ); + + it( + 'validates questions status output', + async () => { + // Spawn agent with questions prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.questions, + mode: 'execute', + provider: 'claude', + }); + + // Wait for waiting_for_input + const questions = await harness.waitForAgentWaiting(agent.id, REAL_TEST_TIMEOUT); + + // Verify questions were validated + expect(questions).toBeTruthy(); + expect(questions?.questions).toBeInstanceOf(Array); + expect(questions?.questions.length).toBeGreaterThan(0); + + // Each question should have id and question fields + for (const q of questions?.questions ?? []) { + expect(q.id).toBeTruthy(); + expect(q.question).toBeTruthy(); + } + + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.status).toBe('waiting_for_input'); + + // No retry events + const resumeEvents = harness.getEventsByType<AgentResumedEvent>('agent:resumed'); + expect(resumeEvents.length).toBe(0); + + console.log(' Status: waiting_for_input (valid questions output)'); + console.log(' Questions:', questions?.questions.length); + }, + REAL_TEST_TIMEOUT + ); + + it( + 'validates multiple questions', + async () => { + // Spawn agent with multiple questions prompt + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.multipleQuestions, + mode: 'execute', + provider: 'claude', + }); + + // Wait for waiting_for_input + const questions = await harness.waitForAgentWaiting(agent.id, REAL_TEST_TIMEOUT); + + // Verify multiple questions + expect(questions?.questions.length).toBeGreaterThanOrEqual(2); + + // Each question should have unique ID + const ids = questions?.questions.map((q) => q.id) ?? []; + const uniqueIds = new Set(ids); + expect(uniqueIds.size).toBe(ids.length); + + console.log(' Questions:', questions?.questions.map((q) => q.id).join(', ')); + }, + REAL_TEST_TIMEOUT + ); + }); + + describe('Retry Logic', () => { + it( + 'retries when output does not match schema', + async () => { + // Prompt that produces non-JSON first, then valid JSON + // Note: Claude may or may not produce invalid output first + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.badThenGood, + mode: 'execute', + provider: 'claude', + }); + + // Wait for completion (may involve retries) + const result = await harness.waitForAgentCompletion(agent.id, EXTENDED_TEST_TIMEOUT); + + const dbAgent = await harness.agentRepository.findById(agent.id); + + // Either succeeded with retry OR succeeded first time + expect(['idle', 'crashed']).toContain(dbAgent?.status); + + // Check for retry events + const resumeEvents = harness.getEventsByType<AgentResumedEvent>('agent:resumed'); + console.log(' Retry attempts:', resumeEvents.length); + console.log(' Final status:', dbAgent?.status); + + if (dbAgent?.status === 'idle') { + expect(result?.success).toBe(true); + console.log(' Result:', result?.message); + } else { + // Crashed after max retries + const crashedEvents = harness.getEventsByType<AgentCrashedEvent>('agent:crashed'); + expect(crashedEvents.length).toBeGreaterThan(0); + console.log(' Crashed after retries'); + } + }, + EXTENDED_TEST_TIMEOUT + ); + + it( + 'extracts JSON from markdown code blocks', + async () => { + // Prompt that produces JSON wrapped in markdown + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: `Output the result wrapped in a markdown code block like this: +\`\`\`json +{"status":"done","result":"extracted from markdown"} +\`\`\``, + mode: 'execute', + provider: 'claude', + }); + + // Wait for completion + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + const dbAgent = await harness.agentRepository.findById(agent.id); + console.log(' Status:', dbAgent?.status); + console.log(' Result:', result?.message); + + // Should succeed (JSON extraction from code block) + if (dbAgent?.status === 'idle') { + expect(result?.success).toBe(true); + } + }, + REAL_TEST_TIMEOUT + ); + + it( + 'extracts JSON from text with surrounding content', + async () => { + // Prompt that produces JSON with text before it + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: `First say "Here is my response:" then output the JSON: +{"status":"done","result":"extracted from text"}`, + mode: 'execute', + provider: 'claude', + }); + + // Wait for completion + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + const dbAgent = await harness.agentRepository.findById(agent.id); + console.log(' Status:', dbAgent?.status); + console.log(' Result:', result?.message); + + // Should succeed (JSON extraction from last {...} block) + if (dbAgent?.status === 'idle') { + expect(result?.success).toBe(true); + } + }, + REAL_TEST_TIMEOUT + ); + }); + + describe('Mode-Specific Schemas', () => { + it( + 'validates discuss mode output', + async () => { + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.discussComplete, + mode: 'discuss', + provider: 'claude', + }); + + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.status).toBe('idle'); + expect(result?.success).toBe(true); + + console.log(' Discuss mode result:', result?.message); + }, + REAL_TEST_TIMEOUT + ); + + it( + 'validates breakdown mode output', + async () => { + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.breakdownComplete, + mode: 'breakdown', + provider: 'claude', + }); + + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.status).toBe('idle'); + expect(result?.success).toBe(true); + + console.log(' Breakdown mode result:', result?.message); + }, + REAL_TEST_TIMEOUT + ); + + it( + 'validates decompose mode output', + async () => { + const agent = await harness.agentManager.spawn({ + taskId: null, + prompt: MINIMAL_PROMPTS.decomposeComplete, + mode: 'decompose', + provider: 'claude', + }); + + const result = await harness.waitForAgentCompletion(agent.id, REAL_TEST_TIMEOUT); + + const dbAgent = await harness.agentRepository.findById(agent.id); + expect(dbAgent?.status).toBe('idle'); + expect(result?.success).toBe(true); + + console.log(' Decompose mode result:', result?.message); + }, + REAL_TEST_TIMEOUT + ); + }); +}); diff --git a/src/trpc/context.ts b/src/trpc/context.ts index c0645b3..cc8b10c 100644 --- a/src/trpc/context.ts +++ b/src/trpc/context.ts @@ -11,7 +11,10 @@ import type { TaskRepository } from '../db/repositories/task-repository.js'; import type { MessageRepository } from '../db/repositories/message-repository.js'; import type { InitiativeRepository } from '../db/repositories/initiative-repository.js'; import type { PhaseRepository } from '../db/repositories/phase-repository.js'; -import type { PlanRepository } from '../db/repositories/plan-repository.js'; +import type { PageRepository } from '../db/repositories/page-repository.js'; +import type { ProjectRepository } from '../db/repositories/project-repository.js'; +import type { AccountRepository } from '../db/repositories/account-repository.js'; +import type { AccountCredentialManager } from '../agent/credentials/types.js'; import type { DispatchManager, PhaseDispatchManager } from '../dispatch/types.js'; import type { CoordinationManager } from '../coordination/types.js'; @@ -42,10 +45,18 @@ export interface TRPCContext { initiativeRepository?: InitiativeRepository; /** Phase repository for phase CRUD operations (optional until server wiring complete) */ phaseRepository?: PhaseRepository; - /** Plan repository for plan CRUD operations (optional until server wiring complete) */ - planRepository?: PlanRepository; /** Phase dispatch manager for phase queue operations (optional until server wiring complete) */ phaseDispatchManager?: PhaseDispatchManager; + /** Page repository for page CRUD operations (optional until server wiring complete) */ + pageRepository?: PageRepository; + /** Project repository for project CRUD and initiative-project junction operations */ + projectRepository?: ProjectRepository; + /** Account repository for account CRUD and load balancing */ + accountRepository?: AccountRepository; + /** Credential manager for account OAuth token management */ + credentialManager?: AccountCredentialManager; + /** Absolute path to the workspace root (.cwrc directory) */ + workspaceRoot?: string; } /** @@ -62,8 +73,12 @@ export interface CreateContextOptions { coordinationManager?: CoordinationManager; initiativeRepository?: InitiativeRepository; phaseRepository?: PhaseRepository; - planRepository?: PlanRepository; phaseDispatchManager?: PhaseDispatchManager; + pageRepository?: PageRepository; + projectRepository?: ProjectRepository; + accountRepository?: AccountRepository; + credentialManager?: AccountCredentialManager; + workspaceRoot?: string; } /** @@ -84,7 +99,11 @@ export function createContext(options: CreateContextOptions): TRPCContext { coordinationManager: options.coordinationManager, initiativeRepository: options.initiativeRepository, phaseRepository: options.phaseRepository, - planRepository: options.planRepository, phaseDispatchManager: options.phaseDispatchManager, + pageRepository: options.pageRepository, + projectRepository: options.projectRepository, + accountRepository: options.accountRepository, + credentialManager: options.credentialManager, + workspaceRoot: options.workspaceRoot, }; } diff --git a/src/trpc/router.ts b/src/trpc/router.ts index b0135da..4e9587d 100644 --- a/src/trpc/router.ts +++ b/src/trpc/router.ts @@ -1,1271 +1,60 @@ /** - * tRPC Router + * tRPC Router — Merge Point * - * Type-safe RPC router for CLI-server communication. - * Uses Zod for runtime validation of procedure inputs/outputs. + * Combines all domain routers into a single application router. + * Each domain file exports a builder function that returns procedure records. */ -import { initTRPC, TRPCError } from '@trpc/server'; -import { z } from 'zod'; -import type { TRPCContext } from './context.js'; -import type { AgentInfo, AgentResult, PendingQuestions } from '../agent/types.js'; -import type { TaskRepository } from '../db/repositories/task-repository.js'; -import type { MessageRepository } from '../db/repositories/message-repository.js'; -import type { InitiativeRepository } from '../db/repositories/initiative-repository.js'; -import type { PhaseRepository } from '../db/repositories/phase-repository.js'; -import type { PlanRepository } from '../db/repositories/plan-repository.js'; -import type { DispatchManager, PhaseDispatchManager } from '../dispatch/types.js'; -import type { CoordinationManager } from '../coordination/types.js'; -import type { Phase, Task } from '../db/schema.js'; -import { buildDiscussPrompt, buildBreakdownPrompt, buildDecomposePrompt } from '../agent/prompts.js'; -import { - eventBusIterable, - ALL_EVENT_TYPES, - AGENT_EVENT_TYPES, - TASK_EVENT_TYPES, -} from './subscriptions.js'; +import { router, publicProcedure } from './trpc.js'; +import { systemProcedures } from './routers/system.js'; +import { agentProcedures } from './routers/agent.js'; +import { taskProcedures } from './routers/task.js'; +import { messageProcedures } from './routers/message.js'; +import { dispatchProcedures } from './routers/dispatch.js'; +import { coordinationProcedures } from './routers/coordination.js'; +import { initiativeProcedures } from './routers/initiative.js'; +import { phaseProcedures } from './routers/phase.js'; +import { phaseDispatchProcedures } from './routers/phase-dispatch.js'; +import { architectProcedures } from './routers/architect.js'; +import { projectProcedures } from './routers/project.js'; +import { pageProcedures } from './routers/page.js'; +import { accountProcedures } from './routers/account.js'; +import { subscriptionProcedures } from './routers/subscription.js'; -/** - * Initialize tRPC with our context type. - * This creates the tRPC instance that all procedures will use. - */ -const t = initTRPC.context<TRPCContext>().create(); +// Re-export tRPC primitives (preserves existing import paths) +export { router, publicProcedure, middleware, createCallerFactory } from './trpc.js'; -/** - * Base router - used to create the app router. - */ -export const router = t.router; +// Re-export schemas and types from domain routers +export { + healthResponseSchema, + processInfoSchema, + statusResponseSchema, +} from './routers/system.js'; +export type { HealthResponse, StatusResponse, ProcessInfo } from './routers/system.js'; -/** - * Public procedure - no authentication required. - * All current procedures are public since this is a local-only server. - */ -export const publicProcedure = t.procedure; +export { + spawnAgentInputSchema, + agentIdentifierSchema, + resumeAgentInputSchema, +} from './routers/agent.js'; +export type { SpawnAgentInput, AgentIdentifier, ResumeAgentInput } from './routers/agent.js'; -/** - * Middleware builder for custom middleware. - */ -export const middleware = t.middleware; - -/** - * Create caller factory for testing. - * Allows calling procedures directly without HTTP transport. - */ -export const createCallerFactory = t.createCallerFactory; - -// ============================================================================= -// Zod Schemas for procedure outputs -// ============================================================================= - -/** - * Schema for health check response. - */ -export const healthResponseSchema = z.object({ - status: z.literal('ok'), - uptime: z.number().int().nonnegative(), - processCount: z.number().int().nonnegative(), -}); - -export type HealthResponse = z.infer<typeof healthResponseSchema>; - -/** - * Schema for process info in status response. - */ -export const processInfoSchema = z.object({ - id: z.string(), - pid: z.number().int().positive(), - command: z.string(), - status: z.string(), - startedAt: z.string(), -}); - -export type ProcessInfo = z.infer<typeof processInfoSchema>; - -/** - * Schema for status response. - */ -export const statusResponseSchema = z.object({ - server: z.object({ - startedAt: z.string(), - uptime: z.number().int().nonnegative(), - pid: z.number().int().positive(), - }), - processes: z.array(processInfoSchema), -}); - -export type StatusResponse = z.infer<typeof statusResponseSchema>; - -// ============================================================================= -// Agent Input Schemas -// ============================================================================= - -/** - * Schema for spawning a new agent. - */ -export const spawnAgentInputSchema = z.object({ - /** Human-readable name for the agent (required, must be unique) */ - name: z.string().min(1), - /** Task ID to assign to agent */ - taskId: z.string().min(1), - /** Initial prompt/instruction for the agent */ - prompt: z.string().min(1), - /** Optional working directory (defaults to worktree path) */ - cwd: z.string().optional(), - /** Agent operation mode (defaults to 'execute') */ - mode: z.enum(['execute', 'discuss', 'breakdown']).optional(), -}); - -export type SpawnAgentInput = z.infer<typeof spawnAgentInputSchema>; - -/** - * Schema for identifying an agent by name or ID. - */ -export const agentIdentifierSchema = z.object({ - /** Lookup by human-readable name (preferred) */ - name: z.string().optional(), - /** Or lookup by ID */ - id: z.string().optional(), -}).refine(data => data.name || data.id, { - message: 'Either name or id must be provided', -}); - -export type AgentIdentifier = z.infer<typeof agentIdentifierSchema>; - -/** - * Schema for resuming an agent with batched answers. - */ -export const resumeAgentInputSchema = z.object({ - /** Lookup by human-readable name (preferred) */ - name: z.string().optional(), - /** Or lookup by ID */ - id: z.string().optional(), - /** Map of question ID to user's answer */ - answers: z.record(z.string(), z.string()), -}).refine(data => data.name || data.id, { - message: 'Either name or id must be provided', -}); - -export type ResumeAgentInput = z.infer<typeof resumeAgentInputSchema>; - -// ============================================================================= -// Helper Functions -// ============================================================================= - -/** - * Helper to resolve agent by name or id. - * Throws TRPCError if agent not found. - */ -async function resolveAgent( - ctx: TRPCContext, - input: { name?: string; id?: string } -): Promise<AgentInfo> { - if (!ctx.agentManager) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Agent manager not available', - }); - } - - const agent = input.name - ? await ctx.agentManager.getByName(input.name) - : await ctx.agentManager.get(input.id!); - - if (!agent) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Agent '${input.name ?? input.id}' not found`, - }); - } - - return agent; -} - -/** - * Helper to ensure agentManager is available in context. - */ -function requireAgentManager(ctx: TRPCContext) { - if (!ctx.agentManager) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Agent manager not available', - }); - } - return ctx.agentManager; -} - -/** - * Helper to ensure taskRepository is available in context. - */ -function requireTaskRepository(ctx: TRPCContext): TaskRepository { - if (!ctx.taskRepository) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Task repository not available', - }); - } - return ctx.taskRepository; -} - -/** - * Helper to ensure messageRepository is available in context. - */ -function requireMessageRepository(ctx: TRPCContext): MessageRepository { - if (!ctx.messageRepository) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Message repository not available', - }); - } - return ctx.messageRepository; -} - -/** - * Helper to ensure dispatchManager is available in context. - */ -function requireDispatchManager(ctx: TRPCContext): DispatchManager { - if (!ctx.dispatchManager) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Dispatch manager not available', - }); - } - return ctx.dispatchManager; -} - -/** - * Helper to ensure coordinationManager is available in context. - */ -function requireCoordinationManager(ctx: TRPCContext): CoordinationManager { - if (!ctx.coordinationManager) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Coordination manager not available', - }); - } - return ctx.coordinationManager; -} - -/** - * Helper to ensure initiativeRepository is available in context. - */ -function requireInitiativeRepository(ctx: TRPCContext): InitiativeRepository { - if (!ctx.initiativeRepository) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Initiative repository not available', - }); - } - return ctx.initiativeRepository; -} - -/** - * Helper to ensure phaseRepository is available in context. - */ -function requirePhaseRepository(ctx: TRPCContext): PhaseRepository { - if (!ctx.phaseRepository) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Phase repository not available', - }); - } - return ctx.phaseRepository; -} - -/** - * Helper to ensure planRepository is available in context. - */ -function requirePlanRepository(ctx: TRPCContext): PlanRepository { - if (!ctx.planRepository) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Plan repository not available', - }); - } - return ctx.planRepository; -} - -/** - * Helper to ensure phaseDispatchManager is available in context. - */ -function requirePhaseDispatchManager(ctx: TRPCContext): PhaseDispatchManager { - if (!ctx.phaseDispatchManager) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: 'Phase dispatch manager not available', - }); - } - return ctx.phaseDispatchManager; -} - -// ============================================================================= -// Application Router with Procedures -// ============================================================================= - -/** - * Application router with all procedures. - * - * Procedures: - * - health: Quick health check with uptime and process count - * - status: Full server status with process list - * - spawnAgent: Spawn a new agent to work on a task - * - stopAgent: Stop a running agent - * - listAgents: List all agents with status - * - getAgent: Get agent details by name or ID - * - getAgentByName: Get agent by name - * - resumeAgent: Resume an agent waiting for input - * - getAgentResult: Get result of agent's work - * - getAgentQuestions: Get pending questions for an agent waiting for input - * - listWaitingAgents: List agents currently waiting for user input - * - listTasks: List tasks for a plan - * - getTask: Get task by ID - * - updateTaskStatus: Update task status - * - listMessages: List messages with optional filtering - * - getMessage: Get message by ID - * - respondToMessage: Respond to a message - * - queueTask: Queue a task for dispatch - * - dispatchNext: Dispatch next available task - * - getQueueState: Get dispatch queue state - * - completeTask: Mark a task as complete - * - queueMerge: Queue a completed task for merge - * - processMerges: Process all ready merges in dependency order - * - getMergeQueueStatus: Get merge queue status - * - getNextMergeable: Get next task ready to merge - */ +// Application router export const appRouter = router({ - /** - * Health check procedure. - * - * Returns a lightweight response suitable for health monitoring. - * Calculates uptime from serverStartedAt in context. - */ - health: publicProcedure - .output(healthResponseSchema) - .query(({ ctx }): HealthResponse => { - const uptime = ctx.serverStartedAt - ? Math.floor((Date.now() - ctx.serverStartedAt.getTime()) / 1000) - : 0; - - return { - status: 'ok', - uptime, - processCount: ctx.processCount, - }; - }), - - /** - * Full status procedure. - * - * Returns detailed server state including process list. - * More comprehensive than health for admin/debugging purposes. - */ - status: publicProcedure - .output(statusResponseSchema) - .query(({ ctx }): StatusResponse => { - const uptime = ctx.serverStartedAt - ? Math.floor((Date.now() - ctx.serverStartedAt.getTime()) / 1000) - : 0; - - return { - server: { - startedAt: ctx.serverStartedAt?.toISOString() ?? '', - uptime, - pid: process.pid, - }, - // Process list will be populated when ProcessManager integration is complete - processes: [], - }; - }), - - // =========================================================================== - // Agent Procedures - // =========================================================================== - - /** - * Spawn a new agent to work on a task. - * Creates isolated worktree, starts Claude CLI, persists state. - */ - spawnAgent: publicProcedure - .input(spawnAgentInputSchema) - .mutation(async ({ ctx, input }) => { - const agentManager = requireAgentManager(ctx); - return agentManager.spawn({ - name: input.name, - taskId: input.taskId, - prompt: input.prompt, - cwd: input.cwd, - mode: input.mode, - }); - }), - - /** - * Stop a running agent by name or ID. - */ - stopAgent: publicProcedure - .input(agentIdentifierSchema) - .mutation(async ({ ctx, input }) => { - const agentManager = requireAgentManager(ctx); - const agent = await resolveAgent(ctx, input); - await agentManager.stop(agent.id); - return { success: true, name: agent.name }; - }), - - /** - * List all agents with their current status. - */ - listAgents: publicProcedure - .query(async ({ ctx }) => { - const agentManager = requireAgentManager(ctx); - return agentManager.list(); - }), - - /** - * Get agent details by name or ID. - */ - getAgent: publicProcedure - .input(agentIdentifierSchema) - .query(async ({ ctx, input }) => { - return resolveAgent(ctx, input); - }), - - /** - * Get agent by name (convenience method). - */ - getAgentByName: publicProcedure - .input(z.object({ name: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const agentManager = requireAgentManager(ctx); - return agentManager.getByName(input.name); - }), - - /** - * Resume an agent that is waiting for input. - * Uses stored session ID to continue with full context. - */ - resumeAgent: publicProcedure - .input(resumeAgentInputSchema) - .mutation(async ({ ctx, input }) => { - const agentManager = requireAgentManager(ctx); - const agent = await resolveAgent(ctx, input); - await agentManager.resume(agent.id, input.answers); - return { success: true, name: agent.name }; - }), - - /** - * Get the result of an agent's work. - * Only available after agent completes or stops. - */ - getAgentResult: publicProcedure - .input(agentIdentifierSchema) - .query(async ({ ctx, input }): Promise<AgentResult | null> => { - const agentManager = requireAgentManager(ctx); - const agent = await resolveAgent(ctx, input); - return agentManager.getResult(agent.id); - }), - - /** - * Get pending questions for an agent waiting for input. - * Returns structured question data (options, multiSelect) from AgentManager. - */ - getAgentQuestions: publicProcedure - .input(agentIdentifierSchema) - .query(async ({ ctx, input }): Promise<PendingQuestions | null> => { - const agentManager = requireAgentManager(ctx); - const agent = await resolveAgent(ctx, input); - return agentManager.getPendingQuestions(agent.id); - }), - - /** - * List agents currently waiting for user input. - * Filters to only agents with status 'waiting_for_input'. - */ - listWaitingAgents: publicProcedure - .query(async ({ ctx }) => { - const agentManager = requireAgentManager(ctx); - const allAgents = await agentManager.list(); - return allAgents.filter(agent => agent.status === 'waiting_for_input'); - }), - - // =========================================================================== - // Task Procedures - // =========================================================================== - - /** - * List tasks for a plan. - * Returns tasks ordered by order field. - */ - listTasks: publicProcedure - .input(z.object({ planId: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const taskRepository = requireTaskRepository(ctx); - return taskRepository.findByPlanId(input.planId); - }), - - /** - * Get a task by ID. - * Throws NOT_FOUND if task doesn't exist. - */ - getTask: publicProcedure - .input(z.object({ id: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const taskRepository = requireTaskRepository(ctx); - const task = await taskRepository.findById(input.id); - if (!task) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Task '${input.id}' not found`, - }); - } - return task; - }), - - /** - * Update a task's status. - * Returns the updated task. - */ - updateTaskStatus: publicProcedure - .input(z.object({ - id: z.string().min(1), - status: z.enum(['pending', 'in_progress', 'completed', 'blocked']), - })) - .mutation(async ({ ctx, input }) => { - const taskRepository = requireTaskRepository(ctx); - // Check task exists first - const existing = await taskRepository.findById(input.id); - if (!existing) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Task '${input.id}' not found`, - }); - } - return taskRepository.update(input.id, { status: input.status }); - }), - - // =========================================================================== - // Message Procedures - // =========================================================================== - - /** - * List messages with optional filtering. - * Filter by agent ID or status. - */ - listMessages: publicProcedure - .input(z.object({ - agentId: z.string().optional(), - status: z.enum(['pending', 'read', 'responded']).optional(), - })) - .query(async ({ ctx, input }) => { - const messageRepository = requireMessageRepository(ctx); - - // Get all messages for user (recipientType='user') - let messages = await messageRepository.findByRecipient('user'); - - // Filter by agentId if provided (sender is the agent) - if (input.agentId) { - messages = messages.filter(m => m.senderId === input.agentId); - } - - // Filter by status if provided - if (input.status) { - messages = messages.filter(m => m.status === input.status); - } - - return messages; - }), - - /** - * Get a single message by ID. - * Throws NOT_FOUND if message doesn't exist. - */ - getMessage: publicProcedure - .input(z.object({ id: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const messageRepository = requireMessageRepository(ctx); - const message = await messageRepository.findById(input.id); - if (!message) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Message '${input.id}' not found`, - }); - } - return message; - }), - - /** - * Respond to a message. - * Updates message with response and sets status to 'responded'. - */ - respondToMessage: publicProcedure - .input(z.object({ - id: z.string().min(1), - response: z.string().min(1), - })) - .mutation(async ({ ctx, input }) => { - const messageRepository = requireMessageRepository(ctx); - - // Check message exists - const existing = await messageRepository.findById(input.id); - if (!existing) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Message '${input.id}' not found`, - }); - } - - // Create a response message linked to the original - const responseMessage = await messageRepository.create({ - senderType: 'user', - recipientType: 'agent', - recipientId: existing.senderId, - type: 'response', - content: input.response, - parentMessageId: input.id, - }); - - // Update original message status to 'responded' - await messageRepository.update(input.id, { status: 'responded' }); - - return responseMessage; - }), - - // =========================================================================== - // Dispatch Procedures - // =========================================================================== - - /** - * Queue a task for dispatch. - * Task will be dispatched when all dependencies complete. - */ - queueTask: publicProcedure - .input(z.object({ taskId: z.string().min(1) })) - .mutation(async ({ ctx, input }) => { - const dispatchManager = requireDispatchManager(ctx); - await dispatchManager.queue(input.taskId); - return { success: true }; - }), - - /** - * Dispatch next available task to an agent. - * Returns dispatch result with task and agent info. - */ - dispatchNext: publicProcedure - .mutation(async ({ ctx }) => { - const dispatchManager = requireDispatchManager(ctx); - return dispatchManager.dispatchNext(); - }), - - /** - * Get current queue state. - * Returns queued, ready, and blocked task counts. - */ - getQueueState: publicProcedure - .query(async ({ ctx }) => { - const dispatchManager = requireDispatchManager(ctx); - return dispatchManager.getQueueState(); - }), - - /** - * Mark a task as complete. - * Removes from queue and triggers dependent task re-evaluation. - */ - completeTask: publicProcedure - .input(z.object({ taskId: z.string().min(1) })) - .mutation(async ({ ctx, input }) => { - const dispatchManager = requireDispatchManager(ctx); - await dispatchManager.completeTask(input.taskId); - return { success: true }; - }), - - // =========================================================================== - // Coordination Procedures - // =========================================================================== - - /** - * Queue a completed task for merge. - * Task will be merged when all dependencies complete. - */ - queueMerge: publicProcedure - .input(z.object({ taskId: z.string().min(1) })) - .mutation(async ({ ctx, input }) => { - const coordinationManager = requireCoordinationManager(ctx); - await coordinationManager.queueMerge(input.taskId); - return { success: true }; - }), - - /** - * Process all ready merges in dependency order. - * Returns results of all merge operations. - */ - processMerges: publicProcedure - .input(z.object({ - targetBranch: z.string().default('main'), - })) - .mutation(async ({ ctx, input }) => { - const coordinationManager = requireCoordinationManager(ctx); - const results = await coordinationManager.processMerges(input.targetBranch); - return { results }; - }), - - /** - * Get merge queue status. - * Shows queued, in-progress, merged, and conflicted tasks. - */ - getMergeQueueStatus: publicProcedure - .query(async ({ ctx }) => { - const coordinationManager = requireCoordinationManager(ctx); - return coordinationManager.getQueueState(); - }), - - /** - * Get next task ready to merge. - * Returns task with all dependencies resolved. - */ - getNextMergeable: publicProcedure - .query(async ({ ctx }) => { - const coordinationManager = requireCoordinationManager(ctx); - return coordinationManager.getNextMergeable(); - }), - - // =========================================================================== - // Initiative Procedures - // =========================================================================== - - /** - * Create a new initiative. - * Returns the created initiative with generated ID. - */ - createInitiative: publicProcedure - .input(z.object({ - name: z.string().min(1), - description: z.string().optional(), - })) - .mutation(async ({ ctx, input }) => { - const repo = requireInitiativeRepository(ctx); - return repo.create({ - name: input.name, - description: input.description ?? null, - status: 'active', - }); - }), - - /** - * List all initiatives with optional status filter. - * Returns initiatives ordered by creation time. - */ - listInitiatives: publicProcedure - .input(z.object({ - status: z.enum(['active', 'completed', 'archived']).optional(), - }).optional()) - .query(async ({ ctx, input }) => { - const repo = requireInitiativeRepository(ctx); - if (input?.status) { - return repo.findByStatus(input.status); - } - return repo.findAll(); - }), - - /** - * Get an initiative by ID. - * Throws NOT_FOUND if initiative doesn't exist. - */ - getInitiative: publicProcedure - .input(z.object({ id: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const repo = requireInitiativeRepository(ctx); - const initiative = await repo.findById(input.id); - if (!initiative) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Initiative '${input.id}' not found`, - }); - } - return initiative; - }), - - /** - * Update an initiative. - * Returns the updated initiative. - */ - updateInitiative: publicProcedure - .input(z.object({ - id: z.string().min(1), - name: z.string().min(1).optional(), - description: z.string().optional(), - status: z.enum(['active', 'completed', 'archived']).optional(), - })) - .mutation(async ({ ctx, input }) => { - const repo = requireInitiativeRepository(ctx); - const { id, ...data } = input; - return repo.update(id, data); - }), - - // =========================================================================== - // Phase Procedures - // =========================================================================== - - /** - * Create a new phase for an initiative. - * Auto-assigns the next phase number. - */ - createPhase: publicProcedure - .input(z.object({ - initiativeId: z.string().min(1), - name: z.string().min(1), - description: z.string().optional(), - })) - .mutation(async ({ ctx, input }) => { - const repo = requirePhaseRepository(ctx); - const nextNumber = await repo.getNextNumber(input.initiativeId); - return repo.create({ - initiativeId: input.initiativeId, - number: nextNumber, - name: input.name, - description: input.description ?? null, - status: 'pending', - }); - }), - - /** - * List phases for an initiative. - * Returns phases ordered by number. - */ - listPhases: publicProcedure - .input(z.object({ initiativeId: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const repo = requirePhaseRepository(ctx); - return repo.findByInitiativeId(input.initiativeId); - }), - - /** - * Get a phase by ID. - * Throws NOT_FOUND if phase doesn't exist. - */ - getPhase: publicProcedure - .input(z.object({ id: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const repo = requirePhaseRepository(ctx); - const phase = await repo.findById(input.id); - if (!phase) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Phase '${input.id}' not found`, - }); - } - return phase; - }), - - /** - * Update a phase. - * Returns the updated phase. - */ - updatePhase: publicProcedure - .input(z.object({ - id: z.string().min(1), - name: z.string().min(1).optional(), - description: z.string().optional(), - status: z.enum(['pending', 'in_progress', 'completed', 'blocked']).optional(), - })) - .mutation(async ({ ctx, input }) => { - const repo = requirePhaseRepository(ctx); - const { id, ...data } = input; - return repo.update(id, data); - }), - - /** - * Create multiple phases from Architect breakdown output. - * Accepts pre-numbered phases and creates them in bulk. - */ - createPhasesFromBreakdown: publicProcedure - .input(z.object({ - initiativeId: z.string().min(1), - phases: z.array(z.object({ - number: z.number(), - name: z.string().min(1), - description: z.string(), - })), - })) - .mutation(async ({ ctx, input }) => { - const repo = requirePhaseRepository(ctx); - const created: Phase[] = []; - for (const p of input.phases) { - const phase = await repo.create({ - initiativeId: input.initiativeId, - number: p.number, - name: p.name, - description: p.description, - status: 'pending', - }); - created.push(phase); - } - return created; - }), - - /** - * Create a dependency between two phases. - * The phase with phaseId depends on the phase with dependsOnPhaseId. - */ - createPhaseDependency: publicProcedure - .input(z.object({ - phaseId: z.string().min(1), - dependsOnPhaseId: z.string().min(1), - })) - .mutation(async ({ ctx, input }) => { - const repo = requirePhaseRepository(ctx); - - // Validate both phases exist - const phase = await repo.findById(input.phaseId); - if (!phase) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Phase '${input.phaseId}' not found`, - }); - } - - const dependsOnPhase = await repo.findById(input.dependsOnPhaseId); - if (!dependsOnPhase) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Phase '${input.dependsOnPhaseId}' not found`, - }); - } - - await repo.createDependency(input.phaseId, input.dependsOnPhaseId); - return { success: true }; - }), - - /** - * Get dependencies for a phase. - * Returns IDs of phases that this phase depends on. - */ - getPhaseDependencies: publicProcedure - .input(z.object({ phaseId: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const repo = requirePhaseRepository(ctx); - const dependencies = await repo.getDependencies(input.phaseId); - return { dependencies }; - }), - - // =========================================================================== - // Phase Dispatch Procedures - // =========================================================================== - - /** - * Queue a phase for dispatch. - * Phase will be dispatched when all dependencies complete. - */ - queuePhase: publicProcedure - .input(z.object({ phaseId: z.string().min(1) })) - .mutation(async ({ ctx, input }) => { - const phaseDispatchManager = requirePhaseDispatchManager(ctx); - await phaseDispatchManager.queuePhase(input.phaseId); - return { success: true }; - }), - - /** - * Dispatch next available phase. - * Returns dispatch result with phase info. - */ - dispatchNextPhase: publicProcedure - .mutation(async ({ ctx }) => { - const phaseDispatchManager = requirePhaseDispatchManager(ctx); - return phaseDispatchManager.dispatchNextPhase(); - }), - - /** - * Get current phase queue state. - * Returns queued, ready, and blocked phase counts. - */ - getPhaseQueueState: publicProcedure - .query(async ({ ctx }) => { - const phaseDispatchManager = requirePhaseDispatchManager(ctx); - return phaseDispatchManager.getPhaseQueueState(); - }), - - // =========================================================================== - // Plan Procedures - // =========================================================================== - - /** - * Create a new plan for a phase. - * Auto-assigns the next plan number if not provided. - */ - createPlan: publicProcedure - .input(z.object({ - phaseId: z.string().min(1), - number: z.number().int().positive().optional(), - name: z.string().min(1), - description: z.string().optional(), - })) - .mutation(async ({ ctx, input }) => { - const repo = requirePlanRepository(ctx); - const number = input.number ?? await repo.getNextNumber(input.phaseId); - return repo.create({ - phaseId: input.phaseId, - number, - name: input.name, - description: input.description ?? null, - status: 'pending', - }); - }), - - /** - * List plans for a phase. - * Returns plans ordered by number. - */ - listPlans: publicProcedure - .input(z.object({ phaseId: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const repo = requirePlanRepository(ctx); - return repo.findByPhaseId(input.phaseId); - }), - - /** - * Get a plan by ID. - * Throws NOT_FOUND if plan doesn't exist. - */ - getPlan: publicProcedure - .input(z.object({ id: z.string().min(1) })) - .query(async ({ ctx, input }) => { - const repo = requirePlanRepository(ctx); - const plan = await repo.findById(input.id); - if (!plan) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Plan '${input.id}' not found`, - }); - } - return plan; - }), - - /** - * Update a plan. - * Returns the updated plan. - */ - updatePlan: publicProcedure - .input(z.object({ - id: z.string().min(1), - name: z.string().min(1).optional(), - description: z.string().optional(), - status: z.enum(['pending', 'in_progress', 'completed']).optional(), - })) - .mutation(async ({ ctx, input }) => { - const repo = requirePlanRepository(ctx); - const { id, ...data } = input; - return repo.update(id, data); - }), - - /** - * Create tasks from decomposition agent output. - * Creates all tasks in order, then creates dependencies from number mappings. - */ - createTasksFromDecomposition: publicProcedure - .input(z.object({ - planId: z.string().min(1), - tasks: z.array(z.object({ - number: z.number().int().positive(), - name: z.string().min(1), - description: z.string(), - type: z.enum(['auto', 'checkpoint:human-verify', 'checkpoint:decision', 'checkpoint:human-action']).default('auto'), - dependencies: z.array(z.number().int().positive()).optional(), - })), - })) - .mutation(async ({ ctx, input }) => { - const taskRepo = requireTaskRepository(ctx); - const planRepo = requirePlanRepository(ctx); - - // Verify plan exists - const plan = await planRepo.findById(input.planId); - if (!plan) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Plan '${input.planId}' not found`, - }); - } - - // Create tasks in order, building number-to-ID map - const numberToId = new Map<number, string>(); - const created: Task[] = []; - - for (const taskInput of input.tasks) { - const task = await taskRepo.create({ - planId: input.planId, - name: taskInput.name, - description: taskInput.description, - type: taskInput.type, - order: taskInput.number, - status: 'pending', - }); - numberToId.set(taskInput.number, task.id); - created.push(task); - } - - // Create dependencies after all tasks exist - for (const taskInput of input.tasks) { - if (taskInput.dependencies && taskInput.dependencies.length > 0) { - const taskId = numberToId.get(taskInput.number)!; - for (const depNumber of taskInput.dependencies) { - const dependsOnTaskId = numberToId.get(depNumber); - if (dependsOnTaskId) { - await taskRepo.createDependency(taskId, dependsOnTaskId); - } - } - } - } - - return created; - }), - - // =========================================================================== - // Architect Spawn Procedures - // =========================================================================== - - /** - * Spawn architect in discuss mode. - * Uses comprehensive discuss prompt to gather context from user. - */ - spawnArchitectDiscuss: publicProcedure - .input(z.object({ - name: z.string().min(1), - initiativeId: z.string().min(1), - context: z.string().optional(), - })) - .mutation(async ({ ctx, input }) => { - const agentManager = requireAgentManager(ctx); - const initiativeRepo = requireInitiativeRepository(ctx); - - const initiative = await initiativeRepo.findById(input.initiativeId); - if (!initiative) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Initiative '${input.initiativeId}' not found`, - }); - } - - const prompt = buildDiscussPrompt(initiative, input.context); - - return agentManager.spawn({ - name: input.name, - taskId: input.initiativeId, - prompt, - mode: 'discuss', - }); - }), - - /** - * Spawn architect in breakdown mode. - * Uses comprehensive breakdown prompt to decompose initiative into phases. - */ - spawnArchitectBreakdown: publicProcedure - .input(z.object({ - name: z.string().min(1), - initiativeId: z.string().min(1), - contextSummary: z.string().optional(), - })) - .mutation(async ({ ctx, input }) => { - const agentManager = requireAgentManager(ctx); - const initiativeRepo = requireInitiativeRepository(ctx); - - const initiative = await initiativeRepo.findById(input.initiativeId); - if (!initiative) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Initiative '${input.initiativeId}' not found`, - }); - } - - const prompt = buildBreakdownPrompt(initiative, input.contextSummary); - - return agentManager.spawn({ - name: input.name, - taskId: input.initiativeId, - prompt, - mode: 'breakdown', - }); - }), - - /** - * Spawn architect in decompose mode. - * Uses comprehensive decompose prompt to break a plan into executable tasks. - */ - spawnArchitectDecompose: publicProcedure - .input(z.object({ - name: z.string().min(1), - planId: z.string().min(1), - context: z.string().optional(), - })) - .mutation(async ({ ctx, input }) => { - const agentManager = requireAgentManager(ctx); - const planRepo = requirePlanRepository(ctx); - const phaseRepo = requirePhaseRepository(ctx); - - // 1. Get plan and its phase - const plan = await planRepo.findById(input.planId); - if (!plan) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Plan '${input.planId}' not found`, - }); - } - const phase = await phaseRepo.findById(plan.phaseId); - if (!phase) { - throw new TRPCError({ - code: 'NOT_FOUND', - message: `Phase '${plan.phaseId}' not found`, - }); - } - - // 2. Build decompose prompt - const prompt = buildDecomposePrompt(plan, phase, input.context); - - // 3. Spawn agent in decompose mode - return agentManager.spawn({ - name: input.name, - taskId: input.planId, // Associate with plan - prompt, - mode: 'decompose', - }); - }), - - // =========================================================================== - // Subscription Procedures (SSE) - // =========================================================================== - - /** - * Subscribe to ALL domain events. - * General-purpose firehose for the frontend. - * Yields tracked events for client-side reconnection support. - */ - onEvent: publicProcedure - .input(z.object({ lastEventId: z.string().nullish() }).optional()) - .subscription(async function* (opts) { - const signal = opts.signal ?? new AbortController().signal; - yield* eventBusIterable(opts.ctx.eventBus, ALL_EVENT_TYPES, signal); - }), - - /** - * Subscribe to agent-specific events only. - * Targeted stream for the inbox page (agent:spawned, agent:stopped, etc.). - */ - onAgentUpdate: publicProcedure - .input(z.object({ lastEventId: z.string().nullish() }).optional()) - .subscription(async function* (opts) { - const signal = opts.signal ?? new AbortController().signal; - yield* eventBusIterable(opts.ctx.eventBus, AGENT_EVENT_TYPES, signal); - }), - - /** - * Subscribe to task and phase events. - * For the initiative detail page (task:queued, phase:started, etc.). - */ - onTaskUpdate: publicProcedure - .input(z.object({ lastEventId: z.string().nullish() }).optional()) - .subscription(async function* (opts) { - const signal = opts.signal ?? new AbortController().signal; - yield* eventBusIterable(opts.ctx.eventBus, TASK_EVENT_TYPES, signal); - }), + ...systemProcedures(publicProcedure), + ...agentProcedures(publicProcedure), + ...taskProcedures(publicProcedure), + ...messageProcedures(publicProcedure), + ...dispatchProcedures(publicProcedure), + ...coordinationProcedures(publicProcedure), + ...initiativeProcedures(publicProcedure), + ...phaseProcedures(publicProcedure), + ...phaseDispatchProcedures(publicProcedure), + ...architectProcedures(publicProcedure), + ...projectProcedures(publicProcedure), + ...pageProcedures(publicProcedure), + ...accountProcedures(publicProcedure), + ...subscriptionProcedures(publicProcedure), }); -/** - * Type of the application router. - * Used by clients for type-safe procedure calls. - */ export type AppRouter = typeof appRouter; diff --git a/src/trpc/routers/_helpers.ts b/src/trpc/routers/_helpers.ts new file mode 100644 index 0000000..006f0e1 --- /dev/null +++ b/src/trpc/routers/_helpers.ts @@ -0,0 +1,128 @@ +/** + * Router Helpers + * + * Shared require*() helpers that validate context dependencies + * and throw TRPCError when a required dependency is missing. + */ + +import { TRPCError } from '@trpc/server'; +import type { TRPCContext } from '../context.js'; +import type { TaskRepository } from '../../db/repositories/task-repository.js'; +import type { MessageRepository } from '../../db/repositories/message-repository.js'; +import type { InitiativeRepository } from '../../db/repositories/initiative-repository.js'; +import type { PhaseRepository } from '../../db/repositories/phase-repository.js'; +import type { PageRepository } from '../../db/repositories/page-repository.js'; +import type { ProjectRepository } from '../../db/repositories/project-repository.js'; +import type { AccountRepository } from '../../db/repositories/account-repository.js'; +import type { DispatchManager, PhaseDispatchManager } from '../../dispatch/types.js'; +import type { CoordinationManager } from '../../coordination/types.js'; + +export function requireAgentManager(ctx: TRPCContext) { + if (!ctx.agentManager) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Agent manager not available', + }); + } + return ctx.agentManager; +} + +export function requireTaskRepository(ctx: TRPCContext): TaskRepository { + if (!ctx.taskRepository) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Task repository not available', + }); + } + return ctx.taskRepository; +} + +export function requireMessageRepository(ctx: TRPCContext): MessageRepository { + if (!ctx.messageRepository) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Message repository not available', + }); + } + return ctx.messageRepository; +} + +export function requireDispatchManager(ctx: TRPCContext): DispatchManager { + if (!ctx.dispatchManager) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Dispatch manager not available', + }); + } + return ctx.dispatchManager; +} + +export function requireCoordinationManager(ctx: TRPCContext): CoordinationManager { + if (!ctx.coordinationManager) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Coordination manager not available', + }); + } + return ctx.coordinationManager; +} + +export function requireInitiativeRepository(ctx: TRPCContext): InitiativeRepository { + if (!ctx.initiativeRepository) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Initiative repository not available', + }); + } + return ctx.initiativeRepository; +} + +export function requirePhaseRepository(ctx: TRPCContext): PhaseRepository { + if (!ctx.phaseRepository) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Phase repository not available', + }); + } + return ctx.phaseRepository; +} + +export function requirePhaseDispatchManager(ctx: TRPCContext): PhaseDispatchManager { + if (!ctx.phaseDispatchManager) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Phase dispatch manager not available', + }); + } + return ctx.phaseDispatchManager; +} + +export function requirePageRepository(ctx: TRPCContext): PageRepository { + if (!ctx.pageRepository) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Page repository not available', + }); + } + return ctx.pageRepository; +} + +export function requireProjectRepository(ctx: TRPCContext): ProjectRepository { + if (!ctx.projectRepository) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Project repository not available', + }); + } + return ctx.projectRepository; +} + +export function requireAccountRepository(ctx: TRPCContext): AccountRepository { + if (!ctx.accountRepository) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Account repository not available', + }); + } + return ctx.accountRepository; +} diff --git a/src/trpc/routers/account.ts b/src/trpc/routers/account.ts new file mode 100644 index 0000000..99c638d --- /dev/null +++ b/src/trpc/routers/account.ts @@ -0,0 +1,76 @@ +/** + * Account Router — list, add, remove, refresh, update auth, mark exhausted, providers + */ + +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requireAccountRepository } from './_helpers.js'; +import { listProviders as listProviderNames } from '../../agent/providers/registry.js'; + +export function accountProcedures(publicProcedure: ProcedureBuilder) { + return { + listAccounts: publicProcedure + .query(async ({ ctx }) => { + const repo = requireAccountRepository(ctx); + return repo.findAll(); + }), + + addAccount: publicProcedure + .input(z.object({ + email: z.string().min(1), + provider: z.string().default('claude'), + configJson: z.string().optional(), + credentials: z.string().optional(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requireAccountRepository(ctx); + return repo.create({ + email: input.email, + provider: input.provider, + configJson: input.configJson, + credentials: input.credentials, + }); + }), + + removeAccount: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + const repo = requireAccountRepository(ctx); + await repo.delete(input.id); + return { success: true }; + }), + + refreshAccounts: publicProcedure + .mutation(async ({ ctx }) => { + const repo = requireAccountRepository(ctx); + const cleared = await repo.clearExpiredExhaustion(); + return { cleared }; + }), + + updateAccountAuth: publicProcedure + .input(z.object({ + id: z.string().min(1), + configJson: z.string(), + credentials: z.string(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requireAccountRepository(ctx); + return repo.updateAccountAuth(input.id, input.configJson, input.credentials); + }), + + markAccountExhausted: publicProcedure + .input(z.object({ + id: z.string().min(1), + until: z.string().datetime(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requireAccountRepository(ctx); + return repo.markExhausted(input.id, new Date(input.until)); + }), + + listProviderNames: publicProcedure + .query(() => { + return listProviderNames(); + }), + }; +} diff --git a/src/trpc/routers/agent.ts b/src/trpc/routers/agent.ts new file mode 100644 index 0000000..36d15c8 --- /dev/null +++ b/src/trpc/routers/agent.ts @@ -0,0 +1,247 @@ +/** + * Agent Router — spawn, stop, delete, list, get, resume, result, questions, output + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import { join } from 'node:path'; +import { readFile } from 'node:fs/promises'; +import { tracked, type TrackedEnvelope } from '@trpc/server'; +import type { ProcedureBuilder } from '../trpc.js'; +import type { TRPCContext } from '../context.js'; +import type { AgentInfo, AgentResult, PendingQuestions } from '../../agent/types.js'; +import type { AgentOutputEvent } from '../../events/types.js'; +import { requireAgentManager } from './_helpers.js'; + +export const spawnAgentInputSchema = z.object({ + name: z.string().min(1).optional(), + taskId: z.string().min(1), + prompt: z.string().min(1), + cwd: z.string().optional(), + mode: z.enum(['execute', 'discuss', 'breakdown', 'decompose', 'refine']).optional(), + provider: z.string().optional(), + initiativeId: z.string().min(1).optional(), +}); + +export type SpawnAgentInput = z.infer<typeof spawnAgentInputSchema>; + +export const agentIdentifierSchema = z.object({ + name: z.string().optional(), + id: z.string().optional(), +}).refine(data => data.name || data.id, { + message: 'Either name or id must be provided', +}); + +export type AgentIdentifier = z.infer<typeof agentIdentifierSchema>; + +export const resumeAgentInputSchema = z.object({ + name: z.string().optional(), + id: z.string().optional(), + answers: z.record(z.string(), z.string()), +}).refine(data => data.name || data.id, { + message: 'Either name or id must be provided', +}); + +export type ResumeAgentInput = z.infer<typeof resumeAgentInputSchema>; + +async function resolveAgent( + ctx: TRPCContext, + input: { name?: string; id?: string } +): Promise<AgentInfo> { + if (!ctx.agentManager) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: 'Agent manager not available', + }); + } + + const agent = input.name + ? await ctx.agentManager.getByName(input.name) + : await ctx.agentManager.get(input.id!); + + if (!agent) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Agent '${input.name ?? input.id}' not found`, + }); + } + + return agent; +} + +export function agentProcedures(publicProcedure: ProcedureBuilder) { + return { + spawnAgent: publicProcedure + .input(spawnAgentInputSchema) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + return agentManager.spawn({ + name: input.name, + taskId: input.taskId, + prompt: input.prompt, + cwd: input.cwd, + mode: input.mode, + provider: input.provider, + initiativeId: input.initiativeId, + }); + }), + + stopAgent: publicProcedure + .input(agentIdentifierSchema) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + const agent = await resolveAgent(ctx, input); + await agentManager.stop(agent.id); + return { success: true, name: agent.name }; + }), + + deleteAgent: publicProcedure + .input(agentIdentifierSchema) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + const agent = await resolveAgent(ctx, input); + await agentManager.delete(agent.id); + return { success: true, name: agent.name }; + }), + + dismissAgent: publicProcedure + .input(agentIdentifierSchema) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + const agent = await resolveAgent(ctx, input); + await agentManager.dismiss(agent.id); + return { success: true, name: agent.name }; + }), + + listAgents: publicProcedure + .query(async ({ ctx }) => { + const agentManager = requireAgentManager(ctx); + return agentManager.list(); + }), + + getAgent: publicProcedure + .input(agentIdentifierSchema) + .query(async ({ ctx, input }) => { + return resolveAgent(ctx, input); + }), + + getAgentByName: publicProcedure + .input(z.object({ name: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + return agentManager.getByName(input.name); + }), + + resumeAgent: publicProcedure + .input(resumeAgentInputSchema) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + const agent = await resolveAgent(ctx, input); + await agentManager.resume(agent.id, input.answers); + return { success: true, name: agent.name }; + }), + + getAgentResult: publicProcedure + .input(agentIdentifierSchema) + .query(async ({ ctx, input }): Promise<AgentResult | null> => { + const agentManager = requireAgentManager(ctx); + const agent = await resolveAgent(ctx, input); + return agentManager.getResult(agent.id); + }), + + getAgentQuestions: publicProcedure + .input(agentIdentifierSchema) + .query(async ({ ctx, input }): Promise<PendingQuestions | null> => { + const agentManager = requireAgentManager(ctx); + const agent = await resolveAgent(ctx, input); + return agentManager.getPendingQuestions(agent.id); + }), + + listWaitingAgents: publicProcedure + .query(async ({ ctx }) => { + const agentManager = requireAgentManager(ctx); + const allAgents = await agentManager.list(); + return allAgents.filter(agent => agent.status === 'waiting_for_input'); + }), + + getAgentOutput: publicProcedure + .input(agentIdentifierSchema) + .query(async ({ ctx, input }): Promise<string> => { + const agent = await resolveAgent(ctx, input); + + const workspaceRoot = ctx.workspaceRoot; + if (!workspaceRoot) { + return ''; + } + + const outputFilePath = join(workspaceRoot, '.cw', 'agent-logs', agent.id, 'output.jsonl'); + + try { + const content = await readFile(outputFilePath, 'utf-8'); + return content; + } catch { + return ''; + } + }), + + onAgentOutput: publicProcedure + .input(z.object({ agentId: z.string().min(1) })) + .subscription(async function* (opts): AsyncGenerator<TrackedEnvelope<{ agentId: string; data: string }>> { + const { agentId } = opts.input; + const signal = opts.signal ?? new AbortController().signal; + const agentManager = requireAgentManager(opts.ctx); + const eventBus = opts.ctx.eventBus; + + const buffer = agentManager.getOutputBuffer(agentId); + let eventCounter = 0; + for (const chunk of buffer) { + const id = `${agentId}-buf-${eventCounter++}`; + yield tracked(id, { agentId, data: chunk }); + } + + const queue: string[] = []; + let resolve: (() => void) | null = null; + + const handler = (event: AgentOutputEvent) => { + if (event.payload.agentId !== agentId) return; + queue.push(event.payload.data); + if (resolve) { + const r = resolve; + resolve = null; + r(); + } + }; + + eventBus.on('agent:output', handler); + + const cleanup = () => { + eventBus.off('agent:output', handler); + if (resolve) { + const r = resolve; + resolve = null; + r(); + } + }; + + signal.addEventListener('abort', cleanup, { once: true }); + + try { + while (!signal.aborted) { + while (queue.length > 0) { + const data = queue.shift()!; + const id = `${agentId}-live-${eventCounter++}`; + yield tracked(id, { agentId, data }); + } + + if (!signal.aborted) { + await new Promise<void>((r) => { + resolve = r; + }); + } + } + } finally { + cleanup(); + } + }), + }; +} diff --git a/src/trpc/routers/architect.ts b/src/trpc/routers/architect.ts new file mode 100644 index 0000000..717da93 --- /dev/null +++ b/src/trpc/routers/architect.ts @@ -0,0 +1,217 @@ +/** + * Architect Router — discuss, breakdown, refine, decompose spawn procedures + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { + requireAgentManager, + requireInitiativeRepository, + requirePhaseRepository, + requirePageRepository, + requireTaskRepository, +} from './_helpers.js'; +import { + buildDiscussPrompt, + buildBreakdownPrompt, + buildRefinePrompt, + buildDecomposePrompt, +} from '../../agent/prompts.js'; + +export function architectProcedures(publicProcedure: ProcedureBuilder) { + return { + spawnArchitectDiscuss: publicProcedure + .input(z.object({ + name: z.string().min(1).optional(), + initiativeId: z.string().min(1), + context: z.string().optional(), + provider: z.string().optional(), + })) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + const initiativeRepo = requireInitiativeRepository(ctx); + const taskRepo = requireTaskRepository(ctx); + + const initiative = await initiativeRepo.findById(input.initiativeId); + if (!initiative) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Initiative '${input.initiativeId}' not found`, + }); + } + + const task = await taskRepo.create({ + initiativeId: input.initiativeId, + name: `Discuss: ${initiative.name}`, + description: input.context ?? 'Gather context and requirements for initiative', + category: 'discuss', + status: 'in_progress', + }); + + const prompt = buildDiscussPrompt(); + + return agentManager.spawn({ + name: input.name, + taskId: task.id, + prompt, + mode: 'discuss', + provider: input.provider, + initiativeId: input.initiativeId, + inputContext: { initiative }, + }); + }), + + spawnArchitectBreakdown: publicProcedure + .input(z.object({ + name: z.string().min(1).optional(), + initiativeId: z.string().min(1), + contextSummary: z.string().optional(), + provider: z.string().optional(), + })) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + const initiativeRepo = requireInitiativeRepository(ctx); + const taskRepo = requireTaskRepository(ctx); + + const initiative = await initiativeRepo.findById(input.initiativeId); + if (!initiative) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Initiative '${input.initiativeId}' not found`, + }); + } + + const task = await taskRepo.create({ + initiativeId: input.initiativeId, + name: `Breakdown: ${initiative.name}`, + description: 'Break initiative into phases', + category: 'breakdown', + status: 'in_progress', + }); + + let pages: import('../../agent/content-serializer.js').PageForSerialization[] | undefined; + if (ctx.pageRepository) { + const rawPages = await ctx.pageRepository.findByInitiativeId(input.initiativeId); + if (rawPages.length > 0) { + pages = rawPages; + } + } + + const prompt = buildBreakdownPrompt(); + + return agentManager.spawn({ + name: input.name, + taskId: task.id, + prompt, + mode: 'breakdown', + provider: input.provider, + initiativeId: input.initiativeId, + inputContext: { initiative, pages }, + }); + }), + + spawnArchitectRefine: publicProcedure + .input(z.object({ + name: z.string().min(1).optional(), + initiativeId: z.string().min(1), + instruction: z.string().optional(), + provider: z.string().optional(), + })) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + const initiativeRepo = requireInitiativeRepository(ctx); + const pageRepo = requirePageRepository(ctx); + const taskRepo = requireTaskRepository(ctx); + + const initiative = await initiativeRepo.findById(input.initiativeId); + if (!initiative) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Initiative '${input.initiativeId}' not found`, + }); + } + + const pages = await pageRepo.findByInitiativeId(input.initiativeId); + + if (pages.length === 0) { + throw new TRPCError({ + code: 'BAD_REQUEST', + message: 'Initiative has no page content to refine', + }); + } + + const task = await taskRepo.create({ + initiativeId: input.initiativeId, + name: `Refine: ${initiative.name}`, + description: input.instruction ?? 'Review and propose edits to initiative content', + category: 'refine', + status: 'in_progress', + }); + + const prompt = buildRefinePrompt(); + + return agentManager.spawn({ + name: input.name, + taskId: task.id, + prompt, + mode: 'refine', + provider: input.provider, + initiativeId: input.initiativeId, + inputContext: { initiative, pages }, + }); + }), + + spawnArchitectDecompose: publicProcedure + .input(z.object({ + name: z.string().min(1).optional(), + phaseId: z.string().min(1), + taskName: z.string().min(1).optional(), + context: z.string().optional(), + provider: z.string().optional(), + })) + .mutation(async ({ ctx, input }) => { + const agentManager = requireAgentManager(ctx); + const phaseRepo = requirePhaseRepository(ctx); + const taskRepo = requireTaskRepository(ctx); + const initiativeRepo = requireInitiativeRepository(ctx); + + const phase = await phaseRepo.findById(input.phaseId); + if (!phase) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Phase '${input.phaseId}' not found`, + }); + } + const initiative = await initiativeRepo.findById(phase.initiativeId); + if (!initiative) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Initiative '${phase.initiativeId}' not found`, + }); + } + + const decomposeTaskName = input.taskName ?? `Decompose: ${phase.name}`; + const task = await taskRepo.create({ + phaseId: phase.id, + initiativeId: phase.initiativeId, + name: decomposeTaskName, + description: input.context ?? `Break phase "${phase.name}" into executable tasks`, + category: 'decompose', + status: 'in_progress', + }); + + const prompt = buildDecomposePrompt(); + + return agentManager.spawn({ + name: input.name, + taskId: task.id, + prompt, + mode: 'decompose', + provider: input.provider, + initiativeId: phase.initiativeId, + inputContext: { initiative, phase, task }, + }); + }), + }; +} diff --git a/src/trpc/routers/coordination.ts b/src/trpc/routers/coordination.ts new file mode 100644 index 0000000..2b75617 --- /dev/null +++ b/src/trpc/routers/coordination.ts @@ -0,0 +1,41 @@ +/** + * Coordination Router — merge queue operations + */ + +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requireCoordinationManager } from './_helpers.js'; + +export function coordinationProcedures(publicProcedure: ProcedureBuilder) { + return { + queueMerge: publicProcedure + .input(z.object({ taskId: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + const coordinationManager = requireCoordinationManager(ctx); + await coordinationManager.queueMerge(input.taskId); + return { success: true }; + }), + + processMerges: publicProcedure + .input(z.object({ + targetBranch: z.string().default('main'), + })) + .mutation(async ({ ctx, input }) => { + const coordinationManager = requireCoordinationManager(ctx); + const results = await coordinationManager.processMerges(input.targetBranch); + return { results }; + }), + + getMergeQueueStatus: publicProcedure + .query(async ({ ctx }) => { + const coordinationManager = requireCoordinationManager(ctx); + return coordinationManager.getQueueState(); + }), + + getNextMergeable: publicProcedure + .query(async ({ ctx }) => { + const coordinationManager = requireCoordinationManager(ctx); + return coordinationManager.getNextMergeable(); + }), + }; +} diff --git a/src/trpc/routers/dispatch.ts b/src/trpc/routers/dispatch.ts new file mode 100644 index 0000000..13f4b0c --- /dev/null +++ b/src/trpc/routers/dispatch.ts @@ -0,0 +1,39 @@ +/** + * Dispatch Router — queue, dispatchNext, getQueueState, completeTask + */ + +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requireDispatchManager } from './_helpers.js'; + +export function dispatchProcedures(publicProcedure: ProcedureBuilder) { + return { + queueTask: publicProcedure + .input(z.object({ taskId: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + const dispatchManager = requireDispatchManager(ctx); + await dispatchManager.queue(input.taskId); + return { success: true }; + }), + + dispatchNext: publicProcedure + .mutation(async ({ ctx }) => { + const dispatchManager = requireDispatchManager(ctx); + return dispatchManager.dispatchNext(); + }), + + getQueueState: publicProcedure + .query(async ({ ctx }) => { + const dispatchManager = requireDispatchManager(ctx); + return dispatchManager.getQueueState(); + }), + + completeTask: publicProcedure + .input(z.object({ taskId: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + const dispatchManager = requireDispatchManager(ctx); + await dispatchManager.completeTask(input.taskId); + return { success: true }; + }), + }; +} diff --git a/src/trpc/routers/initiative.ts b/src/trpc/routers/initiative.ts new file mode 100644 index 0000000..074cc59 --- /dev/null +++ b/src/trpc/routers/initiative.ts @@ -0,0 +1,122 @@ +/** + * Initiative Router — create, list, get, update, merge config + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requireInitiativeRepository, requireProjectRepository, requirePageRepository } from './_helpers.js'; + +export function initiativeProcedures(publicProcedure: ProcedureBuilder) { + return { + createInitiative: publicProcedure + .input(z.object({ + name: z.string().min(1), + projectIds: z.array(z.string().min(1)).min(1).optional(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requireInitiativeRepository(ctx); + + if (input.projectIds && input.projectIds.length > 0) { + const projectRepo = requireProjectRepository(ctx); + for (const pid of input.projectIds) { + const project = await projectRepo.findById(pid); + if (!project) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Project '${pid}' not found`, + }); + } + } + } + + const initiative = await repo.create({ + name: input.name, + status: 'active', + }); + + if (input.projectIds && input.projectIds.length > 0) { + const projectRepo = requireProjectRepository(ctx); + await projectRepo.setInitiativeProjects(initiative.id, input.projectIds); + } + + if (ctx.pageRepository) { + await ctx.pageRepository.create({ + initiativeId: initiative.id, + parentPageId: null, + title: input.name, + content: null, + sortOrder: 0, + }); + } + + return initiative; + }), + + listInitiatives: publicProcedure + .input(z.object({ + status: z.enum(['active', 'completed', 'archived']).optional(), + }).optional()) + .query(async ({ ctx, input }) => { + const repo = requireInitiativeRepository(ctx); + if (input?.status) { + return repo.findByStatus(input.status); + } + return repo.findAll(); + }), + + getInitiative: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requireInitiativeRepository(ctx); + const initiative = await repo.findById(input.id); + if (!initiative) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Initiative '${input.id}' not found`, + }); + } + + let projects: Array<{ id: string; name: string; url: string }> = []; + if (ctx.projectRepository) { + const fullProjects = await ctx.projectRepository.findProjectsByInitiativeId(input.id); + projects = fullProjects.map((p) => ({ id: p.id, name: p.name, url: p.url })); + } + + return { ...initiative, projects }; + }), + + updateInitiative: publicProcedure + .input(z.object({ + id: z.string().min(1), + name: z.string().min(1).optional(), + status: z.enum(['active', 'completed', 'archived']).optional(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requireInitiativeRepository(ctx); + const { id, ...data } = input; + return repo.update(id, data); + }), + + updateInitiativeMergeConfig: publicProcedure + .input(z.object({ + initiativeId: z.string().min(1), + mergeRequiresApproval: z.boolean().optional(), + mergeTarget: z.string().nullable().optional(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requireInitiativeRepository(ctx); + const { initiativeId, ...data } = input; + + const existing = await repo.findById(initiativeId); + if (!existing) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Initiative '${initiativeId}' not found`, + }); + } + + return repo.update(initiativeId, data); + }), + }; +} diff --git a/src/trpc/routers/message.ts b/src/trpc/routers/message.ts new file mode 100644 index 0000000..9328cfe --- /dev/null +++ b/src/trpc/routers/message.ts @@ -0,0 +1,77 @@ +/** + * Message Router — list, get, respond + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requireMessageRepository } from './_helpers.js'; + +export function messageProcedures(publicProcedure: ProcedureBuilder) { + return { + listMessages: publicProcedure + .input(z.object({ + agentId: z.string().optional(), + status: z.enum(['pending', 'read', 'responded']).optional(), + })) + .query(async ({ ctx, input }) => { + const messageRepository = requireMessageRepository(ctx); + + let messages = await messageRepository.findByRecipient('user'); + + if (input.agentId) { + messages = messages.filter(m => m.senderId === input.agentId); + } + + if (input.status) { + messages = messages.filter(m => m.status === input.status); + } + + return messages; + }), + + getMessage: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const messageRepository = requireMessageRepository(ctx); + const message = await messageRepository.findById(input.id); + if (!message) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Message '${input.id}' not found`, + }); + } + return message; + }), + + respondToMessage: publicProcedure + .input(z.object({ + id: z.string().min(1), + response: z.string().min(1), + })) + .mutation(async ({ ctx, input }) => { + const messageRepository = requireMessageRepository(ctx); + + const existing = await messageRepository.findById(input.id); + if (!existing) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Message '${input.id}' not found`, + }); + } + + const responseMessage = await messageRepository.create({ + senderType: 'user', + recipientType: 'agent', + recipientId: existing.senderId, + type: 'response', + content: input.response, + parentMessageId: input.id, + }); + + await messageRepository.update(input.id, { status: 'responded' }); + + return responseMessage; + }), + }; +} diff --git a/src/trpc/routers/page.ts b/src/trpc/routers/page.ts new file mode 100644 index 0000000..cb79833 --- /dev/null +++ b/src/trpc/routers/page.ts @@ -0,0 +1,105 @@ +/** + * Page Router — CRUD, tree operations + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requirePageRepository } from './_helpers.js'; + +export function pageProcedures(publicProcedure: ProcedureBuilder) { + return { + getRootPage: publicProcedure + .input(z.object({ initiativeId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requirePageRepository(ctx); + return repo.getOrCreateRootPage(input.initiativeId); + }), + + getPage: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requirePageRepository(ctx); + const page = await repo.findById(input.id); + if (!page) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Page '${input.id}' not found`, + }); + } + return page; + }), + + listPages: publicProcedure + .input(z.object({ initiativeId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requirePageRepository(ctx); + return repo.findByInitiativeId(input.initiativeId); + }), + + listChildPages: publicProcedure + .input(z.object({ parentPageId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requirePageRepository(ctx); + return repo.findByParentPageId(input.parentPageId); + }), + + createPage: publicProcedure + .input(z.object({ + initiativeId: z.string().min(1), + parentPageId: z.string().min(1).nullable(), + title: z.string().min(1), + })) + .mutation(async ({ ctx, input }) => { + const repo = requirePageRepository(ctx); + const page = await repo.create({ + initiativeId: input.initiativeId, + parentPageId: input.parentPageId, + title: input.title, + content: null, + sortOrder: 0, + }); + ctx.eventBus.emit({ + type: 'page:created', + timestamp: new Date(), + payload: { pageId: page.id, initiativeId: input.initiativeId, title: input.title }, + }); + return page; + }), + + updatePage: publicProcedure + .input(z.object({ + id: z.string().min(1), + title: z.string().min(1).optional(), + content: z.string().nullable().optional(), + sortOrder: z.number().int().optional(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requirePageRepository(ctx); + const { id, ...data } = input; + const page = await repo.update(id, data); + ctx.eventBus.emit({ + type: 'page:updated', + timestamp: new Date(), + payload: { pageId: id, initiativeId: page.initiativeId, title: input.title }, + }); + return page; + }), + + deletePage: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + const repo = requirePageRepository(ctx); + const page = await repo.findById(input.id); + await repo.delete(input.id); + if (page) { + ctx.eventBus.emit({ + type: 'page:deleted', + timestamp: new Date(), + payload: { pageId: input.id, initiativeId: page.initiativeId }, + }); + } + return { success: true }; + }), + }; +} diff --git a/src/trpc/routers/phase-dispatch.ts b/src/trpc/routers/phase-dispatch.ts new file mode 100644 index 0000000..f8b2302 --- /dev/null +++ b/src/trpc/routers/phase-dispatch.ts @@ -0,0 +1,94 @@ +/** + * Phase Dispatch Router — queue, dispatch, state, child tasks + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import type { Task } from '../../db/schema.js'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requirePhaseDispatchManager, requireTaskRepository } from './_helpers.js'; + +export function phaseDispatchProcedures(publicProcedure: ProcedureBuilder) { + return { + queuePhase: publicProcedure + .input(z.object({ phaseId: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + const phaseDispatchManager = requirePhaseDispatchManager(ctx); + await phaseDispatchManager.queuePhase(input.phaseId); + return { success: true }; + }), + + dispatchNextPhase: publicProcedure + .mutation(async ({ ctx }) => { + const phaseDispatchManager = requirePhaseDispatchManager(ctx); + return phaseDispatchManager.dispatchNextPhase(); + }), + + getPhaseQueueState: publicProcedure + .query(async ({ ctx }) => { + const phaseDispatchManager = requirePhaseDispatchManager(ctx); + return phaseDispatchManager.getPhaseQueueState(); + }), + + createChildTasks: publicProcedure + .input(z.object({ + parentTaskId: z.string().min(1), + tasks: z.array(z.object({ + number: z.number().int().positive(), + name: z.string().min(1), + description: z.string(), + type: z.enum(['auto', 'checkpoint:human-verify', 'checkpoint:decision', 'checkpoint:human-action']).default('auto'), + dependencies: z.array(z.number().int().positive()).optional(), + })), + })) + .mutation(async ({ ctx, input }) => { + const taskRepo = requireTaskRepository(ctx); + + const parentTask = await taskRepo.findById(input.parentTaskId); + if (!parentTask) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Parent task '${input.parentTaskId}' not found`, + }); + } + if (parentTask.category !== 'decompose') { + throw new TRPCError({ + code: 'BAD_REQUEST', + message: `Parent task must have category 'decompose', got '${parentTask.category}'`, + }); + } + + const numberToId = new Map<number, string>(); + const created: Task[] = []; + + for (const taskInput of input.tasks) { + const task = await taskRepo.create({ + parentTaskId: input.parentTaskId, + phaseId: parentTask.phaseId, + initiativeId: parentTask.initiativeId, + name: taskInput.name, + description: taskInput.description, + type: taskInput.type, + order: taskInput.number, + status: 'pending', + }); + numberToId.set(taskInput.number, task.id); + created.push(task); + } + + for (const taskInput of input.tasks) { + if (taskInput.dependencies && taskInput.dependencies.length > 0) { + const taskId = numberToId.get(taskInput.number)!; + for (const depNumber of taskInput.dependencies) { + const dependsOnTaskId = numberToId.get(depNumber); + if (dependsOnTaskId) { + await taskRepo.createDependency(taskId, dependsOnTaskId); + } + } + } + } + + return created; + }), + }; +} diff --git a/src/trpc/routers/phase.ts b/src/trpc/routers/phase.ts new file mode 100644 index 0000000..5211796 --- /dev/null +++ b/src/trpc/routers/phase.ts @@ -0,0 +1,126 @@ +/** + * Phase Router — create, list, get, update, dependencies, bulk create + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import type { Phase } from '../../db/schema.js'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requirePhaseRepository } from './_helpers.js'; + +export function phaseProcedures(publicProcedure: ProcedureBuilder) { + return { + createPhase: publicProcedure + .input(z.object({ + initiativeId: z.string().min(1), + name: z.string().min(1), + description: z.string().optional(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requirePhaseRepository(ctx); + const nextNumber = await repo.getNextNumber(input.initiativeId); + return repo.create({ + initiativeId: input.initiativeId, + number: nextNumber, + name: input.name, + description: input.description ?? null, + status: 'pending', + }); + }), + + listPhases: publicProcedure + .input(z.object({ initiativeId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requirePhaseRepository(ctx); + return repo.findByInitiativeId(input.initiativeId); + }), + + getPhase: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requirePhaseRepository(ctx); + const phase = await repo.findById(input.id); + if (!phase) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Phase '${input.id}' not found`, + }); + } + return phase; + }), + + updatePhase: publicProcedure + .input(z.object({ + id: z.string().min(1), + name: z.string().min(1).optional(), + description: z.string().optional(), + status: z.enum(['pending', 'in_progress', 'completed', 'blocked']).optional(), + })) + .mutation(async ({ ctx, input }) => { + const repo = requirePhaseRepository(ctx); + const { id, ...data } = input; + return repo.update(id, data); + }), + + createPhasesFromBreakdown: publicProcedure + .input(z.object({ + initiativeId: z.string().min(1), + phases: z.array(z.object({ + number: z.number(), + name: z.string().min(1), + description: z.string(), + })), + })) + .mutation(async ({ ctx, input }) => { + const repo = requirePhaseRepository(ctx); + const created: Phase[] = []; + for (const p of input.phases) { + const phase = await repo.create({ + initiativeId: input.initiativeId, + number: p.number, + name: p.name, + description: p.description, + status: 'pending', + }); + created.push(phase); + } + return created; + }), + + createPhaseDependency: publicProcedure + .input(z.object({ + phaseId: z.string().min(1), + dependsOnPhaseId: z.string().min(1), + })) + .mutation(async ({ ctx, input }) => { + const repo = requirePhaseRepository(ctx); + + const phase = await repo.findById(input.phaseId); + if (!phase) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Phase '${input.phaseId}' not found`, + }); + } + + const dependsOnPhase = await repo.findById(input.dependsOnPhaseId); + if (!dependsOnPhase) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Phase '${input.dependsOnPhaseId}' not found`, + }); + } + + await repo.createDependency(input.phaseId, input.dependsOnPhaseId); + return { success: true }; + }), + + getPhaseDependencies: publicProcedure + .input(z.object({ phaseId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requirePhaseRepository(ctx); + const dependencies = await repo.getDependencies(input.phaseId); + return { dependencies }; + }), + }; +} diff --git a/src/trpc/routers/project.ts b/src/trpc/routers/project.ts new file mode 100644 index 0000000..7898306 --- /dev/null +++ b/src/trpc/routers/project.ts @@ -0,0 +1,106 @@ +/** + * Project Router — register, list, get, delete, initiative associations + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import { join } from 'node:path'; +import { rm } from 'node:fs/promises'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requireProjectRepository } from './_helpers.js'; +import { cloneProject } from '../../git/clone.js'; +import { getProjectCloneDir } from '../../git/project-clones.js'; + +export function projectProcedures(publicProcedure: ProcedureBuilder) { + return { + registerProject: publicProcedure + .input(z.object({ + name: z.string().min(1), + url: z.string().min(1), + })) + .mutation(async ({ ctx, input }) => { + const repo = requireProjectRepository(ctx); + + let project; + try { + project = await repo.create({ name: input.name, url: input.url }); + } catch (error) { + const msg = (error as Error).message; + if (msg.includes('UNIQUE') || msg.includes('unique')) { + throw new TRPCError({ + code: 'CONFLICT', + message: `A project with that name or URL already exists`, + }); + } + throw error; + } + + if (ctx.workspaceRoot) { + const clonePath = join(ctx.workspaceRoot, getProjectCloneDir(input.name, project.id)); + + try { + await cloneProject(input.url, clonePath); + } catch (cloneError) { + await repo.delete(project.id); + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: `Failed to clone repository: ${(cloneError as Error).message}`, + }); + } + } + + return project; + }), + + listProjects: publicProcedure + .query(async ({ ctx }) => { + const repo = requireProjectRepository(ctx); + return repo.findAll(); + }), + + getProject: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requireProjectRepository(ctx); + const project = await repo.findById(input.id); + if (!project) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Project '${input.id}' not found`, + }); + } + return project; + }), + + deleteProject: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + const repo = requireProjectRepository(ctx); + const project = await repo.findById(input.id); + if (project && ctx.workspaceRoot) { + const clonePath = join(ctx.workspaceRoot, getProjectCloneDir(project.name, project.id)); + await rm(clonePath, { recursive: true, force: true }).catch(() => {}); + } + await repo.delete(input.id); + return { success: true }; + }), + + getInitiativeProjects: publicProcedure + .input(z.object({ initiativeId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const repo = requireProjectRepository(ctx); + return repo.findProjectsByInitiativeId(input.initiativeId); + }), + + updateInitiativeProjects: publicProcedure + .input(z.object({ + initiativeId: z.string().min(1), + projectIds: z.array(z.string().min(1)).min(1), + })) + .mutation(async ({ ctx, input }) => { + const repo = requireProjectRepository(ctx); + await repo.setInitiativeProjects(input.initiativeId, input.projectIds); + return { success: true }; + }), + }; +} diff --git a/src/trpc/routers/subscription.ts b/src/trpc/routers/subscription.ts new file mode 100644 index 0000000..ce95953 --- /dev/null +++ b/src/trpc/routers/subscription.ts @@ -0,0 +1,45 @@ +/** + * Subscription Router — SSE event streams + */ + +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { + eventBusIterable, + ALL_EVENT_TYPES, + AGENT_EVENT_TYPES, + TASK_EVENT_TYPES, + PAGE_EVENT_TYPES, +} from '../subscriptions.js'; + +export function subscriptionProcedures(publicProcedure: ProcedureBuilder) { + return { + onEvent: publicProcedure + .input(z.object({ lastEventId: z.string().nullish() }).optional()) + .subscription(async function* (opts) { + const signal = opts.signal ?? new AbortController().signal; + yield* eventBusIterable(opts.ctx.eventBus, ALL_EVENT_TYPES, signal); + }), + + onAgentUpdate: publicProcedure + .input(z.object({ lastEventId: z.string().nullish() }).optional()) + .subscription(async function* (opts) { + const signal = opts.signal ?? new AbortController().signal; + yield* eventBusIterable(opts.ctx.eventBus, AGENT_EVENT_TYPES, signal); + }), + + onTaskUpdate: publicProcedure + .input(z.object({ lastEventId: z.string().nullish() }).optional()) + .subscription(async function* (opts) { + const signal = opts.signal ?? new AbortController().signal; + yield* eventBusIterable(opts.ctx.eventBus, TASK_EVENT_TYPES, signal); + }), + + onPageUpdate: publicProcedure + .input(z.object({ lastEventId: z.string().nullish() }).optional()) + .subscription(async function* (opts) { + const signal = opts.signal ?? new AbortController().signal; + yield* eventBusIterable(opts.ctx.eventBus, PAGE_EVENT_TYPES, signal); + }), + }; +} diff --git a/src/trpc/routers/system.ts b/src/trpc/routers/system.ts new file mode 100644 index 0000000..ccd2c99 --- /dev/null +++ b/src/trpc/routers/system.ts @@ -0,0 +1,110 @@ +/** + * System Router — health, status, systemHealthCheck + */ + +import { z } from 'zod'; +import { join } from 'node:path'; +import { access } from 'node:fs/promises'; +import type { ProcedureBuilder } from '../trpc.js'; +import { requireAccountRepository, requireProjectRepository } from './_helpers.js'; +import { checkAccountHealth } from '../../agent/accounts/usage.js'; +import { getProjectCloneDir } from '../../git/project-clones.js'; + +export const healthResponseSchema = z.object({ + status: z.literal('ok'), + uptime: z.number().int().nonnegative(), + processCount: z.number().int().nonnegative(), +}); + +export type HealthResponse = z.infer<typeof healthResponseSchema>; + +export const processInfoSchema = z.object({ + id: z.string(), + pid: z.number().int().positive(), + command: z.string(), + status: z.string(), + startedAt: z.string(), +}); + +export type ProcessInfo = z.infer<typeof processInfoSchema>; + +export const statusResponseSchema = z.object({ + server: z.object({ + startedAt: z.string(), + uptime: z.number().int().nonnegative(), + pid: z.number().int().positive(), + }), + processes: z.array(processInfoSchema), +}); + +export type StatusResponse = z.infer<typeof statusResponseSchema>; + +export function systemProcedures(publicProcedure: ProcedureBuilder) { + return { + health: publicProcedure + .output(healthResponseSchema) + .query(({ ctx }): HealthResponse => { + const uptime = ctx.serverStartedAt + ? Math.floor((Date.now() - ctx.serverStartedAt.getTime()) / 1000) + : 0; + + return { + status: 'ok', + uptime, + processCount: ctx.processCount, + }; + }), + + status: publicProcedure + .output(statusResponseSchema) + .query(({ ctx }): StatusResponse => { + const uptime = ctx.serverStartedAt + ? Math.floor((Date.now() - ctx.serverStartedAt.getTime()) / 1000) + : 0; + + return { + server: { + startedAt: ctx.serverStartedAt?.toISOString() ?? '', + uptime, + pid: process.pid, + }, + processes: [], + }; + }), + + systemHealthCheck: publicProcedure + .query(async ({ ctx }) => { + const uptime = ctx.serverStartedAt + ? Math.floor((Date.now() - ctx.serverStartedAt.getTime()) / 1000) + : 0; + + const accountRepo = requireAccountRepository(ctx); + const allAccounts = await accountRepo.findAll(); + const allAgents = ctx.agentManager ? await ctx.agentManager.list() : []; + + const accounts = await Promise.all( + allAccounts.map(account => checkAccountHealth(account, allAgents, ctx.credentialManager, ctx.workspaceRoot ?? undefined)) + ); + + const projectRepo = requireProjectRepository(ctx); + const allProjects = await projectRepo.findAll(); + + const projects = await Promise.all( + allProjects.map(async project => { + let repoExists = false; + if (ctx.workspaceRoot) { + const clonePath = join(ctx.workspaceRoot, getProjectCloneDir(project.name, project.id)); + try { await access(clonePath); repoExists = true; } catch { repoExists = false; } + } + return { id: project.id, name: project.name, url: project.url, repoExists }; + }) + ); + + return { + server: { status: 'ok' as const, uptime, startedAt: ctx.serverStartedAt?.toISOString() ?? null }, + accounts, + projects, + }; + }), + }; +} diff --git a/src/trpc/routers/task.ts b/src/trpc/routers/task.ts new file mode 100644 index 0000000..6c3c4a1 --- /dev/null +++ b/src/trpc/routers/task.ts @@ -0,0 +1,152 @@ +/** + * Task Router — CRUD, approval, listing by parent/initiative/phase + */ + +import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; +import type { ProcedureBuilder } from '../trpc.js'; +import { + requireTaskRepository, + requireInitiativeRepository, + requirePhaseRepository, + requireDispatchManager, +} from './_helpers.js'; + +export function taskProcedures(publicProcedure: ProcedureBuilder) { + return { + listTasks: publicProcedure + .input(z.object({ parentTaskId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const taskRepository = requireTaskRepository(ctx); + return taskRepository.findByParentTaskId(input.parentTaskId); + }), + + getTask: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const taskRepository = requireTaskRepository(ctx); + const task = await taskRepository.findById(input.id); + if (!task) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Task '${input.id}' not found`, + }); + } + return task; + }), + + updateTaskStatus: publicProcedure + .input(z.object({ + id: z.string().min(1), + status: z.enum(['pending_approval', 'pending', 'in_progress', 'completed', 'blocked']), + })) + .mutation(async ({ ctx, input }) => { + const taskRepository = requireTaskRepository(ctx); + const existing = await taskRepository.findById(input.id); + if (!existing) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Task '${input.id}' not found`, + }); + } + return taskRepository.update(input.id, { status: input.status }); + }), + + createInitiativeTask: publicProcedure + .input(z.object({ + initiativeId: z.string().min(1), + name: z.string().min(1), + description: z.string().optional(), + category: z.enum(['execute', 'research', 'discuss', 'breakdown', 'decompose', 'refine', 'verify', 'merge', 'review']).optional(), + type: z.enum(['auto', 'checkpoint:human-verify', 'checkpoint:decision', 'checkpoint:human-action']).optional(), + requiresApproval: z.boolean().nullable().optional(), + })) + .mutation(async ({ ctx, input }) => { + const taskRepository = requireTaskRepository(ctx); + const initiativeRepo = requireInitiativeRepository(ctx); + + const initiative = await initiativeRepo.findById(input.initiativeId); + if (!initiative) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Initiative '${input.initiativeId}' not found`, + }); + } + + return taskRepository.create({ + initiativeId: input.initiativeId, + name: input.name, + description: input.description ?? null, + category: input.category ?? 'execute', + type: input.type ?? 'auto', + requiresApproval: input.requiresApproval ?? null, + status: 'pending', + }); + }), + + createPhaseTask: publicProcedure + .input(z.object({ + phaseId: z.string().min(1), + name: z.string().min(1), + description: z.string().optional(), + category: z.enum(['execute', 'research', 'discuss', 'breakdown', 'decompose', 'refine', 'verify', 'merge', 'review']).optional(), + type: z.enum(['auto', 'checkpoint:human-verify', 'checkpoint:decision', 'checkpoint:human-action']).optional(), + requiresApproval: z.boolean().nullable().optional(), + })) + .mutation(async ({ ctx, input }) => { + const taskRepository = requireTaskRepository(ctx); + const phaseRepo = requirePhaseRepository(ctx); + + const phase = await phaseRepo.findById(input.phaseId); + if (!phase) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: `Phase '${input.phaseId}' not found`, + }); + } + + return taskRepository.create({ + phaseId: input.phaseId, + name: input.name, + description: input.description ?? null, + category: input.category ?? 'execute', + type: input.type ?? 'auto', + requiresApproval: input.requiresApproval ?? null, + status: 'pending', + }); + }), + + listPendingApprovals: publicProcedure + .input(z.object({ + initiativeId: z.string().optional(), + phaseId: z.string().optional(), + category: z.enum(['execute', 'research', 'discuss', 'breakdown', 'decompose', 'refine', 'verify', 'merge', 'review']).optional(), + }).optional()) + .query(async ({ ctx, input }) => { + const taskRepository = requireTaskRepository(ctx); + return taskRepository.findPendingApproval(input); + }), + + listInitiativeTasks: publicProcedure + .input(z.object({ initiativeId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const taskRepository = requireTaskRepository(ctx); + return taskRepository.findByInitiativeId(input.initiativeId); + }), + + listPhaseTasks: publicProcedure + .input(z.object({ phaseId: z.string().min(1) })) + .query(async ({ ctx, input }) => { + const taskRepository = requireTaskRepository(ctx); + return taskRepository.findByPhaseId(input.phaseId); + }), + + approveTask: publicProcedure + .input(z.object({ taskId: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + const dispatchManager = requireDispatchManager(ctx); + await dispatchManager.approveTask(input.taskId); + return { success: true }; + }), + }; +} diff --git a/src/trpc/subscriptions.ts b/src/trpc/subscriptions.ts index a7ccec4..4a5cf63 100644 --- a/src/trpc/subscriptions.ts +++ b/src/trpc/subscriptions.ts @@ -37,7 +37,10 @@ export const ALL_EVENT_TYPES: DomainEventType[] = [ 'agent:stopped', 'agent:crashed', 'agent:resumed', + 'agent:account_switched', + 'agent:deleted', 'agent:waiting', + 'agent:output', 'task:queued', 'task:dispatched', 'task:completed', @@ -50,6 +53,9 @@ export const ALL_EVENT_TYPES: DomainEventType[] = [ 'merge:started', 'merge:completed', 'merge:conflicted', + 'page:created', + 'page:updated', + 'page:deleted', ]; /** @@ -60,7 +66,10 @@ export const AGENT_EVENT_TYPES: DomainEventType[] = [ 'agent:stopped', 'agent:crashed', 'agent:resumed', + 'agent:account_switched', + 'agent:deleted', 'agent:waiting', + 'agent:output', ]; /** @@ -77,6 +86,15 @@ export const TASK_EVENT_TYPES: DomainEventType[] = [ 'phase:blocked', ]; +/** + * Page event types. + */ +export const PAGE_EVENT_TYPES: DomainEventType[] = [ + 'page:created', + 'page:updated', + 'page:deleted', +]; + /** Counter for generating unique event IDs */ let eventCounter = 0; diff --git a/src/trpc/trpc.ts b/src/trpc/trpc.ts new file mode 100644 index 0000000..220840e --- /dev/null +++ b/src/trpc/trpc.ts @@ -0,0 +1,18 @@ +/** + * tRPC Initialization + * + * Extracted from router.ts to break circular dependencies. + * All domain routers import from here instead of router.ts. + */ + +import { initTRPC } from '@trpc/server'; +import type { TRPCContext } from './context.js'; + +const t = initTRPC.context<TRPCContext>().create(); + +export const router = t.router; +export const publicProcedure = t.procedure; +export const middleware = t.middleware; +export const createCallerFactory = t.createCallerFactory; + +export type ProcedureBuilder = typeof t.procedure; diff --git a/vitest.config.ts b/vitest.config.ts index 920ba58..7e8e3fd 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -4,6 +4,9 @@ export default defineConfig({ test: { // Enable test globals (describe, it, expect without imports) globals: true, + env: { + CW_LOG_LEVEL: 'silent', + }, // Test file pattern include: ['**/*.test.ts'], // TypeScript support uses tsconfig.json automatically