feat: add backfill-metrics script and cw backfill-metrics CLI command
Populates the agent_metrics table from existing agent_log_chunks data after the schema migration. Reads chunks in batches of 500, accumulates per-agent counts in memory, then upserts with additive ON CONFLICT DO UPDATE to match the ongoing insertChunk write-path behavior. - apps/server/scripts/backfill-metrics.ts: core backfillMetrics(db) + CLI wrapper backfillMetricsFromPath(dbPath) - apps/server/scripts/backfill-metrics.test.ts: 8 tests covering all chunk types, malformed JSON, isolation, empty DB, and re-run double-count behavior - apps/server/cli/index.ts: new top-level `cw backfill-metrics [--db <path>]` command - docs/database-migrations.md: Post-migration backfill scripts section documenting when and how to run the script Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -13,6 +13,8 @@ import { createDefaultTrpcClient } from './trpc-client.js';
|
|||||||
import { createContainer } from '../container.js';
|
import { createContainer } from '../container.js';
|
||||||
import { findWorkspaceRoot, writeCwrc, defaultCwConfig } from '../config/index.js';
|
import { findWorkspaceRoot, writeCwrc, defaultCwConfig } from '../config/index.js';
|
||||||
import { createModuleLogger } from '../logger/index.js';
|
import { createModuleLogger } from '../logger/index.js';
|
||||||
|
import { backfillMetricsFromPath } from '../scripts/backfill-metrics.js';
|
||||||
|
import { getDbPath } from '../db/index.js';
|
||||||
|
|
||||||
/** Environment variable for custom port */
|
/** Environment variable for custom port */
|
||||||
const CW_PORT_ENV = 'CW_PORT';
|
const CW_PORT_ENV = 'CW_PORT';
|
||||||
@@ -134,6 +136,22 @@ export function createCli(serverHandler?: (port?: number) => Promise<void>): Com
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Backfill metrics command (standalone — no server, no tRPC)
|
||||||
|
program
|
||||||
|
.command('backfill-metrics')
|
||||||
|
.description('Populate agent_metrics table from existing agent_log_chunks (run once after upgrading)')
|
||||||
|
.option('--db <path>', 'Path to the SQLite database file (defaults to configured DB path)')
|
||||||
|
.action(async (options: { db?: string }) => {
|
||||||
|
const dbPath = options.db ?? getDbPath();
|
||||||
|
console.log(`Backfilling metrics from ${dbPath}...`);
|
||||||
|
try {
|
||||||
|
await backfillMetricsFromPath(dbPath);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Backfill failed:', (error as Error).message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// Agent command group
|
// Agent command group
|
||||||
const agentCommand = program
|
const agentCommand = program
|
||||||
.command('agent')
|
.command('agent')
|
||||||
|
|||||||
131
apps/server/scripts/backfill-metrics.test.ts
Normal file
131
apps/server/scripts/backfill-metrics.test.ts
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
/**
|
||||||
|
* Tests for the backfill-metrics script.
|
||||||
|
*
|
||||||
|
* Uses an in-memory test database to verify that backfillMetrics correctly
|
||||||
|
* accumulates counts from agent_log_chunks and upserts into agent_metrics.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach } from 'vitest';
|
||||||
|
import { createTestDatabase } from '../db/repositories/drizzle/test-helpers.js';
|
||||||
|
import type { DrizzleDatabase } from '../db/index.js';
|
||||||
|
import { agentLogChunks, agentMetrics } from '../db/index.js';
|
||||||
|
import { backfillMetrics } from './backfill-metrics.js';
|
||||||
|
import { nanoid } from 'nanoid';
|
||||||
|
import { eq } from 'drizzle-orm';
|
||||||
|
|
||||||
|
async function insertChunk(db: DrizzleDatabase, agentId: string, content: object | string) {
|
||||||
|
await db.insert(agentLogChunks).values({
|
||||||
|
id: nanoid(),
|
||||||
|
agentId,
|
||||||
|
agentName: 'test-agent',
|
||||||
|
sessionNumber: 1,
|
||||||
|
content: typeof content === 'string' ? content : JSON.stringify(content),
|
||||||
|
createdAt: new Date(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('backfillMetrics', () => {
|
||||||
|
let db: DrizzleDatabase;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
db = createTestDatabase();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('AskUserQuestion chunks — questionsCount correct', async () => {
|
||||||
|
await insertChunk(db, 'agent-a', { type: 'tool_use', name: 'AskUserQuestion', input: { questions: [{}, {}] } });
|
||||||
|
await insertChunk(db, 'agent-a', { type: 'tool_use', name: 'AskUserQuestion', input: { questions: [{}] } });
|
||||||
|
|
||||||
|
await backfillMetrics(db);
|
||||||
|
|
||||||
|
const rows = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-a'));
|
||||||
|
expect(rows).toHaveLength(1);
|
||||||
|
expect(rows[0].questionsCount).toBe(3);
|
||||||
|
expect(rows[0].subagentsCount).toBe(0);
|
||||||
|
expect(rows[0].compactionsCount).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Agent tool chunks — subagentsCount correct', async () => {
|
||||||
|
await insertChunk(db, 'agent-b', { type: 'tool_use', name: 'Agent' });
|
||||||
|
await insertChunk(db, 'agent-b', { type: 'tool_use', name: 'Agent' });
|
||||||
|
|
||||||
|
await backfillMetrics(db);
|
||||||
|
|
||||||
|
const rows = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-b'));
|
||||||
|
expect(rows).toHaveLength(1);
|
||||||
|
expect(rows[0].questionsCount).toBe(0);
|
||||||
|
expect(rows[0].subagentsCount).toBe(2);
|
||||||
|
expect(rows[0].compactionsCount).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Compaction chunks — compactionsCount correct', async () => {
|
||||||
|
await insertChunk(db, 'agent-c', { type: 'system', subtype: 'init', source: 'compact' });
|
||||||
|
|
||||||
|
await backfillMetrics(db);
|
||||||
|
|
||||||
|
const rows = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-c'));
|
||||||
|
expect(rows).toHaveLength(1);
|
||||||
|
expect(rows[0].questionsCount).toBe(0);
|
||||||
|
expect(rows[0].subagentsCount).toBe(0);
|
||||||
|
expect(rows[0].compactionsCount).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Irrelevant chunk type — no metrics row created', async () => {
|
||||||
|
await insertChunk(db, 'agent-d', { type: 'text', text: 'hello' });
|
||||||
|
|
||||||
|
await backfillMetrics(db);
|
||||||
|
|
||||||
|
const rows = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-d'));
|
||||||
|
expect(rows).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Malformed JSON chunk — skipped, no crash', async () => {
|
||||||
|
await insertChunk(db, 'agent-e', 'not-valid-json');
|
||||||
|
await insertChunk(db, 'agent-e', { type: 'tool_use', name: 'Agent' });
|
||||||
|
|
||||||
|
await expect(backfillMetrics(db)).resolves.not.toThrow();
|
||||||
|
|
||||||
|
const rows = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-e'));
|
||||||
|
expect(rows).toHaveLength(1);
|
||||||
|
expect(rows[0].subagentsCount).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Multiple agents — counts isolated per agent', async () => {
|
||||||
|
await insertChunk(db, 'agent-f', { type: 'tool_use', name: 'AskUserQuestion', input: { questions: [{}, {}, {}] } });
|
||||||
|
await insertChunk(db, 'agent-f', { type: 'tool_use', name: 'AskUserQuestion', input: { questions: [{}, {}, {}] } });
|
||||||
|
await insertChunk(db, 'agent-g', { type: 'tool_use', name: 'Agent' });
|
||||||
|
|
||||||
|
await backfillMetrics(db);
|
||||||
|
|
||||||
|
const rowsF = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-f'));
|
||||||
|
expect(rowsF).toHaveLength(1);
|
||||||
|
expect(rowsF[0].questionsCount).toBe(6);
|
||||||
|
expect(rowsF[0].subagentsCount).toBe(0);
|
||||||
|
expect(rowsF[0].compactionsCount).toBe(0);
|
||||||
|
|
||||||
|
const rowsG = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-g'));
|
||||||
|
expect(rowsG).toHaveLength(1);
|
||||||
|
expect(rowsG[0].questionsCount).toBe(0);
|
||||||
|
expect(rowsG[0].subagentsCount).toBe(1);
|
||||||
|
expect(rowsG[0].compactionsCount).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Empty database — completes without error', async () => {
|
||||||
|
await expect(backfillMetrics(db)).resolves.not.toThrow();
|
||||||
|
|
||||||
|
const rows = await db.select().from(agentMetrics);
|
||||||
|
expect(rows).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Re-run idempotency note — second run doubles counts', async () => {
|
||||||
|
// Documented behavior: run only once against a fresh agent_metrics table
|
||||||
|
await insertChunk(db, 'agent-h', { type: 'tool_use', name: 'Agent' });
|
||||||
|
|
||||||
|
await backfillMetrics(db);
|
||||||
|
const rowsAfterFirst = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-h'));
|
||||||
|
expect(rowsAfterFirst[0].subagentsCount).toBe(1);
|
||||||
|
|
||||||
|
await backfillMetrics(db);
|
||||||
|
const rowsAfterSecond = await db.select().from(agentMetrics).where(eq(agentMetrics.agentId, 'agent-h'));
|
||||||
|
expect(rowsAfterSecond[0].subagentsCount).toBe(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
128
apps/server/scripts/backfill-metrics.ts
Normal file
128
apps/server/scripts/backfill-metrics.ts
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
/**
|
||||||
|
* Backfill script for agent_metrics table.
|
||||||
|
*
|
||||||
|
* Reads all existing agent_log_chunks rows and populates agent_metrics with
|
||||||
|
* accumulated counts of questions, subagent spawns, and compaction events.
|
||||||
|
*
|
||||||
|
* Intended to be run once per production database after applying the migration
|
||||||
|
* that introduces the agent_metrics table.
|
||||||
|
*
|
||||||
|
* Idempotency note: Uses ON CONFLICT DO UPDATE with additive increments to match
|
||||||
|
* the ongoing insertChunk write-path behavior. Running against an empty
|
||||||
|
* agent_metrics table is fully safe. Running a second time will double-count —
|
||||||
|
* only run this script once per database, immediately after applying the migration.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { asc, sql } from 'drizzle-orm';
|
||||||
|
import { createDatabase, DrizzleDatabase, agentLogChunks, agentMetrics } from '../db/index.js';
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
const LOG_EVERY = 1000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core backfill function. Accepts a DrizzleDatabase for testability.
|
||||||
|
*/
|
||||||
|
export async function backfillMetrics(db: DrizzleDatabase): Promise<void> {
|
||||||
|
const accumulator = new Map<string, { questionsCount: number; subagentsCount: number; compactionsCount: number }>();
|
||||||
|
let offset = 0;
|
||||||
|
let totalChunks = 0;
|
||||||
|
let malformedCount = 0;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const batch = await db
|
||||||
|
.select({ agentId: agentLogChunks.agentId, content: agentLogChunks.content })
|
||||||
|
.from(agentLogChunks)
|
||||||
|
.orderBy(asc(agentLogChunks.createdAt))
|
||||||
|
.limit(BATCH_SIZE)
|
||||||
|
.offset(offset);
|
||||||
|
|
||||||
|
if (batch.length === 0) break;
|
||||||
|
|
||||||
|
for (const chunk of batch) {
|
||||||
|
let parsed: unknown;
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(chunk.content);
|
||||||
|
} catch {
|
||||||
|
malformedCount++;
|
||||||
|
totalChunks++;
|
||||||
|
if (totalChunks % LOG_EVERY === 0) {
|
||||||
|
console.log(`Processed ${totalChunks} chunks...`);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof parsed !== 'object' || parsed === null) {
|
||||||
|
totalChunks++;
|
||||||
|
if (totalChunks % LOG_EVERY === 0) {
|
||||||
|
console.log(`Processed ${totalChunks} chunks...`);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const obj = parsed as Record<string, unknown>;
|
||||||
|
const type = obj['type'];
|
||||||
|
const name = obj['name'];
|
||||||
|
|
||||||
|
if (type === 'tool_use' && name === 'AskUserQuestion') {
|
||||||
|
const input = obj['input'] as Record<string, unknown> | undefined;
|
||||||
|
const questions = input?.['questions'];
|
||||||
|
const count = Array.isArray(questions) ? questions.length : 0;
|
||||||
|
if (count > 0) {
|
||||||
|
const entry = accumulator.get(chunk.agentId) ?? { questionsCount: 0, subagentsCount: 0, compactionsCount: 0 };
|
||||||
|
entry.questionsCount += count;
|
||||||
|
accumulator.set(chunk.agentId, entry);
|
||||||
|
}
|
||||||
|
} else if (type === 'tool_use' && name === 'Agent') {
|
||||||
|
const entry = accumulator.get(chunk.agentId) ?? { questionsCount: 0, subagentsCount: 0, compactionsCount: 0 };
|
||||||
|
entry.subagentsCount += 1;
|
||||||
|
accumulator.set(chunk.agentId, entry);
|
||||||
|
} else if (type === 'system' && obj['subtype'] === 'init' && obj['source'] === 'compact') {
|
||||||
|
const entry = accumulator.get(chunk.agentId) ?? { questionsCount: 0, subagentsCount: 0, compactionsCount: 0 };
|
||||||
|
entry.compactionsCount += 1;
|
||||||
|
accumulator.set(chunk.agentId, entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
totalChunks++;
|
||||||
|
if (totalChunks % LOG_EVERY === 0) {
|
||||||
|
console.log(`Processed ${totalChunks} chunks...`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert accumulated counts into agent_metrics.
|
||||||
|
// Uses additive ON CONFLICT DO UPDATE to match the ongoing insertChunk behavior.
|
||||||
|
for (const [agentId, counts] of accumulator) {
|
||||||
|
await db
|
||||||
|
.insert(agentMetrics)
|
||||||
|
.values({
|
||||||
|
agentId,
|
||||||
|
questionsCount: counts.questionsCount,
|
||||||
|
subagentsCount: counts.subagentsCount,
|
||||||
|
compactionsCount: counts.compactionsCount,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: agentMetrics.agentId,
|
||||||
|
set: {
|
||||||
|
questionsCount: sql`${agentMetrics.questionsCount} + ${counts.questionsCount}`,
|
||||||
|
subagentsCount: sql`${agentMetrics.subagentsCount} + ${counts.subagentsCount}`,
|
||||||
|
compactionsCount: sql`${agentMetrics.compactionsCount} + ${counts.compactionsCount}`,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Backfill complete: ${accumulator.size} agents updated, ${totalChunks} chunks processed, ${malformedCount} malformed chunks skipped`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI wrapper — opens a database from a path, then delegates to backfillMetrics.
|
||||||
|
*/
|
||||||
|
export async function backfillMetricsFromPath(dbPath: string): Promise<void> {
|
||||||
|
const db = createDatabase(dbPath);
|
||||||
|
await backfillMetrics(db);
|
||||||
|
}
|
||||||
@@ -55,3 +55,27 @@ Migrations 0000–0007 were generated by `drizzle-kit generate`. Migrations 0008
|
|||||||
- **Migration files are immutable.** Once committed, never edit them. Make a new migration instead.
|
- **Migration files are immutable.** Once committed, never edit them. Make a new migration instead.
|
||||||
- **Keep schema.ts in sync.** The schema file is the source of truth for TypeScript types; migrations are the source of truth for database DDL. Both must reflect the same structure.
|
- **Keep schema.ts in sync.** The schema file is the source of truth for TypeScript types; migrations are the source of truth for database DDL. Both must reflect the same structure.
|
||||||
- **Test with `npm test`** after generating migrations to verify they work with in-memory databases.
|
- **Test with `npm test`** after generating migrations to verify they work with in-memory databases.
|
||||||
|
|
||||||
|
## Post-migration backfill scripts
|
||||||
|
|
||||||
|
Some schema additions require a one-time data backfill because SQLite migrations cannot execute Node.js logic (e.g., JSON parsing). In these cases, the migration creates the table structure, and a separate Node.js script populates it from existing data.
|
||||||
|
|
||||||
|
### agent_metrics backfill
|
||||||
|
|
||||||
|
**When to run:** After deploying the migration that creates the `agent_metrics` table (introduced in the Radar Screen Performance initiative). Run this once per production database after upgrading.
|
||||||
|
|
||||||
|
**Command:**
|
||||||
|
```sh
|
||||||
|
cw backfill-metrics
|
||||||
|
# Or with a custom DB path:
|
||||||
|
cw backfill-metrics --db /path/to/codewalkers.db
|
||||||
|
```
|
||||||
|
|
||||||
|
**What it does:**
|
||||||
|
- Reads all existing `agent_log_chunks` rows in batches of 500 (ordered by `createdAt ASC`)
|
||||||
|
- Parses each chunk's `content` JSON to count `AskUserQuestion` tool calls, `Agent` spawns, and compaction events
|
||||||
|
- Upserts the accumulated counts into `agent_metrics` using additive conflict resolution
|
||||||
|
|
||||||
|
**Idempotency:** The script uses `ON CONFLICT DO UPDATE` with additive increments, matching the ongoing write-path behavior. Running it against an empty `agent_metrics` table is fully safe. Running it a second time will double-count — only run it once per database, immediately after applying the migration.
|
||||||
|
|
||||||
|
**Batch size:** 500 rows per query, to avoid loading the full `agent_log_chunks` table into memory. Progress is logged every 1,000 chunks.
|
||||||
|
|||||||
Reference in New Issue
Block a user