Merge branch 'cw/review-tab-performance' into cw-merge-1772826318787
This commit is contained in:
@@ -46,6 +46,9 @@ function createMocks() {
|
||||
ensureBranch: vi.fn(),
|
||||
mergeBranch: vi.fn().mockResolvedValue({ success: true, message: 'merged', previousRef: 'abc000' }),
|
||||
diffBranches: vi.fn().mockResolvedValue(''),
|
||||
diffBranchesStat: vi.fn().mockResolvedValue([]),
|
||||
diffFileSingle: vi.fn().mockResolvedValue(''),
|
||||
getHeadCommitHash: vi.fn().mockResolvedValue('deadbeef00000000000000000000000000000000'),
|
||||
deleteBranch: vi.fn(),
|
||||
branchExists: vi.fn().mockResolvedValue(true),
|
||||
remoteBranchExists: vi.fn().mockResolvedValue(false),
|
||||
|
||||
@@ -23,6 +23,7 @@ import type { ConflictResolutionService } from '../coordination/conflict-resolut
|
||||
import { phaseBranchName, taskBranchName } from '../git/branch-naming.js';
|
||||
import { ensureProjectClone } from '../git/project-clones.js';
|
||||
import { createModuleLogger } from '../logger/index.js';
|
||||
import { phaseMetaCache, fileDiffCache } from '../review/diff-cache.js';
|
||||
|
||||
const log = createModuleLogger('execution-orchestrator');
|
||||
|
||||
@@ -253,6 +254,10 @@ export class ExecutionOrchestrator {
|
||||
log.info({ taskId, taskBranch, phaseBranch, project: project.name }, 'task branch merged into phase branch');
|
||||
}
|
||||
|
||||
// Invalidate diff cache — phase branch HEAD has advanced after merges
|
||||
phaseMetaCache.invalidateByPrefix(`${phaseId}:`);
|
||||
fileDiffCache.invalidateByPrefix(`${phaseId}:`);
|
||||
|
||||
// Emit task:merged event
|
||||
const mergedEvent: TaskMergedEvent = {
|
||||
type: 'task:merged',
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
* a worktree to be checked out.
|
||||
*/
|
||||
|
||||
import type { MergeResult, MergeabilityResult, BranchCommit } from './types.js';
|
||||
import type { MergeResult, MergeabilityResult, BranchCommit, FileStatEntry } from './types.js';
|
||||
|
||||
export interface BranchManager {
|
||||
/**
|
||||
@@ -29,6 +29,27 @@ export interface BranchManager {
|
||||
*/
|
||||
diffBranches(repoPath: string, baseBranch: string, headBranch: string): Promise<string>;
|
||||
|
||||
/**
|
||||
* Get per-file metadata for changes between two branches.
|
||||
* Uses three-dot diff (baseBranch...headBranch) — same divergence model as diffBranches.
|
||||
* Binary files are included with status 'binary' and additions/deletions both 0.
|
||||
* Does NOT return hunk content.
|
||||
*/
|
||||
diffBranchesStat(repoPath: string, baseBranch: string, headBranch: string): Promise<FileStatEntry[]>;
|
||||
|
||||
/**
|
||||
* Get the raw unified diff for a single file between two branches.
|
||||
* Uses three-dot diff (baseBranch...headBranch).
|
||||
* Returns empty string for binary files (caller must detect binary separately).
|
||||
* filePath must be URL-decoded before being passed here.
|
||||
*/
|
||||
diffFileSingle(repoPath: string, baseBranch: string, headBranch: string, filePath: string): Promise<string>;
|
||||
|
||||
/**
|
||||
* Returns the current HEAD commit hash (40-char SHA) for the given branch in the repo.
|
||||
*/
|
||||
getHeadCommitHash(repoPath: string, branch: string): Promise<string>;
|
||||
|
||||
/**
|
||||
* Delete a branch. No-op if the branch doesn't exist.
|
||||
*/
|
||||
|
||||
@@ -65,6 +65,69 @@ async function createTestRepoWithRemote(): Promise<{
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a repo pair for testing diff operations.
|
||||
* Sets up bare + clone with a 'feature' branch that has known changes vs 'main'.
|
||||
*/
|
||||
async function createTestRepoForDiff(): Promise<{
|
||||
clonePath: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> {
|
||||
const tmpBase = await mkdtemp(path.join(tmpdir(), 'cw-diff-test-'));
|
||||
const barePath = path.join(tmpBase, 'bare.git');
|
||||
const workPath = path.join(tmpBase, 'work');
|
||||
const clonePath = path.join(tmpBase, 'clone');
|
||||
|
||||
// Create bare repo
|
||||
await simpleGit().init([barePath, '--bare']);
|
||||
|
||||
// Set up main branch in work dir
|
||||
await simpleGit().clone(barePath, workPath);
|
||||
const workGit = simpleGit(workPath);
|
||||
await workGit.addConfig('user.email', 'test@example.com');
|
||||
await workGit.addConfig('user.name', 'Test User');
|
||||
await writeFile(path.join(workPath, 'README.md'), '# README\n');
|
||||
await writeFile(path.join(workPath, 'to-delete.txt'), 'delete me\n');
|
||||
await workGit.add(['README.md', 'to-delete.txt']);
|
||||
await workGit.commit('Initial commit');
|
||||
await workGit.push('origin', 'main');
|
||||
|
||||
// Clone and create feature branch with changes
|
||||
await simpleGit().clone(barePath, clonePath);
|
||||
const cloneGit = simpleGit(clonePath);
|
||||
await cloneGit.addConfig('user.email', 'test@example.com');
|
||||
await cloneGit.addConfig('user.name', 'Test User');
|
||||
await cloneGit.checkoutLocalBranch('feature');
|
||||
|
||||
// Add new text file
|
||||
await writeFile(path.join(clonePath, 'added.txt'), 'new content\n');
|
||||
await cloneGit.add('added.txt');
|
||||
|
||||
// Modify existing file
|
||||
await writeFile(path.join(clonePath, 'README.md'), '# README\n\nModified content\n');
|
||||
await cloneGit.add('README.md');
|
||||
|
||||
// Delete a file
|
||||
await cloneGit.rm(['to-delete.txt']);
|
||||
|
||||
// Add binary file
|
||||
await writeFile(path.join(clonePath, 'image.bin'), Buffer.alloc(16));
|
||||
await cloneGit.add('image.bin');
|
||||
|
||||
// Add file with space in name
|
||||
await writeFile(path.join(clonePath, 'has space.txt'), 'content\n');
|
||||
await cloneGit.add('has space.txt');
|
||||
|
||||
await cloneGit.commit('Feature branch changes');
|
||||
|
||||
return {
|
||||
clonePath,
|
||||
cleanup: async () => {
|
||||
await rm(tmpBase, { recursive: true, force: true });
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe('SimpleGitBranchManager', () => {
|
||||
let clonePath: string;
|
||||
let cleanup: () => Promise<void>;
|
||||
@@ -108,3 +171,80 @@ describe('SimpleGitBranchManager', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('SimpleGitBranchManager - diffBranchesStat and diffFileSingle', () => {
|
||||
let clonePath: string;
|
||||
let cleanup: () => Promise<void>;
|
||||
let branchManager: SimpleGitBranchManager;
|
||||
|
||||
beforeEach(async () => {
|
||||
const setup = await createTestRepoForDiff();
|
||||
clonePath = setup.clonePath;
|
||||
cleanup = setup.cleanup;
|
||||
branchManager = new SimpleGitBranchManager();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
describe('diffBranchesStat', () => {
|
||||
it('returns correct entries for added, modified, and deleted text files', async () => {
|
||||
const entries = await branchManager.diffBranchesStat(clonePath, 'main', 'feature');
|
||||
const added = entries.find(e => e.path === 'added.txt');
|
||||
const modified = entries.find(e => e.path === 'README.md');
|
||||
const deleted = entries.find(e => e.path === 'to-delete.txt');
|
||||
|
||||
expect(added?.status).toBe('added');
|
||||
expect(added?.additions).toBeGreaterThan(0);
|
||||
expect(modified?.status).toBe('modified');
|
||||
expect(deleted?.status).toBe('deleted');
|
||||
expect(deleted?.deletions).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('marks binary files as status=binary with additions=0, deletions=0', async () => {
|
||||
const entries = await branchManager.diffBranchesStat(clonePath, 'main', 'feature');
|
||||
const binary = entries.find(e => e.path === 'image.bin');
|
||||
expect(binary?.status).toBe('binary');
|
||||
expect(binary?.additions).toBe(0);
|
||||
expect(binary?.deletions).toBe(0);
|
||||
});
|
||||
|
||||
it('returns empty array when there are no changes', async () => {
|
||||
const entries = await branchManager.diffBranchesStat(clonePath, 'main', 'main');
|
||||
expect(entries).toEqual([]);
|
||||
});
|
||||
|
||||
it('handles files with spaces in their names', async () => {
|
||||
const entries = await branchManager.diffBranchesStat(clonePath, 'main', 'feature');
|
||||
const spaced = entries.find(e => e.path === 'has space.txt');
|
||||
expect(spaced).toBeDefined();
|
||||
expect(spaced?.status).toBe('added');
|
||||
});
|
||||
});
|
||||
|
||||
describe('diffFileSingle', () => {
|
||||
it('returns unified diff containing addition hunks for an added file', async () => {
|
||||
const diff = await branchManager.diffFileSingle(clonePath, 'main', 'feature', 'added.txt');
|
||||
expect(diff).toContain('+');
|
||||
expect(diff).toContain('added.txt');
|
||||
});
|
||||
|
||||
it('returns unified diff with removal hunks for a deleted file', async () => {
|
||||
const diff = await branchManager.diffFileSingle(clonePath, 'main', 'feature', 'to-delete.txt');
|
||||
expect(diff).toContain('-');
|
||||
expect(diff).toContain('to-delete.txt');
|
||||
});
|
||||
|
||||
it('returns string for a binary file', async () => {
|
||||
const diff = await branchManager.diffFileSingle(clonePath, 'main', 'feature', 'image.bin');
|
||||
// git diff returns empty or a "Binary files differ" line — no hunk content
|
||||
expect(typeof diff).toBe('string');
|
||||
});
|
||||
|
||||
it('handles file paths with spaces', async () => {
|
||||
const diff = await branchManager.diffFileSingle(clonePath, 'main', 'feature', 'has space.txt');
|
||||
expect(diff).toContain('has space.txt');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,11 +11,31 @@ import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { simpleGit } from 'simple-git';
|
||||
import type { BranchManager } from './branch-manager.js';
|
||||
import type { MergeResult, MergeabilityResult, BranchCommit } from './types.js';
|
||||
import type { MergeResult, MergeabilityResult, BranchCommit, FileStatEntry } from './types.js';
|
||||
import { createModuleLogger } from '../logger/index.js';
|
||||
|
||||
const log = createModuleLogger('branch-manager');
|
||||
|
||||
/**
|
||||
* Normalize a numstat path to the new path for rename entries.
|
||||
* Handles patterns like:
|
||||
* - "{old.txt => new.txt}" → "new.txt"
|
||||
* - "dir/{old.txt => new.txt}" → "dir/new.txt"
|
||||
* - "old_dir/file.txt => new_dir/file.txt" → "new_dir/file.txt"
|
||||
*/
|
||||
function normalizeNumstatPath(pathStr: string): string {
|
||||
const braceMatch = pathStr.match(/^(.*)\{(.*) => (.*)\}(.*)$/);
|
||||
if (braceMatch) {
|
||||
const [, prefix, , newPart, suffix] = braceMatch;
|
||||
return `${prefix}${newPart}${suffix}`;
|
||||
}
|
||||
const arrowMatch = pathStr.match(/^.* => (.+)$/);
|
||||
if (arrowMatch) {
|
||||
return arrowMatch[1];
|
||||
}
|
||||
return pathStr;
|
||||
}
|
||||
|
||||
export class SimpleGitBranchManager implements BranchManager {
|
||||
async ensureBranch(repoPath: string, branch: string, baseBranch: string): Promise<void> {
|
||||
const git = simpleGit(repoPath);
|
||||
@@ -97,6 +117,97 @@ export class SimpleGitBranchManager implements BranchManager {
|
||||
return diff;
|
||||
}
|
||||
|
||||
async diffBranchesStat(repoPath: string, baseBranch: string, headBranch: string): Promise<FileStatEntry[]> {
|
||||
const git = simpleGit(repoPath);
|
||||
const range = `${baseBranch}...${headBranch}`;
|
||||
|
||||
const [nameStatusRaw, numStatRaw] = await Promise.all([
|
||||
git.raw(['diff', '--name-status', range]),
|
||||
git.raw(['diff', '--numstat', range]),
|
||||
]);
|
||||
|
||||
if (!nameStatusRaw.trim()) return [];
|
||||
|
||||
// Parse numstat: "<additions>\t<deletions>\t<path>"
|
||||
// Binary files: "-\t-\t<path>"
|
||||
const numStatMap = new Map<string, { additions: number; deletions: number; binary: boolean }>();
|
||||
for (const line of numStatRaw.split('\n')) {
|
||||
if (!line.trim()) continue;
|
||||
const tabIdx1 = line.indexOf('\t');
|
||||
const tabIdx2 = line.indexOf('\t', tabIdx1 + 1);
|
||||
if (tabIdx1 === -1 || tabIdx2 === -1) continue;
|
||||
const addStr = line.slice(0, tabIdx1);
|
||||
const delStr = line.slice(tabIdx1 + 1, tabIdx2);
|
||||
const pathStr = line.slice(tabIdx2 + 1);
|
||||
const binary = addStr === '-' && delStr === '-';
|
||||
// Normalize rename paths like "{old => new}" or "dir/{old => new}/file" to new path
|
||||
const newPath = normalizeNumstatPath(pathStr);
|
||||
numStatMap.set(newPath, {
|
||||
additions: binary ? 0 : parseInt(addStr, 10),
|
||||
deletions: binary ? 0 : parseInt(delStr, 10),
|
||||
binary,
|
||||
});
|
||||
}
|
||||
|
||||
// Parse name-status: "<status>\t<path>" or "<Rxx>\t<oldPath>\t<newPath>"
|
||||
const entries: FileStatEntry[] = [];
|
||||
for (const line of nameStatusRaw.split('\n')) {
|
||||
if (!line.trim()) continue;
|
||||
const parts = line.split('\t');
|
||||
if (parts.length < 2) continue;
|
||||
|
||||
const statusCode = parts[0];
|
||||
let status: FileStatEntry['status'];
|
||||
let filePath: string;
|
||||
let oldPath: string | undefined;
|
||||
|
||||
if (statusCode.startsWith('R')) {
|
||||
status = 'renamed';
|
||||
oldPath = parts[1];
|
||||
filePath = parts[2];
|
||||
} else if (statusCode === 'A') {
|
||||
status = 'added';
|
||||
filePath = parts[1];
|
||||
} else if (statusCode === 'M') {
|
||||
status = 'modified';
|
||||
filePath = parts[1];
|
||||
} else if (statusCode === 'D') {
|
||||
status = 'deleted';
|
||||
filePath = parts[1];
|
||||
} else {
|
||||
status = 'modified';
|
||||
filePath = parts[1];
|
||||
}
|
||||
|
||||
const numStat = numStatMap.get(filePath);
|
||||
if (numStat?.binary) {
|
||||
status = 'binary';
|
||||
}
|
||||
|
||||
const entry: FileStatEntry = {
|
||||
path: filePath,
|
||||
status,
|
||||
additions: numStat?.additions ?? 0,
|
||||
deletions: numStat?.deletions ?? 0,
|
||||
};
|
||||
if (oldPath !== undefined) entry.oldPath = oldPath;
|
||||
entries.push(entry);
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
async diffFileSingle(repoPath: string, baseBranch: string, headBranch: string, filePath: string): Promise<string> {
|
||||
const git = simpleGit(repoPath);
|
||||
return git.diff([`${baseBranch}...${headBranch}`, '--', filePath]);
|
||||
}
|
||||
|
||||
async getHeadCommitHash(repoPath: string, branch: string): Promise<string> {
|
||||
const git = simpleGit(repoPath);
|
||||
const result = await git.raw(['rev-parse', branch]);
|
||||
return result.trim();
|
||||
}
|
||||
|
||||
async deleteBranch(repoPath: string, branch: string): Promise<void> {
|
||||
const git = simpleGit(repoPath);
|
||||
const exists = await this.branchExists(repoPath, branch);
|
||||
|
||||
@@ -100,6 +100,29 @@ export interface BranchCommit {
|
||||
deletions: number;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// File Stat Entry (per-file diff metadata)
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Metadata for a single file changed between two branches.
|
||||
* No hunk content — only path, status, and line-count statistics.
|
||||
*/
|
||||
export interface FileStatEntry {
|
||||
/** New path (or old path for deletions) */
|
||||
path: string;
|
||||
/** Only set for renames — the path before the rename */
|
||||
oldPath?: string;
|
||||
/** Nature of the change */
|
||||
status: 'added' | 'modified' | 'deleted' | 'renamed' | 'binary';
|
||||
/** Lines added (0 for binary files) */
|
||||
additions: number;
|
||||
/** Lines deleted (0 for binary files) */
|
||||
deletions: number;
|
||||
/** Which project clone this file belongs to (populated by callers in multi-project scenarios) */
|
||||
projectId?: string;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// WorktreeManager Port Interface
|
||||
// =============================================================================
|
||||
|
||||
76
apps/server/review/diff-cache.test.ts
Normal file
76
apps/server/review/diff-cache.test.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* Unit tests for DiffCache class.
|
||||
*
|
||||
* Tests TTL expiry, prefix invalidation, and env-var TTL configuration.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||
import { DiffCache } from './diff-cache.js';
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('DiffCache', () => {
|
||||
it('returns undefined for a key that was never set', () => {
|
||||
const cache = new DiffCache<string>(5000);
|
||||
expect(cache.get('nonexistent')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns value when entry has not expired', () => {
|
||||
vi.spyOn(Date, 'now').mockReturnValue(1000);
|
||||
const cache = new DiffCache<string>(5000);
|
||||
cache.set('key', 'value');
|
||||
vi.spyOn(Date, 'now').mockReturnValue(5999); // 4999ms elapsed, TTL=5000
|
||||
expect(cache.get('key')).toBe('value');
|
||||
});
|
||||
|
||||
it('returns undefined and deletes the entry when TTL has elapsed', () => {
|
||||
vi.spyOn(Date, 'now').mockReturnValue(1000);
|
||||
const cache = new DiffCache<string>(5000);
|
||||
cache.set('key', 'value');
|
||||
vi.spyOn(Date, 'now').mockReturnValue(6001); // 5001ms elapsed, TTL=5000
|
||||
expect(cache.get('key')).toBeUndefined();
|
||||
// Verify the key is no longer stored (second get also returns undefined)
|
||||
vi.spyOn(Date, 'now').mockReturnValue(6001);
|
||||
expect(cache.get('key')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('overwrites an existing entry and resets its TTL', () => {
|
||||
vi.spyOn(Date, 'now').mockReturnValue(1000);
|
||||
const cache = new DiffCache<string>(5000);
|
||||
cache.set('key', 'first');
|
||||
vi.spyOn(Date, 'now').mockReturnValue(4000); // overwrite before expiry
|
||||
cache.set('key', 'second');
|
||||
vi.spyOn(Date, 'now').mockReturnValue(8999); // 4999ms after overwrite, TTL=5000
|
||||
expect(cache.get('key')).toBe('second');
|
||||
vi.spyOn(Date, 'now').mockReturnValue(9001); // 5001ms after overwrite
|
||||
expect(cache.get('key')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('invalidateByPrefix removes all matching keys and preserves others', () => {
|
||||
const cache = new DiffCache<string>(60_000);
|
||||
cache.set('phase-1:abc', 'a');
|
||||
cache.set('phase-1:abc:file.ts', 'b');
|
||||
cache.set('phase-2:xyz', 'c');
|
||||
cache.invalidateByPrefix('phase-1:');
|
||||
expect(cache.get('phase-1:abc')).toBeUndefined();
|
||||
expect(cache.get('phase-1:abc:file.ts')).toBeUndefined();
|
||||
expect(cache.get('phase-2:xyz')).toBe('c');
|
||||
});
|
||||
|
||||
it('singleton instances use REVIEW_DIFF_CACHE_TTL_MS env var for TTL', async () => {
|
||||
vi.resetModules();
|
||||
vi.stubEnv('REVIEW_DIFF_CACHE_TTL_MS', '1000');
|
||||
const { phaseMetaCache } = await import('./diff-cache.js');
|
||||
|
||||
vi.spyOn(Date, 'now').mockReturnValue(0);
|
||||
phaseMetaCache.set('key', {} as any);
|
||||
vi.spyOn(Date, 'now').mockReturnValue(999);
|
||||
expect(phaseMetaCache.get('key')).toBeDefined();
|
||||
vi.spyOn(Date, 'now').mockReturnValue(1001);
|
||||
expect(phaseMetaCache.get('key')).toBeUndefined();
|
||||
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
});
|
||||
70
apps/server/review/diff-cache.ts
Normal file
70
apps/server/review/diff-cache.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* DiffCache — in-memory TTL cache for git diff results.
|
||||
*
|
||||
* Keyed by `phaseId:headHash` (or `phaseId:headHash:filePath` for per-file diffs).
|
||||
* TTL defaults to 5 minutes, configurable via REVIEW_DIFF_CACHE_TTL_MS env var.
|
||||
* Prefix-based invalidation clears all entries for a phase when a new commit lands.
|
||||
*/
|
||||
|
||||
import type { FileStatEntry } from '../git/types.js';
|
||||
|
||||
interface CacheEntry<T> {
|
||||
value: T;
|
||||
expiresAt: number;
|
||||
}
|
||||
|
||||
export class DiffCache<T> {
|
||||
private store = new Map<string, CacheEntry<T>>();
|
||||
private ttlMs: number;
|
||||
|
||||
constructor(ttlMs: number) {
|
||||
this.ttlMs = ttlMs;
|
||||
}
|
||||
|
||||
get(key: string): T | undefined {
|
||||
const entry = this.store.get(key);
|
||||
if (!entry) return undefined;
|
||||
if (Date.now() > entry.expiresAt) {
|
||||
this.store.delete(key);
|
||||
return undefined;
|
||||
}
|
||||
return entry.value;
|
||||
}
|
||||
|
||||
set(key: string, value: T): void {
|
||||
this.store.set(key, { value, expiresAt: Date.now() + this.ttlMs });
|
||||
}
|
||||
|
||||
invalidateByPrefix(prefix: string): void {
|
||||
for (const key of this.store.keys()) {
|
||||
if (key.startsWith(prefix)) this.store.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Response shapes (mirror the return types of getPhaseReviewDiff / getFileDiff)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface PhaseMetaResponse {
|
||||
phaseName: string;
|
||||
sourceBranch: string;
|
||||
targetBranch: string;
|
||||
files: FileStatEntry[];
|
||||
totalAdditions: number;
|
||||
totalDeletions: number;
|
||||
}
|
||||
|
||||
export interface FileDiffResponse {
|
||||
binary: boolean;
|
||||
rawDiff: string;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Singleton instances — TTL is read once at module load time
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const TTL = parseInt(process.env.REVIEW_DIFF_CACHE_TTL_MS ?? '300000', 10);
|
||||
|
||||
export const phaseMetaCache = new DiffCache<PhaseMetaResponse>(TTL);
|
||||
export const fileDiffCache = new DiffCache<FileDiffResponse>(TTL);
|
||||
256
apps/server/test/integration/phase-review-diff.test.ts
Normal file
256
apps/server/test/integration/phase-review-diff.test.ts
Normal file
@@ -0,0 +1,256 @@
|
||||
/**
|
||||
* Integration tests for getPhaseReviewDiff and getFileDiff tRPC procedures.
|
||||
*
|
||||
* Uses real git repos on disk (no cassettes) + an in-memory SQLite database.
|
||||
* No network calls — purely local git operations.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import { mkdtemp, rm, writeFile, mkdir } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { simpleGit } from 'simple-git';
|
||||
import { router, publicProcedure, createCallerFactory } from '../../trpc/trpc.js';
|
||||
import { phaseProcedures } from '../../trpc/routers/phase.js';
|
||||
import { createTestDatabase } from '../../db/repositories/drizzle/test-helpers.js';
|
||||
import {
|
||||
DrizzleInitiativeRepository,
|
||||
DrizzlePhaseRepository,
|
||||
DrizzleProjectRepository,
|
||||
} from '../../db/repositories/drizzle/index.js';
|
||||
import { SimpleGitBranchManager } from '../../git/simple-git-branch-manager.js';
|
||||
import { getProjectCloneDir } from '../../git/project-clones.js';
|
||||
import type { TRPCContext } from '../../trpc/context.js';
|
||||
|
||||
// ============================================================================
|
||||
// Test router & caller factory
|
||||
// ============================================================================
|
||||
|
||||
const testRouter = router({
|
||||
...phaseProcedures(publicProcedure),
|
||||
});
|
||||
|
||||
const createCaller = createCallerFactory(testRouter);
|
||||
|
||||
// ============================================================================
|
||||
// Shared test state (set up once for the whole suite)
|
||||
// ============================================================================
|
||||
|
||||
let workspaceRoot: string;
|
||||
let cleanup: () => Promise<void>;
|
||||
let phaseId: string;
|
||||
let pendingPhaseId: string;
|
||||
|
||||
/**
|
||||
* Build the test git repo with the required branches and files.
|
||||
*
|
||||
* Repo layout on the phase branch vs main:
|
||||
* file1.txt through file5.txt — added (10–20 lines each)
|
||||
* photo.bin — binary file (Buffer.alloc(32))
|
||||
* gone.txt — deleted (existed on main)
|
||||
* has space.txt — added (contains text)
|
||||
*/
|
||||
async function setupGitRepo(clonePath: string): Promise<void> {
|
||||
await mkdir(clonePath, { recursive: true });
|
||||
|
||||
const git = simpleGit(clonePath);
|
||||
await git.init();
|
||||
await git.addConfig('user.email', 'test@example.com');
|
||||
await git.addConfig('user.name', 'Test');
|
||||
|
||||
// Commit gone.txt on main so it can be deleted on the phase branch
|
||||
await writeFile(path.join(clonePath, 'gone.txt'), Array.from({ length: 5 }, (_, i) => `line ${i + 1}`).join('\n') + '\n');
|
||||
await git.add('gone.txt');
|
||||
await git.commit('Initial commit on main');
|
||||
|
||||
// Create phase branch from main
|
||||
// phaseBranchName('main', 'test-phase') => 'main-phase-test-phase'
|
||||
await git.checkoutLocalBranch('main-phase-test-phase');
|
||||
|
||||
// Add 5 text files
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
const lines = Array.from({ length: 15 }, (_, j) => `Line ${j + 1} in file${i}`).join('\n') + '\n';
|
||||
await writeFile(path.join(clonePath, `file${i}.txt`), lines);
|
||||
}
|
||||
await git.add(['file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', 'file5.txt']);
|
||||
|
||||
// Add binary file
|
||||
await writeFile(path.join(clonePath, 'photo.bin'), Buffer.alloc(32));
|
||||
await git.add('photo.bin');
|
||||
|
||||
// Delete gone.txt
|
||||
await git.rm(['gone.txt']);
|
||||
|
||||
// Add file with space in name
|
||||
await writeFile(path.join(clonePath, 'has space.txt'), 'content with spaces\n');
|
||||
await git.add('has space.txt');
|
||||
|
||||
await git.commit('Phase branch changes');
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create workspace root temp dir
|
||||
workspaceRoot = await mkdtemp(path.join(tmpdir(), 'cw-phase-diff-test-'));
|
||||
cleanup = async () => {
|
||||
await rm(workspaceRoot, { recursive: true, force: true });
|
||||
};
|
||||
|
||||
// Set up in-memory database
|
||||
const db = createTestDatabase();
|
||||
const initiativeRepo = new DrizzleInitiativeRepository(db);
|
||||
const phaseRepo = new DrizzlePhaseRepository(db);
|
||||
const projectRepo = new DrizzleProjectRepository(db);
|
||||
|
||||
// Create initiative with branch='main'
|
||||
const initiative = await initiativeRepo.create({
|
||||
name: 'Test Initiative',
|
||||
branch: 'main',
|
||||
});
|
||||
|
||||
// Create project — we'll set up the git repo at the expected clone path
|
||||
const project = await projectRepo.create({
|
||||
name: 'test-repo',
|
||||
url: 'file:///dev/null', // won't be cloned — we create the repo directly
|
||||
defaultBranch: 'main',
|
||||
});
|
||||
|
||||
// Link project to initiative
|
||||
await projectRepo.addProjectToInitiative(initiative.id, project.id);
|
||||
|
||||
// Set up git repo at the expected clone path
|
||||
const relPath = getProjectCloneDir(project.name, project.id);
|
||||
const clonePath = path.join(workspaceRoot, relPath);
|
||||
await setupGitRepo(clonePath);
|
||||
|
||||
// Create reviewable phase (pending_review)
|
||||
const phase = await phaseRepo.create({
|
||||
initiativeId: initiative.id,
|
||||
name: 'test-phase',
|
||||
status: 'pending_review',
|
||||
});
|
||||
phaseId = phase.id;
|
||||
|
||||
// Create a non-reviewable phase (pending) for error test
|
||||
const pendingPhase = await phaseRepo.create({
|
||||
initiativeId: initiative.id,
|
||||
name: 'pending-phase',
|
||||
status: 'pending',
|
||||
});
|
||||
pendingPhaseId = pendingPhase.id;
|
||||
|
||||
// Store db and repos so the caller can use them
|
||||
// (stored in module-level vars to be accessed in test helper)
|
||||
Object.assign(sharedCtx, {
|
||||
initiativeRepository: initiativeRepo,
|
||||
phaseRepository: phaseRepo,
|
||||
projectRepository: projectRepo,
|
||||
branchManager: new SimpleGitBranchManager(),
|
||||
workspaceRoot,
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await cleanup?.();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Shared context (filled in beforeAll)
|
||||
// ============================================================================
|
||||
|
||||
const sharedCtx: Partial<TRPCContext> = {
|
||||
eventBus: { emit: () => {}, on: () => {}, off: () => {}, once: () => {} } as any,
|
||||
serverStartedAt: null,
|
||||
processCount: 0,
|
||||
};
|
||||
|
||||
function getCaller() {
|
||||
return createCaller(sharedCtx as TRPCContext);
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Tests: getPhaseReviewDiff
|
||||
// ============================================================================
|
||||
|
||||
describe('getPhaseReviewDiff', () => {
|
||||
it('returns files array with correct metadata and no rawDiff field', async () => {
|
||||
const start = Date.now();
|
||||
const result = await getCaller().getPhaseReviewDiff({ phaseId });
|
||||
const elapsed = Date.now() - start;
|
||||
|
||||
expect(result).not.toHaveProperty('rawDiff');
|
||||
expect(result.files).toBeInstanceOf(Array);
|
||||
// 5 text + 1 binary + 1 deleted + 1 spaced = 8 files
|
||||
expect(result.files.length).toBeGreaterThanOrEqual(7);
|
||||
expect(elapsed).toBeLessThan(3000);
|
||||
});
|
||||
|
||||
it('includes binary file with status=binary, additions=0, deletions=0', async () => {
|
||||
const result = await getCaller().getPhaseReviewDiff({ phaseId });
|
||||
const bin = result.files.find((f) => f.path === 'photo.bin');
|
||||
expect(bin?.status).toBe('binary');
|
||||
expect(bin?.additions).toBe(0);
|
||||
expect(bin?.deletions).toBe(0);
|
||||
});
|
||||
|
||||
it('includes deleted file with status=deleted and nonzero deletions', async () => {
|
||||
const result = await getCaller().getPhaseReviewDiff({ phaseId });
|
||||
const del = result.files.find((f) => f.path === 'gone.txt');
|
||||
expect(del?.status).toBe('deleted');
|
||||
expect(del?.deletions).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('computes totalAdditions and totalDeletions as sums over files', async () => {
|
||||
const result = await getCaller().getPhaseReviewDiff({ phaseId });
|
||||
const sumAdd = result.files.reduce((s, f) => s + f.additions, 0);
|
||||
const sumDel = result.files.reduce((s, f) => s + f.deletions, 0);
|
||||
expect(result.totalAdditions).toBe(sumAdd);
|
||||
expect(result.totalDeletions).toBe(sumDel);
|
||||
});
|
||||
|
||||
it('throws NOT_FOUND for unknown phaseId', async () => {
|
||||
const err = await getCaller().getPhaseReviewDiff({ phaseId: 'nonexistent' }).catch((e) => e);
|
||||
expect(err.code).toBe('NOT_FOUND');
|
||||
});
|
||||
|
||||
it('throws BAD_REQUEST for phase not in reviewable status', async () => {
|
||||
const err = await getCaller().getPhaseReviewDiff({ phaseId: pendingPhaseId }).catch((e) => e);
|
||||
expect(err.code).toBe('BAD_REQUEST');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Tests: getFileDiff
|
||||
// ============================================================================
|
||||
|
||||
describe('getFileDiff', () => {
|
||||
it('returns rawDiff with unified diff for a normal file, under 1 second', async () => {
|
||||
const start = Date.now();
|
||||
const result = await getCaller().getFileDiff({ phaseId, filePath: 'file1.txt' });
|
||||
const elapsed = Date.now() - start;
|
||||
|
||||
expect(result.binary).toBe(false);
|
||||
expect(result.rawDiff).toContain('+');
|
||||
expect(elapsed).toBeLessThan(1000);
|
||||
});
|
||||
|
||||
it('returns binary=true and rawDiff="" for binary file', async () => {
|
||||
const result = await getCaller().getFileDiff({ phaseId, filePath: 'photo.bin' });
|
||||
expect(result.binary).toBe(true);
|
||||
expect(result.rawDiff).toBe('');
|
||||
});
|
||||
|
||||
it('returns removal hunks for a deleted file', async () => {
|
||||
const result = await getCaller().getFileDiff({ phaseId, filePath: 'gone.txt' });
|
||||
expect(result.binary).toBe(false);
|
||||
expect(result.rawDiff).toContain('-');
|
||||
});
|
||||
|
||||
it('handles URL-encoded file path with space', async () => {
|
||||
const result = await getCaller().getFileDiff({
|
||||
phaseId,
|
||||
filePath: encodeURIComponent('has space.txt'),
|
||||
});
|
||||
expect(result.binary).toBe(false);
|
||||
expect(result.rawDiff).toContain('has space.txt');
|
||||
});
|
||||
});
|
||||
249
apps/server/trpc/routers/phase.test.ts
Normal file
249
apps/server/trpc/routers/phase.test.ts
Normal file
@@ -0,0 +1,249 @@
|
||||
/**
|
||||
* Integration tests for getPhaseReviewDiff caching behaviour.
|
||||
*
|
||||
* Verifies that git diff is only invoked once per HEAD hash and that
|
||||
* cache invalidation after a task merge triggers a re-run.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { router, publicProcedure, createCallerFactory } from '../trpc.js';
|
||||
import { phaseProcedures } from './phase.js';
|
||||
import type { TRPCContext } from '../context.js';
|
||||
import type { BranchManager } from '../../git/branch-manager.js';
|
||||
import { createTestDatabase } from '../../db/repositories/drizzle/test-helpers.js';
|
||||
import {
|
||||
DrizzleInitiativeRepository,
|
||||
DrizzlePhaseRepository,
|
||||
DrizzleProjectRepository,
|
||||
} from '../../db/repositories/drizzle/index.js';
|
||||
import { phaseMetaCache, fileDiffCache } from '../../review/diff-cache.js';
|
||||
|
||||
// =============================================================================
|
||||
// Mock ensureProjectClone — prevents actual git cloning
|
||||
// =============================================================================
|
||||
|
||||
vi.mock('../../git/project-clones.js', () => ({
|
||||
ensureProjectClone: vi.fn().mockResolvedValue('/fake/clone/path'),
|
||||
getProjectCloneDir: vi.fn().mockReturnValue('repos/fake-project-id'),
|
||||
}));
|
||||
|
||||
// =============================================================================
|
||||
// Test router
|
||||
// =============================================================================
|
||||
|
||||
const testRouter = router({
|
||||
...phaseProcedures(publicProcedure),
|
||||
});
|
||||
|
||||
const createCaller = createCallerFactory(testRouter);
|
||||
|
||||
// =============================================================================
|
||||
// MockBranchManager
|
||||
// =============================================================================
|
||||
|
||||
function makeMockBranchManager(): BranchManager {
|
||||
return {
|
||||
ensureBranch: vi.fn().mockResolvedValue(undefined),
|
||||
mergeBranch: vi.fn().mockResolvedValue({ success: true, conflictFiles: [] }),
|
||||
diffBranches: vi.fn().mockResolvedValue('diff --git a/file.ts'),
|
||||
diffBranchesStat: vi.fn().mockResolvedValue([]),
|
||||
diffFileSingle: vi.fn().mockResolvedValue('diff --git a/file.ts'),
|
||||
deleteBranch: vi.fn().mockResolvedValue(undefined),
|
||||
branchExists: vi.fn().mockResolvedValue(true),
|
||||
remoteBranchExists: vi.fn().mockResolvedValue(true),
|
||||
listCommits: vi.fn().mockResolvedValue([]),
|
||||
diffCommit: vi.fn().mockResolvedValue(''),
|
||||
getMergeBase: vi.fn().mockResolvedValue('mergebase123'),
|
||||
pushBranch: vi.fn().mockResolvedValue(undefined),
|
||||
checkMergeability: vi.fn().mockResolvedValue({ canMerge: true, conflicts: [] }),
|
||||
fetchRemote: vi.fn().mockResolvedValue(undefined),
|
||||
fastForwardBranch: vi.fn().mockResolvedValue(undefined),
|
||||
updateRef: vi.fn().mockResolvedValue(undefined),
|
||||
getHeadCommitHash: vi.fn().mockResolvedValue('abc123def456'),
|
||||
};
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Helpers
|
||||
// =============================================================================
|
||||
|
||||
function createMockEventBus(): TRPCContext['eventBus'] {
|
||||
return {
|
||||
emit: vi.fn(),
|
||||
on: vi.fn(),
|
||||
off: vi.fn(),
|
||||
once: vi.fn(),
|
||||
};
|
||||
}
|
||||
|
||||
interface SeedResult {
|
||||
phaseId: string;
|
||||
initiativeId: string;
|
||||
projectId: string;
|
||||
}
|
||||
|
||||
async function seedDatabase(): Promise<{
|
||||
repos: {
|
||||
initiativeRepo: DrizzleInitiativeRepository;
|
||||
phaseRepo: DrizzlePhaseRepository;
|
||||
projectRepo: DrizzleProjectRepository;
|
||||
};
|
||||
data: SeedResult;
|
||||
}> {
|
||||
const db = createTestDatabase();
|
||||
const initiativeRepo = new DrizzleInitiativeRepository(db);
|
||||
const phaseRepo = new DrizzlePhaseRepository(db);
|
||||
const projectRepo = new DrizzleProjectRepository(db);
|
||||
|
||||
const initiative = await initiativeRepo.create({
|
||||
name: 'Test Initiative',
|
||||
status: 'active',
|
||||
branch: 'main',
|
||||
});
|
||||
|
||||
const phase = await phaseRepo.create({
|
||||
initiativeId: initiative.id,
|
||||
name: 'Test Phase',
|
||||
status: 'pending_review',
|
||||
});
|
||||
|
||||
const project = await projectRepo.create({
|
||||
name: 'Test Project',
|
||||
url: 'https://github.com/test/repo',
|
||||
});
|
||||
|
||||
await projectRepo.addProjectToInitiative(initiative.id, project.id);
|
||||
|
||||
return {
|
||||
repos: { initiativeRepo, phaseRepo, projectRepo },
|
||||
data: { phaseId: phase.id, initiativeId: initiative.id, projectId: project.id },
|
||||
};
|
||||
}
|
||||
|
||||
async function seedDatabaseNoProjects(): Promise<{
|
||||
repos: {
|
||||
initiativeRepo: DrizzleInitiativeRepository;
|
||||
phaseRepo: DrizzlePhaseRepository;
|
||||
projectRepo: DrizzleProjectRepository;
|
||||
};
|
||||
data: { phaseId: string };
|
||||
}> {
|
||||
const db = createTestDatabase();
|
||||
const initiativeRepo = new DrizzleInitiativeRepository(db);
|
||||
const phaseRepo = new DrizzlePhaseRepository(db);
|
||||
const projectRepo = new DrizzleProjectRepository(db);
|
||||
|
||||
const initiative = await initiativeRepo.create({
|
||||
name: 'Test Initiative No Projects',
|
||||
status: 'active',
|
||||
branch: 'main',
|
||||
});
|
||||
|
||||
const phase = await phaseRepo.create({
|
||||
initiativeId: initiative.id,
|
||||
name: 'Empty Phase',
|
||||
status: 'pending_review',
|
||||
});
|
||||
|
||||
return {
|
||||
repos: { initiativeRepo, phaseRepo, projectRepo },
|
||||
data: { phaseId: phase.id },
|
||||
};
|
||||
}
|
||||
|
||||
function makeCaller(
|
||||
branchManager: BranchManager,
|
||||
repos: {
|
||||
initiativeRepo: DrizzleInitiativeRepository;
|
||||
phaseRepo: DrizzlePhaseRepository;
|
||||
projectRepo: DrizzleProjectRepository;
|
||||
},
|
||||
) {
|
||||
const ctx: TRPCContext = {
|
||||
eventBus: createMockEventBus(),
|
||||
serverStartedAt: null,
|
||||
processCount: 0,
|
||||
branchManager,
|
||||
initiativeRepository: repos.initiativeRepo,
|
||||
phaseRepository: repos.phaseRepo,
|
||||
projectRepository: repos.projectRepo,
|
||||
workspaceRoot: '/fake/workspace',
|
||||
};
|
||||
return createCaller(ctx);
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Tests
|
||||
// =============================================================================
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear caches between tests to ensure isolation
|
||||
phaseMetaCache.invalidateByPrefix('');
|
||||
fileDiffCache.invalidateByPrefix('');
|
||||
});
|
||||
|
||||
describe('getPhaseReviewDiff caching', () => {
|
||||
it('second call for same phase/HEAD returns cached result without calling git again', async () => {
|
||||
const { repos, data } = await seedDatabase();
|
||||
const branchManager = makeMockBranchManager();
|
||||
const diffBranchesSpy = vi.spyOn(branchManager, 'diffBranchesStat');
|
||||
const caller = makeCaller(branchManager, repos);
|
||||
|
||||
await caller.getPhaseReviewDiff({ phaseId: data.phaseId });
|
||||
await caller.getPhaseReviewDiff({ phaseId: data.phaseId });
|
||||
|
||||
expect(diffBranchesSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('after cache invalidation, next call re-runs git diff', async () => {
|
||||
const { repos, data } = await seedDatabase();
|
||||
const branchManager = makeMockBranchManager();
|
||||
const diffBranchesSpy = vi.spyOn(branchManager, 'diffBranchesStat');
|
||||
const caller = makeCaller(branchManager, repos);
|
||||
|
||||
await caller.getPhaseReviewDiff({ phaseId: data.phaseId });
|
||||
expect(diffBranchesSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Simulate a task merge → cache invalidated
|
||||
phaseMetaCache.invalidateByPrefix(`${data.phaseId}:`);
|
||||
|
||||
await caller.getPhaseReviewDiff({ phaseId: data.phaseId });
|
||||
expect(diffBranchesSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('different HEAD hashes for same phase are treated as distinct cache entries', async () => {
|
||||
const { repos, data } = await seedDatabase();
|
||||
const branchManager = makeMockBranchManager();
|
||||
const diffBranchesSpy = vi.spyOn(branchManager, 'diffBranchesStat');
|
||||
const caller = makeCaller(branchManager, repos);
|
||||
|
||||
// First call with headHash = 'abc123'
|
||||
vi.spyOn(branchManager, 'getHeadCommitHash').mockResolvedValueOnce('abc123');
|
||||
await caller.getPhaseReviewDiff({ phaseId: data.phaseId });
|
||||
|
||||
// Second call with headHash = 'def456' (simulates a new commit)
|
||||
vi.spyOn(branchManager, 'getHeadCommitHash').mockResolvedValueOnce('def456');
|
||||
await caller.getPhaseReviewDiff({ phaseId: data.phaseId });
|
||||
|
||||
expect(diffBranchesSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('throws NOT_FOUND for nonexistent phaseId', async () => {
|
||||
const { repos } = await seedDatabase();
|
||||
const caller = makeCaller(makeMockBranchManager(), repos);
|
||||
|
||||
await expect(caller.getPhaseReviewDiff({ phaseId: 'nonexistent' }))
|
||||
.rejects.toMatchObject({ code: 'NOT_FOUND' });
|
||||
});
|
||||
|
||||
it('phase with no projects returns empty result without calling git', async () => {
|
||||
const { repos, data } = await seedDatabaseNoProjects();
|
||||
const branchManager = makeMockBranchManager();
|
||||
const diffBranchesSpy = vi.spyOn(branchManager, 'diffBranchesStat');
|
||||
const caller = makeCaller(branchManager, repos);
|
||||
|
||||
const result = await caller.getPhaseReviewDiff({ phaseId: data.phaseId });
|
||||
expect(diffBranchesSpy).not.toHaveBeenCalled();
|
||||
expect(result).toHaveProperty('phaseName');
|
||||
});
|
||||
});
|
||||
@@ -4,11 +4,14 @@
|
||||
|
||||
import { TRPCError } from '@trpc/server';
|
||||
import { z } from 'zod';
|
||||
import { simpleGit } from 'simple-git';
|
||||
import type { Phase } from '../../db/schema.js';
|
||||
import type { ProcedureBuilder } from '../trpc.js';
|
||||
import { requirePhaseRepository, requireTaskRepository, requireBranchManager, requireInitiativeRepository, requireProjectRepository, requireExecutionOrchestrator, requireReviewCommentRepository, requireChangeSetRepository } from './_helpers.js';
|
||||
import { phaseBranchName } from '../../git/branch-naming.js';
|
||||
import { ensureProjectClone } from '../../git/project-clones.js';
|
||||
import type { FileStatEntry } from '../../git/types.js';
|
||||
import { phaseMetaCache, fileDiffCache } from '../../review/diff-cache.js';
|
||||
|
||||
export function phaseProcedures(publicProcedure: ProcedureBuilder) {
|
||||
return {
|
||||
@@ -230,26 +233,124 @@ export function phaseProcedures(publicProcedure: ProcedureBuilder) {
|
||||
|
||||
const initBranch = initiative.branch;
|
||||
const phBranch = phaseBranchName(initBranch, phase.name);
|
||||
// For completed phases, use stored merge base; for pending_review, use initiative branch
|
||||
const diffBase = (phase.status === 'completed' && phase.mergeBase) ? phase.mergeBase : initBranch;
|
||||
|
||||
const projects = await projectRepo.findProjectsByInitiativeId(phase.initiativeId);
|
||||
let rawDiff = '';
|
||||
|
||||
for (const project of projects) {
|
||||
const clonePath = await ensureProjectClone(project, ctx.workspaceRoot!);
|
||||
const diff = await branchManager.diffBranches(clonePath, diffBase, phBranch);
|
||||
if (diff) {
|
||||
rawDiff += diff + '\n';
|
||||
}
|
||||
}
|
||||
|
||||
if (projects.length === 0) {
|
||||
return {
|
||||
phaseName: phase.name,
|
||||
sourceBranch: phBranch,
|
||||
targetBranch: initBranch,
|
||||
rawDiff,
|
||||
files: [],
|
||||
totalAdditions: 0,
|
||||
totalDeletions: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const firstClone = await ensureProjectClone(projects[0], ctx.workspaceRoot!);
|
||||
const headHash = await branchManager.getHeadCommitHash(firstClone, phBranch);
|
||||
const cacheKey = `${input.phaseId}:${headHash}`;
|
||||
|
||||
type PhaseReviewDiffResult = { phaseName: string; sourceBranch: string; targetBranch: string; files: FileStatEntry[]; totalAdditions: number; totalDeletions: number };
|
||||
const cached = phaseMetaCache.get(cacheKey) as PhaseReviewDiffResult | undefined;
|
||||
if (cached) return cached;
|
||||
|
||||
const files: FileStatEntry[] = [];
|
||||
|
||||
for (const project of projects) {
|
||||
const clonePath = await ensureProjectClone(project, ctx.workspaceRoot!);
|
||||
const entries = await branchManager.diffBranchesStat(clonePath, diffBase, phBranch);
|
||||
for (const entry of entries) {
|
||||
const tagged: FileStatEntry = { ...entry, projectId: project.id };
|
||||
if (projects.length > 1) {
|
||||
tagged.path = `${project.name}/${entry.path}`;
|
||||
if (entry.oldPath) {
|
||||
tagged.oldPath = `${project.name}/${entry.oldPath}`;
|
||||
}
|
||||
}
|
||||
files.push(tagged);
|
||||
}
|
||||
}
|
||||
|
||||
const totalAdditions = files.reduce((sum, f) => sum + f.additions, 0);
|
||||
const totalDeletions = files.reduce((sum, f) => sum + f.deletions, 0);
|
||||
|
||||
const result = {
|
||||
phaseName: phase.name,
|
||||
sourceBranch: phBranch,
|
||||
targetBranch: initBranch,
|
||||
files,
|
||||
totalAdditions,
|
||||
totalDeletions,
|
||||
};
|
||||
phaseMetaCache.set(cacheKey, result);
|
||||
return result;
|
||||
}),
|
||||
|
||||
getFileDiff: publicProcedure
|
||||
.input(z.object({
|
||||
phaseId: z.string().min(1),
|
||||
filePath: z.string().min(1),
|
||||
projectId: z.string().optional(),
|
||||
}))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const phaseRepo = requirePhaseRepository(ctx);
|
||||
const initiativeRepo = requireInitiativeRepository(ctx);
|
||||
const projectRepo = requireProjectRepository(ctx);
|
||||
const branchManager = requireBranchManager(ctx);
|
||||
|
||||
const phase = await phaseRepo.findById(input.phaseId);
|
||||
if (!phase) {
|
||||
throw new TRPCError({ code: 'NOT_FOUND', message: `Phase '${input.phaseId}' not found` });
|
||||
}
|
||||
if (phase.status !== 'pending_review' && phase.status !== 'completed') {
|
||||
throw new TRPCError({ code: 'BAD_REQUEST', message: `Phase is not reviewable (status: ${phase.status})` });
|
||||
}
|
||||
|
||||
const initiative = await initiativeRepo.findById(phase.initiativeId);
|
||||
if (!initiative?.branch) {
|
||||
throw new TRPCError({ code: 'BAD_REQUEST', message: 'Initiative has no branch configured' });
|
||||
}
|
||||
|
||||
const initBranch = initiative.branch;
|
||||
const phBranch = phaseBranchName(initBranch, phase.name);
|
||||
const diffBase = (phase.status === 'completed' && phase.mergeBase) ? phase.mergeBase : initBranch;
|
||||
|
||||
const decodedPath = decodeURIComponent(input.filePath);
|
||||
|
||||
const projects = await projectRepo.findProjectsByInitiativeId(phase.initiativeId);
|
||||
|
||||
const firstClone = await ensureProjectClone(projects[0], ctx.workspaceRoot!);
|
||||
const headHash = await branchManager.getHeadCommitHash(firstClone, phBranch);
|
||||
const cacheKey = `${input.phaseId}:${headHash}:${input.filePath}`;
|
||||
const cached = fileDiffCache.get(cacheKey);
|
||||
if (cached) return cached;
|
||||
|
||||
let clonePath: string;
|
||||
if (input.projectId) {
|
||||
const project = projects.find((p) => p.id === input.projectId);
|
||||
if (!project) {
|
||||
throw new TRPCError({ code: 'NOT_FOUND', message: `Project '${input.projectId}' not found for this phase` });
|
||||
}
|
||||
clonePath = await ensureProjectClone(project, ctx.workspaceRoot!);
|
||||
} else {
|
||||
clonePath = firstClone;
|
||||
}
|
||||
|
||||
const git = simpleGit(clonePath);
|
||||
// Binary files appear as "-\t-\t<path>" in --numstat output
|
||||
const numstatOut = await git.raw(['diff', '--numstat', `${diffBase}...${phBranch}`, '--', decodedPath]);
|
||||
if (numstatOut.trim() && numstatOut.startsWith('-\t-\t')) {
|
||||
const binaryResult = { binary: true, rawDiff: '' };
|
||||
fileDiffCache.set(cacheKey, binaryResult);
|
||||
return binaryResult;
|
||||
}
|
||||
|
||||
const rawDiff = await branchManager.diffFileSingle(clonePath, diffBase, phBranch, decodedPath);
|
||||
const result = { binary: false, rawDiff };
|
||||
fileDiffCache.set(cacheKey, result);
|
||||
return result;
|
||||
}),
|
||||
|
||||
approvePhaseReview: publicProcedure
|
||||
|
||||
@@ -27,12 +27,14 @@
|
||||
"@tiptap/suggestion": "^3.19.0",
|
||||
"@trpc/client": "^11.9.0",
|
||||
"@trpc/react-query": "^11.9.0",
|
||||
"@types/react-window": "^1.8.8",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"geist": "^1.7.0",
|
||||
"lucide-react": "^0.563.0",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-window": "^2.2.7",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"tippy.js": "^6.3.7"
|
||||
|
||||
@@ -11,7 +11,7 @@ interface ConflictResolutionPanelProps {
|
||||
}
|
||||
|
||||
export function ConflictResolutionPanel({ initiativeId, conflicts, onResolved }: ConflictResolutionPanelProps) {
|
||||
const { state, agent, questions, spawn, resume, stop, dismiss } = useConflictAgent(initiativeId);
|
||||
const { state, agent: _agent, questions, spawn, resume, stop, dismiss } = useConflictAgent(initiativeId);
|
||||
const [showManual, setShowManual] = useState(false);
|
||||
const prevStateRef = useRef<string | null>(null);
|
||||
|
||||
|
||||
192
apps/web/src/components/review/DiffViewer.test.tsx
Normal file
192
apps/web/src/components/review/DiffViewer.test.tsx
Normal file
@@ -0,0 +1,192 @@
|
||||
// @vitest-environment happy-dom
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { render, screen, act } from "@testing-library/react";
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
import { DiffViewer } from "./DiffViewer";
|
||||
import type { FileDiff } from "./types";
|
||||
|
||||
// ── Module mocks ──────────────────────────────────────────────────────────────
|
||||
|
||||
vi.mock("./FileCard", () => ({
|
||||
FileCard: ({ file }: { file: FileDiff }) => (
|
||||
<div data-testid="file-card" data-path={file.newPath} />
|
||||
),
|
||||
}));
|
||||
|
||||
// Hoist the fetch mock so it can be referenced inside vi.mock factories
|
||||
const { mockGetFileDiffFetch } = vi.hoisted(() => ({
|
||||
mockGetFileDiffFetch: vi.fn().mockResolvedValue({ rawDiff: "" }),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/trpc", () => ({
|
||||
trpc: {
|
||||
useUtils: () => ({
|
||||
getFileDiff: { fetch: mockGetFileDiffFetch },
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
// DiffViewer calls useQueryClient() (even though the return value is unused).
|
||||
// Provide a minimal mock so the hook doesn't throw outside a QueryClientProvider.
|
||||
vi.mock("@tanstack/react-query", async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import("@tanstack/react-query")>();
|
||||
return { ...actual, useQueryClient: () => ({}) };
|
||||
});
|
||||
|
||||
// ── IntersectionObserver mock ─────────────────────────────────────────────────
|
||||
|
||||
let observerCallback: IntersectionObserverCallback | null = null;
|
||||
const observedElements = new Set<Element>();
|
||||
|
||||
// Class (not arrow function) so it can be used with `new IntersectionObserver(...)`
|
||||
class MockIntersectionObserver {
|
||||
constructor(cb: IntersectionObserverCallback) {
|
||||
observerCallback = cb;
|
||||
}
|
||||
observe(el: Element) {
|
||||
observedElements.add(el);
|
||||
}
|
||||
unobserve(el: Element) {
|
||||
observedElements.delete(el);
|
||||
}
|
||||
disconnect() {
|
||||
observedElements.clear();
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.stubGlobal("IntersectionObserver", MockIntersectionObserver);
|
||||
observedElements.clear();
|
||||
observerCallback = null;
|
||||
mockGetFileDiffFetch.mockClear();
|
||||
mockGetFileDiffFetch.mockResolvedValue({ rawDiff: "" });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Fire the IntersectionObserver callback with a set of intersecting and
|
||||
* non-intersecting file paths. The target element is simulated by an object
|
||||
* whose dataset.filePath matches the DiffViewer's data-file-path attribute.
|
||||
*/
|
||||
function fireIntersection(
|
||||
intersectingPaths: string[],
|
||||
nonIntersectingPaths: string[] = [],
|
||||
) {
|
||||
if (!observerCallback) return;
|
||||
const entries = [
|
||||
...intersectingPaths.map((p) => ({
|
||||
isIntersecting: true,
|
||||
target: { dataset: { filePath: p } } as unknown as Element,
|
||||
})),
|
||||
...nonIntersectingPaths.map((p) => ({
|
||||
isIntersecting: false,
|
||||
target: { dataset: { filePath: p } } as unknown as Element,
|
||||
})),
|
||||
] as IntersectionObserverEntry[];
|
||||
act(() => {
|
||||
observerCallback!(entries, {} as IntersectionObserver);
|
||||
});
|
||||
}
|
||||
|
||||
function makeFiles(count: number): FileDiff[] {
|
||||
return Array.from({ length: count }, (_, i) => ({
|
||||
oldPath: `file${i}.ts`,
|
||||
newPath: `file${i}.ts`,
|
||||
status: "modified" as const,
|
||||
additions: 1,
|
||||
deletions: 1,
|
||||
}));
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
phaseId: "phase-1",
|
||||
commitMode: false,
|
||||
commentsByLine: new Map(),
|
||||
onAddComment: vi.fn(),
|
||||
onResolveComment: vi.fn(),
|
||||
onUnresolveComment: vi.fn(),
|
||||
};
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe("DiffViewer", () => {
|
||||
it("renders all FileCards when 5 files are all in viewport", () => {
|
||||
const files = makeFiles(5);
|
||||
render(<DiffViewer files={files} {...defaultProps} />);
|
||||
|
||||
// Trigger all five as intersecting
|
||||
fireIntersection(files.map((f) => f.newPath));
|
||||
|
||||
expect(screen.getAllByTestId("file-card")).toHaveLength(5);
|
||||
});
|
||||
|
||||
it("shows only intersecting FileCards for 300 files, placeholders for the rest", () => {
|
||||
const files = makeFiles(300);
|
||||
render(<DiffViewer files={files} {...defaultProps} />);
|
||||
|
||||
// Only first 5 files enter the viewport
|
||||
fireIntersection(files.slice(0, 5).map((f) => f.newPath));
|
||||
|
||||
expect(screen.getAllByTestId("file-card")).toHaveLength(5);
|
||||
|
||||
// The remaining 295 should be 48px placeholder divs marked aria-hidden
|
||||
const placeholders = document.querySelectorAll(
|
||||
'[aria-hidden][style*="height: 48px"]',
|
||||
);
|
||||
expect(placeholders.length).toBeGreaterThanOrEqual(295);
|
||||
});
|
||||
|
||||
it("skips IntersectionObserver for single-file diff and renders FileCard directly", () => {
|
||||
render(<DiffViewer files={makeFiles(1)} {...defaultProps} />);
|
||||
|
||||
// Single-file path: isVisible is always true, no intersection event needed
|
||||
expect(screen.getAllByTestId("file-card")).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("calls scrollIntoView on the wrapper div when onRegisterRef is used for sidebar navigation", () => {
|
||||
const files = makeFiles(5);
|
||||
const registeredRefs = new Map<string, HTMLDivElement>();
|
||||
const onRegisterRef = (filePath: string, el: HTMLDivElement | null) => {
|
||||
if (el) registeredRefs.set(filePath, el);
|
||||
};
|
||||
|
||||
render(<DiffViewer files={files} {...defaultProps} onRegisterRef={onRegisterRef} />);
|
||||
|
||||
// All wrapper divs should have been registered (including the last one)
|
||||
const targetFile = files[4].newPath;
|
||||
expect(registeredRefs.has(targetFile)).toBe(true);
|
||||
|
||||
const wrapperEl = registeredRefs.get(targetFile)!;
|
||||
const scrollSpy = vi.fn();
|
||||
Object.defineProperty(wrapperEl, "scrollIntoView", { value: scrollSpy });
|
||||
|
||||
// Simulate a sidebar click that calls scrollIntoView on the wrapper
|
||||
act(() => {
|
||||
wrapperEl.scrollIntoView({ behavior: "smooth", block: "start" });
|
||||
});
|
||||
expect(scrollSpy).toHaveBeenCalledOnce();
|
||||
});
|
||||
|
||||
it("fires getFileDiff queries in batches of 10 when expandAll is toggled", async () => {
|
||||
const files = makeFiles(25); // 3 batches: 10, 10, 5
|
||||
const { rerender } = render(
|
||||
<DiffViewer files={files} {...defaultProps} expandAll={false} />,
|
||||
);
|
||||
|
||||
rerender(<DiffViewer files={files} {...defaultProps} expandAll={true} />);
|
||||
|
||||
// Wait for all async batch iterations to complete
|
||||
await act(async () => {
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
});
|
||||
|
||||
// All 25 non-binary files should have been prefetched
|
||||
expect(mockGetFileDiffFetch).toHaveBeenCalledTimes(25);
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,25 @@
|
||||
import type { FileDiff, DiffLine, ReviewComment } from "./types";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import type { FileDiff, FileDiffDetail, DiffLine, ReviewComment } from "./types";
|
||||
import { FileCard } from "./FileCard";
|
||||
import { trpc } from "@/lib/trpc";
|
||||
|
||||
function getFileCommentMap(
|
||||
commentsByLine: Map<string, ReviewComment[]>,
|
||||
filePath: string,
|
||||
): Map<string, ReviewComment[]> {
|
||||
const result = new Map<string, ReviewComment[]>();
|
||||
for (const [key, val] of commentsByLine) {
|
||||
if (key.startsWith(`${filePath}:`)) result.set(key, val);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
interface DiffViewerProps {
|
||||
files: FileDiff[];
|
||||
comments: ReviewComment[];
|
||||
files: (FileDiff | FileDiffDetail)[];
|
||||
phaseId: string;
|
||||
commitMode: boolean;
|
||||
commentsByLine: Map<string, ReviewComment[]>;
|
||||
onAddComment: (
|
||||
filePath: string,
|
||||
lineNumber: number,
|
||||
@@ -17,11 +33,14 @@ interface DiffViewerProps {
|
||||
viewedFiles?: Set<string>;
|
||||
onToggleViewed?: (filePath: string) => void;
|
||||
onRegisterRef?: (filePath: string, el: HTMLDivElement | null) => void;
|
||||
expandAll?: boolean;
|
||||
}
|
||||
|
||||
export function DiffViewer({
|
||||
files,
|
||||
comments,
|
||||
phaseId,
|
||||
commitMode,
|
||||
commentsByLine,
|
||||
onAddComment,
|
||||
onResolveComment,
|
||||
onUnresolveComment,
|
||||
@@ -30,14 +49,142 @@ export function DiffViewer({
|
||||
viewedFiles,
|
||||
onToggleViewed,
|
||||
onRegisterRef,
|
||||
expandAll,
|
||||
}: DiffViewerProps) {
|
||||
// Set of file paths currently intersecting (or near) the viewport
|
||||
const visibleFiles = useRef<Set<string>>(new Set());
|
||||
// Map from filePath → wrapper div ref
|
||||
const wrapperRefs = useRef<Map<string, HTMLDivElement>>(new Map());
|
||||
// Increment to trigger re-render when visibility changes
|
||||
const [visibilityVersion, setVisibilityVersion] = useState(0);
|
||||
|
||||
// Single IntersectionObserver for all wrappers
|
||||
const observerRef = useRef<IntersectionObserver | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (files.length === 1) return; // skip for single file
|
||||
|
||||
observerRef.current = new IntersectionObserver(
|
||||
(entries) => {
|
||||
let changed = false;
|
||||
for (const entry of entries) {
|
||||
const filePath = (entry.target as HTMLDivElement).dataset['filePath'];
|
||||
if (!filePath) continue;
|
||||
if (entry.isIntersecting) {
|
||||
if (!visibleFiles.current.has(filePath)) {
|
||||
visibleFiles.current.add(filePath);
|
||||
changed = true;
|
||||
}
|
||||
} else {
|
||||
if (visibleFiles.current.has(filePath)) {
|
||||
visibleFiles.current.delete(filePath);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (changed) setVisibilityVersion((v) => v + 1);
|
||||
},
|
||||
{ rootMargin: '100% 0px 100% 0px' }, // 1× viewport above and below
|
||||
);
|
||||
|
||||
// Observe all current wrapper divs
|
||||
for (const el of wrapperRefs.current.values()) {
|
||||
observerRef.current.observe(el);
|
||||
}
|
||||
|
||||
return () => {
|
||||
observerRef.current?.disconnect();
|
||||
};
|
||||
}, [files]); // re-create observer when file list changes
|
||||
|
||||
// Register wrapper ref — observes the div, registers with parent
|
||||
const registerWrapper = useCallback(
|
||||
(filePath: string, el: HTMLDivElement | null) => {
|
||||
if (el) {
|
||||
wrapperRefs.current.set(filePath, el);
|
||||
observerRef.current?.observe(el);
|
||||
} else {
|
||||
const prev = wrapperRefs.current.get(filePath);
|
||||
if (prev) observerRef.current?.unobserve(prev);
|
||||
wrapperRefs.current.delete(filePath);
|
||||
}
|
||||
onRegisterRef?.(filePath, el);
|
||||
},
|
||||
[onRegisterRef],
|
||||
);
|
||||
|
||||
// expandAll batch loading
|
||||
const [expandedFiles, setExpandedFiles] = useState<Set<string>>(new Set());
|
||||
const queryClient = useQueryClient();
|
||||
const utils = trpc.useUtils();
|
||||
|
||||
useEffect(() => {
|
||||
if (!expandAll || files.length === 0) return;
|
||||
|
||||
const BATCH = 10;
|
||||
let cancelled = false;
|
||||
|
||||
async function batchExpand() {
|
||||
const chunks: (FileDiff | FileDiffDetail)[][] = [];
|
||||
for (let i = 0; i < files.length; i += BATCH) {
|
||||
chunks.push(files.slice(i, i + BATCH));
|
||||
}
|
||||
|
||||
for (const chunk of chunks) {
|
||||
if (cancelled) break;
|
||||
// Mark this batch as expanded (triggers FileCard renders + queries)
|
||||
setExpandedFiles((prev) => {
|
||||
const next = new Set(prev);
|
||||
for (const f of chunk) {
|
||||
if (f.status !== 'binary') next.add(f.newPath);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
// Eagerly prefetch via React Query to saturate network
|
||||
await Promise.all(
|
||||
chunk
|
||||
.filter((f) => f.status !== 'binary' && !('hunks' in f))
|
||||
.map((f) =>
|
||||
utils.getFileDiff
|
||||
.fetch({ phaseId, filePath: encodeURIComponent(f.newPath) })
|
||||
.catch(() => null), // swallow per-file errors; FileCard shows its own error state
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
batchExpand();
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [expandAll]); // only re-run when expandAll toggles
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps -- intentionally only on expandAll
|
||||
|
||||
// Suppress unused variable warning — used only to force re-render on visibility change
|
||||
void visibilityVersion;
|
||||
void queryClient; // imported for type alignment; actual prefetch goes through trpc utils
|
||||
|
||||
const isSingleFile = files.length === 1;
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
{files.map((file) => (
|
||||
<div key={file.newPath} ref={(el) => onRegisterRef?.(file.newPath, el)}>
|
||||
{files.map((file) => {
|
||||
const isVisible = isSingleFile || visibleFiles.current.has(file.newPath);
|
||||
const isExpandedOverride = expandedFiles.has(file.newPath) ? true : undefined;
|
||||
return (
|
||||
<div
|
||||
key={file.newPath}
|
||||
ref={(el) => registerWrapper(file.newPath, el)}
|
||||
data-file-path={file.newPath}
|
||||
>
|
||||
{isVisible ? (
|
||||
<FileCard
|
||||
file={file}
|
||||
comments={comments.filter((c) => c.filePath === file.newPath)}
|
||||
file={file as FileDiff}
|
||||
detail={'hunks' in file ? (file as FileDiffDetail) : undefined}
|
||||
phaseId={phaseId}
|
||||
commitMode={commitMode}
|
||||
commentsByLine={getFileCommentMap(commentsByLine, file.newPath)}
|
||||
isExpandedOverride={isExpandedOverride}
|
||||
onAddComment={onAddComment}
|
||||
onResolveComment={onResolveComment}
|
||||
onUnresolveComment={onUnresolveComment}
|
||||
@@ -46,8 +193,12 @@ export function DiffViewer({
|
||||
isViewed={viewedFiles?.has(file.newPath) ?? false}
|
||||
onToggleViewed={() => onToggleViewed?.(file.newPath)}
|
||||
/>
|
||||
) : (
|
||||
<div style={{ height: '48px' }} aria-hidden />
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
270
apps/web/src/components/review/FileCard.test.tsx
Normal file
270
apps/web/src/components/review/FileCard.test.tsx
Normal file
@@ -0,0 +1,270 @@
|
||||
// @vitest-environment happy-dom
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { render, screen, fireEvent, waitFor } from "@testing-library/react";
|
||||
import { vi, describe, it, expect, beforeEach } from "vitest";
|
||||
import { FileCard } from "./FileCard";
|
||||
import type { FileDiff, FileDiffDetail } from "./types";
|
||||
|
||||
// ── Module mocks ──────────────────────────────────────────────────────────────
|
||||
|
||||
vi.mock("./HunkRows", () => ({
|
||||
HunkRows: ({ hunk }: { hunk: { header: string } }) => (
|
||||
<tr data-testid="hunk-row">
|
||||
<td>{hunk.header}</td>
|
||||
</tr>
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock("./use-syntax-highlight", () => ({
|
||||
useHighlightedFile: () => null,
|
||||
}));
|
||||
|
||||
// Hoist mocks so they can be referenced in vi.mock factories
|
||||
const { mockGetFileDiff, mockParseUnifiedDiff } = vi.hoisted(() => ({
|
||||
mockGetFileDiff: vi.fn(),
|
||||
mockParseUnifiedDiff: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/trpc", () => ({
|
||||
trpc: {
|
||||
getFileDiff: {
|
||||
useQuery: (
|
||||
input: unknown,
|
||||
opts: { enabled: boolean; staleTime?: number },
|
||||
) => mockGetFileDiff(input, opts),
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("./parse-diff", () => ({
|
||||
parseUnifiedDiff: (rawDiff: string) => mockParseUnifiedDiff(rawDiff),
|
||||
}));
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
function makeFile(overrides: Partial<FileDiff> = {}): FileDiff {
|
||||
return {
|
||||
oldPath: "src/foo.ts",
|
||||
newPath: "src/foo.ts",
|
||||
status: "modified",
|
||||
additions: 10,
|
||||
deletions: 5,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
phaseId: "phase-1",
|
||||
commitMode: false,
|
||||
commentsByLine: new Map(),
|
||||
onAddComment: vi.fn(),
|
||||
onResolveComment: vi.fn(),
|
||||
onUnresolveComment: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockGetFileDiff.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
refetch: vi.fn(),
|
||||
});
|
||||
// Default: return empty parse result
|
||||
mockParseUnifiedDiff.mockReturnValue([]);
|
||||
});
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe("FileCard", () => {
|
||||
it("starts collapsed and does not enable getFileDiff query", () => {
|
||||
render(<FileCard file={makeFile()} {...defaultProps} />);
|
||||
|
||||
// Query must be called with enabled: false while card is collapsed
|
||||
expect(mockGetFileDiff).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
filePath: encodeURIComponent("src/foo.ts"),
|
||||
}),
|
||||
expect.objectContaining({ enabled: false }),
|
||||
);
|
||||
|
||||
// No hunk rows rendered in the collapsed state
|
||||
expect(screen.queryByTestId("hunk-row")).toBeNull();
|
||||
});
|
||||
|
||||
it("enables query and shows loading spinner when expanded", () => {
|
||||
mockGetFileDiff.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
refetch: vi.fn(),
|
||||
});
|
||||
|
||||
render(<FileCard file={makeFile()} {...defaultProps} />);
|
||||
fireEvent.click(screen.getByRole("button"));
|
||||
|
||||
// After expanding, query should be called with enabled: true
|
||||
expect(mockGetFileDiff).toHaveBeenLastCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({ enabled: true }),
|
||||
);
|
||||
|
||||
// Loading spinner should be visible
|
||||
expect(screen.getByText(/Loading diff/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("renders HunkRows when query succeeds", async () => {
|
||||
mockGetFileDiff.mockReturnValue({
|
||||
data: {
|
||||
binary: false,
|
||||
rawDiff:
|
||||
"diff --git a/src/foo.ts b/src/foo.ts\n@@ -1,3 +1,3 @@\n context\n",
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
refetch: vi.fn(),
|
||||
});
|
||||
|
||||
mockParseUnifiedDiff.mockReturnValue([
|
||||
{
|
||||
oldPath: "src/foo.ts",
|
||||
newPath: "src/foo.ts",
|
||||
status: "modified",
|
||||
additions: 0,
|
||||
deletions: 0,
|
||||
hunks: [
|
||||
{
|
||||
header: "@@ -1,3 +1,3 @@",
|
||||
oldStart: 1,
|
||||
oldCount: 3,
|
||||
newStart: 1,
|
||||
newCount: 3,
|
||||
lines: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
|
||||
render(<FileCard file={makeFile()} {...defaultProps} />);
|
||||
fireEvent.click(screen.getByRole("button"));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId("hunk-row")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("shows error state with Retry button; clicking retry calls refetch", () => {
|
||||
const refetch = vi.fn();
|
||||
mockGetFileDiff.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isError: true,
|
||||
refetch,
|
||||
});
|
||||
|
||||
render(<FileCard file={makeFile()} {...defaultProps} />);
|
||||
fireEvent.click(screen.getByRole("button"));
|
||||
|
||||
expect(screen.getByText(/Failed to load diff/i)).toBeInTheDocument();
|
||||
const retryBtn = screen.getByRole("button", { name: /retry/i });
|
||||
fireEvent.click(retryBtn);
|
||||
expect(refetch).toHaveBeenCalledOnce();
|
||||
});
|
||||
|
||||
it("shows binary message on expand and does not enable getFileDiff query", () => {
|
||||
render(<FileCard file={makeFile({ status: "binary" })} {...defaultProps} />);
|
||||
fireEvent.click(screen.getByRole("button"));
|
||||
|
||||
expect(screen.getByText(/Binary file/i)).toBeInTheDocument();
|
||||
|
||||
// Query must never be enabled for binary files
|
||||
expect(mockGetFileDiff).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({ enabled: false }),
|
||||
);
|
||||
});
|
||||
|
||||
it("shows No content changes when parsed hunks array is empty", async () => {
|
||||
mockGetFileDiff.mockReturnValue({
|
||||
data: {
|
||||
binary: false,
|
||||
rawDiff: "diff --git a/src/foo.ts b/src/foo.ts\nsome content\n",
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
refetch: vi.fn(),
|
||||
});
|
||||
|
||||
mockParseUnifiedDiff.mockReturnValue([
|
||||
{
|
||||
oldPath: "src/foo.ts",
|
||||
newPath: "src/foo.ts",
|
||||
status: "modified",
|
||||
additions: 0,
|
||||
deletions: 0,
|
||||
hunks: [],
|
||||
},
|
||||
]);
|
||||
|
||||
render(<FileCard file={makeFile()} {...defaultProps} />);
|
||||
fireEvent.click(screen.getByRole("button"));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/No content changes/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("renders pre-parsed hunks from detail prop without fetching", () => {
|
||||
const detail: FileDiffDetail = {
|
||||
oldPath: "src/foo.ts",
|
||||
newPath: "src/foo.ts",
|
||||
status: "modified",
|
||||
additions: 5,
|
||||
deletions: 2,
|
||||
hunks: [
|
||||
{
|
||||
header: "@@ -1 +1 @@",
|
||||
oldStart: 1,
|
||||
oldCount: 1,
|
||||
newStart: 1,
|
||||
newCount: 1,
|
||||
lines: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
render(<FileCard file={makeFile()} detail={detail} {...defaultProps} />);
|
||||
|
||||
// Should start expanded because detail prop is provided
|
||||
expect(screen.getByTestId("hunk-row")).toBeInTheDocument();
|
||||
|
||||
// Query must not be enabled when detail prop is present
|
||||
expect(mockGetFileDiff).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({ enabled: false }),
|
||||
);
|
||||
});
|
||||
|
||||
it("does not refetch when collapsing and re-expanding", () => {
|
||||
// Simulate data already available (as if previously fetched and cached)
|
||||
mockGetFileDiff.mockReturnValue({
|
||||
data: { binary: false, rawDiff: "" },
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
refetch: vi.fn(),
|
||||
});
|
||||
|
||||
render(<FileCard file={makeFile()} {...defaultProps} />);
|
||||
const headerBtn = screen.getByRole("button");
|
||||
|
||||
// Expand: query enabled, data shown immediately (no loading)
|
||||
fireEvent.click(headerBtn);
|
||||
expect(screen.queryByText(/Loading diff/i)).toBeNull();
|
||||
|
||||
// Collapse
|
||||
fireEvent.click(headerBtn);
|
||||
|
||||
// Re-expand: should not enter loading state (data still available)
|
||||
fireEvent.click(headerBtn);
|
||||
expect(screen.queryByText(/Loading diff/i)).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -6,16 +6,16 @@ import {
|
||||
Minus,
|
||||
CheckCircle2,
|
||||
Circle,
|
||||
Loader2,
|
||||
} from "lucide-react";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import type { FileDiff, FileChangeType, DiffLine, ReviewComment } from "./types";
|
||||
import type { FileDiff, FileDiffDetail, DiffLine, ReviewComment } from "./types";
|
||||
import { HunkRows } from "./HunkRows";
|
||||
import { useHighlightedFile } from "./use-syntax-highlight";
|
||||
import { parseUnifiedDiff } from "./parse-diff";
|
||||
import { trpc } from "@/lib/trpc";
|
||||
|
||||
const changeTypeBadge: Record<
|
||||
FileChangeType,
|
||||
{ label: string; classes: string } | null
|
||||
> = {
|
||||
const statusBadge: Record<FileDiff['status'], { label: string; classes: string } | null> = {
|
||||
added: {
|
||||
label: "NEW",
|
||||
classes:
|
||||
@@ -32,18 +32,27 @@ const changeTypeBadge: Record<
|
||||
"bg-status-active-bg text-status-active-fg border-status-active-border",
|
||||
},
|
||||
modified: null,
|
||||
binary: {
|
||||
label: "BINARY",
|
||||
classes: "bg-muted text-muted-foreground border-border",
|
||||
},
|
||||
};
|
||||
|
||||
const leftBorderClass: Record<FileChangeType, string> = {
|
||||
const leftBorderClass: Record<FileDiff['status'], string> = {
|
||||
added: "border-l-2 border-l-status-success-fg",
|
||||
deleted: "border-l-2 border-l-status-error-fg",
|
||||
renamed: "border-l-2 border-l-status-active-fg",
|
||||
modified: "border-l-2 border-l-primary/40",
|
||||
binary: "border-l-2 border-l-primary/40",
|
||||
};
|
||||
|
||||
interface FileCardProps {
|
||||
file: FileDiff;
|
||||
comments: ReviewComment[];
|
||||
detail?: FileDiffDetail;
|
||||
phaseId: string;
|
||||
commitMode: boolean;
|
||||
commentsByLine: Map<string, ReviewComment[]>;
|
||||
isExpandedOverride?: boolean;
|
||||
onAddComment: (
|
||||
filePath: string,
|
||||
lineNumber: number,
|
||||
@@ -60,7 +69,11 @@ interface FileCardProps {
|
||||
|
||||
export function FileCard({
|
||||
file,
|
||||
comments,
|
||||
detail,
|
||||
phaseId,
|
||||
commitMode,
|
||||
commentsByLine,
|
||||
isExpandedOverride,
|
||||
onAddComment,
|
||||
onResolveComment,
|
||||
onUnresolveComment,
|
||||
@@ -69,26 +82,65 @@ export function FileCard({
|
||||
isViewed = false,
|
||||
onToggleViewed = () => {},
|
||||
}: FileCardProps) {
|
||||
const [expanded, setExpanded] = useState(true);
|
||||
const commentCount = comments.length;
|
||||
const badge = changeTypeBadge[file.changeType];
|
||||
// Uncontrolled expand for normal file clicks.
|
||||
// Start expanded if detail prop is provided (commit mode).
|
||||
const [isExpandedLocal, setIsExpandedLocal] = useState(() => !!detail);
|
||||
|
||||
// Flatten all hunk lines for syntax highlighting
|
||||
const allLines = useMemo(
|
||||
() => file.hunks.flatMap((h) => h.lines),
|
||||
[file.hunks],
|
||||
// Merge with override from DiffViewer expandAll
|
||||
const isExpanded = isExpandedOverride ?? isExpandedLocal;
|
||||
|
||||
const fileDiffQuery = trpc.getFileDiff.useQuery(
|
||||
{ phaseId, filePath: encodeURIComponent(file.newPath) },
|
||||
{
|
||||
enabled: isExpanded && !commitMode && file.status !== 'binary' && !detail,
|
||||
staleTime: Infinity,
|
||||
},
|
||||
);
|
||||
|
||||
// Compute hunks from query data (phase mode)
|
||||
const parsedHunks = useMemo(() => {
|
||||
if (!fileDiffQuery.data?.rawDiff) return null;
|
||||
const parsed = parseUnifiedDiff(fileDiffQuery.data.rawDiff);
|
||||
return parsed[0] ?? null;
|
||||
}, [fileDiffQuery.data]);
|
||||
|
||||
// Collect all lines for syntax highlighting
|
||||
const allLines = useMemo(() => {
|
||||
if (detail) return detail.hunks.flatMap((h) => h.lines);
|
||||
if (parsedHunks) return parsedHunks.hunks.flatMap((h) => h.lines);
|
||||
return [];
|
||||
}, [detail, parsedHunks]);
|
||||
|
||||
const tokenMap = useHighlightedFile(file.newPath, allLines);
|
||||
|
||||
const commentCount = useMemo(() => {
|
||||
let count = 0;
|
||||
for (const [key, arr] of commentsByLine) {
|
||||
if (key.startsWith(`${file.newPath}:`)) count += arr.length;
|
||||
}
|
||||
return count;
|
||||
}, [commentsByLine, file.newPath]);
|
||||
|
||||
const badge = statusBadge[file.status];
|
||||
|
||||
const handlers = {
|
||||
onAddComment,
|
||||
onResolveComment,
|
||||
onUnresolveComment,
|
||||
onReplyComment,
|
||||
onEditComment,
|
||||
tokenMap,
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="rounded-lg border border-border overflow-clip">
|
||||
{/* File header — sticky so it stays visible when scrolling */}
|
||||
{/* File header */}
|
||||
<button
|
||||
className={`sticky z-10 flex w-full items-center gap-2 px-3 py-2 bg-muted hover:bg-muted/90 text-left text-sm font-mono transition-colors ${leftBorderClass[file.changeType]}`}
|
||||
className={`sticky z-10 flex w-full items-center gap-2 px-3 py-2 bg-muted hover:bg-muted/90 text-left text-sm font-mono transition-colors ${leftBorderClass[file.status]}`}
|
||||
style={{ top: 'var(--review-header-h, 0px)' }}
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
onClick={() => setIsExpandedLocal(!isExpandedLocal)}
|
||||
>
|
||||
{expanded ? (
|
||||
{isExpanded ? (
|
||||
<ChevronDown className="h-3.5 w-3.5 shrink-0 text-muted-foreground" />
|
||||
) : (
|
||||
<ChevronRight className="h-3.5 w-3.5 shrink-0 text-muted-foreground" />
|
||||
@@ -149,26 +201,63 @@ export function FileCard({
|
||||
</button>
|
||||
|
||||
{/* Diff content */}
|
||||
{expanded && (
|
||||
{isExpanded && (
|
||||
<div className="overflow-x-auto">
|
||||
{detail ? (
|
||||
// Commit mode: pre-parsed hunks from detail prop
|
||||
detail.hunks.length === 0 ? (
|
||||
<div className="px-4 py-3 text-xs text-muted-foreground">No content changes</div>
|
||||
) : (
|
||||
<table className="w-full text-xs font-mono border-collapse">
|
||||
<tbody>
|
||||
{file.hunks.map((hunk, hi) => (
|
||||
{detail.hunks.map((hunk, hi) => (
|
||||
<HunkRows
|
||||
key={hi}
|
||||
hunk={hunk}
|
||||
filePath={file.newPath}
|
||||
comments={comments}
|
||||
onAddComment={onAddComment}
|
||||
onResolveComment={onResolveComment}
|
||||
onUnresolveComment={onUnresolveComment}
|
||||
onReplyComment={onReplyComment}
|
||||
onEditComment={onEditComment}
|
||||
tokenMap={tokenMap}
|
||||
commentsByLine={commentsByLine}
|
||||
{...handlers}
|
||||
/>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
)
|
||||
) : file.status === 'binary' ? (
|
||||
<div className="px-4 py-3 text-xs text-muted-foreground">Binary file — diff not shown</div>
|
||||
) : fileDiffQuery.isLoading ? (
|
||||
<div className="flex items-center gap-2 px-4 py-3 text-xs text-muted-foreground">
|
||||
<Loader2 className="h-3.5 w-3.5 animate-spin" />
|
||||
Loading diff…
|
||||
</div>
|
||||
) : fileDiffQuery.isError ? (
|
||||
<div className="flex items-center gap-2 px-4 py-3 text-xs text-destructive">
|
||||
Failed to load diff.
|
||||
<button
|
||||
className="underline hover:no-underline"
|
||||
onClick={() => fileDiffQuery.refetch()}
|
||||
>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
) : fileDiffQuery.data ? (
|
||||
!parsedHunks || parsedHunks.hunks.length === 0 ? (
|
||||
<div className="px-4 py-3 text-xs text-muted-foreground">No content changes</div>
|
||||
) : (
|
||||
<table className="w-full text-xs font-mono border-collapse">
|
||||
<tbody>
|
||||
{parsedHunks.hunks.map((hunk, hi) => (
|
||||
<HunkRows
|
||||
key={hi}
|
||||
hunk={hunk}
|
||||
filePath={file.newPath}
|
||||
commentsByLine={commentsByLine}
|
||||
{...handlers}
|
||||
/>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
)
|
||||
) : null}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -6,7 +6,7 @@ import type { LineTokenMap } from "./use-syntax-highlight";
|
||||
interface HunkRowsProps {
|
||||
hunk: { header: string; lines: DiffLine[] };
|
||||
filePath: string;
|
||||
comments: ReviewComment[];
|
||||
commentsByLine: Map<string, ReviewComment[]>;
|
||||
onAddComment: (
|
||||
filePath: string,
|
||||
lineNumber: number,
|
||||
@@ -23,7 +23,7 @@ interface HunkRowsProps {
|
||||
export function HunkRows({
|
||||
hunk,
|
||||
filePath,
|
||||
comments,
|
||||
commentsByLine,
|
||||
onAddComment,
|
||||
onResolveComment,
|
||||
onUnresolveComment,
|
||||
@@ -81,9 +81,9 @@ export function HunkRows({
|
||||
|
||||
{hunk.lines.map((line, li) => {
|
||||
const lineKey = line.newLineNumber ?? line.oldLineNumber ?? li;
|
||||
const lineComments = comments.filter(
|
||||
(c) => c.lineNumber === lineKey && c.lineType === line.type,
|
||||
);
|
||||
// O(1) map lookup — replaces the previous O(n) filter
|
||||
const lineComments =
|
||||
commentsByLine.get(`${filePath}:${lineKey}:${line.type}`) ?? [];
|
||||
const isCommenting =
|
||||
commentingLine?.lineNumber === lineKey &&
|
||||
commentingLine?.lineType === line.type;
|
||||
|
||||
@@ -308,7 +308,7 @@ export function InitiativeReview({ initiativeId, onCompleted }: InitiativeReview
|
||||
) : (
|
||||
<DiffViewer
|
||||
files={files}
|
||||
comments={[]}
|
||||
commentsByLine={new Map()}
|
||||
onAddComment={() => {}}
|
||||
onResolveComment={() => {}}
|
||||
onUnresolveComment={() => {}}
|
||||
|
||||
@@ -42,6 +42,9 @@ interface ReviewHeaderProps {
|
||||
preview: PreviewState | null;
|
||||
viewedCount?: number;
|
||||
totalCount?: number;
|
||||
totalAdditions?: number;
|
||||
totalDeletions?: number;
|
||||
onExpandAll?: () => void;
|
||||
}
|
||||
|
||||
export function ReviewHeader({
|
||||
@@ -62,9 +65,12 @@ export function ReviewHeader({
|
||||
preview,
|
||||
viewedCount,
|
||||
totalCount,
|
||||
totalAdditions: totalAdditionsProp,
|
||||
totalDeletions: totalDeletionsProp,
|
||||
onExpandAll,
|
||||
}: ReviewHeaderProps) {
|
||||
const totalAdditions = files.reduce((s, f) => s + f.additions, 0);
|
||||
const totalDeletions = files.reduce((s, f) => s + f.deletions, 0);
|
||||
const totalAdditions = totalAdditionsProp ?? files.reduce((s, f) => s + f.additions, 0);
|
||||
const totalDeletions = totalDeletionsProp ?? files.reduce((s, f) => s + f.deletions, 0);
|
||||
const [showConfirmation, setShowConfirmation] = useState(false);
|
||||
const [showRequestConfirm, setShowRequestConfirm] = useState(false);
|
||||
const confirmRef = useRef<HTMLDivElement>(null);
|
||||
@@ -186,6 +192,16 @@ export function ReviewHeader({
|
||||
|
||||
{/* Right: preview + actions */}
|
||||
<div className="flex items-center gap-3 shrink-0">
|
||||
{onExpandAll && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={onExpandAll}
|
||||
className="h-7 text-xs px-2 text-muted-foreground"
|
||||
>
|
||||
Expand all
|
||||
</Button>
|
||||
)}
|
||||
{/* Preview controls */}
|
||||
{preview && <PreviewControls preview={preview} />}
|
||||
|
||||
|
||||
193
apps/web/src/components/review/ReviewSidebar.test.tsx
Normal file
193
apps/web/src/components/review/ReviewSidebar.test.tsx
Normal file
@@ -0,0 +1,193 @@
|
||||
// @vitest-environment happy-dom
|
||||
import '@testing-library/jest-dom/vitest';
|
||||
import { render, screen, fireEvent, act } from '@testing-library/react';
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { ReviewSidebar } from './ReviewSidebar';
|
||||
import type { FileDiff, ReviewComment, CommitInfo } from './types';
|
||||
|
||||
// Mock ResizeObserver — not provided by happy-dom.
|
||||
// react-window 2.x uses `new ResizeObserver()` internally.
|
||||
class MockResizeObserver {
|
||||
observe() {}
|
||||
unobserve() {}
|
||||
disconnect() {}
|
||||
}
|
||||
vi.stubGlobal('ResizeObserver', MockResizeObserver);
|
||||
|
||||
// Mock react-window to avoid ESM/CJS duplicate-React-instance errors in Vitest.
|
||||
// The mock renders only the first 15 rows, simulating windowed rendering.
|
||||
// It also exposes a `listRef`-compatible imperative handle so scroll-save/restore logic runs.
|
||||
vi.mock('react-window', () => ({
|
||||
List: vi.fn(({ rowComponent: RowComponent, rowCount, rowProps, listRef }: any) => {
|
||||
// Expose the imperative API via the ref (synchronous assignment is safe in tests).
|
||||
if (listRef && typeof listRef === 'object' && 'current' in listRef) {
|
||||
listRef.current = { element: { scrollTop: 0 }, scrollToRow: vi.fn() };
|
||||
}
|
||||
const renderCount = Math.min(rowCount ?? 0, 15);
|
||||
return (
|
||||
<div data-testid="virtual-list">
|
||||
{Array.from({ length: renderCount }, (_, i) => (
|
||||
<RowComponent key={i} index={i} style={{}} ariaAttributes={{}} {...rowProps} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}),
|
||||
}));
|
||||
|
||||
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
function makeFile(path: string): FileDiff {
|
||||
return {
|
||||
oldPath: path,
|
||||
newPath: path,
|
||||
hunks: [],
|
||||
additions: 1,
|
||||
deletions: 0,
|
||||
changeType: 'modified',
|
||||
};
|
||||
}
|
||||
|
||||
function makeFiles(count: number, prefix = 'src/components/'): FileDiff[] {
|
||||
return Array.from({ length: count }, (_, i) =>
|
||||
makeFile(`${prefix}file${String(i).padStart(4, '0')}.ts`),
|
||||
);
|
||||
}
|
||||
|
||||
const NO_COMMENTS: ReviewComment[] = [];
|
||||
const NO_COMMITS: CommitInfo[] = [];
|
||||
|
||||
function renderSidebar(files: FileDiff[]) {
|
||||
return render(
|
||||
<ReviewSidebar
|
||||
files={files}
|
||||
comments={NO_COMMENTS}
|
||||
onFileClick={vi.fn()}
|
||||
selectedCommit={null}
|
||||
activeFiles={files}
|
||||
commits={NO_COMMITS}
|
||||
onSelectCommit={vi.fn()}
|
||||
/>,
|
||||
);
|
||||
}
|
||||
|
||||
// ─── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('ReviewSidebar FilesView virtualization', () => {
|
||||
beforeEach(() => vi.clearAllMocks());
|
||||
afterEach(() => vi.restoreAllMocks());
|
||||
|
||||
// 1. Virtual list NOT used for ≤50 files (fallback path)
|
||||
it('does not use virtual list when files count is ≤50', () => {
|
||||
renderSidebar(makeFiles(10));
|
||||
|
||||
expect(screen.queryByTestId('virtual-list')).not.toBeInTheDocument();
|
||||
// All 10 file rows are in the DOM directly
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBe(10);
|
||||
});
|
||||
|
||||
// 2. Virtual list IS used for >50 files (virtualized path)
|
||||
it('uses virtual list when files count is >50', () => {
|
||||
renderSidebar(makeFiles(1000));
|
||||
|
||||
expect(screen.getByTestId('virtual-list')).toBeInTheDocument();
|
||||
// Mock renders only 15 rows — far fewer than 1000
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBeLessThan(50);
|
||||
});
|
||||
|
||||
// 3. Directory collapse removes file rows from the virtual list
|
||||
it('removes file rows from virtual list when directory is collapsed', async () => {
|
||||
// 100 files all in "src/" — produces 101 rows (1 dir-header + 100 files), which is >50
|
||||
const files = Array.from({ length: 100 }, (_, i) => makeFile(`src/file${i}.ts`));
|
||||
renderSidebar(files);
|
||||
|
||||
expect(screen.getByTestId('virtual-list')).toBeInTheDocument();
|
||||
|
||||
const dirHeader = screen.getByRole('button', { name: /src\// });
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBeGreaterThan(0);
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(dirHeader);
|
||||
});
|
||||
|
||||
// After collapse: only the dir-header row remains in the virtual list
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBe(0);
|
||||
expect(document.querySelectorAll('[data-testid="dir-header"]').length).toBe(1);
|
||||
});
|
||||
|
||||
// 3a. Expanding a collapsed directory restores file rows
|
||||
it('restores file rows when a collapsed directory is expanded again', async () => {
|
||||
const files = makeFiles(60, 'src/components/');
|
||||
renderSidebar(files);
|
||||
|
||||
const dirHeader = screen.getByRole('button', { name: /src\/components\// });
|
||||
|
||||
// Collapse
|
||||
await act(async () => {
|
||||
fireEvent.click(dirHeader);
|
||||
});
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBe(0);
|
||||
|
||||
// Expand again
|
||||
const freshDirHeader = screen.getByRole('button', { name: /src\/components\// });
|
||||
await act(async () => {
|
||||
fireEvent.click(freshDirHeader);
|
||||
});
|
||||
|
||||
// File rows are back (virtual list renders up to 15)
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBeGreaterThan(0);
|
||||
expect(document.querySelectorAll('[data-testid="dir-header"]').length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
// 4. Scroll position saved and restored on Files ↔ Commits tab switch
|
||||
it('restores file rows when returning to Files tab after switching to Commits tab', async () => {
|
||||
renderSidebar(makeFiles(200));
|
||||
|
||||
// Files tab is default — file rows are visible
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBeGreaterThan(0);
|
||||
|
||||
// Switch to Commits tab — FilesView unmounts (scroll offset is saved)
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByTitle('Commits'));
|
||||
});
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBe(0);
|
||||
|
||||
// Switch back to Files tab — FilesView remounts (scroll offset is restored)
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByTitle('Files'));
|
||||
});
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
// 5. Clicking a file calls onFileClick with the correct path
|
||||
it('calls onFileClick when a file row is clicked', () => {
|
||||
const onFileClick = vi.fn();
|
||||
const files = makeFiles(5);
|
||||
render(
|
||||
<ReviewSidebar
|
||||
files={files}
|
||||
comments={NO_COMMENTS}
|
||||
onFileClick={onFileClick}
|
||||
selectedCommit={null}
|
||||
activeFiles={files}
|
||||
commits={NO_COMMITS}
|
||||
onSelectCommit={vi.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
const fileButtons = document.querySelectorAll('[data-testid="file-row"]');
|
||||
expect(fileButtons.length).toBeGreaterThan(0);
|
||||
fireEvent.click(fileButtons[0]);
|
||||
|
||||
// First file after alphabetical sort within the directory
|
||||
expect(onFileClick).toHaveBeenCalledWith(files[0].newPath);
|
||||
});
|
||||
|
||||
// 6. Root-level files (no subdirectory) render without a directory header
|
||||
it('root-level files render without a directory header', () => {
|
||||
const files = makeFiles(10, ''); // no prefix → root-level files
|
||||
renderSidebar(files);
|
||||
|
||||
expect(document.querySelectorAll('[data-testid="file-row"]').length).toBe(10);
|
||||
expect(document.querySelectorAll('[data-testid="dir-header"]').length).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,15 @@
|
||||
import { useMemo, useState } from "react";
|
||||
import { useMemo, useState, useRef, useEffect, useCallback } from "react";
|
||||
// Using react-window 2.x (installed version). The task spec was written for react-window 1.x
|
||||
// (VariableSizeList API). react-window 2.x provides a `List` component with a different but
|
||||
// equivalent API: it handles ResizeObserver internally (no explicit height/width props needed),
|
||||
// uses `rowComponent`/`rowProps` for rendering, and exposes `scrollToRow` via `listRef`.
|
||||
import { List } from "react-window";
|
||||
import type { RowComponentProps, ListImperativeAPI } from "react-window";
|
||||
import {
|
||||
MessageSquare,
|
||||
FileCode,
|
||||
FolderOpen,
|
||||
ChevronRight,
|
||||
Plus,
|
||||
Minus,
|
||||
Circle,
|
||||
@@ -38,6 +45,8 @@ export function ReviewSidebar({
|
||||
viewedFiles = new Set(),
|
||||
}: ReviewSidebarProps) {
|
||||
const [view, setView] = useState<SidebarView>("files");
|
||||
// Persist Files-tab scroll offset across Files ↔ Commits switches
|
||||
const filesScrollOffsetRef = useRef<number>(0);
|
||||
|
||||
return (
|
||||
<div className="flex h-full">
|
||||
@@ -58,8 +67,8 @@ export function ReviewSidebar({
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Content panel */}
|
||||
<div className="flex-1 min-w-0 overflow-y-auto p-4">
|
||||
{/* Content panel — flex column so FilesView can stretch and manage its own scroll */}
|
||||
<div className="flex-1 min-w-0 flex flex-col min-h-0">
|
||||
{view === "files" ? (
|
||||
<FilesView
|
||||
files={files}
|
||||
@@ -69,13 +78,16 @@ export function ReviewSidebar({
|
||||
selectedCommit={selectedCommit}
|
||||
activeFiles={activeFiles}
|
||||
viewedFiles={viewedFiles}
|
||||
scrollOffsetRef={filesScrollOffsetRef}
|
||||
/>
|
||||
) : (
|
||||
<div className="overflow-y-auto p-4 flex-1">
|
||||
<CommitsView
|
||||
commits={commits}
|
||||
selectedCommit={selectedCommit}
|
||||
onSelectCommit={onSelectCommit}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
@@ -171,6 +183,109 @@ const changeTypeDotColor: Record<string, string> = {
|
||||
renamed: "bg-status-active-fg",
|
||||
};
|
||||
|
||||
// ─── Row type for virtualized list ───
|
||||
|
||||
type Row =
|
||||
| { kind: "dir-header"; dirName: string; fileCount: number; isCollapsed: boolean }
|
||||
| { kind: "file"; file: FileDiff; dirName: string; isViewed: boolean; commentCount: number };
|
||||
|
||||
// Item heights: dir-header ≈ 32px (py-0.5 + icon), file row ≈ 40px (py-1 + text)
|
||||
const DIR_HEADER_HEIGHT = 32;
|
||||
const FILE_ROW_HEIGHT = 40;
|
||||
|
||||
// ─── Virtualized row component (must be stable — defined outside FilesView) ───
|
||||
|
||||
type VirtualRowProps = {
|
||||
rows: Row[];
|
||||
selectedCommit: string | null;
|
||||
activeFilePaths: Set<string>;
|
||||
onFileClick: (filePath: string) => void;
|
||||
onToggleDir: (dirName: string) => void;
|
||||
};
|
||||
|
||||
function VirtualRowItem({
|
||||
index,
|
||||
style,
|
||||
rows,
|
||||
selectedCommit,
|
||||
activeFilePaths,
|
||||
onFileClick,
|
||||
onToggleDir,
|
||||
}: RowComponentProps<VirtualRowProps>) {
|
||||
const row = rows[index];
|
||||
if (!row) return null;
|
||||
|
||||
if (row.kind === "dir-header") {
|
||||
return (
|
||||
<button
|
||||
data-testid="dir-header"
|
||||
style={style}
|
||||
className="flex w-full items-center gap-1 text-[10px] font-mono text-muted-foreground/70 px-2 hover:bg-accent/30 transition-colors"
|
||||
onClick={() => onToggleDir(row.dirName)}
|
||||
title={row.isCollapsed ? "Expand directory" : "Collapse directory"}
|
||||
>
|
||||
<ChevronRight
|
||||
className={`h-3 w-3 shrink-0 transition-transform ${row.isCollapsed ? "" : "rotate-90"}`}
|
||||
/>
|
||||
<FolderOpen className="h-3 w-3 shrink-0" />
|
||||
<span className="truncate">{row.dirName}</span>
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
// kind === "file"
|
||||
const { file, dirName, isViewed, commentCount } = row;
|
||||
const isInView = activeFilePaths.has(file.newPath);
|
||||
const dimmed = selectedCommit && !isInView;
|
||||
const dotColor = changeTypeDotColor[file.changeType];
|
||||
|
||||
return (
|
||||
<button
|
||||
data-testid="file-row"
|
||||
style={style}
|
||||
className={`
|
||||
flex w-full items-center gap-1.5 rounded py-1 text-left text-[11px]
|
||||
hover:bg-accent/50 transition-colors group
|
||||
${dirName ? "pl-4 pr-2" : "px-2"}
|
||||
${dimmed ? "opacity-35" : ""}
|
||||
`}
|
||||
onClick={() => onFileClick(file.newPath)}
|
||||
>
|
||||
{isViewed ? (
|
||||
<CheckCircle2 className="h-3 w-3 text-status-success-fg shrink-0" />
|
||||
) : (
|
||||
<FileCode className="h-3 w-3 text-muted-foreground shrink-0" />
|
||||
)}
|
||||
{dotColor && (
|
||||
<span className={`h-1.5 w-1.5 rounded-full shrink-0 ${dotColor}`} />
|
||||
)}
|
||||
<span className="truncate flex-1 font-mono">
|
||||
{getFileName(file.newPath)}
|
||||
</span>
|
||||
<span className="flex items-center gap-1 shrink-0">
|
||||
{commentCount > 0 && (
|
||||
<span className="flex items-center gap-0.5 text-muted-foreground">
|
||||
<MessageSquare className="h-2.5 w-2.5" />
|
||||
{commentCount}
|
||||
</span>
|
||||
)}
|
||||
{file.additions > 0 && (
|
||||
<span className="text-diff-add-fg text-[10px]">
|
||||
<Plus className="h-2.5 w-2.5 inline" />
|
||||
{file.additions}
|
||||
</span>
|
||||
)}
|
||||
{file.deletions > 0 && (
|
||||
<span className="text-diff-remove-fg text-[10px]">
|
||||
<Minus className="h-2.5 w-2.5 inline" />
|
||||
{file.deletions}
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
function FilesView({
|
||||
files,
|
||||
comments,
|
||||
@@ -179,6 +294,7 @@ function FilesView({
|
||||
selectedCommit,
|
||||
activeFiles,
|
||||
viewedFiles,
|
||||
scrollOffsetRef,
|
||||
}: {
|
||||
files: FileDiff[];
|
||||
comments: ReviewComment[];
|
||||
@@ -187,10 +303,14 @@ function FilesView({
|
||||
selectedCommit: string | null;
|
||||
activeFiles: FileDiff[];
|
||||
viewedFiles: Set<string>;
|
||||
scrollOffsetRef: React.MutableRefObject<number>;
|
||||
}) {
|
||||
const unresolvedCount = comments.filter((c) => !c.resolved && !c.parentCommentId).length;
|
||||
const resolvedCount = comments.filter((c) => c.resolved && !c.parentCommentId).length;
|
||||
const activeFilePaths = new Set(activeFiles.map((f) => f.newPath));
|
||||
const activeFilePaths = useMemo(
|
||||
() => new Set(activeFiles.map((f) => f.newPath)),
|
||||
[activeFiles],
|
||||
);
|
||||
|
||||
const directoryGroups = useMemo(() => groupFilesByDirectory(files), [files]);
|
||||
|
||||
@@ -198,8 +318,119 @@ function FilesView({
|
||||
const totalCount = files.length;
|
||||
const progressPercent = totalCount > 0 ? (viewedCount / totalCount) * 100 : 0;
|
||||
|
||||
// ─── Collapse state ───
|
||||
const [collapsedDirs, setCollapsedDirs] = useState<Set<string>>(new Set());
|
||||
|
||||
const toggleDir = useCallback((dirName: string) => {
|
||||
setCollapsedDirs((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(dirName)) next.delete(dirName);
|
||||
else next.add(dirName);
|
||||
return next;
|
||||
});
|
||||
}, []);
|
||||
|
||||
// ─── Flat row list for virtualization ───
|
||||
const rows = useMemo<Row[]>(() => {
|
||||
const result: Row[] = [];
|
||||
for (const group of directoryGroups) {
|
||||
const isCollapsed = collapsedDirs.has(group.directory);
|
||||
// Root-level files (directory === "") get no dir-header, preserving existing behavior
|
||||
if (group.directory) {
|
||||
result.push({
|
||||
kind: "dir-header",
|
||||
dirName: group.directory,
|
||||
fileCount: group.files.length,
|
||||
isCollapsed,
|
||||
});
|
||||
}
|
||||
if (!isCollapsed) {
|
||||
for (const file of group.files) {
|
||||
const commentCount = comments.filter(
|
||||
(c) => c.filePath === file.newPath && !c.parentCommentId,
|
||||
).length;
|
||||
result.push({
|
||||
kind: "file",
|
||||
file,
|
||||
dirName: group.directory,
|
||||
isViewed: viewedFiles.has(file.newPath),
|
||||
commentCount,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}, [directoryGroups, collapsedDirs, comments, viewedFiles]);
|
||||
|
||||
const isVirtualized = rows.length > 50;
|
||||
|
||||
// ─── react-window 2.x imperative ref ───
|
||||
const listRef = useRef<ListImperativeAPI | null>(null);
|
||||
// Fallback container ref for non-virtualized path
|
||||
const containerRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
// Restore scroll position on mount (both paths)
|
||||
useEffect(() => {
|
||||
const offset = scrollOffsetRef.current;
|
||||
if (!offset) return;
|
||||
if (isVirtualized) {
|
||||
// react-window 2.x: scroll via the outermost DOM element
|
||||
const el = listRef.current?.element;
|
||||
if (el) el.scrollTop = offset;
|
||||
} else if (containerRef.current) {
|
||||
containerRef.current.scrollTop = offset;
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []); // only on mount
|
||||
|
||||
// Save scroll position on unmount (both paths)
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (isVirtualized) {
|
||||
scrollOffsetRef.current = listRef.current?.element?.scrollTop ?? 0;
|
||||
} else {
|
||||
scrollOffsetRef.current = containerRef.current?.scrollTop ?? 0;
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isVirtualized]);
|
||||
|
||||
// Row height function for react-window 2.x List
|
||||
const rowHeight = useCallback(
|
||||
(index: number) => (rows[index]?.kind === "dir-header" ? DIR_HEADER_HEIGHT : FILE_ROW_HEIGHT),
|
||||
[rows],
|
||||
);
|
||||
|
||||
// Handle file click: call onFileClick and scroll virtual list to row
|
||||
const handleFileClick = useCallback(
|
||||
(filePath: string) => {
|
||||
onFileClick(filePath);
|
||||
const rowIndex = rows.findIndex(
|
||||
(r) => r.kind === "file" && r.file.newPath === filePath,
|
||||
);
|
||||
if (rowIndex >= 0) {
|
||||
listRef.current?.scrollToRow({ index: rowIndex, align: "smart" });
|
||||
}
|
||||
},
|
||||
[onFileClick, rows, listRef],
|
||||
);
|
||||
|
||||
// Stable row props for the virtual row component
|
||||
const rowProps = useMemo<VirtualRowProps>(
|
||||
() => ({
|
||||
rows,
|
||||
selectedCommit,
|
||||
activeFilePaths,
|
||||
onFileClick: handleFileClick,
|
||||
onToggleDir: toggleDir,
|
||||
}),
|
||||
[rows, selectedCommit, activeFilePaths, handleFileClick, toggleDir],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<div className="flex flex-col h-full min-h-0">
|
||||
{/* Fixed header — review progress + discussions */}
|
||||
<div className="p-4 space-y-4 shrink-0">
|
||||
{/* Review progress */}
|
||||
{totalCount > 0 && (
|
||||
<div className="space-y-1.5">
|
||||
@@ -282,7 +513,7 @@ function FilesView({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Directory-grouped file tree */}
|
||||
{/* Files section heading */}
|
||||
<div>
|
||||
<h4 className="text-[10px] font-semibold text-muted-foreground uppercase tracking-wider mb-1.5">
|
||||
Files
|
||||
@@ -292,16 +523,41 @@ function FilesView({
|
||||
</span>
|
||||
)}
|
||||
</h4>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Scrollable file tree — virtualized (react-window 2.x List) when >50 rows */}
|
||||
{isVirtualized ? (
|
||||
<List
|
||||
listRef={listRef}
|
||||
rowCount={rows.length}
|
||||
rowHeight={rowHeight}
|
||||
rowComponent={VirtualRowItem}
|
||||
rowProps={rowProps}
|
||||
defaultHeight={600}
|
||||
style={{ flex: 1, minHeight: 0 }}
|
||||
/>
|
||||
) : (
|
||||
<div ref={containerRef} className="overflow-y-auto px-4 pb-4">
|
||||
{directoryGroups.map((group) => (
|
||||
<div key={group.directory}>
|
||||
{/* Directory header */}
|
||||
{/* Directory header — collapsible */}
|
||||
{group.directory && (
|
||||
<div className="text-[10px] font-mono text-muted-foreground/70 mt-2 first:mt-0 px-2 py-0.5 flex items-center gap-1">
|
||||
<button
|
||||
data-testid="dir-header"
|
||||
className="flex w-full items-center gap-1 text-[10px] font-mono text-muted-foreground/70 mt-2 first:mt-0 px-2 py-0.5 hover:bg-accent/30 transition-colors"
|
||||
onClick={() => toggleDir(group.directory)}
|
||||
title={collapsedDirs.has(group.directory) ? "Expand directory" : "Collapse directory"}
|
||||
>
|
||||
<ChevronRight
|
||||
className={`h-3 w-3 shrink-0 transition-transform ${collapsedDirs.has(group.directory) ? "" : "rotate-90"}`}
|
||||
/>
|
||||
<FolderOpen className="h-3 w-3 shrink-0" />
|
||||
<span className="truncate">{group.directory}</span>
|
||||
</div>
|
||||
</button>
|
||||
)}
|
||||
{/* Files in directory */}
|
||||
{!collapsedDirs.has(group.directory) && (
|
||||
<div className="space-y-0.5">
|
||||
{group.files.map((file) => {
|
||||
const fileCommentCount = comments.filter(
|
||||
@@ -315,6 +571,7 @@ function FilesView({
|
||||
return (
|
||||
<button
|
||||
key={file.newPath}
|
||||
data-testid="file-row"
|
||||
className={`
|
||||
flex w-full items-center gap-1.5 rounded py-1 text-left text-[11px]
|
||||
hover:bg-accent/50 transition-colors group
|
||||
@@ -358,9 +615,11 @@ function FilesView({
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
226
apps/web/src/components/review/ReviewTab.test.tsx
Normal file
226
apps/web/src/components/review/ReviewTab.test.tsx
Normal file
@@ -0,0 +1,226 @@
|
||||
// @vitest-environment happy-dom
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { render, screen, act } from "@testing-library/react";
|
||||
import { vi, describe, it, expect, beforeEach } from "vitest";
|
||||
|
||||
// ── Capture props passed to stubs ─────────────────────────────────────────────
|
||||
// These are module-level so the vi.mock factories (which are hoisted) can close over them.
|
||||
let diffViewerProps: Record<string, unknown> = {};
|
||||
let reviewSidebarProps: Record<string, unknown> = {};
|
||||
|
||||
vi.mock("./DiffViewer", () => ({
|
||||
DiffViewer: (props: Record<string, unknown>) => {
|
||||
diffViewerProps = props;
|
||||
return <div data-testid="diff-viewer" />;
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("./ReviewSidebar", () => ({
|
||||
ReviewSidebar: (props: Record<string, unknown>) => {
|
||||
reviewSidebarProps = props;
|
||||
return <div data-testid="review-sidebar" />;
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("./ReviewHeader", () => ({
|
||||
ReviewHeader: (props: Record<string, unknown>) => (
|
||||
<div data-testid="review-header">
|
||||
{props.onExpandAll && (
|
||||
<button
|
||||
data-testid="expand-all-btn"
|
||||
onClick={props.onExpandAll as () => void}
|
||||
>
|
||||
Expand all
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock("./InitiativeReview", () => ({
|
||||
InitiativeReview: () => <div data-testid="initiative-review" />,
|
||||
}));
|
||||
|
||||
vi.mock("./comment-index", () => ({
|
||||
buildCommentIndex: vi.fn(() => new Map()),
|
||||
}));
|
||||
|
||||
vi.mock("sonner", () => ({
|
||||
toast: { success: vi.fn(), error: vi.fn() },
|
||||
}));
|
||||
|
||||
// ── parseUnifiedDiff spy ───────────────────────────────────────────────────────
|
||||
const mockParseUnifiedDiff = vi.fn((_raw: string) => [
|
||||
{
|
||||
oldPath: "a.ts",
|
||||
newPath: "a.ts",
|
||||
status: "modified" as const,
|
||||
additions: 3,
|
||||
deletions: 1,
|
||||
hunks: [],
|
||||
},
|
||||
]);
|
||||
|
||||
vi.mock("./parse-diff", () => ({
|
||||
get parseUnifiedDiff() {
|
||||
return mockParseUnifiedDiff;
|
||||
},
|
||||
}));
|
||||
|
||||
// ── tRPC mock factory ─────────────────────────────────────────────────────────
|
||||
|
||||
const noopMutation = () => ({
|
||||
mutate: vi.fn(),
|
||||
isPending: false,
|
||||
});
|
||||
|
||||
const noopQuery = (data: unknown = undefined) => ({
|
||||
data,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
refetch: vi.fn(),
|
||||
});
|
||||
|
||||
const mockUtils = {
|
||||
listReviewComments: { invalidate: vi.fn() },
|
||||
};
|
||||
|
||||
// Server format (FileStatEntry): uses `path` not `newPath`
|
||||
const PHASE_FILES = [
|
||||
{
|
||||
path: "a.ts",
|
||||
status: "modified" as const,
|
||||
additions: 5,
|
||||
deletions: 2,
|
||||
},
|
||||
];
|
||||
|
||||
// trpcMock is a let so tests can override it. The getter in the mock reads the current value.
|
||||
let trpcMock = buildTrpcMock();
|
||||
|
||||
function buildTrpcMock(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
getInitiative: { useQuery: vi.fn(() => noopQuery({ status: "in_progress" })) },
|
||||
listPhases: {
|
||||
useQuery: vi.fn(() =>
|
||||
noopQuery([{ id: "phase-1", name: "Phase 1", status: "pending_review" }])
|
||||
),
|
||||
},
|
||||
getInitiativeProjects: { useQuery: vi.fn(() => noopQuery([{ id: "proj-1" }])) },
|
||||
getPhaseReviewDiff: {
|
||||
useQuery: vi.fn(() =>
|
||||
noopQuery({
|
||||
phaseName: "Phase 1",
|
||||
sourceBranch: "cw/phase-1",
|
||||
targetBranch: "main",
|
||||
files: PHASE_FILES,
|
||||
totalAdditions: 5,
|
||||
totalDeletions: 2,
|
||||
})
|
||||
),
|
||||
},
|
||||
getPhaseReviewCommits: {
|
||||
useQuery: vi.fn(() =>
|
||||
noopQuery({ commits: [], sourceBranch: "cw/phase-1", targetBranch: "main" })
|
||||
),
|
||||
},
|
||||
getCommitDiff: {
|
||||
useQuery: vi.fn(() => noopQuery({ rawDiff: "" })),
|
||||
},
|
||||
listPreviews: { useQuery: vi.fn(() => noopQuery([])) },
|
||||
getPreviewStatus: { useQuery: vi.fn(() => noopQuery(null)) },
|
||||
listReviewComments: { useQuery: vi.fn(() => noopQuery([])) },
|
||||
startPreview: { useMutation: vi.fn(() => noopMutation()) },
|
||||
stopPreview: { useMutation: vi.fn(() => noopMutation()) },
|
||||
createReviewComment: { useMutation: vi.fn(() => noopMutation()) },
|
||||
resolveReviewComment: { useMutation: vi.fn(() => noopMutation()) },
|
||||
unresolveReviewComment: { useMutation: vi.fn(() => noopMutation()) },
|
||||
replyToReviewComment: { useMutation: vi.fn(() => noopMutation()) },
|
||||
updateReviewComment: { useMutation: vi.fn(() => noopMutation()) },
|
||||
approvePhaseReview: { useMutation: vi.fn(() => noopMutation()) },
|
||||
requestPhaseChanges: { useMutation: vi.fn(() => noopMutation()) },
|
||||
useUtils: vi.fn(() => mockUtils),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
vi.mock("@/lib/trpc", () => ({
|
||||
get trpc() {
|
||||
return trpcMock;
|
||||
},
|
||||
}));
|
||||
|
||||
// ── Import component after mocks ──────────────────────────────────────────────
|
||||
import { ReviewTab } from "./ReviewTab";
|
||||
|
||||
// ── Tests ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe("ReviewTab", () => {
|
||||
beforeEach(() => {
|
||||
diffViewerProps = {};
|
||||
reviewSidebarProps = {};
|
||||
mockParseUnifiedDiff.mockClear();
|
||||
trpcMock = buildTrpcMock();
|
||||
});
|
||||
|
||||
it("1. phase diff loads metadata: DiffViewer receives files array and commitMode=false", () => {
|
||||
render(<ReviewTab initiativeId="init-1" />);
|
||||
|
||||
expect(screen.getByTestId("diff-viewer")).toBeInTheDocument();
|
||||
const files = diffViewerProps.files as unknown[];
|
||||
expect(files).toHaveLength(1);
|
||||
expect(diffViewerProps.commitMode).toBe(false);
|
||||
});
|
||||
|
||||
it("2. no rawDiff parsing in phase mode: parseUnifiedDiff is NOT called", () => {
|
||||
render(<ReviewTab initiativeId="init-1" />);
|
||||
|
||||
expect(mockParseUnifiedDiff).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("3. commit view parses rawDiff: parseUnifiedDiff called and DiffViewer gets commitMode=true", async () => {
|
||||
trpcMock = buildTrpcMock({
|
||||
getCommitDiff: {
|
||||
useQuery: vi.fn(() =>
|
||||
noopQuery({ rawDiff: "diff --git a/a.ts b/a.ts\nindex 000..111 100644\n--- a/a.ts\n+++ b/a.ts\n@@ -1,1 +1,1 @@\n-old\n+new\n" })
|
||||
),
|
||||
},
|
||||
});
|
||||
|
||||
render(<ReviewTab initiativeId="init-1" />);
|
||||
|
||||
// Select a commit via the sidebar stub's onSelectCommit prop
|
||||
const { onSelectCommit } = reviewSidebarProps as {
|
||||
onSelectCommit: (hash: string | null) => void;
|
||||
};
|
||||
|
||||
await act(async () => {
|
||||
onSelectCommit("abc123");
|
||||
});
|
||||
|
||||
expect(diffViewerProps.commitMode).toBe(true);
|
||||
expect(mockParseUnifiedDiff).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("4. allFiles uses metadata for sidebar: ReviewSidebar receives files from diffQuery.data.files", () => {
|
||||
render(<ReviewTab initiativeId="init-1" />);
|
||||
|
||||
const sidebarFiles = reviewSidebarProps.files as Array<{ newPath: string }>;
|
||||
expect(sidebarFiles).toHaveLength(1);
|
||||
expect(sidebarFiles[0].newPath).toBe("a.ts");
|
||||
});
|
||||
|
||||
it("5. expandAll prop passed: clicking Expand all button causes DiffViewer to receive expandAll=true", async () => {
|
||||
render(<ReviewTab initiativeId="init-1" />);
|
||||
|
||||
// Before clicking, expandAll should be false
|
||||
expect(diffViewerProps.expandAll).toBe(false);
|
||||
|
||||
const expandBtn = screen.getByTestId("expand-all-btn");
|
||||
await act(async () => {
|
||||
expandBtn.click();
|
||||
});
|
||||
|
||||
expect(diffViewerProps.expandAll).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -7,7 +7,8 @@ import { DiffViewer } from "./DiffViewer";
|
||||
import { ReviewSidebar } from "./ReviewSidebar";
|
||||
import { ReviewHeader } from "./ReviewHeader";
|
||||
import { InitiativeReview } from "./InitiativeReview";
|
||||
import type { ReviewStatus, DiffLine } from "./types";
|
||||
import { buildCommentIndex } from "./comment-index";
|
||||
import type { ReviewStatus, DiffLine, FileDiff, FileDiffDetail } from "./types";
|
||||
|
||||
interface ReviewTabProps {
|
||||
initiativeId: string;
|
||||
@@ -17,6 +18,7 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
const [status, setStatus] = useState<ReviewStatus>("pending");
|
||||
const [selectedCommit, setSelectedCommit] = useState<string | null>(null);
|
||||
const [viewedFiles, setViewedFiles] = useState<Set<string>>(new Set());
|
||||
const [expandAll, setExpandAll] = useState(false);
|
||||
const fileRefs = useRef<Map<string, HTMLDivElement>>(new Map());
|
||||
const headerRef = useRef<HTMLDivElement>(null);
|
||||
const [headerHeight, setHeaderHeight] = useState(0);
|
||||
@@ -73,7 +75,7 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
const projectsQuery = trpc.getInitiativeProjects.useQuery({ initiativeId });
|
||||
const firstProjectId = projectsQuery.data?.[0]?.id ?? null;
|
||||
|
||||
// Fetch full branch diff for active phase
|
||||
// Fetch full branch diff for active phase (metadata only, no rawDiff)
|
||||
const diffQuery = trpc.getPhaseReviewDiff.useQuery(
|
||||
{ phaseId: activePhaseId! },
|
||||
{ enabled: !!activePhaseId && !isInitiativePendingReview },
|
||||
@@ -95,7 +97,7 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
// Preview state
|
||||
const previewsQuery = trpc.listPreviews.useQuery({ initiativeId });
|
||||
const existingPreview = previewsQuery.data?.find(
|
||||
(p) => p.phaseId === activePhaseId || p.initiativeId === initiativeId,
|
||||
(p: { phaseId?: string; initiativeId?: string }) => p.phaseId === activePhaseId || p.initiativeId === initiativeId,
|
||||
);
|
||||
const [activePreviewId, setActivePreviewId] = useState<string | null>(null);
|
||||
const previewStatusQuery = trpc.getPreviewStatus.useQuery(
|
||||
@@ -106,12 +108,12 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
const sourceBranch = diffQuery.data?.sourceBranch ?? commitsQuery.data?.sourceBranch ?? "";
|
||||
|
||||
const startPreview = trpc.startPreview.useMutation({
|
||||
onSuccess: (data) => {
|
||||
onSuccess: (data: { id: string; url: string }) => {
|
||||
setActivePreviewId(data.id);
|
||||
previewsQuery.refetch();
|
||||
toast.success(`Preview running at ${data.url}`);
|
||||
},
|
||||
onError: (err) => toast.error(`Preview failed: ${err.message}`),
|
||||
onError: (err: { message: string }) => toast.error(`Preview failed: ${err.message}`),
|
||||
});
|
||||
|
||||
const stopPreview = trpc.stopPreview.useMutation({
|
||||
@@ -120,7 +122,7 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
toast.success("Preview stopped");
|
||||
previewsQuery.refetch();
|
||||
},
|
||||
onError: (err) => toast.error(`Failed to stop: ${err.message}`),
|
||||
onError: (err: { message: string }) => toast.error(`Failed to stop: ${err.message}`),
|
||||
});
|
||||
|
||||
const previewState = firstProjectId && sourceBranch
|
||||
@@ -156,7 +158,17 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
{ enabled: !!activePhaseId && !isInitiativePendingReview },
|
||||
);
|
||||
const comments = useMemo(() => {
|
||||
return (commentsQuery.data ?? []).map((c) => ({
|
||||
return (commentsQuery.data ?? []).map((c: {
|
||||
id: string;
|
||||
filePath: string;
|
||||
lineNumber: number | null;
|
||||
lineType: string;
|
||||
body: string;
|
||||
author: string;
|
||||
createdAt: string | number;
|
||||
resolved: boolean;
|
||||
parentCommentId?: string | null;
|
||||
}) => ({
|
||||
id: c.id,
|
||||
filePath: c.filePath,
|
||||
lineNumber: c.lineNumber,
|
||||
@@ -169,11 +181,16 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
}));
|
||||
}, [commentsQuery.data]);
|
||||
|
||||
const commentsByLine = useMemo(
|
||||
() => buildCommentIndex(comments),
|
||||
[comments],
|
||||
);
|
||||
|
||||
const createCommentMutation = trpc.createReviewComment.useMutation({
|
||||
onSuccess: () => {
|
||||
utils.listReviewComments.invalidate({ phaseId: activePhaseId! });
|
||||
},
|
||||
onError: (err) => toast.error(`Failed to save comment: ${err.message}`),
|
||||
onError: (err: { message: string }) => toast.error(`Failed to save comment: ${err.message}`),
|
||||
});
|
||||
|
||||
const resolveCommentMutation = trpc.resolveReviewComment.useMutation({
|
||||
@@ -192,14 +209,14 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
onSuccess: () => {
|
||||
utils.listReviewComments.invalidate({ phaseId: activePhaseId! });
|
||||
},
|
||||
onError: (err) => toast.error(`Failed to post reply: ${err.message}`),
|
||||
onError: (err: { message: string }) => toast.error(`Failed to post reply: ${err.message}`),
|
||||
});
|
||||
|
||||
const editCommentMutation = trpc.updateReviewComment.useMutation({
|
||||
onSuccess: () => {
|
||||
utils.listReviewComments.invalidate({ phaseId: activePhaseId! });
|
||||
},
|
||||
onError: (err) => toast.error(`Failed to update comment: ${err.message}`),
|
||||
onError: (err: { message: string }) => toast.error(`Failed to update comment: ${err.message}`),
|
||||
});
|
||||
|
||||
const approveMutation = trpc.approvePhaseReview.useMutation({
|
||||
@@ -208,23 +225,48 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
toast.success("Phase approved and merged");
|
||||
phasesQuery.refetch();
|
||||
},
|
||||
onError: (err) => toast.error(err.message),
|
||||
onError: (err: { message: string }) => toast.error(err.message),
|
||||
});
|
||||
|
||||
// Determine which diff to display
|
||||
const activeDiffRaw = selectedCommit
|
||||
? commitDiffQuery.data?.rawDiff
|
||||
: diffQuery.data?.rawDiff;
|
||||
// Phase branch diff — metadata only, no parsing
|
||||
const phaseFiles: FileDiff[] = useMemo(
|
||||
() => {
|
||||
const serverFiles = diffQuery.data?.files ?? [];
|
||||
// Map server FileStatEntry (path) to frontend FileDiff (newPath)
|
||||
return serverFiles.map((f: {
|
||||
path: string;
|
||||
oldPath?: string;
|
||||
status: FileDiff['status'];
|
||||
additions: number;
|
||||
deletions: number;
|
||||
projectId?: string;
|
||||
}) => ({
|
||||
newPath: f.path,
|
||||
oldPath: f.oldPath ?? f.path,
|
||||
status: f.status,
|
||||
additions: f.additions,
|
||||
deletions: f.deletions,
|
||||
projectId: f.projectId,
|
||||
}));
|
||||
},
|
||||
[diffQuery.data?.files],
|
||||
);
|
||||
|
||||
const files = useMemo(() => {
|
||||
if (!activeDiffRaw) return [];
|
||||
return parseUnifiedDiff(activeDiffRaw);
|
||||
}, [activeDiffRaw]);
|
||||
// Commit diff — still raw, parse client-side
|
||||
const commitFiles: FileDiffDetail[] = useMemo(() => {
|
||||
if (!commitDiffQuery.data?.rawDiff) return [];
|
||||
return parseUnifiedDiff(commitDiffQuery.data.rawDiff);
|
||||
}, [commitDiffQuery.data?.rawDiff]);
|
||||
|
||||
const isDiffLoading = selectedCommit
|
||||
? commitDiffQuery.isLoading
|
||||
: diffQuery.isLoading;
|
||||
|
||||
// All files for sidebar — always from phase metadata
|
||||
const allFiles = phaseFiles;
|
||||
|
||||
const activeFiles: FileDiff[] | FileDiffDetail[] = selectedCommit ? commitFiles : phaseFiles;
|
||||
|
||||
const handleAddComment = useCallback(
|
||||
(filePath: string, lineNumber: number, lineType: DiffLine["type"], body: string) => {
|
||||
if (!activePhaseId) return;
|
||||
@@ -267,7 +309,7 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
toast.success("Changes requested — revision task dispatched");
|
||||
phasesQuery.refetch();
|
||||
},
|
||||
onError: (err) => toast.error(err.message),
|
||||
onError: (err: { message: string }) => toast.error(err.message),
|
||||
});
|
||||
|
||||
const handleRequestChanges = useCallback(() => {
|
||||
@@ -297,6 +339,11 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
setSelectedCommit(null);
|
||||
setStatus("pending");
|
||||
setViewedFiles(new Set());
|
||||
setExpandAll(false);
|
||||
}, []);
|
||||
|
||||
const handleExpandAll = useCallback(() => {
|
||||
setExpandAll(v => !v);
|
||||
}, []);
|
||||
|
||||
const unresolvedCount = comments.filter((c) => !c.resolved && !c.parentCommentId).length;
|
||||
@@ -306,12 +353,6 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
reviewablePhases.find((p) => p.id === activePhaseId)?.name ??
|
||||
"Phase";
|
||||
|
||||
// All files from the full branch diff (for sidebar file list)
|
||||
const allFiles = useMemo(() => {
|
||||
if (!diffQuery.data?.rawDiff) return [];
|
||||
return parseUnifiedDiff(diffQuery.data.rawDiff);
|
||||
}, [diffQuery.data?.rawDiff]);
|
||||
|
||||
// Initiative-level review takes priority
|
||||
if (isInitiativePendingReview) {
|
||||
return (
|
||||
@@ -357,6 +398,9 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
preview={previewState}
|
||||
viewedCount={viewedFiles.size}
|
||||
totalCount={allFiles.length}
|
||||
totalAdditions={selectedCommit ? undefined : diffQuery.data?.totalAdditions}
|
||||
totalDeletions={selectedCommit ? undefined : diffQuery.data?.totalDeletions}
|
||||
onExpandAll={handleExpandAll}
|
||||
/>
|
||||
|
||||
{/* Main content area — sidebar always rendered to preserve state */}
|
||||
@@ -376,7 +420,7 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
onFileClick={handleFileClick}
|
||||
onCommentClick={handleCommentClick}
|
||||
selectedCommit={selectedCommit}
|
||||
activeFiles={files}
|
||||
activeFiles={activeFiles}
|
||||
commits={commits}
|
||||
onSelectCommit={setSelectedCommit}
|
||||
viewedFiles={viewedFiles}
|
||||
@@ -391,7 +435,7 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
<Loader2 className="h-4 w-4 animate-spin" />
|
||||
Loading diff...
|
||||
</div>
|
||||
) : files.length === 0 ? (
|
||||
) : activeFiles.length === 0 ? (
|
||||
<div className="flex h-32 items-center justify-center text-muted-foreground text-sm">
|
||||
{selectedCommit
|
||||
? "No changes in this commit"
|
||||
@@ -399,8 +443,10 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
</div>
|
||||
) : (
|
||||
<DiffViewer
|
||||
files={files}
|
||||
comments={comments}
|
||||
files={activeFiles}
|
||||
phaseId={activePhaseId!}
|
||||
commitMode={!!selectedCommit}
|
||||
commentsByLine={commentsByLine}
|
||||
onAddComment={handleAddComment}
|
||||
onResolveComment={handleResolveComment}
|
||||
onUnresolveComment={handleUnresolveComment}
|
||||
@@ -409,6 +455,7 @@ export function ReviewTab({ initiativeId }: ReviewTabProps) {
|
||||
viewedFiles={viewedFiles}
|
||||
onToggleViewed={toggleViewed}
|
||||
onRegisterRef={registerFileRef}
|
||||
expandAll={expandAll}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
134
apps/web/src/components/review/comment-index.test.tsx
Normal file
134
apps/web/src/components/review/comment-index.test.tsx
Normal file
@@ -0,0 +1,134 @@
|
||||
// @vitest-environment happy-dom
|
||||
import "@testing-library/jest-dom/vitest";
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { vi, describe, it, expect } from "vitest";
|
||||
import { buildCommentIndex } from "./comment-index";
|
||||
import type { ReviewComment } from "./types";
|
||||
|
||||
// ── Stub CommentThread and CommentForm so LineWithComments renders without deps ──
|
||||
vi.mock("./CommentThread", () => ({
|
||||
CommentThread: () => <div data-testid="comment-thread" />,
|
||||
}));
|
||||
vi.mock("./CommentForm", () => ({
|
||||
CommentForm: vi.fn().mockReturnValue(<div data-testid="comment-form" />),
|
||||
}));
|
||||
vi.mock("./use-syntax-highlight", () => ({
|
||||
useHighlightedFile: () => null,
|
||||
}));
|
||||
|
||||
// ── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
function makeComment(overrides: Partial<ReviewComment> & { id: string }): ReviewComment {
|
||||
return {
|
||||
id: overrides.id,
|
||||
filePath: overrides.filePath ?? "src/foo.ts",
|
||||
lineNumber: overrides.lineNumber !== undefined ? overrides.lineNumber : 1,
|
||||
lineType: overrides.lineType ?? "added",
|
||||
body: overrides.body ?? "comment body",
|
||||
author: overrides.author ?? "alice",
|
||||
createdAt: overrides.createdAt ?? "2024-01-01T00:00:00Z",
|
||||
resolved: overrides.resolved ?? false,
|
||||
parentCommentId: overrides.parentCommentId ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
// ── buildCommentIndex — pure function tests ───────────────────────────────────
|
||||
|
||||
describe("buildCommentIndex", () => {
|
||||
it("happy path — basic indexing", () => {
|
||||
const c1 = makeComment({ id: "1", filePath: "src/foo.ts", lineNumber: 10, lineType: "added" });
|
||||
const c2 = makeComment({ id: "2", filePath: "src/bar.ts", lineNumber: 5, lineType: "context" });
|
||||
const map = buildCommentIndex([c1, c2]);
|
||||
expect(map.get("src/foo.ts:10:added")).toEqual([c1]);
|
||||
expect(map.get("src/bar.ts:5:context")).toEqual([c2]);
|
||||
expect(map.size).toBe(2);
|
||||
});
|
||||
|
||||
it("same-line accumulation — two comments land in same array", () => {
|
||||
const c1 = makeComment({ id: "a", filePath: "src/x.ts", lineNumber: 20, lineType: "added" });
|
||||
const c2 = makeComment({ id: "b", filePath: "src/x.ts", lineNumber: 20, lineType: "added" });
|
||||
const map = buildCommentIndex([c1, c2]);
|
||||
expect(map.get("src/x.ts:20:added")).toEqual([c1, c2]);
|
||||
expect(map.size).toBe(1);
|
||||
});
|
||||
|
||||
it("cross-type isolation — same lineNumber but different lineType produces separate entries", () => {
|
||||
const added = makeComment({ id: "a", filePath: "src/x.ts", lineNumber: 10, lineType: "added" });
|
||||
const removed = makeComment({ id: "r", filePath: "src/x.ts", lineNumber: 10, lineType: "removed" });
|
||||
const map = buildCommentIndex([added, removed]);
|
||||
expect(map.get("src/x.ts:10:added")).toEqual([added]);
|
||||
expect(map.get("src/x.ts:10:removed")).toEqual([removed]);
|
||||
expect(map.size).toBe(2);
|
||||
});
|
||||
|
||||
it("null lineNumber — file-level comment stored under filePath:file", () => {
|
||||
const fileComment = makeComment({ id: "f", filePath: "src/z.ts", lineNumber: null, lineType: "context" });
|
||||
const map = buildCommentIndex([fileComment]);
|
||||
expect(map.get("src/z.ts:file")).toEqual([fileComment]);
|
||||
});
|
||||
|
||||
it("empty input — returns empty map", () => {
|
||||
expect(buildCommentIndex([])).toEqual(new Map());
|
||||
});
|
||||
});
|
||||
|
||||
// ── LineWithComments — component tests ───────────────────────────────────────
|
||||
|
||||
import { LineWithComments } from "./LineWithComments";
|
||||
import type { DiffLine } from "./types";
|
||||
|
||||
const addedLine: DiffLine = {
|
||||
type: "added",
|
||||
content: "const x = 1;",
|
||||
oldLineNumber: null,
|
||||
newLineNumber: 5,
|
||||
};
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
describe("LineWithComments", () => {
|
||||
it("renders comment button with title when lineComments is non-empty", () => {
|
||||
const lineComments = [
|
||||
makeComment({ id: "c1", filePath: "src/foo.ts", lineNumber: 5, lineType: "added" }),
|
||||
];
|
||||
render(
|
||||
<table>
|
||||
<tbody>
|
||||
<LineWithComments
|
||||
line={addedLine}
|
||||
lineKey={5}
|
||||
lineComments={lineComments}
|
||||
isCommenting={false}
|
||||
onStartComment={noop}
|
||||
onCancelComment={noop}
|
||||
onSubmitComment={noop}
|
||||
onResolveComment={noop}
|
||||
onUnresolveComment={noop}
|
||||
/>
|
||||
</tbody>
|
||||
</table>,
|
||||
);
|
||||
expect(screen.getByTitle(/1 comment/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("does not render comment thread row when lineComments is empty", () => {
|
||||
render(
|
||||
<table>
|
||||
<tbody>
|
||||
<LineWithComments
|
||||
line={addedLine}
|
||||
lineKey={5}
|
||||
lineComments={[]}
|
||||
isCommenting={false}
|
||||
onStartComment={noop}
|
||||
onCancelComment={noop}
|
||||
onSubmitComment={noop}
|
||||
onResolveComment={noop}
|
||||
onUnresolveComment={noop}
|
||||
/>
|
||||
</tbody>
|
||||
</table>,
|
||||
);
|
||||
expect(document.querySelector("[data-comment-id]")).toBeNull();
|
||||
});
|
||||
});
|
||||
25
apps/web/src/components/review/comment-index.ts
Normal file
25
apps/web/src/components/review/comment-index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { ReviewComment } from "./types";
|
||||
|
||||
/**
|
||||
* Build a Map keyed by `"${filePath}:${lineNumber}:${lineType}"` for line-level
|
||||
* comments, or `"${filePath}:file"` for file-level comments (lineNumber === null).
|
||||
*
|
||||
* The compound key (filePath + lineNumber + lineType) is required because
|
||||
* added and removed lines can share the same numeric position in a replacement
|
||||
* hunk (e.g., old line 10 removed, new line 10 added).
|
||||
*/
|
||||
export function buildCommentIndex(
|
||||
comments: ReviewComment[],
|
||||
): Map<string, ReviewComment[]> {
|
||||
const map = new Map<string, ReviewComment[]>();
|
||||
for (const comment of comments) {
|
||||
const key =
|
||||
comment.lineNumber != null
|
||||
? `${comment.filePath}:${comment.lineNumber}:${comment.lineType}`
|
||||
: `${comment.filePath}:file`;
|
||||
const existing = map.get(key);
|
||||
if (existing) existing.push(comment);
|
||||
else map.set(key, [comment]);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
39
apps/web/src/components/review/highlight-worker.ts
Normal file
39
apps/web/src/components/review/highlight-worker.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import type { ThemedToken } from 'shiki';
|
||||
|
||||
export interface HighlightRequest {
|
||||
id: string;
|
||||
filePath: string;
|
||||
language: string; // resolved lang name (e.g. "typescript") or "text"
|
||||
code: string; // full joined content of new-side lines to highlight
|
||||
lineNumbers: number[]; // new-side line numbers to map tokens back to
|
||||
}
|
||||
|
||||
export interface HighlightResponse {
|
||||
id: string;
|
||||
tokens: Array<{ lineNumber: number; tokens: ThemedToken[] }>;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
self.addEventListener('message', async (event: MessageEvent<HighlightRequest>) => {
|
||||
const { id, language, code, lineNumbers } = event.data;
|
||||
try {
|
||||
const { codeToTokens } = await import('shiki');
|
||||
const result = await codeToTokens(code, {
|
||||
lang: language as Parameters<typeof codeToTokens>[1]['lang'],
|
||||
theme: 'github-dark-default',
|
||||
});
|
||||
const tokens: HighlightResponse['tokens'] = result.tokens.map((lineTokens, idx) => ({
|
||||
lineNumber: lineNumbers[idx] ?? idx,
|
||||
tokens: lineTokens,
|
||||
}));
|
||||
const response: HighlightResponse = { id, tokens };
|
||||
self.postMessage(response);
|
||||
} catch (err) {
|
||||
const response: HighlightResponse = {
|
||||
id,
|
||||
tokens: [],
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
};
|
||||
self.postMessage(response);
|
||||
}
|
||||
});
|
||||
@@ -1,10 +1,10 @@
|
||||
import type { FileDiff, FileChangeType, DiffHunk, DiffLine } from "./types";
|
||||
import type { FileDiffDetail, FileDiff, DiffHunk, DiffLine } from "./types";
|
||||
|
||||
/**
|
||||
* Parse a unified diff string into structured FileDiff objects.
|
||||
* Parse a unified diff string into structured FileDiffDetail objects.
|
||||
*/
|
||||
export function parseUnifiedDiff(raw: string): FileDiff[] {
|
||||
const files: FileDiff[] = [];
|
||||
export function parseUnifiedDiff(raw: string): FileDiffDetail[] {
|
||||
const files: FileDiffDetail[] = [];
|
||||
const fileChunks = raw.split(/^diff --git /m).filter(Boolean);
|
||||
|
||||
for (const chunk of fileChunks) {
|
||||
@@ -90,19 +90,19 @@ export function parseUnifiedDiff(raw: string): FileDiff[] {
|
||||
hunks.push({ header, oldStart, oldCount, newStart, newCount, lines: hunkLines });
|
||||
}
|
||||
|
||||
// Derive changeType from header markers and path comparison
|
||||
let changeType: FileChangeType;
|
||||
// Derive status from header markers and path comparison
|
||||
let status: FileDiff['status'];
|
||||
if (hasOldDevNull) {
|
||||
changeType = "added";
|
||||
status = "added";
|
||||
} else if (hasNewDevNull) {
|
||||
changeType = "deleted";
|
||||
status = "deleted";
|
||||
} else if (oldPath !== newPath) {
|
||||
changeType = "renamed";
|
||||
status = "renamed";
|
||||
} else {
|
||||
changeType = "modified";
|
||||
status = "modified";
|
||||
}
|
||||
|
||||
files.push({ oldPath, newPath, hunks, additions, deletions, changeType });
|
||||
files.push({ oldPath, newPath, hunks, additions, deletions, status });
|
||||
}
|
||||
|
||||
return files;
|
||||
|
||||
29
apps/web/src/components/review/types.test.ts
Normal file
29
apps/web/src/components/review/types.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
// @vitest-environment happy-dom
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import type { FileDiff, FileDiffDetail } from './types';
|
||||
|
||||
describe('FileDiff types', () => {
|
||||
it('FileDiff accepts binary status', () => {
|
||||
const f: FileDiff = {
|
||||
oldPath: 'a.png',
|
||||
newPath: 'a.png',
|
||||
status: 'binary',
|
||||
additions: 0,
|
||||
deletions: 0,
|
||||
};
|
||||
expect(f.status).toBe('binary');
|
||||
});
|
||||
|
||||
it('FileDiffDetail extends FileDiff with hunks', () => {
|
||||
const d: FileDiffDetail = {
|
||||
oldPath: 'a.ts',
|
||||
newPath: 'a.ts',
|
||||
status: 'modified',
|
||||
additions: 5,
|
||||
deletions: 2,
|
||||
hunks: [],
|
||||
};
|
||||
expect(d.hunks).toEqual([]);
|
||||
expect(d.additions).toBe(5);
|
||||
});
|
||||
});
|
||||
@@ -14,21 +14,26 @@ export interface DiffLine {
|
||||
newLineNumber: number | null;
|
||||
}
|
||||
|
||||
export type FileChangeType = 'added' | 'modified' | 'deleted' | 'renamed';
|
||||
|
||||
/** Metadata returned by getPhaseReviewDiff — no hunk content */
|
||||
export interface FileDiff {
|
||||
oldPath: string;
|
||||
newPath: string;
|
||||
hunks: DiffHunk[];
|
||||
/** 'binary' is new — prior changeType used FileChangeType which had no 'binary' */
|
||||
status: 'added' | 'modified' | 'deleted' | 'renamed' | 'binary';
|
||||
additions: number;
|
||||
deletions: number;
|
||||
changeType: FileChangeType;
|
||||
projectId?: string; // present in multi-project initiatives
|
||||
}
|
||||
|
||||
/** Full diff with parsed hunks — returned by getFileDiff, parsed client-side */
|
||||
export interface FileDiffDetail extends FileDiff {
|
||||
hunks: DiffHunk[];
|
||||
}
|
||||
|
||||
export interface ReviewComment {
|
||||
id: string;
|
||||
filePath: string;
|
||||
lineNumber: number; // new-side line number (or old-side for deletions)
|
||||
lineNumber: number | null; // null = file-level comment
|
||||
lineType: "added" | "removed" | "context";
|
||||
body: string;
|
||||
author: string;
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
// @vitest-environment happy-dom
|
||||
// This file tests the chunked main-thread fallback path when Worker
|
||||
// construction is blocked (e.g. by CSP). It runs in isolation from the
|
||||
// worker-path tests so that module-level state (workersInitialized, workers)
|
||||
// starts clean.
|
||||
import '@testing-library/jest-dom/vitest'
|
||||
import { renderHook, waitFor } from '@testing-library/react'
|
||||
import { vi, describe, it, expect, beforeAll, afterAll } from 'vitest'
|
||||
|
||||
const MOCK_TOKEN_A = { content: 'const', color: '#569cd6', offset: 0 }
|
||||
const MOCK_TOKEN_B = { content: 'x', color: '#9cdcfe', offset: 0 }
|
||||
|
||||
// Mock shiki's createHighlighter for the fallback path
|
||||
const mockCodeToTokens = vi.fn()
|
||||
|
||||
vi.mock('shiki', () => ({
|
||||
createHighlighter: vi.fn().mockResolvedValue({
|
||||
codeToTokens: mockCodeToTokens,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Stub Worker to throw (simulating CSP) BEFORE the hook module is loaded.
|
||||
// initWorkers() catches the exception and leaves workers = [].
|
||||
beforeAll(() => {
|
||||
// Use a class so Vitest doesn't warn about constructing vi.fn() without a class impl
|
||||
class BlockedWorker {
|
||||
constructor() {
|
||||
throw new Error('CSP blocks workers')
|
||||
}
|
||||
}
|
||||
vi.stubGlobal('Worker', BlockedWorker)
|
||||
|
||||
mockCodeToTokens.mockReturnValue({
|
||||
tokens: [[MOCK_TOKEN_A], [MOCK_TOKEN_B]],
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
vi.unstubAllGlobals()
|
||||
})
|
||||
|
||||
// Dynamic import ensures this file's module instance is fresh (workersInitialized = false).
|
||||
// We import inside tests below rather than at the top level.
|
||||
|
||||
describe('useHighlightedFile — fallback path (Worker unavailable)', () => {
|
||||
it('falls back to chunked main-thread highlighting when Worker construction throws', async () => {
|
||||
const { useHighlightedFile } = await import('./use-syntax-highlight')
|
||||
|
||||
const lines = [
|
||||
{ content: 'const x = 1', newLineNumber: 1, type: 'added' as const },
|
||||
{ content: 'let y = 2', newLineNumber: 2, type: 'context' as const },
|
||||
]
|
||||
|
||||
const { result } = renderHook(() => useHighlightedFile('app.ts', lines))
|
||||
|
||||
// Initially null while chunked highlighting runs
|
||||
expect(result.current).toBeNull()
|
||||
|
||||
// Fallback createHighlighter path eventually resolves tokens
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result.current).not.toBeNull()
|
||||
},
|
||||
{ timeout: 5000 },
|
||||
)
|
||||
|
||||
expect(result.current?.get(1)).toEqual([MOCK_TOKEN_A])
|
||||
expect(result.current?.get(2)).toEqual([MOCK_TOKEN_B])
|
||||
})
|
||||
|
||||
it('returns a complete token map with no lines missing for ≤200-line input (single-chunk equivalence)', async () => {
|
||||
const { useHighlightedFile } = await import('./use-syntax-highlight')
|
||||
|
||||
// 5 lines — well within the 200-line chunk size, so a single codeToTokens call handles all
|
||||
const MOCK_TOKENS = [
|
||||
[{ content: 'line1', color: '#fff', offset: 0 }],
|
||||
[{ content: 'line2', color: '#fff', offset: 0 }],
|
||||
[{ content: 'line3', color: '#fff', offset: 0 }],
|
||||
[{ content: 'line4', color: '#fff', offset: 0 }],
|
||||
[{ content: 'line5', color: '#fff', offset: 0 }],
|
||||
]
|
||||
mockCodeToTokens.mockReturnValueOnce({ tokens: MOCK_TOKENS })
|
||||
|
||||
const lines = [1, 2, 3, 4, 5].map((n) => ({
|
||||
content: `line${n}`,
|
||||
newLineNumber: n,
|
||||
type: 'context' as const,
|
||||
}))
|
||||
|
||||
const { result } = renderHook(() => useHighlightedFile('src/bar.ts', lines))
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result.current).not.toBeNull()
|
||||
},
|
||||
{ timeout: 5000 },
|
||||
)
|
||||
|
||||
// All 5 line numbers must be present — no lines missing
|
||||
expect(result.current!.size).toBe(5)
|
||||
for (let n = 1; n <= 5; n++) {
|
||||
expect(result.current!.get(n)).toEqual(MOCK_TOKENS[n - 1])
|
||||
}
|
||||
})
|
||||
|
||||
it('calls AbortController.abort() when component unmounts during chunked fallback', async () => {
|
||||
const { useHighlightedFile } = await import('./use-syntax-highlight')
|
||||
|
||||
const abortSpy = vi.spyOn(AbortController.prototype, 'abort')
|
||||
|
||||
// Delay the mock so the hook is still in-flight when we unmount
|
||||
mockCodeToTokens.mockImplementationOnce(
|
||||
() =>
|
||||
new Promise((resolve) =>
|
||||
setTimeout(() => resolve({ tokens: [[MOCK_TOKEN_A]] }), 500),
|
||||
),
|
||||
)
|
||||
|
||||
const lines = [{ content: 'const x = 1', newLineNumber: 1, type: 'added' as const }]
|
||||
|
||||
const { unmount } = renderHook(() => useHighlightedFile('unmount.ts', lines))
|
||||
|
||||
// Unmount while the async chunked highlight is still pending
|
||||
unmount()
|
||||
|
||||
// The cleanup function calls abortController.abort()
|
||||
expect(abortSpy).toHaveBeenCalled()
|
||||
|
||||
abortSpy.mockRestore()
|
||||
})
|
||||
})
|
||||
240
apps/web/src/components/review/use-syntax-highlight.test.ts
Normal file
240
apps/web/src/components/review/use-syntax-highlight.test.ts
Normal file
@@ -0,0 +1,240 @@
|
||||
// @vitest-environment happy-dom
|
||||
import '@testing-library/jest-dom/vitest'
|
||||
import { renderHook, waitFor, act } from '@testing-library/react'
|
||||
import { vi, describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest'
|
||||
|
||||
// ── Worker mock infrastructure ─────────────────────────────────────────────
|
||||
//
|
||||
// We stub Worker BEFORE importing use-syntax-highlight so that initWorkers()
|
||||
// (called from useEffect on first render) picks up our mock.
|
||||
// Module-level state (workers, pending, workersInitialized) is shared across
|
||||
// all tests in this file — we control behaviour through the mock instances.
|
||||
|
||||
type WorkerHandler = (event: { data: unknown }) => void
|
||||
|
||||
class MockWorker {
|
||||
static instances: MockWorker[] = []
|
||||
|
||||
messageHandler: WorkerHandler | null = null
|
||||
postMessage = vi.fn()
|
||||
|
||||
constructor() {
|
||||
MockWorker.instances.push(this)
|
||||
}
|
||||
|
||||
addEventListener(type: string, handler: WorkerHandler) {
|
||||
if (type === 'message') this.messageHandler = handler
|
||||
}
|
||||
|
||||
/** Simulate a message arriving from the worker thread */
|
||||
simulateResponse(data: unknown) {
|
||||
this.messageHandler?.({ data })
|
||||
}
|
||||
}
|
||||
|
||||
// Stub Worker before the hook module is loaded.
|
||||
// initWorkers() is lazy (called inside useEffect), so the stub is in place
|
||||
// by the time any test renders a hook.
|
||||
beforeAll(() => {
|
||||
vi.stubGlobal('Worker', MockWorker)
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
vi.unstubAllGlobals()
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset call history between tests; keep instances (pool is created once)
|
||||
MockWorker.instances.forEach((w) => w.postMessage.mockClear())
|
||||
})
|
||||
|
||||
// Import the hook AFTER the beforeAll stub is registered (hoisted evaluation
|
||||
// of the module will not call initWorkers() — that happens in useEffect).
|
||||
import { useHighlightedFile } from './use-syntax-highlight'
|
||||
|
||||
// ── Helpers ────────────────────────────────────────────────────────────────
|
||||
|
||||
const MOCK_TOKEN_A = { content: 'const', color: '#569cd6', offset: 0 }
|
||||
const MOCK_TOKEN_B = { content: 'x', color: '#9cdcfe', offset: 0 }
|
||||
|
||||
function makeLine(
|
||||
content: string,
|
||||
newLineNumber: number,
|
||||
type: 'added' | 'context' | 'removed' = 'added',
|
||||
) {
|
||||
return { content, newLineNumber, type } as const
|
||||
}
|
||||
|
||||
// ── Tests ──────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('useHighlightedFile — worker path', () => {
|
||||
// ── Test 1: Correct message format ───────────────────────────────────────
|
||||
|
||||
it('posts a message to a worker with filePath, language, code, and lineNumbers', async () => {
|
||||
const lines = [
|
||||
makeLine('const x = 1', 1, 'added'),
|
||||
makeLine('const y = 2', 2, 'context'),
|
||||
]
|
||||
|
||||
renderHook(() => useHighlightedFile('src/index.ts', lines))
|
||||
|
||||
// Wait for initWorkers() to fire and postMessage to be called
|
||||
await waitFor(() => {
|
||||
const totalCalls = MockWorker.instances.reduce(
|
||||
(n, w) => n + w.postMessage.mock.calls.length,
|
||||
0,
|
||||
)
|
||||
expect(totalCalls).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
// Find which worker received the message
|
||||
const calledWorker = MockWorker.instances.find((w) => w.postMessage.mock.calls.length > 0)
|
||||
expect(calledWorker).toBeDefined()
|
||||
expect(calledWorker!.postMessage).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
filePath: 'src/index.ts',
|
||||
language: 'typescript',
|
||||
code: 'const x = 1\nconst y = 2',
|
||||
lineNumbers: [1, 2],
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
// ── Test 2: Response builds token map ─────────────────────────────────────
|
||||
|
||||
it('returns null initially and a LineTokenMap after worker responds', async () => {
|
||||
const lines = [makeLine('const x = 1', 10, 'added')]
|
||||
|
||||
const { result } = renderHook(() => useHighlightedFile('component.ts', lines))
|
||||
|
||||
// Immediately null while worker is pending
|
||||
expect(result.current).toBeNull()
|
||||
|
||||
// Capture the request id from whichever worker received it
|
||||
let requestId = ''
|
||||
let respondingWorker: MockWorker | undefined
|
||||
|
||||
await waitFor(() => {
|
||||
respondingWorker = MockWorker.instances.find((w) => w.postMessage.mock.calls.length > 0)
|
||||
expect(respondingWorker).toBeDefined()
|
||||
requestId = respondingWorker!.postMessage.mock.calls[0][0].id as string
|
||||
expect(requestId).not.toBe('')
|
||||
})
|
||||
|
||||
// Simulate the worker responding
|
||||
act(() => {
|
||||
respondingWorker!.simulateResponse({
|
||||
id: requestId,
|
||||
tokens: [{ lineNumber: 10, tokens: [MOCK_TOKEN_A] }],
|
||||
})
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current).not.toBeNull()
|
||||
expect(result.current?.get(10)).toEqual([MOCK_TOKEN_A])
|
||||
})
|
||||
})
|
||||
|
||||
// ── Test 3: Worker error response → null ──────────────────────────────────
|
||||
|
||||
it('returns null when worker responds with an error field', async () => {
|
||||
const lines = [makeLine('code here', 1, 'added')]
|
||||
|
||||
const { result } = renderHook(() => useHighlightedFile('bad.ts', lines))
|
||||
|
||||
let requestId = ''
|
||||
let respondingWorker: MockWorker | undefined
|
||||
|
||||
await waitFor(() => {
|
||||
respondingWorker = MockWorker.instances.find((w) => w.postMessage.mock.calls.length > 0)
|
||||
expect(respondingWorker).toBeDefined()
|
||||
requestId = respondingWorker!.postMessage.mock.calls[0][0].id as string
|
||||
})
|
||||
|
||||
act(() => {
|
||||
respondingWorker!.simulateResponse({
|
||||
id: requestId,
|
||||
tokens: [],
|
||||
error: 'Worker crashed',
|
||||
})
|
||||
})
|
||||
|
||||
// Error → stays null (plain text fallback in the UI)
|
||||
await new Promise<void>((r) => setTimeout(r, 20))
|
||||
expect(result.current).toBeNull()
|
||||
})
|
||||
|
||||
// ── Test 4: Unmount before response — no state update ────────────────────
|
||||
|
||||
it('silently discards a late worker response after unmount', async () => {
|
||||
const lines = [makeLine('const z = 3', 5, 'added')]
|
||||
|
||||
const { result, unmount } = renderHook(() => useHighlightedFile('late.ts', lines))
|
||||
|
||||
let requestId = ''
|
||||
let respondingWorker: MockWorker | undefined
|
||||
|
||||
await waitFor(() => {
|
||||
respondingWorker = MockWorker.instances.find((w) => w.postMessage.mock.calls.length > 0)
|
||||
expect(respondingWorker).toBeDefined()
|
||||
requestId = respondingWorker!.postMessage.mock.calls[0][0].id as string
|
||||
})
|
||||
|
||||
// Unmount before the response arrives
|
||||
unmount()
|
||||
|
||||
// Simulate the late response — should be silently dropped
|
||||
act(() => {
|
||||
respondingWorker!.simulateResponse({
|
||||
id: requestId,
|
||||
tokens: [{ lineNumber: 5, tokens: [MOCK_TOKEN_B] }],
|
||||
})
|
||||
})
|
||||
|
||||
// result.current is frozen at last rendered value (null) — no update fired
|
||||
expect(result.current).toBeNull()
|
||||
})
|
||||
|
||||
// ── Test 5: Round-robin — two simultaneous requests go to different workers
|
||||
|
||||
it('distributes two simultaneous requests across both pool workers', async () => {
|
||||
// Ensure the pool has been initialised (first test may have done this)
|
||||
// and reset call counts for clean measurement.
|
||||
MockWorker.instances.forEach((w) => w.postMessage.mockClear())
|
||||
|
||||
const lines1 = [makeLine('alpha', 1, 'added')]
|
||||
const lines2 = [makeLine('beta', 1, 'added')]
|
||||
|
||||
// Render two hook instances at the same time
|
||||
renderHook(() => useHighlightedFile('file1.ts', lines1))
|
||||
renderHook(() => useHighlightedFile('file2.ts', lines2))
|
||||
|
||||
await waitFor(() => {
|
||||
const total = MockWorker.instances.reduce((n, w) => n + w.postMessage.mock.calls.length, 0)
|
||||
expect(total).toBe(2)
|
||||
})
|
||||
|
||||
// Both pool workers should each have received exactly one request
|
||||
// (round-robin: even requestCount → workers[0], odd → workers[1])
|
||||
const counts = MockWorker.instances.map((w) => w.postMessage.mock.calls.length)
|
||||
// Pool has 2 workers; each should have received 1 of the 2 requests
|
||||
expect(counts[0]).toBe(1)
|
||||
expect(counts[1]).toBe(1)
|
||||
})
|
||||
|
||||
// ── Test 6: Unknown language → no request ────────────────────────────────
|
||||
|
||||
it('returns null immediately for files with no detectable language', async () => {
|
||||
MockWorker.instances.forEach((w) => w.postMessage.mockClear())
|
||||
|
||||
const lines = [makeLine('raw data', 1, 'added')]
|
||||
|
||||
const { result } = renderHook(() => useHighlightedFile('data.xyz', lines))
|
||||
|
||||
await new Promise<void>((r) => setTimeout(r, 50))
|
||||
|
||||
expect(result.current).toBeNull()
|
||||
const total = MockWorker.instances.reduce((n, w) => n + w.postMessage.mock.calls.length, 0)
|
||||
expect(total).toBe(0)
|
||||
})
|
||||
})
|
||||
@@ -1,7 +1,59 @@
|
||||
import { useState, useEffect, useMemo } from "react";
|
||||
import type { ThemedToken } from "shiki";
|
||||
import type { HighlightRequest, HighlightResponse } from "./highlight-worker";
|
||||
|
||||
/* ── Lazy singleton highlighter ─────────────────────────── */
|
||||
/* ── Worker pool (module-level, shared across all hook instances) ─────── */
|
||||
|
||||
type PendingResolve = (response: HighlightResponse) => void;
|
||||
|
||||
let workers: Worker[] = [];
|
||||
let requestCount = 0;
|
||||
const MAX_WORKERS = 2;
|
||||
const pending = new Map<string, PendingResolve>();
|
||||
|
||||
let workersInitialized = false;
|
||||
|
||||
function initWorkers(): void {
|
||||
if (workersInitialized) return;
|
||||
workersInitialized = true;
|
||||
try {
|
||||
workers = Array.from({ length: MAX_WORKERS }, () => {
|
||||
const w = new Worker(
|
||||
new URL("./highlight-worker.ts", import.meta.url),
|
||||
{ type: "module" },
|
||||
);
|
||||
w.addEventListener("message", (event: MessageEvent<HighlightResponse>) => {
|
||||
const resolve = pending.get(event.data.id);
|
||||
if (resolve) {
|
||||
pending.delete(event.data.id);
|
||||
resolve(event.data);
|
||||
}
|
||||
});
|
||||
return w;
|
||||
});
|
||||
} catch {
|
||||
// CSP or browser compat — fall back to chunked main-thread highlighting
|
||||
workers = [];
|
||||
}
|
||||
}
|
||||
|
||||
function highlightWithWorker(
|
||||
id: string,
|
||||
language: string,
|
||||
code: string,
|
||||
lineNumbers: number[],
|
||||
filePath: string,
|
||||
): Promise<HighlightResponse> {
|
||||
return new Promise<HighlightResponse>((resolve) => {
|
||||
pending.set(id, resolve);
|
||||
const worker = workers[requestCount % MAX_WORKERS];
|
||||
requestCount++;
|
||||
const req: HighlightRequest = { id, filePath, language, code, lineNumbers };
|
||||
worker.postMessage(req);
|
||||
});
|
||||
}
|
||||
|
||||
/* ── Lazy singleton highlighter (for main-thread fallback) ───────────── */
|
||||
|
||||
let highlighterPromise: Promise<Awaited<
|
||||
ReturnType<typeof import("shiki")["createHighlighter"]>
|
||||
@@ -40,10 +92,59 @@ function getHighlighter() {
|
||||
return highlighterPromise;
|
||||
}
|
||||
|
||||
// Pre-warm on module load (non-blocking)
|
||||
getHighlighter();
|
||||
/* ── Chunked main-thread fallback ────────────────────────────────────── */
|
||||
|
||||
/* ── Language detection ──────────────────────────────────── */
|
||||
async function highlightChunked(
|
||||
code: string,
|
||||
language: string,
|
||||
lineNumbers: number[],
|
||||
signal: AbortSignal,
|
||||
): Promise<LineTokenMap> {
|
||||
const CHUNK = 200;
|
||||
const result: LineTokenMap = new Map();
|
||||
const lines = code.split("\n");
|
||||
const highlighter = await getHighlighter();
|
||||
if (!highlighter) return result;
|
||||
|
||||
for (let i = 0; i < lines.length; i += CHUNK) {
|
||||
if (signal.aborted) break;
|
||||
const chunkLines = lines.slice(i, i + CHUNK);
|
||||
const chunkCode = chunkLines.join("\n");
|
||||
try {
|
||||
const tokenized = highlighter.codeToTokens(chunkCode, {
|
||||
lang: language as Parameters<typeof highlighter.codeToTokens>[1]["lang"],
|
||||
theme: "github-dark-default",
|
||||
});
|
||||
tokenized.tokens.forEach((lineTokens: ThemedToken[], idx: number) => {
|
||||
const lineNum = lineNumbers[i + idx];
|
||||
if (lineNum !== undefined) result.set(lineNum, lineTokens);
|
||||
});
|
||||
} catch {
|
||||
// Skip unparseable chunk
|
||||
}
|
||||
|
||||
// Yield between chunks to avoid blocking the main thread
|
||||
await new Promise<void>((r) => {
|
||||
if (
|
||||
"scheduler" in globalThis &&
|
||||
"yield" in (globalThis as Record<string, unknown>).scheduler
|
||||
) {
|
||||
(
|
||||
(globalThis as Record<string, unknown>).scheduler as {
|
||||
yield: () => Promise<void>;
|
||||
}
|
||||
)
|
||||
.yield()
|
||||
.then(r);
|
||||
} else {
|
||||
setTimeout(r, 0);
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ── Language detection ──────────────────────────────────────────────── */
|
||||
|
||||
const EXT_TO_LANG: Record<string, string> = {
|
||||
ts: "typescript",
|
||||
@@ -77,7 +178,7 @@ function detectLang(path: string): string | null {
|
||||
return EXT_TO_LANG[ext] ?? null;
|
||||
}
|
||||
|
||||
/* ── Types ───────────────────────────────────────────────── */
|
||||
/* ── Types ───────────────────────────────────────────────────────────── */
|
||||
|
||||
export type TokenizedLine = ThemedToken[];
|
||||
/** Maps newLineNumber → highlighted tokens for that line */
|
||||
@@ -89,12 +190,23 @@ interface DiffLineInput {
|
||||
type: "added" | "removed" | "context";
|
||||
}
|
||||
|
||||
/* ── Hook ────────────────────────────────────────────────── */
|
||||
/* ── Hook ────────────────────────────────────────────────────────────── */
|
||||
|
||||
/**
|
||||
* Highlights the "new-side" content of a file diff.
|
||||
* Returns null until highlighting is ready (progressive enhancement).
|
||||
* Only context + added lines are highlighted (removed lines fall back to plain text).
|
||||
* Highlights the "new-side" content of a file diff, returning a map of
|
||||
* line number → syntax tokens.
|
||||
*
|
||||
* Progressive rendering: returns `null` while highlighting is in progress.
|
||||
* Callers (HunkRows → LineWithComments) render plain text when `null` and
|
||||
* patch in highlighted tokens on re-render once the worker or chunked call
|
||||
* resolves.
|
||||
*
|
||||
* Worker path: uses a module-level pool of 2 Web Workers. Round-robin
|
||||
* assignment. Late responses after unmount are silently discarded.
|
||||
*
|
||||
* Fallback path: if Worker construction fails (CSP, browser compat),
|
||||
* falls back to chunked main-thread highlighting via codeToTokens (200
|
||||
* lines/chunk) with scheduler.yield()/setTimeout(0) between chunks.
|
||||
*/
|
||||
export function useHighlightedFile(
|
||||
filePath: string,
|
||||
@@ -129,32 +241,37 @@ export function useHighlightedFile(
|
||||
return;
|
||||
}
|
||||
|
||||
let cancelled = false;
|
||||
initWorkers(); // no-op after first call
|
||||
|
||||
getHighlighter().then((highlighter) => {
|
||||
if (cancelled || !highlighter) return;
|
||||
const id = crypto.randomUUID();
|
||||
let unmounted = false;
|
||||
const abortController = new AbortController();
|
||||
|
||||
try {
|
||||
const result = highlighter.codeToTokens(code, {
|
||||
lang: lang as Parameters<typeof highlighter.codeToTokens>[1]["lang"],
|
||||
theme: "github-dark-default",
|
||||
});
|
||||
if (workers.length > 0) {
|
||||
highlightWithWorker(id, lang, code, lineNums, filePath).then((response) => {
|
||||
if (unmounted) return; // ignore late responses after unmount
|
||||
if (response.error || response.tokens.length === 0) {
|
||||
setTokenMap(null);
|
||||
return;
|
||||
}
|
||||
const map: LineTokenMap = new Map();
|
||||
|
||||
result.tokens.forEach((lineTokens: ThemedToken[], idx: number) => {
|
||||
if (idx < lineNums.length) {
|
||||
map.set(lineNums[idx], lineTokens);
|
||||
for (const { lineNumber, tokens } of response.tokens) {
|
||||
map.set(lineNumber, tokens);
|
||||
}
|
||||
setTokenMap(map);
|
||||
});
|
||||
} else {
|
||||
highlightChunked(code, lang, lineNums, abortController.signal).then((map) => {
|
||||
if (unmounted) return;
|
||||
setTokenMap(map.size > 0 ? map : null);
|
||||
});
|
||||
|
||||
if (!cancelled) setTokenMap(map);
|
||||
} catch {
|
||||
// Language not loaded or parse error — no highlighting
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
cancelled = true;
|
||||
unmounted = true;
|
||||
abortController.abort();
|
||||
// Remove pending resolver so a late worker response is silently dropped
|
||||
pending.delete(id);
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [cacheKey]);
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -16,6 +16,11 @@ export default defineConfig({
|
||||
"@": path.resolve(__dirname, "./src"),
|
||||
},
|
||||
},
|
||||
worker: {
|
||||
// ES module workers are required when the app uses code-splitting (Rollup
|
||||
// can't bundle IIFE workers alongside dynamic imports).
|
||||
format: "es",
|
||||
},
|
||||
server: {
|
||||
watch: {
|
||||
ignored: ['**/routeTree.gen.ts'],
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
| Tiptap | Rich text editor (ProseMirror-based) |
|
||||
| Lucide | Icon library |
|
||||
| Geist Sans/Mono | Typography (variable fonts in `public/fonts/`) |
|
||||
| react-window 2.x | Virtualized list rendering for large file trees in ReviewSidebar |
|
||||
|
||||
## Design System (v2)
|
||||
|
||||
@@ -114,15 +115,26 @@ The initiative detail page has three tabs managed via local state (not URL param
|
||||
### Review Components (`src/components/review/`)
|
||||
| Component | Purpose |
|
||||
|-----------|---------|
|
||||
| `ReviewTab` | Review tab container — orchestrates header, diff, sidebar, and preview. Phase-level review has threaded inline comments (with reply support) + Request Changes; initiative-level review has Request Changes (summary prompt) + Push Branch / Merge & Push |
|
||||
| `ReviewHeader` | Consolidated toolbar: phase selector pills, branch info, stats, preview controls, approve/reject actions |
|
||||
| `ReviewSidebar` | VSCode-style icon strip (Files/Commits views) with file list, root-only comment counts, and commit navigation |
|
||||
| `DiffViewer` | Unified diff renderer with threaded inline comments (root + reply threads) |
|
||||
| `ReviewTab` | Review tab container — orchestrates header, diff, sidebar, and preview. Phase-level review has threaded inline comments (with reply support) + Request Changes; initiative-level review has Request Changes (summary prompt) + Push Branch / Merge & Push. Phase diff uses metadata-only `FileDiff[]` from `getPhaseReviewDiff`; commit diff parses `rawDiff` via `parseUnifiedDiff` → `FileDiffDetail[]`. Passes `commitMode`, `phaseId`, `expandAll` to DiffViewer |
|
||||
| `ReviewHeader` | Consolidated toolbar: phase selector pills, branch info, stats (uses `totalAdditions`/`totalDeletions` props when available, falls back to summing files), preview controls, Expand all button, approve/reject actions |
|
||||
| `ReviewSidebar` | VSCode-style icon strip (Files/Commits views) with file list, root-only comment counts, and commit navigation. FilesView uses react-window 2.x `List` for virtualized rendering when the row count exceeds 50 (dir-headers + file rows). Scroll position is preserved across Files ↔ Commits tab switches. Directories are collapsible. Clicking a file scrolls the virtual list to that row. |
|
||||
| `DiffViewer` | Unified diff renderer with threaded inline comments (root + reply threads). Accepts `FileDiff[] | FileDiffDetail[]`, `phaseId`, `commitMode`, `expandAll` props |
|
||||
| `CommentThread` | Renders root comment with resolve/reopen + nested reply threads (agent replies styled with primary border). Inline reply form |
|
||||
| `ConflictResolutionPanel` | Merge conflict detection + agent resolution in initiative review. Shows conflict files, spawns conflict agent, inline questions, re-check on completion |
|
||||
| `PreviewPanel` | Docker preview status: building/running/failed with start/stop (legacy, now integrated into ReviewHeader) |
|
||||
| `ProposalCard` | Individual proposal display |
|
||||
|
||||
#### Syntax Highlighting (`use-syntax-highlight.ts` + `highlight-worker.ts`)
|
||||
|
||||
`useHighlightedFile(filePath, allLines)` returns `LineTokenMap | null`. Tokenisation runs off the main thread:
|
||||
|
||||
- **Worker path** (default): a module-level pool of 2 ES module Web Workers (`highlight-worker.ts`) each import shiki's `codeToTokens` dynamically. Requests are round-robined by `requestCount % 2`. Responses are correlated by UUID. Late responses after unmount are silently discarded via the `pending` Map.
|
||||
- **Fallback path** (CSP / browser-compat): if `Worker` construction throws, `createHighlighter` is used on the main thread but processes 200 lines per chunk, yielding between chunks via `scheduler.yield()` or `setTimeout(0)`.
|
||||
|
||||
Callers receive `null` while highlighting is in progress and a populated `Map<lineNumber, ThemedToken[]>` once it resolves. `LineWithComments` already renders plain text when `null`, so no caller changes are needed.
|
||||
|
||||
Vite must be configured with `worker.format: 'es'` (added to `vite.config.ts`) for the worker chunk to bundle correctly alongside code-split app chunks.
|
||||
|
||||
### UI Primitives (`src/components/ui/`)
|
||||
shadcn/ui components: badge (6 status variants + xs size), button, card, dialog, dropdown-menu, input, label, select, sonner, textarea, tooltip.
|
||||
|
||||
|
||||
@@ -40,6 +40,8 @@ Worktrees stored in `.cw-worktrees/` subdirectory of the repo. Each agent gets a
|
||||
| `ensureBranch(repoPath, branch, baseBranch)` | Create branch from base if it doesn't exist (idempotent) |
|
||||
| `mergeBranch(repoPath, source, target)` | Merge via ephemeral worktree, returns conflict info |
|
||||
| `diffBranches(repoPath, base, head)` | Three-dot diff between branches |
|
||||
| `diffBranchesStat(repoPath, base, head)` | Per-file metadata (path, status, additions, deletions) — no hunk content. Binary files included with `status: 'binary'` and counts of 0. Returns `FileStatEntry[]`. |
|
||||
| `diffFileSingle(repoPath, base, head, filePath)` | Raw unified diff for a single file (three-dot diff). `filePath` must be URL-decoded. Returns empty string for binary files. |
|
||||
| `deleteBranch(repoPath, branch)` | Delete local branch (no-op if missing) |
|
||||
| `branchExists(repoPath, branch)` | Check local branches |
|
||||
| `remoteBranchExists(repoPath, branch)` | Check remote tracking branches (`origin/<branch>`) |
|
||||
|
||||
@@ -122,7 +122,8 @@ Each procedure uses `require*Repository(ctx)` helpers that throw `TRPCError(INTE
|
||||
| listInitiativePhaseDependencies | query | All dependency edges |
|
||||
| getPhaseDependencies | query | What this phase depends on |
|
||||
| getPhaseDependents | query | What depends on this phase |
|
||||
| getPhaseReviewDiff | query | Full branch diff for pending_review phase |
|
||||
| getPhaseReviewDiff | query | File-level metadata for pending_review phase: `{phaseName, sourceBranch, targetBranch, files: FileStatEntry[], totalAdditions, totalDeletions}` — no hunk content. Results are cached in-memory by `phaseId:headHash` (TTL: `REVIEW_DIFF_CACHE_TTL_MS`, default 5 min). Cache is invalidated when a task merges into the phase branch. |
|
||||
| getFileDiff | query | Per-file unified diff on demand: `{phaseId, filePath, projectId?}` → `{binary: boolean, rawDiff: string}`; `filePath` must be URL-encoded; binary files return `{binary: true, rawDiff: ''}` |
|
||||
| getPhaseReviewCommits | query | List commits between initiative and phase branch |
|
||||
| getCommitDiff | query | Diff for a single commit (by hash) in a phase |
|
||||
| approvePhaseReview | mutation | Approve and merge phase branch |
|
||||
|
||||
21
package-lock.json
generated
21
package-lock.json
generated
@@ -73,12 +73,14 @@
|
||||
"@tiptap/suggestion": "^3.19.0",
|
||||
"@trpc/client": "^11.9.0",
|
||||
"@trpc/react-query": "^11.9.0",
|
||||
"@types/react-window": "^1.8.8",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"geist": "^1.7.0",
|
||||
"lucide-react": "^0.563.0",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-window": "^2.2.7",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"tippy.js": "^6.3.7"
|
||||
@@ -5198,6 +5200,15 @@
|
||||
"@types/react": "^19.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/react-window": {
|
||||
"version": "1.8.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/react-window/-/react-window-1.8.8.tgz",
|
||||
"integrity": "sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/react": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/unist": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
|
||||
@@ -9131,6 +9142,16 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/react-window": {
|
||||
"version": "2.2.7",
|
||||
"resolved": "https://registry.npmjs.org/react-window/-/react-window-2.2.7.tgz",
|
||||
"integrity": "sha512-SH5nvfUQwGHYyriDUAOt7wfPsfG9Qxd6OdzQxl5oQ4dsSsUicqQvjV7dR+NqZ4coY0fUn3w1jnC5PwzIUWEg5w==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"react": "^18.0.0 || ^19.0.0",
|
||||
"react-dom": "^18.0.0 || ^19.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/read-cache": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz",
|
||||
|
||||
@@ -5,11 +5,24 @@ import path from 'node:path';
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
resolve: {
|
||||
// Alias react to the parent monorepo's copy, matching what @testing-library
|
||||
// loads react-dom from. This ensures React DOM and our components share the
|
||||
// same ReactSharedInternals and hook dispatcher — preventing null-dispatcher
|
||||
// errors when running tests from a git worktree.
|
||||
alias: {
|
||||
'@': path.resolve(__dirname, './apps/web/src'),
|
||||
react: path.resolve(__dirname, '../../../../node_modules/react'),
|
||||
'react-dom': path.resolve(__dirname, '../../../../node_modules/react-dom'),
|
||||
},
|
||||
dedupe: ['react', 'react-dom'],
|
||||
},
|
||||
test: {
|
||||
// Force react-dom and @testing-library through Vite's module graph so that
|
||||
// the resolve.alias for 'react-dom' applies (prevents parent-monorepo
|
||||
// react-dom loading a different React instance than our source files).
|
||||
deps: {
|
||||
inline: ['react-dom', '@testing-library/react'],
|
||||
},
|
||||
// Enable test globals (describe, it, expect without imports)
|
||||
globals: true,
|
||||
env: {
|
||||
|
||||
Reference in New Issue
Block a user