chore(repo): add parallel worktree hygiene guardrail

This commit is contained in:
2026-04-01 08:53:14 +02:00
parent 90f2f3c123
commit 6249f61ce1
4 changed files with 253 additions and 0 deletions
+2
View File
@@ -9,7 +9,9 @@
|---|---|---| |---|---|---|
| AI excellence due diligence | [ai-excellence-due-diligence-roadmap.md](/home/hartmut/Documents/Copilot/capakraken/docs/ai-excellence-due-diligence-roadmap.md) | Frank quality assessment and cleanup roadmap toward a showcase AI-built project | | AI excellence due diligence | [ai-excellence-due-diligence-roadmap.md](/home/hartmut/Documents/Copilot/capakraken/docs/ai-excellence-due-diligence-roadmap.md) | Frank quality assessment and cleanup roadmap toward a showcase AI-built project |
| Showcase quality backlog | [showcase-quality-backlog.md](/home/hartmut/Documents/Copilot/capakraken/docs/showcase-quality-backlog.md) | Consolidated working backlog for the current quality and maintainability north star | | Showcase quality backlog | [showcase-quality-backlog.md](/home/hartmut/Documents/Copilot/capakraken/docs/showcase-quality-backlog.md) | Consolidated working backlog for the current quality and maintainability north star |
| Parallel worktree hygiene | [parallel-worktree-hygiene.md](/home/hartmut/Documents/Copilot/capakraken/docs/parallel-worktree-hygiene.md) | Keep parallel worker slices isolated and the worktree reviewable |
| Target CI/CD architecture | [cicd-target-architecture.md](/home/hartmut/Documents/Copilot/capakraken/docs/cicd-target-architecture.md) | Canonical image-based build, deploy, and rollback flow | | Target CI/CD architecture | [cicd-target-architecture.md](/home/hartmut/Documents/Copilot/capakraken/docs/cicd-target-architecture.md) | Canonical image-based build, deploy, and rollback flow |
| Parallel worktree hygiene | [parallel-worktree-hygiene.md](/home/hartmut/Documents/Copilot/capakraken/docs/parallel-worktree-hygiene.md) | Lightweight guardrail for shared dirty worktrees and parallel worker scopes |
| Active roadmap and open gaps | [product-roadmap.md](/home/hartmut/Documents/Copilot/capakraken/docs/product-roadmap.md) | Primary backlog and current delivery order | | Active roadmap and open gaps | [product-roadmap.md](/home/hartmut/Documents/Copilot/capakraken/docs/product-roadmap.md) | Primary backlog and current delivery order |
| Estimating system design | [estimating-extension-design.md](/home/hartmut/Documents/Copilot/capakraken/docs/estimating-extension-design.md) | Workbook analysis, field mapping, and implementation plan | | Estimating system design | [estimating-extension-design.md](/home/hartmut/Documents/Copilot/capakraken/docs/estimating-extension-design.md) | Workbook analysis, field mapping, and implementation plan |
| Dispo import implementation | [dispo-import-implementation.md](/home/hartmut/Documents/Copilot/capakraken/docs/dispo-import-implementation.md) | Clean-slate Dispo v2 import design, mapping rules, staging flow, and commit policy | | Dispo import implementation | [dispo-import-implementation.md](/home/hartmut/Documents/Copilot/capakraken/docs/dispo-import-implementation.md) | Clean-slate Dispo v2 import design, mapping rules, staging flow, and commit policy |
+64
View File
@@ -0,0 +1,64 @@
# Parallel Worktree Hygiene
This repository is now opinionated about keeping the worktree reviewable during parallel AI-worker runs.
## Goal
Keep every slice narrow, make unrelated drift visible immediately, and avoid "who touched what?" cleanup phases before reviews or commits.
## Command
Use the root helper:
```bash
pnpm worktree:hygiene
```
It prints the current worktree and, when scopes are declared, splits dirty files into:
- `staged`
- `unstaged`
- `untracked`
- `in scope`
- `outside scope`
## Recommended Flow
Before starting a slice:
```bash
pnpm worktree:hygiene
```
When you intentionally work in a narrow area:
```bash
pnpm worktree:hygiene -- --scope apps/web/e2e --scope apps/web/src/components/timeline --fail-outside-scope
```
If the worktree is intentionally shared for a moment, keep the signal but suppress the hard failure:
```bash
pnpm worktree:hygiene -- --scope docs --scope scripts --allow-outside-scope
```
Before committing:
```bash
pnpm worktree:hygiene -- --fail-on-dirty
```
Use `--json` when another tool or agent should consume the output programmatically.
## Parallel-Worker Rules
1. Pick an explicit ownership scope before editing files.
2. Run the scoped hygiene check before and after each slice.
3. Do not mix unrelated files into a commit just because they are already dirty.
4. If `outside-scope` is non-empty, either hand off that scope or finish and commit your own slice first.
5. Prefer multiple small commits over one shared dirty worktree.
## Notes
- `git status --short` remains the fastest human spot check.
- `pnpm worktree:hygiene` is the stricter guard when multiple workers operate in parallel.
+1
View File
@@ -14,6 +14,7 @@
"check:architecture": "node ./scripts/check-architecture-guardrails.mjs", "check:architecture": "node ./scripts/check-architecture-guardrails.mjs",
"check:exports": "node ./scripts/check-workspace-exports.mjs", "check:exports": "node ./scripts/check-workspace-exports.mjs",
"check:imports": "node ./scripts/check-workspace-imports.mjs", "check:imports": "node ./scripts/check-workspace-imports.mjs",
"worktree:hygiene": "node ./scripts/worktree-hygiene.mjs",
"clean:next": "node ./scripts/clean-next-artifacts.mjs", "clean:next": "node ./scripts/clean-next-artifacts.mjs",
"db:doctor": "node ./scripts/db-doctor.mjs capakraken", "db:doctor": "node ./scripts/db-doctor.mjs capakraken",
"db:prisma": "node ./scripts/prisma-with-env.mjs", "db:prisma": "node ./scripts/prisma-with-env.mjs",
+186
View File
@@ -0,0 +1,186 @@
#!/usr/bin/env node
import { execFileSync } from "node:child_process";
import process from "node:process";
function printUsage() {
console.log(`Usage:
node scripts/worktree-hygiene.mjs [--scope <path>]... [--allow-outside-scope] [--fail-outside-scope] [--fail-on-dirty] [--json]
Examples:
node scripts/worktree-hygiene.mjs --scope docs/ --scope scripts/
node scripts/worktree-hygiene.mjs --scope apps/web/src/components/timeline/ --scope apps/web/e2e/timeline.spec.ts
node scripts/worktree-hygiene.mjs --scope docs/ --scope scripts/ --fail-outside-scope
node scripts/worktree-hygiene.mjs --fail-on-dirty
node scripts/worktree-hygiene.mjs --scope packages/api/src/router/ --allow-outside-scope
`);
}
function normalizeScope(scope) {
return scope.replace(/\\/gu, "/").replace(/^\.\/+/u, "");
}
function parseArgs(argv) {
const scopes = [];
let allowOutsideScope = false;
let failOutsideScope = false;
let failOnDirty = false;
let json = false;
for (let index = 0; index < argv.length; index += 1) {
const arg = argv[index];
if (arg === "--") {
continue;
}
if (arg === "--scope") {
const value = argv[index + 1];
if (!value) {
throw new Error("--scope requires a path value.");
}
scopes.push(normalizeScope(value));
index += 1;
continue;
}
if (arg === "--allow-outside-scope") {
allowOutsideScope = true;
continue;
}
if (arg === "--fail-outside-scope") {
failOutsideScope = true;
continue;
}
if (arg === "--fail-on-dirty") {
failOnDirty = true;
continue;
}
if (arg === "--json") {
json = true;
continue;
}
if (arg === "--help" || arg === "-h") {
printUsage();
process.exit(0);
}
throw new Error(`Unknown argument: ${arg}`);
}
return { scopes, allowOutsideScope, failOutsideScope, failOnDirty, json };
}
function runGit(args) {
return execFileSync("git", args, {
encoding: "utf8",
stdio: ["ignore", "pipe", "pipe"],
});
}
function parsePorcelain(output) {
return output
.split("\n")
.filter(Boolean)
.map((line) => {
const xy = line.slice(0, 2);
const rawPath = line.slice(3);
const renamed = xy.includes("R") || xy.includes("C");
const path = renamed ? rawPath.split(" -> ").at(-1) ?? rawPath : rawPath;
return {
xy,
path: path.replace(/\\/gu, "/"),
};
});
}
function matchesScope(path, scopes) {
return scopes.some((scope) => path === scope || path.startsWith(scope.endsWith("/") ? scope : `${scope}/`));
}
function summarize(entries) {
const summary = {
staged: 0,
unstaged: 0,
untracked: 0,
};
for (const entry of entries) {
if (entry.xy === "??") {
summary.untracked += 1;
continue;
}
if (entry.xy[0] && entry.xy[0] !== " ") {
summary.staged += 1;
}
if (entry.xy[1] && entry.xy[1] !== " ") {
summary.unstaged += 1;
}
}
return summary;
}
function formatEntries(title, entries) {
if (entries.length === 0) {
return `${title}: none`;
}
return `${title}:\n${entries.map((entry) => `- ${entry.xy} ${entry.path}`).join("\n")}`;
}
function main() {
const { scopes, allowOutsideScope, failOutsideScope, failOnDirty, json } = parseArgs(process.argv.slice(2));
const root = runGit(["rev-parse", "--show-toplevel"]).trim();
const branch = runGit(["rev-parse", "--abbrev-ref", "HEAD"]).trim();
const entries = parsePorcelain(runGit(["status", "--short"]));
const inScope = scopes.length === 0 ? entries : entries.filter((entry) => matchesScope(entry.path, scopes));
const outOfScope = scopes.length === 0 ? [] : entries.filter((entry) => !matchesScope(entry.path, scopes));
const status = {
root,
branch,
scopes,
totals: summarize(entries),
inScope: inScope.length,
outOfScope: outOfScope.length,
allowOutsideScope,
failOutsideScope,
failOnDirty,
};
if (json) {
console.log(JSON.stringify({ ...status, entries: { inScope, outOfScope } }, null, 2));
} else {
console.log(`Repository: ${root}`);
console.log(`Branch: ${branch}`);
console.log(`Dirty entries: ${entries.length}`);
console.log(
`Summary: staged=${status.totals.staged}, unstaged=${status.totals.unstaged}, untracked=${status.totals.untracked}`,
);
if (scopes.length > 0) {
console.log(`Owned scope: ${scopes.join(", ")}`);
console.log(formatEntries("In scope", inScope));
console.log(formatEntries("Outside scope", outOfScope));
if (outOfScope.length > 0 && !allowOutsideScope) {
console.log("\nResult: outside-scope changes detected.");
} else if (outOfScope.length > 0) {
console.log("\nResult: outside-scope changes detected but tolerated by flag.");
} else {
console.log("\nResult: all dirty files are inside the declared scope.");
}
} else {
console.log(formatEntries("Dirty files", entries));
}
}
if (failOnDirty && entries.length > 0) {
process.exit(1);
}
if (scopes.length > 0 && outOfScope.length > 0 && (failOutsideScope || !allowOutsideScope)) {
process.exit(2);
}
}
try {
main();
} catch (error) {
console.error(error instanceof Error ? error.message : String(error));
printUsage();
process.exit(1);
}