test(repo): guard critical ownership surfaces
This commit is contained in:
@@ -1,11 +1,12 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import process from "node:process";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import { resolveRealWorkspaceRoot } from "./load-env.mjs";
|
||||
|
||||
const rootDir = resolveRealWorkspaceRoot();
|
||||
|
||||
const rules = [
|
||||
export const rules = [
|
||||
{
|
||||
file: "apps/web/src/server/auth.ts",
|
||||
required: [
|
||||
@@ -60,6 +61,7 @@ const rules = [
|
||||
},
|
||||
{
|
||||
file: "packages/api/src/sse/subscription-policy.ts",
|
||||
maxLines: 80,
|
||||
required: [
|
||||
{
|
||||
pattern: /\bderiveUserSseSubscription\b/,
|
||||
@@ -83,6 +85,21 @@ const rules = [
|
||||
{ pattern: /\baudience\b/, message: "timeline SSE route must not parse raw audience values from the client" },
|
||||
],
|
||||
},
|
||||
{
|
||||
file: "apps/web/src/hooks/useTimelineSSE.ts",
|
||||
maxLines: 120,
|
||||
required: [
|
||||
{
|
||||
pattern: /\bgetTimelineSseInvalidationKeys\s*\(/,
|
||||
message: "timeline SSE hook must keep invalidation policy delegated to the extracted policy helper",
|
||||
},
|
||||
{
|
||||
pattern: /\bparseTimelineSseEvent\s*\(/,
|
||||
message: "timeline SSE hook must keep event parsing delegated to the extracted policy helper",
|
||||
},
|
||||
],
|
||||
forbidden: [],
|
||||
},
|
||||
{
|
||||
file: "docker-compose.prod.yml",
|
||||
required: [
|
||||
@@ -184,11 +201,12 @@ const rules = [
|
||||
},
|
||||
];
|
||||
|
||||
const violations = [];
|
||||
export function countLines(source) {
|
||||
return source.split("\n").length;
|
||||
}
|
||||
|
||||
for (const rule of rules) {
|
||||
const absolutePath = path.join(rootDir, rule.file);
|
||||
const source = await readFile(absolutePath, "utf8");
|
||||
export function evaluateRule(rule, source) {
|
||||
const violations = [];
|
||||
|
||||
for (const requirement of rule.required) {
|
||||
if (!requirement.pattern.test(source)) {
|
||||
@@ -201,14 +219,48 @@ for (const rule of rules) {
|
||||
violations.push(`${rule.file}: forbidden pattern matched: ${forbidden.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (violations.length > 0) {
|
||||
console.error("Architecture guardrail check failed:");
|
||||
for (const violation of violations) {
|
||||
console.error(`- ${violation}`);
|
||||
if (typeof rule.maxLines === "number") {
|
||||
const lines = countLines(source);
|
||||
if (lines > rule.maxLines) {
|
||||
violations.push(
|
||||
`${rule.file}: file grew to ${lines} lines and exceeds maxLines=${rule.maxLines}; split the ownership surface before expanding it further`,
|
||||
);
|
||||
}
|
||||
}
|
||||
process.exit(1);
|
||||
|
||||
return violations;
|
||||
}
|
||||
|
||||
console.log("Architecture guardrails passed.");
|
||||
export async function collectArchitectureGuardrailViolations(
|
||||
architectureRules = rules,
|
||||
{ readSource = readFile, workspaceRoot = rootDir } = {},
|
||||
) {
|
||||
const violations = [];
|
||||
|
||||
for (const rule of architectureRules) {
|
||||
const absolutePath = path.join(workspaceRoot, rule.file);
|
||||
const source = await readSource(absolutePath, "utf8");
|
||||
violations.push(...evaluateRule(rule, source));
|
||||
}
|
||||
|
||||
return violations;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const violations = await collectArchitectureGuardrailViolations();
|
||||
|
||||
if (violations.length > 0) {
|
||||
console.error("Architecture guardrail check failed:");
|
||||
for (const violation of violations) {
|
||||
console.error(`- ${violation}`);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log("Architecture guardrails passed.");
|
||||
}
|
||||
|
||||
if (import.meta.url === pathToFileURL(process.argv[1]).href) {
|
||||
await main();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user