chore(ci): add workspace and db guardrails

This commit is contained in:
2026-03-31 22:36:12 +02:00
parent cb8669c489
commit 0b192efdb1
10 changed files with 563 additions and 38 deletions
+27 -6
View File
@@ -13,6 +13,8 @@ concurrency:
env:
NODE_VERSION: "20"
PNPM_VERSION: "9.14.2"
CI_AUTH_URL: http://localhost:3100
CI_AUTH_SECRET: capakraken-ci-build-secret-rotate-if-shared
jobs:
guardrails:
@@ -36,6 +38,12 @@ jobs:
- name: Check architecture guardrails
run: pnpm check:architecture
- name: Check workspace exports
run: pnpm check:exports
- name: Check workspace imports
run: pnpm check:imports
# ──────────────────────────────────────────────
# Typecheck — ~40s, no services needed
# ──────────────────────────────────────────────
@@ -58,7 +66,7 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Generate Prisma client
run: pnpm --filter @capakraken/db exec prisma generate
run: pnpm db:generate
- name: Cache Turborepo
uses: actions/cache@v4
@@ -68,7 +76,7 @@ jobs:
restore-keys: turbo-typecheck-
- name: Run typecheck
run: pnpm --filter @capakraken/web exec tsc --noEmit
run: pnpm typecheck
# ──────────────────────────────────────────────
# Lint — ~20s, no services needed
@@ -92,7 +100,7 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Generate Prisma client
run: pnpm --filter @capakraken/db exec prisma generate
run: pnpm db:generate
- name: Cache Turborepo
uses: actions/cache@v4
@@ -136,6 +144,10 @@ jobs:
env:
DATABASE_URL: postgresql://capakraken:capakraken_test@localhost:5432/capakraken_test
REDIS_URL: redis://localhost:6379
NEXTAUTH_URL: ${{ env.CI_AUTH_URL }}
AUTH_URL: ${{ env.CI_AUTH_URL }}
NEXTAUTH_SECRET: ${{ env.CI_AUTH_SECRET }}
AUTH_SECRET: ${{ env.CI_AUTH_SECRET }}
steps:
- uses: actions/checkout@v4
@@ -152,10 +164,11 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Generate Prisma client
run: pnpm --filter @capakraken/db exec prisma generate
run: pnpm db:generate
- name: Run unit tests with coverage
run: |
pnpm --filter @capakraken/web test:unit -- --coverage
pnpm --filter @capakraken/engine exec vitest run --coverage
pnpm --filter @capakraken/staffing exec vitest run --coverage
pnpm --filter @capakraken/api exec vitest run --coverage
@@ -172,6 +185,10 @@ jobs:
runs-on: ubuntu-latest
env:
DATABASE_URL: postgresql://placeholder:placeholder@localhost:5432/placeholder
NEXTAUTH_URL: ${{ env.CI_AUTH_URL }}
AUTH_URL: ${{ env.CI_AUTH_URL }}
NEXTAUTH_SECRET: ${{ env.CI_AUTH_SECRET }}
AUTH_SECRET: ${{ env.CI_AUTH_SECRET }}
steps:
- uses: actions/checkout@v4
@@ -188,7 +205,7 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Generate Prisma client
run: pnpm --filter @capakraken/db exec prisma generate
run: pnpm db:generate
- name: Cache Turborepo
uses: actions/cache@v4
@@ -243,6 +260,10 @@ jobs:
CONFIRM_DESTRUCTIVE_DB_NAME: capakraken_test
REDIS_URL: redis://localhost:6379
PORT: 3100
NEXTAUTH_URL: ${{ env.CI_AUTH_URL }}
AUTH_URL: ${{ env.CI_AUTH_URL }}
NEXTAUTH_SECRET: ${{ env.CI_AUTH_SECRET }}
AUTH_SECRET: ${{ env.CI_AUTH_SECRET }}
steps:
- uses: actions/checkout@v4
@@ -259,7 +280,7 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Generate Prisma client
run: pnpm --filter @capakraken/db exec prisma generate
run: pnpm db:generate
- name: Cache Playwright browsers
id: playwright-cache
+22 -11
View File
@@ -3,26 +3,31 @@
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "turbo dev",
"build": "turbo build",
"lint": "turbo lint",
"test": "turbo run test:unit",
"test:unit": "turbo test:unit",
"test:e2e": "turbo test:e2e",
"predev": "pnpm check:exports && pnpm check:imports",
"dev": "node ./scripts/run-from-workspace-root.mjs turbo dev",
"prebuild": "pnpm check:exports && pnpm check:imports",
"build": "node ./scripts/run-from-workspace-root.mjs turbo build",
"lint": "node ./scripts/run-from-workspace-root.mjs turbo lint",
"test": "node ./scripts/run-from-workspace-root.mjs turbo run test:unit",
"test:unit": "node ./scripts/run-from-workspace-root.mjs turbo test:unit",
"test:e2e": "node ./scripts/run-from-workspace-root.mjs turbo test:e2e",
"check:architecture": "node ./scripts/check-architecture-guardrails.mjs",
"check:exports": "node ./scripts/check-workspace-exports.mjs",
"check:imports": "node ./scripts/check-workspace-imports.mjs",
"clean:next": "node ./scripts/clean-next-artifacts.mjs",
"db:doctor": "node ./scripts/db-doctor.mjs capakraken",
"db:prisma": "node ./scripts/prisma-with-env.mjs",
"db:push": "node ./scripts/with-env.mjs pnpm --filter @capakraken/db db:push",
"db:migrate": "node ./scripts/with-env.mjs pnpm --filter @capakraken/db db:migrate",
"db:generate": "node ./scripts/with-env.mjs pnpm --filter @capakraken/db db:generate",
"db:validate": "node ./scripts/with-env.mjs pnpm --filter @capakraken/db db:validate",
"db:push": "node ./scripts/prisma-with-env.mjs db push",
"db:migrate": "node ./scripts/prisma-with-env.mjs migrate dev",
"db:generate": "node ./scripts/prisma-with-env.mjs generate",
"db:validate": "node ./scripts/prisma-with-env.mjs validate",
"db:seed": "node ./scripts/with-env.mjs pnpm --filter @capakraken/db db:seed",
"db:studio": "node ./scripts/with-env.mjs pnpm --filter @capakraken/db db:studio",
"db:reset:dispo": "pnpm --filter @capakraken/db db:reset:dispo",
"db:import:dispo": "pnpm --filter @capakraken/db db:import:dispo",
"db:readiness:demand-assignment": "pnpm --filter @capakraken/db db:readiness:demand-assignment",
"format": "prettier --write \"**/*.{ts,tsx,md,json}\"",
"typecheck": "turbo typecheck"
"typecheck": "node ./scripts/run-from-workspace-root.mjs turbo typecheck"
},
"devDependencies": {
"@capakraken/eslint-config": "workspace:*",
@@ -35,6 +40,12 @@
"node": ">=20.0.0",
"pnpm": ">=9.0.0"
},
"pnpm": {
"overrides": {
"flatted": "^3.4.2",
"picomatch": "^4.0.4"
}
},
"packageManager": "pnpm@9.14.2",
"dependencies": {
"framer-motion": "^12.38.0"
+108
View File
@@ -0,0 +1,108 @@
import { readdir, readFile, stat } from "node:fs/promises";
import path from "node:path";
import process from "node:process";
import { resolveRealWorkspaceRoot } from "./load-env.mjs";
const rootDir = resolveRealWorkspaceRoot();
const workspaceDirs = ["packages", "tooling"];
const violations = [];
async function pathExists(targetPath) {
try {
await stat(targetPath);
return true;
} catch {
return false;
}
}
async function listPackageJsonFiles(baseDir) {
const absoluteBaseDir = path.join(rootDir, baseDir);
if (!(await pathExists(absoluteBaseDir))) {
return [];
}
const entries = await readdir(absoluteBaseDir, { withFileTypes: true });
return entries
.filter((entry) => entry.isDirectory())
.map((entry) => path.join(baseDir, entry.name, "package.json"));
}
function collectExportTargets(value, keyPath, targets) {
if (typeof value === "string") {
targets.push({ keyPath, target: value });
return;
}
if (!value || typeof value !== "object") {
return;
}
for (const [key, nestedValue] of Object.entries(value)) {
collectExportTargets(nestedValue, `${keyPath}.${key}`, targets);
}
}
function getWildcardBaseDir(target) {
const wildcardIndex = target.indexOf("*");
if (wildcardIndex === -1) {
return target;
}
const targetPrefix = target.slice(0, wildcardIndex);
const lastSlashIndex = targetPrefix.lastIndexOf("/");
return lastSlashIndex === -1 ? "." : targetPrefix.slice(0, lastSlashIndex);
}
const packageJsonFiles = (
await Promise.all(workspaceDirs.map((workspaceDir) => listPackageJsonFiles(workspaceDir)))
).flat();
for (const packageJsonFile of packageJsonFiles) {
const packageJsonPath = path.join(rootDir, packageJsonFile);
if (!(await pathExists(packageJsonPath))) {
continue;
}
const packageDir = path.dirname(packageJsonPath);
const packageJson = JSON.parse(await readFile(packageJsonPath, "utf8"));
const exportsField = packageJson.exports;
if (!exportsField || typeof exportsField !== "object") {
continue;
}
const exportTargets = [];
collectExportTargets(exportsField, "exports", exportTargets);
for (const exportTarget of exportTargets) {
const exportPath = exportTarget.target;
const containsWildcard = exportPath.includes("*");
const relativeCheckPath = containsWildcard ? getWildcardBaseDir(exportPath) : exportPath;
const absoluteCheckPath = path.resolve(packageDir, relativeCheckPath);
const relativeFromRoot = path.relative(rootDir, absoluteCheckPath) || ".";
if (!relativeFromRoot || relativeFromRoot.startsWith("..")) {
violations.push(
`${packageJsonFile}: ${exportTarget.keyPath} points outside the repository: ${exportPath}`,
);
continue;
}
if (!(await pathExists(absoluteCheckPath))) {
violations.push(
`${packageJsonFile}: ${exportTarget.keyPath} references a missing path: ${exportPath}`,
);
}
}
}
if (violations.length > 0) {
console.error("Workspace export check failed:");
for (const violation of violations) {
console.error(`- ${violation}`);
}
process.exit(1);
}
console.log("Workspace exports passed.");
+150
View File
@@ -0,0 +1,150 @@
import { readdir, readFile, stat } from "node:fs/promises";
import path from "node:path";
import process from "node:process";
import { resolveRealWorkspaceRoot } from "./load-env.mjs";
const rootDir = resolveRealWorkspaceRoot();
const workspaceDirs = ["apps", "packages", "tooling"];
const sourceFileExtensions = new Set([
".ts",
".tsx",
".mts",
".cts",
".js",
".jsx",
".mjs",
".cjs",
]);
const resolutionExtensions = [
"",
".ts",
".tsx",
".mts",
".cts",
".js",
".jsx",
".mjs",
".cjs",
];
const violations = [];
const importPattern =
/(?:import|export)\s+(?:[^"'`]*?\s+from\s+)?["'`](\.{1,2}\/[^"'`]+)["'`]|import\s*\(\s*["'`](\.{1,2}\/[^"'`]+)["'`]\s*\)/g;
async function pathExists(targetPath) {
try {
await stat(targetPath);
return true;
} catch {
return false;
}
}
async function collectSourceFiles(directoryPath, result) {
const entries = await readdir(directoryPath, { withFileTypes: true });
for (const entry of entries) {
const absolutePath = path.join(directoryPath, entry.name);
if (entry.isDirectory()) {
if (
entry.name === "node_modules"
|| entry.name.startsWith(".next")
|| entry.name === ".turbo"
|| entry.name === "dist"
|| entry.name === "coverage"
|| entry.name === "playwright-report"
|| entry.name === "test-results"
) {
continue;
}
await collectSourceFiles(absolutePath, result);
continue;
}
if (!entry.isFile()) {
continue;
}
const extension = path.extname(entry.name);
if (sourceFileExtensions.has(extension)) {
result.push(absolutePath);
}
}
}
function getExtensionlessCandidates(basePath) {
return resolutionExtensions.flatMap((extension) => [
`${basePath}${extension}`,
path.join(basePath, `index${extension}`),
]);
}
function getExplicitExtensionCandidates(basePath, importExtension) {
const baseWithoutExtension = basePath.slice(0, -importExtension.length);
const siblingExtensions =
importExtension === ".js" || importExtension === ".mjs" || importExtension === ".cjs"
? [importExtension, ".ts", ".tsx", ".mts", ".cts"]
: [importExtension];
return siblingExtensions.flatMap((extension) => [
`${baseWithoutExtension}${extension}`,
path.join(baseWithoutExtension, `index${extension}`),
]);
}
async function resolvesImport(fromFilePath, importPath) {
const resolvedBasePath = path.resolve(path.dirname(fromFilePath), importPath);
const rawExtension = path.extname(importPath);
const importExtension = resolutionExtensions.includes(rawExtension) ? rawExtension : "";
const candidates = importExtension
? getExplicitExtensionCandidates(resolvedBasePath, importExtension)
: getExtensionlessCandidates(resolvedBasePath);
for (const candidate of candidates) {
if (await pathExists(candidate)) {
return true;
}
}
return false;
}
const sourceFiles = [];
for (const workspaceDir of workspaceDirs) {
const absoluteWorkspaceDir = path.join(rootDir, workspaceDir);
if (!(await pathExists(absoluteWorkspaceDir))) {
continue;
}
await collectSourceFiles(absoluteWorkspaceDir, sourceFiles);
}
for (const sourceFile of sourceFiles) {
const content = await readFile(sourceFile, "utf8");
const relativeSourceFile = path.relative(rootDir, sourceFile);
const imports = new Set();
for (const match of content.matchAll(importPattern)) {
const importPath = match[1] ?? match[2];
if (importPath) {
imports.add(importPath.trim());
}
}
for (const importPath of imports) {
if (!(await resolvesImport(sourceFile, importPath))) {
violations.push(`${relativeSourceFile}: unresolved relative import ${importPath}`);
}
}
}
if (violations.length > 0) {
console.error("Workspace import check failed:");
for (const violation of violations) {
console.error(`- ${violation}`);
}
process.exit(1);
}
console.log("Workspace imports passed.");
+86
View File
@@ -0,0 +1,86 @@
#!/usr/bin/env node
import { existsSync, readdirSync, rmSync } from "node:fs";
import { join, resolve } from "node:path";
import { spawnSync } from "node:child_process";
const webDir = resolve("apps/web");
function listStaleNextArtifacts() {
if (!existsSync(webDir)) {
return [];
}
return readdirSync(webDir, { withFileTypes: true })
.filter((entry) => entry.isDirectory())
.map((entry) => entry.name)
.filter((name) => name.startsWith(".next."))
.sort();
}
function removeLocally(names) {
for (const name of names) {
rmSync(join(webDir, name), { recursive: true, force: true });
}
}
function removeWithDocker(names) {
const dockerCheck = spawnSync("docker", ["--version"], {
stdio: "ignore",
});
if (dockerCheck.status !== 0) {
return false;
}
const shellQuotedPaths = names
.map((name) => `/work/${name}`)
.map((path) => `'${path.replaceAll("'", "'\"'\"'")}'`)
.join(" ");
const result = spawnSync(
"docker",
[
"run",
"--rm",
"-v",
`${webDir}:/work`,
"alpine:3.20",
"sh",
"-lc",
`rm -rf -- ${shellQuotedPaths}`,
],
{
stdio: "inherit",
},
);
return result.status === 0;
}
const initialArtifacts = listStaleNextArtifacts();
if (initialArtifacts.length === 0) {
console.log("No stale Next.js artifacts found.");
process.exit(0);
}
removeLocally(initialArtifacts);
const remainingArtifacts = listStaleNextArtifacts();
if (remainingArtifacts.length === 0) {
console.log(`Removed stale Next.js artifacts: ${initialArtifacts.join(", ")}`);
process.exit(0);
}
const dockerRemoved = removeWithDocker(remainingArtifacts);
const finalArtifacts = listStaleNextArtifacts();
if (!dockerRemoved || finalArtifacts.length > 0) {
console.error("Failed to remove stale Next.js artifacts:");
for (const artifact of finalArtifacts) {
console.error(`- ${artifact}`);
}
console.error("Run the cleanup in an environment with Docker access or fix ownership before retrying.");
process.exit(1);
}
console.log(`Removed stale Next.js artifacts: ${initialArtifacts.join(", ")}`);
+5 -20
View File
@@ -1,10 +1,10 @@
#!/usr/bin/env node
import { URL } from "node:url";
import { loadWorkspaceEnv, resolveWorkspaceEnvPaths } from "./load-env.mjs";
import { getExpectedDatabaseName, inspectDatabaseUrl } from "./db-target-guard.mjs";
const loadedEnvPaths = loadWorkspaceEnv();
const expectedDatabase = process.argv[2] ?? "capakraken";
const expectedDatabase = process.argv[2] ?? getExpectedDatabaseName();
const rawUrl = process.env.DATABASE_URL;
const expectedEnvSources = loadedEnvPaths.length > 0
? loadedEnvPaths.join(", ")
@@ -15,25 +15,10 @@ if (!rawUrl) {
process.exit(1);
}
let parsed;
try {
parsed = new URL(rawUrl);
const result = inspectDatabaseUrl(rawUrl, expectedDatabase);
console.log(`DB target OK: ${result.target}`);
} catch (error) {
console.error(`DATABASE_URL is invalid: ${error instanceof Error ? error.message : String(error)}`);
console.error(error instanceof Error ? error.message : String(error));
process.exit(1);
}
const databaseName = parsed.pathname.replace(/^\/+/, "");
const target = `${parsed.protocol}//${decodeURIComponent(parsed.username)}@${parsed.hostname}${parsed.port ? `:${parsed.port}` : ""}/${databaseName}`;
if (!databaseName) {
console.error(`DATABASE_URL does not contain a database name. Target=${target}`);
process.exit(1);
}
if (databaseName !== expectedDatabase) {
console.error(`Unexpected database target '${databaseName}'. Expected '${expectedDatabase}'. Target=${target}`);
process.exit(1);
}
console.log(`DB target OK: ${target}`);
+86
View File
@@ -0,0 +1,86 @@
import { URL } from "node:url";
export function formatDatabaseTarget(parsedUrl, databaseName) {
return `${parsedUrl.protocol}//${decodeURIComponent(parsedUrl.username)}@${parsedUrl.hostname}${parsedUrl.port ? `:${parsedUrl.port}` : ""}/${databaseName}`;
}
export function inspectDatabaseUrl(rawUrl, expectedDatabase = "capakraken") {
if (!rawUrl) {
throw new Error("DATABASE_URL is not configured.");
}
let parsedUrl;
try {
parsedUrl = new URL(rawUrl);
} catch (error) {
throw new Error(`DATABASE_URL is invalid: ${error instanceof Error ? error.message : String(error)}`);
}
const databaseName = parsedUrl.pathname.replace(/^\/+/, "");
const target = formatDatabaseTarget(parsedUrl, databaseName);
if (!databaseName) {
throw new Error(`DATABASE_URL does not contain a database name. Target=${target}`);
}
if (databaseName !== expectedDatabase) {
throw new Error(`Unexpected database target '${databaseName}'. Expected '${expectedDatabase}'. Target=${target}`);
}
return {
databaseName,
expectedDatabase,
target,
};
}
function collectPrismaCommandTokens(args) {
const tokens = [];
for (let index = 0; index < args.length; index += 1) {
const arg = args[index];
if (arg === "--") {
break;
}
if (arg === "--schema" || arg === "--url" || arg === "--telemetry-information") {
index += 1;
continue;
}
if (arg.startsWith("-")) {
continue;
}
tokens.push(arg);
}
return tokens;
}
export function shouldGuardPrismaCommand(args) {
if (args.includes("--help") || args.includes("-h")) {
return false;
}
const [command, subcommand] = collectPrismaCommandTokens(args);
if (!command) {
return false;
}
if (command === "db") {
return subcommand === "push" || subcommand === "pull" || subcommand === "execute";
}
if (command === "migrate") {
return true;
}
return command === "studio" || command === "introspect";
}
export function getExpectedDatabaseName() {
return process.env.CAPAKRAKEN_EXPECTED_DB_NAME?.trim() || "capakraken";
}
+35
View File
@@ -0,0 +1,35 @@
import assert from "node:assert/strict";
import { describe, it } from "node:test";
import {
inspectDatabaseUrl,
shouldGuardPrismaCommand,
} from "./db-target-guard.mjs";
describe("db target guard", () => {
it("accepts the expected capakraken database target", () => {
const result = inspectDatabaseUrl(
"postgresql://capakraken:secret@localhost:5432/capakraken",
"capakraken",
);
assert.equal(result.databaseName, "capakraken");
assert.equal(result.expectedDatabase, "capakraken");
assert.equal(result.target, "postgresql://capakraken@localhost:5432/capakraken");
});
it("rejects a mismatched database target", () => {
assert.throws(
() => inspectDatabaseUrl("postgresql://capakraken:secret@localhost:5432/planarchy", "capakraken"),
/Unexpected database target 'planarchy'\. Expected 'capakraken'\./,
);
});
it("guards only prisma commands that actually target a database", () => {
assert.equal(shouldGuardPrismaCommand(["generate"]), false);
assert.equal(shouldGuardPrismaCommand(["validate", "--schema", "./prisma/schema.prisma"]), false);
assert.equal(shouldGuardPrismaCommand(["db", "push", "--schema", "./prisma/schema.prisma"]), true);
assert.equal(shouldGuardPrismaCommand(["migrate", "deploy", "--schema", "./prisma/schema.prisma"]), true);
assert.equal(shouldGuardPrismaCommand(["studio", "--schema", "./prisma/schema.prisma"]), true);
assert.equal(shouldGuardPrismaCommand(["db", "push", "--help"]), false);
});
});
+16 -1
View File
@@ -2,9 +2,12 @@
import { spawnSync } from "node:child_process";
import { resolve } from "node:path";
import { loadWorkspaceEnv } from "./load-env.mjs";
import { getExpectedDatabaseName, inspectDatabaseUrl, shouldGuardPrismaCommand } from "./db-target-guard.mjs";
import { loadWorkspaceEnv, resolveRealWorkspaceRoot } from "./load-env.mjs";
loadWorkspaceEnv();
const workspaceRoot = resolveRealWorkspaceRoot();
process.chdir(workspaceRoot);
const args = process.argv.slice(2);
@@ -18,9 +21,21 @@ const prismaArgs = hasSchemaArg
? args
: [...args, "--schema", resolve("packages/db/prisma/schema.prisma")];
if (shouldGuardPrismaCommand(prismaArgs)) {
try {
inspectDatabaseUrl(process.env.DATABASE_URL, getExpectedDatabaseName());
} catch (error) {
console.error(error instanceof Error ? error.message : String(error));
console.error("Refusing to run Prisma against an unexpected database target.");
console.error("Use the repo env files for CapaKraken, or set CAPAKRAKEN_EXPECTED_DB_NAME explicitly if you intentionally target another database.");
process.exit(1);
}
}
const result = spawnSync("pnpm", ["--filter", "@capakraken/db", "exec", "prisma", ...prismaArgs], {
stdio: "inherit",
env: process.env,
cwd: workspaceRoot,
});
if (result.error) {
+28
View File
@@ -0,0 +1,28 @@
#!/usr/bin/env node
import { spawnSync } from "node:child_process";
import process from "node:process";
import { resolveRealWorkspaceRoot } from "./load-env.mjs";
const args = process.argv.slice(2);
if (args.length === 0) {
console.error("Usage: node scripts/run-from-workspace-root.mjs <command> [args...]");
process.exit(1);
}
const workspaceRoot = resolveRealWorkspaceRoot();
process.chdir(workspaceRoot);
const result = spawnSync(args[0], args.slice(1), {
stdio: "inherit",
env: process.env,
cwd: workspaceRoot,
});
if (result.error) {
console.error(result.error.message);
process.exit(1);
}
process.exit(result.status ?? 1);