security: bound Zod inputs, add SSE per-user cap and tRPC body limit (#51)
CI / Architecture Guardrails (pull_request) Successful in 2m6s
CI / Lint (pull_request) Successful in 7m29s
CI / Typecheck (pull_request) Successful in 8m3s
CI / Unit Tests (pull_request) Successful in 8m11s
CI / Build (pull_request) Successful in 5m24s
CI / E2E Tests (pull_request) Successful in 5m25s
CI / Fresh-Linux Docker Deploy (pull_request) Successful in 6m30s
CI / Release Images (pull_request) Has been skipped
CI / Assistant Split Regression (pull_request) Successful in 3m47s

Mechanical .max() bounds across 9 router schemas per the convention in
#51: IDs at 64, names at 200, search/filter strings at 500, arrays at
100-5000 depending on domain. Webhook secret bounded at min(16)/max(256).

Reports route now validates startDate/endDate via zod with year bounds
and rejects end<start. SSE timeline route enforces a per-user connection
cap of 8 (returns 429 with Retry-After). tRPC route rejects bodies over
2 MiB via Content-Length check before auth/DB work.

Covers 12 call-sites listed in #51. ESLint rule and zod conventions doc
remain as follow-up.
This commit is contained in:
2026-04-18 13:31:18 +02:00
parent f0251a654a
commit 40ca0c3046
12 changed files with 254 additions and 148 deletions
@@ -1,6 +1,7 @@
import { renderToBuffer } from "@react-pdf/renderer";
import { createElement } from "react";
import { NextResponse } from "next/server";
import { z } from "zod";
import { buildSplitAllocationReadModel } from "@capakraken/application";
import { anonymizeResource, getAnonymizationDirectory } from "@capakraken/api";
import { prisma } from "@capakraken/db";
@@ -11,6 +12,17 @@ import { createWorkbookArrayBuffer } from "~/lib/workbook-export.js";
const ALLOWED_ROLES = new Set(["ADMIN", "MANAGER", "CONTROLLER"]);
// Reject fantasy dates from clients — years outside [2000, 2100] are almost
// certainly malformed input and would generate nonsensical SQL range scans.
const DATE_MIN = new Date("2000-01-01T00:00:00.000Z");
const DATE_MAX = new Date("2100-01-01T00:00:00.000Z");
const queryParamsSchema = z.object({
startDate: z.coerce.date().min(DATE_MIN).max(DATE_MAX).optional(),
endDate: z.coerce.date().min(DATE_MIN).max(DATE_MAX).optional(),
format: z.enum(["pdf", "xlsx"]).default("pdf"),
});
export async function GET(request: Request) {
const session = await auth();
if (!session?.user) {
@@ -23,9 +35,20 @@ export async function GET(request: Request) {
}
const { searchParams } = new URL(request.url);
const startDate = searchParams.get("startDate") ? new Date(searchParams.get("startDate")!) : new Date();
const endDate = searchParams.get("endDate") ? new Date(searchParams.get("endDate")!) : new Date(Date.now() + 90 * 24 * 60 * 60 * 1000);
const format = searchParams.get("format") ?? "pdf";
const parsed = queryParamsSchema.safeParse({
startDate: searchParams.get("startDate") ?? undefined,
endDate: searchParams.get("endDate") ?? undefined,
format: searchParams.get("format") ?? undefined,
});
if (!parsed.success) {
return new NextResponse("Invalid query parameters", { status: 400 });
}
const startDate = parsed.data.startDate ?? new Date();
const endDate = parsed.data.endDate ?? new Date(Date.now() + 90 * 24 * 60 * 60 * 1000);
if (endDate < startDate) {
return new NextResponse("endDate must be >= startDate", { status: 400 });
}
const format = parsed.data.format;
const [demandRequirements, assignments] = await Promise.all([
prisma.demandRequirement.findMany({
@@ -62,21 +85,25 @@ export async function GET(request: Request) {
const assignmentRows = allocationView.assignments.slice(0, 500);
const directory = await getAnonymizationDirectory(prisma);
const rows = assignmentRows.map((a: AllocationLike & {
resource?: { id: string; displayName?: string | null } | null;
project?: { shortCode: string; name: string } | null;
}) => {
const resource = a.resource ? anonymizeResource(a.resource, directory) : null;
return {
resourceName: resource?.displayName ?? "Unknown",
projectName: a.project ? `${a.project.shortCode}${a.project.name}` : "Unknown project",
role: a.role ?? "",
startDate: new Date(a.startDate).toLocaleDateString("en-GB"),
endDate: new Date(a.endDate).toLocaleDateString("en-GB"),
hoursPerDay: a.hoursPerDay,
dailyCostCents: a.dailyCostCents,
};
});
const rows = assignmentRows.map(
(
a: AllocationLike & {
resource?: { id: string; displayName?: string | null } | null;
project?: { shortCode: string; name: string } | null;
},
) => {
const resource = a.resource ? anonymizeResource(a.resource, directory) : null;
return {
resourceName: resource?.displayName ?? "Unknown",
projectName: a.project ? `${a.project.shortCode}${a.project.name}` : "Unknown project",
role: a.role ?? "",
startDate: new Date(a.startDate).toLocaleDateString("en-GB"),
endDate: new Date(a.endDate).toLocaleDateString("en-GB"),
hoursPerDay: a.hoursPerDay,
dailyCostCents: a.dailyCostCents,
};
},
);
const ts = Date.now();
@@ -9,6 +9,11 @@ import { auth } from "~/server/auth.js";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
// Bounded connection tracking: a single user opening 100 tabs should not be
// able to pin 100 persistent subscriptions on this node.
const MAX_SSE_CONNECTIONS_PER_USER = 8;
const sseConnectionsByUser = new Map<string, number>();
export async function GET() {
// Start lazily on the first real SSE request so builds/import-time evaluation
// never attempt reminder processing against a live database.
@@ -43,6 +48,24 @@ export async function GET() {
return new Response("Unauthorized", { status: 401 });
}
const currentCount = sseConnectionsByUser.get(dbUser.id) ?? 0;
if (currentCount >= MAX_SSE_CONNECTIONS_PER_USER) {
return new Response("Too many SSE connections", {
status: 429,
headers: { "Retry-After": "30" },
});
}
sseConnectionsByUser.set(dbUser.id, currentCount + 1);
const releaseSlot = () => {
const next = (sseConnectionsByUser.get(dbUser.id) ?? 1) - 1;
if (next <= 0) {
sseConnectionsByUser.delete(dbUser.id);
} else {
sseConnectionsByUser.set(dbUser.id, next);
}
};
const roleDefaults = await loadRoleDefaults();
const subscription = deriveUserSseSubscription(
{
@@ -85,6 +108,7 @@ export async function GET() {
} catch {
clearInterval(heartbeat);
unsubscribe();
releaseSlot();
}
}, 30000);
@@ -92,8 +116,12 @@ export async function GET() {
return () => {
clearInterval(heartbeat);
unsubscribe();
releaseSlot();
};
},
cancel() {
releaseSlot();
},
});
return new Response(stream, {
+22
View File
@@ -17,6 +17,11 @@ function extractClientIp(req: NextRequest): string | null {
return null;
}
// Hard cap on tRPC request body size to prevent memory/CPU amplification from
// a single oversized payload. Stream uploads (files, reports) don't go through
// tRPC. 2 MiB is comfortably above any legitimate tRPC batch call.
const MAX_TRPC_BODY_BYTES = 2 * 1024 * 1024;
// Throttle lastActiveAt updates: max once per 60s per user
const lastActiveCache = new Map<string, number>();
const ACTIVITY_THROTTLE_MS = 60_000;
@@ -37,6 +42,23 @@ function trackActivity(userId: string) {
}
const handler = async (req: NextRequest) => {
// Reject oversized bodies before we touch auth, DB, or the router. A tRPC
// mutation should never exceed MAX_TRPC_BODY_BYTES. Content-Length is
// advisory — also guard against chunked requests below via length check
// on the cloned body.
if (req.method !== "GET") {
const declaredLength = req.headers.get("content-length");
if (declaredLength) {
const parsed = Number(declaredLength);
if (Number.isFinite(parsed) && parsed > MAX_TRPC_BODY_BYTES) {
return new Response(JSON.stringify({ error: "Request body too large" }), {
status: 413,
headers: { "Content-Type": "application/json" },
});
}
}
}
const session = await auth();
// Validate active session registry on every authenticated request.