refactor(api): extract import export procedures

This commit is contained in:
2026-03-31 20:36:46 +02:00
parent 1d3f1a007f
commit f14d2679cc
5 changed files with 572 additions and 152 deletions
+11 -151
View File
@@ -1,157 +1,17 @@
import { BlueprintTarget, PermissionKey } from "@capakraken/shared";
import type { BlueprintFieldDefinition } from "@capakraken/shared";
import { z } from "zod";
import { controllerProcedure, createTRPCRouter, managerProcedure, requirePermission } from "../trpc.js";
import { controllerProcedure, createTRPCRouter, managerProcedure } from "../trpc.js";
import {
exportProjectsCsv,
exportResourcesCsv,
importCsv,
importCsvInputSchema,
} from "./import-export-procedure-support.js";
export const importExportRouter = createTRPCRouter({
/**
* Export resources as CSV.
*/
exportResourcesCSV: controllerProcedure.query(async ({ ctx }) => {
const [resources, globalBlueprints] = await Promise.all([
ctx.db.resource.findMany({
where: { isActive: true },
orderBy: { eid: "asc" },
}),
ctx.db.blueprint.findMany({
where: { target: BlueprintTarget.RESOURCE, isGlobal: true, isActive: true },
select: { fieldDefs: true },
}),
]);
exportResourcesCSV: controllerProcedure.query(({ ctx }) => exportResourcesCsv(ctx)),
// Collect all custom field defs that should appear in exports (showInList = true)
const customDefs = globalBlueprints
.flatMap((b) => b.fieldDefs as unknown as BlueprintFieldDefinition[])
.filter((f) => f.showInList);
exportProjectsCSV: controllerProcedure.query(({ ctx }) => exportProjectsCsv(ctx)),
function escapeCSV(v: unknown): string {
const s = v === null || v === undefined ? "" : String(v);
return s.includes(",") || s.includes('"') || s.includes("\n")
? `"${s.replace(/"/g, '""')}"`
: s;
}
const builtinHeaders = ["eid", "displayName", "email", "chapter", "lcrCents", "ucrCents", "currency", "chargeabilityTarget"];
const customHeaders = customDefs.map((f) => f.label);
const headers = [...builtinHeaders, ...customHeaders];
const rows = resources.map((r) => {
const df = r.dynamicFields as unknown as Record<string, unknown> ?? {};
const builtins = [r.eid, r.displayName, r.email, r.chapter ?? "", r.lcrCents, r.ucrCents, r.currency, r.chargeabilityTarget];
const customs = customDefs.map((f) => df[f.key] ?? "");
return [...builtins, ...customs].map(escapeCSV).join(",");
});
return [headers.map(escapeCSV).join(","), ...rows].join("\n");
}),
/**
* Export projects as CSV.
*/
exportProjectsCSV: controllerProcedure.query(async ({ ctx }) => {
const [projects, globalBlueprints] = await Promise.all([
ctx.db.project.findMany({ orderBy: { shortCode: "asc" } }),
ctx.db.blueprint.findMany({
where: { target: BlueprintTarget.PROJECT, isGlobal: true, isActive: true },
select: { fieldDefs: true },
}),
]);
const customDefs = globalBlueprints
.flatMap((b) => b.fieldDefs as unknown as BlueprintFieldDefinition[])
.filter((f) => f.showInList);
function escapeCSV(v: unknown): string {
const s = v === null || v === undefined ? "" : String(v);
return s.includes(",") || s.includes('"') || s.includes("\n")
? `"${s.replace(/"/g, '""')}"`
: s;
}
const builtinHeaders = ["shortCode", "name", "orderType", "status", "budgetCents", "startDate", "endDate", "winProbability"];
const headers = [...builtinHeaders, ...customDefs.map((f) => f.label)];
const rows = projects.map((p) => {
const df = p.dynamicFields as unknown as Record<string, unknown> ?? {};
const builtins = [
p.shortCode, p.name, p.orderType, p.status, p.budgetCents,
p.startDate.toISOString().split("T")[0],
p.endDate.toISOString().split("T")[0],
p.winProbability,
];
return [...builtins, ...customDefs.map((f) => df[f.key] ?? "")].map(escapeCSV).join(",");
});
return [headers.map(escapeCSV).join(","), ...rows].join("\n");
}),
/**
* Import resources from CSV data (parsed client-side).
*/
importCSV: managerProcedure
.input(
z.object({
entityType: z.enum(["resources", "projects", "allocations"]),
rows: z.array(z.record(z.string(), z.string())),
dryRun: z.boolean().default(true),
}),
)
.mutation(async ({ ctx, input }) => {
requirePermission(ctx, PermissionKey.IMPORT_DATA);
const { entityType, rows, dryRun } = input;
const results = {
total: rows.length,
created: 0,
updated: 0,
errors: [] as { row: number; message: string }[],
dryRun,
};
if (dryRun) {
// Validate without committing
return { ...results, message: `Dry run: ${rows.length} rows validated` };
}
// Basic import logic per entity type
for (let i = 0; i < rows.length; i++) {
const row = rows[i];
if (!row) continue;
try {
if (entityType === "resources") {
const existing = await ctx.db.resource.findFirst({
where: { eid: row["eid"] ?? "" },
});
if (existing) {
await ctx.db.resource.update({
where: { id: existing.id },
data: {
displayName: row["displayName"] ?? existing.displayName,
email: row["email"] ?? existing.email,
chapter: row["chapter"] ?? existing.chapter,
lcrCents: row["lcrCents"] ? parseInt(row["lcrCents"]) : existing.lcrCents,
},
});
results.updated++;
} else {
results.errors.push({ row: i + 1, message: "New resource creation via import requires full data" });
}
}
} catch (err) {
results.errors.push({ row: i + 1, message: err instanceof Error ? err.message : "Unknown error" });
}
}
await ctx.db.auditLog.create({
data: {
entityType: entityType,
entityId: "bulk-import",
action: "IMPORT",
changes: { summary: results },
},
});
return results;
}),
.input(importCsvInputSchema)
.mutation(({ ctx, input }) => importCsv(ctx, input)),
});