chore(repo): initialize planarchy workspace

This commit is contained in:
2026-03-14 14:31:09 +01:00
commit dd55d0e78b
769 changed files with 166461 additions and 0 deletions
+46
View File
@@ -0,0 +1,46 @@
import * as XLSX from "xlsx";
/**
* Parse an Excel (.xlsx, .xls) or CSV file to an array of row objects.
* Keys come from the first row (headers).
*/
export function parseSpreadsheet(file: File): Promise<Record<string, string>[]> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (e) => {
try {
const data = new Uint8Array(e.target!.result as ArrayBuffer);
const workbook = XLSX.read(data, { type: "array" });
const sheetName = workbook.SheetNames[0];
if (!sheetName) {
resolve([]);
return;
}
const sheet = workbook.Sheets[sheetName];
if (!sheet) {
resolve([]);
return;
}
const rows = XLSX.utils.sheet_to_json<Record<string, string>>(sheet, {
raw: false,
defval: "",
});
resolve(rows);
} catch (err) {
reject(err);
}
};
reader.onerror = () => reject(reader.error);
reader.readAsArrayBuffer(file);
});
}
export function isSpreadsheetFile(file: File): boolean {
return (
file.type === "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" ||
file.type === "application/vnd.ms-excel" ||
file.name.endsWith(".xlsx") ||
file.name.endsWith(".xls") ||
file.name.endsWith(".csv")
);
}
+29
View File
@@ -0,0 +1,29 @@
/**
* Format a date as dd/mm/yyyy for display in the UI.
* Input date inputs (type="date") still use yyyy-mm-dd — this is for rendered text only.
*/
export function formatDate(d: Date | string | null | undefined): string {
if (!d) return "";
return new Date(d).toLocaleDateString("en-GB"); // en-GB → dd/mm/yyyy
}
/**
* Format a date as "DD MMM" (e.g. "04 Mar") for compact timeline labels.
*/
export function formatDateShort(d: Date | string): string {
return new Date(d).toLocaleDateString("en-GB", { day: "2-digit", month: "short" });
}
/**
* Format a date as "MMM YY" (e.g. "Mar 26") for timeline month headers.
*/
export function formatMonthYear(d: Date | string): string {
return new Date(d).toLocaleDateString("en-GB", { month: "short", year: "2-digit" });
}
/**
* Format a date in long form (e.g. "4 March 2026") for descriptive contexts.
*/
export function formatDateLong(d: Date | string): string {
return new Date(d).toLocaleDateString("en-GB", { day: "numeric", month: "long", year: "numeric" });
}
+9
View File
@@ -0,0 +1,9 @@
type PlanningEntryIdentity = {
id: string;
entityId?: string | null;
sourceAllocationId?: string | null;
};
export function getPlanningEntryMutationId(entry: PlanningEntryIdentity): string {
return entry.entityId ?? entry.sourceAllocationId ?? entry.id;
}
+106
View File
@@ -0,0 +1,106 @@
import { parseSpreadsheet } from "./excel.js";
export interface ParsedScopeRow {
sequenceNo: number;
scopeType: string;
packageCode: string;
name: string;
description: string;
}
/**
* Known column header aliases for each field.
* Case-insensitive match; first match wins.
*/
const FIELD_ALIASES: Record<keyof ParsedScopeRow, string[]> = {
sequenceNo: ["sequence", "seq", "no", "nr", "#", "number", "sequenceno", "sequence_no"],
scopeType: ["type", "scope_type", "scopetype", "category", "asset_type", "assettype"],
packageCode: ["package", "pkg", "package_code", "packagecode", "code"],
name: ["name", "title", "shot", "asset", "scene", "item", "scope_item", "scopeitem"],
description: ["description", "desc", "details", "notes", "comment"],
};
function normalizeHeader(header: string): string {
return header.trim().toLowerCase().replace(/[\s_-]+/g, "_");
}
function resolveFieldMapping(
headers: string[],
): Record<keyof ParsedScopeRow, string | null> {
const mapping: Record<keyof ParsedScopeRow, string | null> = {
sequenceNo: null,
scopeType: null,
packageCode: null,
name: null,
description: null,
};
for (const header of headers) {
const normalized = normalizeHeader(header);
for (const [field, aliases] of Object.entries(FIELD_ALIASES)) {
if (mapping[field as keyof ParsedScopeRow] !== null) continue;
if (aliases.some((alias) => normalized === alias || normalized.includes(alias))) {
mapping[field as keyof ParsedScopeRow] = header;
}
}
}
return mapping;
}
export interface ScopeImportResult {
rows: ParsedScopeRow[];
warnings: string[];
mapping: Record<keyof ParsedScopeRow, string | null>;
}
export async function parseScopeImport(file: File): Promise<ScopeImportResult> {
const rawRows = await parseSpreadsheet(file);
const warnings: string[] = [];
if (rawRows.length === 0) {
return { rows: [], warnings: ["File contains no data rows."], mapping: { sequenceNo: null, scopeType: null, packageCode: null, name: null, description: null } };
}
const headers = Object.keys(rawRows[0] ?? {});
const mapping = resolveFieldMapping(headers);
if (!mapping.name) {
warnings.push(
`Could not identify a "Name" column. Available headers: ${headers.join(", ")}`,
);
return { rows: [], warnings, mapping };
}
const rows: ParsedScopeRow[] = [];
for (let i = 0; i < rawRows.length; i++) {
const raw = rawRows[i]!;
const name = mapping.name ? (raw[mapping.name] ?? "").trim() : "";
if (!name) {
continue;
}
const seqRaw = mapping.sequenceNo ? (raw[mapping.sequenceNo] ?? "").trim() : "";
const seqParsed = parseInt(seqRaw, 10);
const sequenceNo = Number.isFinite(seqParsed) && seqParsed > 0 ? seqParsed : rows.length + 1;
rows.push({
sequenceNo,
scopeType: mapping.scopeType ? (raw[mapping.scopeType] ?? "").trim() || "SHOT" : "SHOT",
packageCode: mapping.packageCode ? (raw[mapping.packageCode] ?? "").trim() : "",
name,
description: mapping.description ? (raw[mapping.description] ?? "").trim() : "",
});
}
if (rows.length === 0) {
warnings.push("No rows with a non-empty name found.");
}
if (!mapping.scopeType) warnings.push("No scope type column detected — defaulting to SHOT.");
if (!mapping.sequenceNo) warnings.push("No sequence number column detected — auto-numbering.");
return { rows, warnings, mapping };
}
+141
View File
@@ -0,0 +1,141 @@
import * as XLSX from "xlsx";
import type { SkillEntry } from "@planarchy/shared";
export interface ParsedEmployeeInfo {
displayName?: string;
areaOfExpertise?: string;
yearsOfExperience?: number;
portfolioUrl?: string;
}
export interface ParsedSkillMatrix {
employeeInfo: ParsedEmployeeInfo;
skills: SkillEntry[];
}
// Maps Excel proficiency (1-4) → Planarchy proficiency (2-5)
function mapProficiency(raw: string): 1 | 2 | 3 | 4 | 5 | null {
const n = parseInt(raw, 10);
if (isNaN(n) || n === 0) return null;
if (n === 1) return 2;
if (n === 2) return 3;
if (n === 3) return 4;
if (n === 4) return 5;
return null;
}
function parseEmployeeInfo(rows: Record<string, string>[]): ParsedEmployeeInfo {
const info: ParsedEmployeeInfo = {};
for (const row of rows) {
const item = (row["item"] ?? "").toLowerCase().trim();
const value = (row["property"] ?? "").trim();
if (!value || value === "please select") continue;
if (item.includes("full name")) {
info.displayName = value;
} else if (item.includes("area of expertise") || item.includes("main focus")) {
info.areaOfExpertise = value;
} else if (item.includes("years of experience")) {
const n = parseFloat(value);
if (!isNaN(n)) info.yearsOfExperience = Math.round(n);
} else if (item.includes("portfolio") || item.includes("url") || item.includes("linkedin") || item.includes("artstation")) {
if (value.startsWith("http")) info.portfolioUrl = value;
}
}
return info;
}
function parseSkillSheet(rows: Record<string, string>[], mainSkillSet: Set<string>): SkillEntry[] {
const skills: SkillEntry[] = [];
let mainSkillCount = 0;
for (const row of rows) {
const category = (row["category"] ?? "").trim();
const item = (row["item"] ?? "").trim();
const property = (row["property"] ?? "").trim();
const mainSkillRaw = (row["main skillset"] ?? "").trim();
if (!item) continue;
const proficiency = mapProficiency(property);
if (proficiency === null) continue; // skip 0 / no experience
const isMainSkillCandidate = mainSkillRaw === "1" || mainSkillRaw === "2";
const isMainSkill = isMainSkillCandidate && mainSkillCount < 2;
if (isMainSkill) {
mainSkillSet.add(item);
mainSkillCount++;
}
skills.push({
skill: item,
...(category ? { category } : {}),
proficiency,
...(isMainSkillCandidate ? { isMainSkill: true } : {}),
});
}
return skills;
}
/**
* Parse a skill matrix workbook (xlsx ArrayBuffer) into structured data.
* Returns ParsedSkillMatrix with employeeInfo and merged skills array.
*/
export function parseSkillMatrixWorkbook(data: ArrayBuffer): ParsedSkillMatrix {
const workbook = XLSX.read(new Uint8Array(data), { type: "array" });
const employeeSheet = workbook.Sheets["Employee Information"];
const softwareSheet = workbook.Sheets["Software Skills"];
const technicalSheet = workbook.Sheets["Technical Skillset"];
const employeeRows = employeeSheet
? XLSX.utils.sheet_to_json<Record<string, string>>(employeeSheet, { raw: false, defval: "" })
: [];
const softwareRows = softwareSheet
? XLSX.utils.sheet_to_json<Record<string, string>>(softwareSheet, { raw: false, defval: "" })
: [];
const technicalRows = technicalSheet
? XLSX.utils.sheet_to_json<Record<string, string>>(technicalSheet, { raw: false, defval: "" })
: [];
const employeeInfo = parseEmployeeInfo(employeeRows);
// Track main skills across both sheets (max 2 total)
const mainSkillSet = new Set<string>();
const softwareSkills = parseSkillSheet(softwareRows, mainSkillSet);
const technicalSkills = parseSkillSheet(technicalRows, mainSkillSet);
// Merge: deduplicate by skill name (prefer higher proficiency)
const skillMap = new Map<string, SkillEntry>();
for (const s of [...softwareSkills, ...technicalSkills]) {
const existing = skillMap.get(s.skill);
if (!existing || s.proficiency > existing.proficiency) {
skillMap.set(s.skill, s);
}
}
return {
employeeInfo,
skills: Array.from(skillMap.values()),
};
}
/**
* Fuzzy match an areaOfExpertise string against a list of role names.
* Returns the best matching role name or null.
*/
export function matchRoleName(areaOfExpertise: string, roleNames: string[]): string | null {
if (!areaOfExpertise) return null;
const needle = areaOfExpertise.toLowerCase().trim();
// Exact match first
const exact = roleNames.find((r) => r.toLowerCase() === needle);
if (exact) return exact;
// Partial match (needle contains role name or vice versa)
const partial = roleNames.find(
(r) => needle.includes(r.toLowerCase()) || r.toLowerCase().includes(needle),
);
return partial ?? null;
}
+6
View File
@@ -0,0 +1,6 @@
"use client";
import type { AppRouter } from "@planarchy/api/router";
import { createTRPCReact } from "@trpc/react-query";
export const trpc = createTRPCReact<AppRouter>();
+50
View File
@@ -0,0 +1,50 @@
"use client";
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
import { httpBatchLink, loggerLink } from "@trpc/client";
import { SessionProvider } from "next-auth/react";
import { useState } from "react";
import { trpc } from "./client.js";
function getBaseUrl() {
if (typeof window !== "undefined") return window.location.origin;
if (process.env["VERCEL_URL"]) return `https://${process.env["VERCEL_URL"]}`;
return `http://localhost:${process.env["PORT"] ?? 3100}`;
}
export function TRPCProvider({ children }: { children: React.ReactNode }) {
const [queryClient] = useState(
() =>
new QueryClient({
defaultOptions: {
queries: {
staleTime: 30 * 1000, // 30 seconds
retry: 1,
},
},
}),
);
const [trpcClient] = useState(() =>
trpc.createClient({
links: [
loggerLink({
enabled: (opts) =>
process.env["NODE_ENV"] === "development" ||
(opts.direction === "down" && opts.result instanceof Error),
}),
httpBatchLink({
url: `${getBaseUrl()}/api/trpc`,
}),
],
}),
);
return (
<SessionProvider>
<trpc.Provider client={trpcClient} queryClient={queryClient}>
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
</trpc.Provider>
</SessionProvider>
);
}