chore(repo): initialize planarchy workspace
This commit is contained in:
@@ -0,0 +1,106 @@
|
||||
import { parseSpreadsheet } from "./excel.js";
|
||||
|
||||
export interface ParsedScopeRow {
|
||||
sequenceNo: number;
|
||||
scopeType: string;
|
||||
packageCode: string;
|
||||
name: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Known column header aliases for each field.
|
||||
* Case-insensitive match; first match wins.
|
||||
*/
|
||||
const FIELD_ALIASES: Record<keyof ParsedScopeRow, string[]> = {
|
||||
sequenceNo: ["sequence", "seq", "no", "nr", "#", "number", "sequenceno", "sequence_no"],
|
||||
scopeType: ["type", "scope_type", "scopetype", "category", "asset_type", "assettype"],
|
||||
packageCode: ["package", "pkg", "package_code", "packagecode", "code"],
|
||||
name: ["name", "title", "shot", "asset", "scene", "item", "scope_item", "scopeitem"],
|
||||
description: ["description", "desc", "details", "notes", "comment"],
|
||||
};
|
||||
|
||||
function normalizeHeader(header: string): string {
|
||||
return header.trim().toLowerCase().replace(/[\s_-]+/g, "_");
|
||||
}
|
||||
|
||||
function resolveFieldMapping(
|
||||
headers: string[],
|
||||
): Record<keyof ParsedScopeRow, string | null> {
|
||||
const mapping: Record<keyof ParsedScopeRow, string | null> = {
|
||||
sequenceNo: null,
|
||||
scopeType: null,
|
||||
packageCode: null,
|
||||
name: null,
|
||||
description: null,
|
||||
};
|
||||
|
||||
for (const header of headers) {
|
||||
const normalized = normalizeHeader(header);
|
||||
for (const [field, aliases] of Object.entries(FIELD_ALIASES)) {
|
||||
if (mapping[field as keyof ParsedScopeRow] !== null) continue;
|
||||
if (aliases.some((alias) => normalized === alias || normalized.includes(alias))) {
|
||||
mapping[field as keyof ParsedScopeRow] = header;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mapping;
|
||||
}
|
||||
|
||||
export interface ScopeImportResult {
|
||||
rows: ParsedScopeRow[];
|
||||
warnings: string[];
|
||||
mapping: Record<keyof ParsedScopeRow, string | null>;
|
||||
}
|
||||
|
||||
export async function parseScopeImport(file: File): Promise<ScopeImportResult> {
|
||||
const rawRows = await parseSpreadsheet(file);
|
||||
const warnings: string[] = [];
|
||||
|
||||
if (rawRows.length === 0) {
|
||||
return { rows: [], warnings: ["File contains no data rows."], mapping: { sequenceNo: null, scopeType: null, packageCode: null, name: null, description: null } };
|
||||
}
|
||||
|
||||
const headers = Object.keys(rawRows[0] ?? {});
|
||||
const mapping = resolveFieldMapping(headers);
|
||||
|
||||
if (!mapping.name) {
|
||||
warnings.push(
|
||||
`Could not identify a "Name" column. Available headers: ${headers.join(", ")}`,
|
||||
);
|
||||
return { rows: [], warnings, mapping };
|
||||
}
|
||||
|
||||
const rows: ParsedScopeRow[] = [];
|
||||
|
||||
for (let i = 0; i < rawRows.length; i++) {
|
||||
const raw = rawRows[i]!;
|
||||
const name = mapping.name ? (raw[mapping.name] ?? "").trim() : "";
|
||||
|
||||
if (!name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const seqRaw = mapping.sequenceNo ? (raw[mapping.sequenceNo] ?? "").trim() : "";
|
||||
const seqParsed = parseInt(seqRaw, 10);
|
||||
const sequenceNo = Number.isFinite(seqParsed) && seqParsed > 0 ? seqParsed : rows.length + 1;
|
||||
|
||||
rows.push({
|
||||
sequenceNo,
|
||||
scopeType: mapping.scopeType ? (raw[mapping.scopeType] ?? "").trim() || "SHOT" : "SHOT",
|
||||
packageCode: mapping.packageCode ? (raw[mapping.packageCode] ?? "").trim() : "",
|
||||
name,
|
||||
description: mapping.description ? (raw[mapping.description] ?? "").trim() : "",
|
||||
});
|
||||
}
|
||||
|
||||
if (rows.length === 0) {
|
||||
warnings.push("No rows with a non-empty name found.");
|
||||
}
|
||||
|
||||
if (!mapping.scopeType) warnings.push("No scope type column detected — defaulting to SHOT.");
|
||||
if (!mapping.sequenceNo) warnings.push("No sequence number column detected — auto-numbering.");
|
||||
|
||||
return { rows, warnings, mapping };
|
||||
}
|
||||
Reference in New Issue
Block a user