142 lines
4.8 KiB
TypeScript
142 lines
4.8 KiB
TypeScript
import { createAiClient, isAiConfigured, loggedAiCall } from "../ai-client.js";
|
||
import { TRPCError } from "@trpc/server";
|
||
import { z } from "zod";
|
||
import { findUniqueOrThrow } from "../db/helpers.js";
|
||
import { logger } from "../lib/logger.js";
|
||
import { managerProcedure } from "../trpc.js";
|
||
|
||
export const DEFAULT_SUMMARY_PROMPT = `You are writing a short professional profile for an internal resource planning tool.
|
||
|
||
Artist profile:
|
||
- Role: {role}
|
||
- Chapter: {chapter}
|
||
- Main skills: {mainSkills}
|
||
- Top skills: {topSkills}
|
||
|
||
Write a 2–3 sentence professional bio. Be specific, use skill names. No fluff.`;
|
||
|
||
type SkillRow = {
|
||
skill: string;
|
||
category?: string;
|
||
proficiency: number;
|
||
isMainSkill?: boolean;
|
||
};
|
||
|
||
export const resourceAiSummaryProcedures = {
|
||
generateAiSummary: managerProcedure
|
||
.input(z.object({ resourceId: z.string() }))
|
||
.mutation(async ({ ctx, input }) => {
|
||
const [resource, settings] = await Promise.all([
|
||
findUniqueOrThrow(
|
||
ctx.db.resource.findUnique({
|
||
where: { id: input.resourceId },
|
||
include: { areaRole: { select: { name: true } } },
|
||
}),
|
||
"Resource",
|
||
),
|
||
ctx.db.systemSettings.findUnique({ where: { id: "singleton" } }),
|
||
]);
|
||
|
||
if (!isAiConfigured(settings)) {
|
||
throw new TRPCError({
|
||
code: "PRECONDITION_FAILED",
|
||
message: "AI is not configured. Please set credentials in Admin → Settings.",
|
||
});
|
||
}
|
||
|
||
const skills = (resource.skills as unknown as SkillRow[]) ?? [];
|
||
const mainSkills = skills.filter((skill) => skill.isMainSkill).map((skill) => skill.skill);
|
||
const top10 = [...skills]
|
||
.sort((left, right) => right.proficiency - left.proficiency)
|
||
.slice(0, 10)
|
||
.map((skill) => `${skill.skill} (${skill.proficiency}/5)`);
|
||
|
||
const vars = {
|
||
role: resource.areaRole?.name ?? "Not specified",
|
||
chapter: resource.chapter ?? "Not specified",
|
||
mainSkills: mainSkills.length > 0 ? mainSkills.join(", ") : "Not specified",
|
||
topSkills: top10.join(", "),
|
||
};
|
||
|
||
const templateStr = settings!.aiSummaryPrompt ?? DEFAULT_SUMMARY_PROMPT;
|
||
const prompt = templateStr
|
||
.replace("{role}", vars.role)
|
||
.replace("{chapter}", vars.chapter)
|
||
.replace("{mainSkills}", vars.mainSkills)
|
||
.replace("{topSkills}", vars.topSkills);
|
||
|
||
const client = createAiClient(settings!);
|
||
const model = settings!.azureOpenAiDeployment!;
|
||
const maxTokens = settings!.aiMaxCompletionTokens ?? 300;
|
||
const temperature = settings!.aiTemperature ?? 1;
|
||
const provider = settings!.aiProvider ?? "openai";
|
||
|
||
async function callChatCompletions(withTemperature: boolean) {
|
||
return loggedAiCall(provider, model, prompt.length, () =>
|
||
client.chat.completions.create({
|
||
messages: [{ role: "user", content: prompt }],
|
||
max_completion_tokens: maxTokens,
|
||
model,
|
||
...(withTemperature && temperature !== 1 ? { temperature } : {}),
|
||
}),
|
||
);
|
||
}
|
||
|
||
let summary = "";
|
||
try {
|
||
let completion;
|
||
try {
|
||
completion = await callChatCompletions(true);
|
||
logger.debug(
|
||
{
|
||
provider,
|
||
model,
|
||
choiceCount: completion.choices?.length ?? 0,
|
||
},
|
||
"AI summary chat completion succeeded",
|
||
);
|
||
} catch (tempErr) {
|
||
const status = (tempErr as { status?: number }).status;
|
||
const msg = (tempErr as Error).message ?? "";
|
||
if (status === 400 && msg.includes("temperature")) {
|
||
logger.info(
|
||
{ provider, model, status },
|
||
"Retrying AI summary generation without temperature override",
|
||
);
|
||
completion = await callChatCompletions(false);
|
||
} else if (status === 404) {
|
||
logger.info(
|
||
{ provider, model, status },
|
||
"Falling back to AI responses API for summary generation",
|
||
);
|
||
const resp = await client.responses.create({ model, input: prompt, max_output_tokens: maxTokens });
|
||
logger.debug(
|
||
{
|
||
provider,
|
||
model,
|
||
summaryLength: resp.output_text?.trim().length ?? 0,
|
||
},
|
||
"AI summary responses API call succeeded",
|
||
);
|
||
summary = resp.output_text?.trim() ?? "";
|
||
completion = null;
|
||
} else {
|
||
throw tempErr;
|
||
}
|
||
}
|
||
if (completion) {
|
||
summary = completion.choices[0]?.message?.content?.trim() ?? "";
|
||
}
|
||
} catch (error) {
|
||
throw error;
|
||
}
|
||
|
||
await ctx.db.resource.update({
|
||
where: { id: input.resourceId },
|
||
data: { aiSummary: summary, aiSummaryUpdatedAt: new Date() },
|
||
});
|
||
|
||
return { summary };
|
||
}),
|
||
};
|