fix: cap AI tool result size to prevent JSON parse errors
When generating multiple cover images in one conversation, the accumulated tool results (each ~400KB base64) caused the OpenAI conversation payload to exceed JSON parsing limits in the browser. Fix: - Strip coverImageUrl from invalidate action results (not needed by AI) - Cap invalidate results to 4KB - Cap all tool results to 8KB - Prevents "JSON.parse: unexpected character" errors during batch ops Co-Authored-By: claude-flow <ruv@ruv.net>
This commit is contained in:
@@ -5564,16 +5564,19 @@ export async function executeTool(
|
||||
|
||||
if (actionType === "invalidate") {
|
||||
const scope = actionResult.scope as string[];
|
||||
// Strip __action and scope from the result sent back to the AI
|
||||
const { __action: _, scope: _s, ...rest } = actionResult;
|
||||
// Strip __action, scope, and large data from the result sent back to the AI
|
||||
const { __action: _, scope: _s, coverImageUrl: _img, ...rest } = actionResult;
|
||||
const content = JSON.stringify(rest);
|
||||
return {
|
||||
content: JSON.stringify(rest),
|
||||
content: content.length > 4000 ? content.slice(0, 4000) + '..."' : content,
|
||||
action: { type: "invalidate", scope },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { content: JSON.stringify(result) };
|
||||
// Cap tool result size to prevent oversized OpenAI conversation payloads
|
||||
const content = typeof result === "string" ? result : JSON.stringify(result);
|
||||
return { content: content.length > 8000 ? content.slice(0, 8000) + '..."' : content };
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
return { content: JSON.stringify({ error: msg }) };
|
||||
|
||||
Reference in New Issue
Block a user