Skip to content

Commit 0544c6d

Browse files
fix: suppress raw JSON parse errors from leaking to Discord channels (#59076) [AI-assisted] (#59118)
Merged via squash. Prepared head SHA: b8b3686 Co-authored-by: singleGanghood <156392444+singleGanghood@users.noreply.github.com> Co-authored-by: hxy91819 <8814856+hxy91819@users.noreply.github.com> Reviewed-by: @hxy91819
1 parent 03e17d1 commit 0544c6d

13 files changed

Lines changed: 229 additions & 4 deletions

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ Docs: https://docs.openclaw.ai
2020

2121
### Fixes
2222

23+
- Agents/errors: suppress malformed streaming tool-call JSON fragments before they reach chat surfaces while preserving provider request-validation diagnostics. Fixes #59076; keeps #59080 as duplicate coverage. (#59118) Thanks @singleGanghood.
2324
- CLI/models: restore provider-filtered `models list --all --provider <id>` rows for providers without manifest/static catalog coverage, including Anthropic and Amazon Bedrock, while keeping the compatibility fallback off expensive availability and resolver paths. Thanks @shakkernerd.
2425
- CLI/tools: keep the Gateway `tools.*` RPC namespace out of plugin command discovery and managed proxy startup, so stray commands like `openclaw tools effective` fail quickly instead of cold-loading plugin metadata. Refs #73477. Thanks @oromeis.
2526
- CLI/status: keep default text `openclaw status --usage` on metadata-only channel scans unless `--deep` or `--all` is set, and send stray `openclaw tools --help` through the precomputed root-help fast path so latency-triage commands avoid plugin/runtime cold loads before printing. Refs #73477 and #74220. Thanks @oromeis and @NianJiuZst.

src/agents/anthropic-transport-stream.test.ts

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,14 @@ function createStalledSseResponse(params: { onCancel: (reason: unknown) => void
4141
params.onCancel(reason);
4242
},
4343
});
44+
45+
return new Response(body, {
46+
status: 200,
47+
headers: { "content-type": "text/event-stream" },
48+
});
49+
}
50+
51+
function createRawSseResponse(body: string): Response {
4452
return new Response(body, {
4553
status: 200,
4654
headers: { "content-type": "text/event-stream" },
@@ -339,6 +347,23 @@ describe("anthropic transport stream", () => {
339347
expect(guardedFetchMock).not.toHaveBeenCalled();
340348
});
341349

350+
it("classifies malformed Anthropic SSE data as a stable transport error", async () => {
351+
guardedFetchMock.mockResolvedValueOnce(createRawSseResponse('data: {"type":\n\n'));
352+
353+
const result = await runTransportStream(
354+
makeAnthropicTransportModel(),
355+
{
356+
messages: [{ role: "user", content: "hello" }],
357+
} as AnthropicStreamContext,
358+
{
359+
apiKey: "sk-ant-api",
360+
} as AnthropicStreamOptions,
361+
);
362+
363+
expect(result.stopReason).toBe("error");
364+
expect(result.errorMessage).toBe("OpenClaw transport error: malformed_streaming_fragment");
365+
});
366+
342367
it("preserves Anthropic OAuth identity and tool-name remapping with transport overrides", async () => {
343368
guardedFetchMock.mockResolvedValueOnce(
344369
createSseResponse([

src/agents/anthropic-transport-stream.ts

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import {
99
type SimpleStreamOptions,
1010
type ThinkingLevel,
1111
} from "@mariozechner/pi-ai";
12+
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../shared/assistant-error-format.js";
1213
import { normalizeLowercaseStringOrEmpty } from "../shared/string-coerce.js";
1314
import {
1415
applyAnthropicPayloadPolicyToParams,
@@ -534,6 +535,17 @@ function readAnthropicSseChunk(
534535
});
535536
}
536537

538+
function parseAnthropicSseEventData(data: string): Record<string, unknown> {
539+
try {
540+
return JSON.parse(data) as Record<string, unknown>;
541+
} catch (error) {
542+
if (error instanceof SyntaxError) {
543+
throw new Error(MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE, { cause: error });
544+
}
545+
throw error;
546+
}
547+
}
548+
537549
async function* parseAnthropicSseBody(
538550
body: ReadableStream<Uint8Array>,
539551
signal?: AbortSignal,
@@ -558,7 +570,7 @@ async function* parseAnthropicSseBody(
558570
.map((line) => line.slice(5).trimStart())
559571
.join("\n");
560572
if (data && data !== "[DONE]") {
561-
yield JSON.parse(data) as Record<string, unknown>;
573+
yield parseAnthropicSseEventData(data);
562574
}
563575
frameEnd = buffer.indexOf("\n\n");
564576
}
@@ -571,7 +583,7 @@ async function* parseAnthropicSseBody(
571583
.map((line) => line.slice(5).trimStart())
572584
.join("\n");
573585
if (data && data !== "[DONE]") {
574-
yield JSON.parse(data) as Record<string, unknown>;
586+
yield parseAnthropicSseEventData(data);
575587
}
576588
}
577589
} finally {

src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
import type { AssistantMessage } from "@mariozechner/pi-ai";
22
import { describe, expect, it } from "vitest";
3+
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../shared/assistant-error-format.js";
34
import {
45
BILLING_ERROR_USER_MESSAGE,
56
formatBillingErrorMessage,
67
formatAssistantErrorText,
78
getApiErrorPayloadFingerprint,
89
formatRawAssistantErrorForUi,
910
isRawApiErrorPayload,
11+
sanitizeUserFacingText,
1012
} from "./pi-embedded-helpers.js";
1113
import { makeAssistantMessageFixture } from "./test-helpers/assistant-message-fixtures.js";
1214

@@ -349,6 +351,34 @@ describe("formatAssistantErrorText", () => {
349351
"LLM request failed: provider returned an invalid streaming response. Please try again.",
350352
);
351353
});
354+
355+
it("sanitizes transport-classified malformed streaming fragments (#59076)", () => {
356+
const msg = makeAssistantError(MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE);
357+
expect(formatAssistantErrorText(msg)).toBe(
358+
"LLM streaming response contained a malformed fragment. Please try again.",
359+
);
360+
});
361+
362+
it("does not broadly rewrite non-streaming 'Unexpected token' JSON parse errors", () => {
363+
const msg = makeAssistantError("Unexpected token < in JSON at position 0");
364+
expect(formatAssistantErrorText(msg)).toBe("Unexpected token < in JSON at position 0");
365+
});
366+
367+
it("does not rewrite non-streaming provider JSON request-validation diagnostics", () => {
368+
const msg = makeAssistantError("Expected value in JSON at position 12 for messages.0.content");
369+
expect(formatAssistantErrorText(msg)).toBe(
370+
"Expected value in JSON at position 12 for messages.0.content",
371+
);
372+
});
373+
374+
it("keeps provider request-validation JSON diagnostics actionable", () => {
375+
const msg = makeAssistantError(
376+
'{"type":"error","error":{"type":"invalid_request_error","message":"Expected value in JSON at position 12 for messages.0.content"}}',
377+
);
378+
expect(formatAssistantErrorText(msg)).toBe(
379+
"LLM request rejected: Expected value in JSON at position 12 for messages.0.content",
380+
);
381+
});
352382
});
353383

354384
describe("formatRawAssistantErrorForUi", () => {
@@ -424,3 +454,21 @@ describe("raw API error payload helpers", () => {
424454
);
425455
});
426456
});
457+
458+
describe("sanitizeUserFacingText — streaming JSON parse error (#59076)", () => {
459+
it("rewrites transport-classified malformed streaming fragments in error context", () => {
460+
const result = sanitizeUserFacingText(MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE, {
461+
errorContext: true,
462+
});
463+
expect(result).toBe("LLM streaming response contained a malformed fragment. Please try again.");
464+
});
465+
466+
it("does not rewrite JSON parse error when not in error context", () => {
467+
// When not in error context, the text could be legitimate assistant content
468+
// mentioning JSON errors. Don't rewrite.
469+
const text =
470+
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)";
471+
const result = sanitizeUserFacingText(text, { errorContext: false });
472+
expect(result).toBe(text);
473+
});
474+
});
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
import type { AssistantMessage } from "@mariozechner/pi-ai";
2+
import { describe, expect, it } from "vitest";
3+
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../../shared/assistant-error-format.js";
4+
import { makeAssistantMessageFixture } from "../test-helpers/assistant-message-fixtures.js";
5+
import { formatAssistantErrorText } from "./errors.js";
6+
7+
describe("formatAssistantErrorText streaming JSON parse classification", () => {
8+
const makeAssistantError = (errorMessage: string): AssistantMessage =>
9+
makeAssistantMessageFixture({
10+
errorMessage,
11+
content: [{ type: "text", text: errorMessage }],
12+
});
13+
14+
it("suppresses transport-classified malformed streaming fragments", () => {
15+
const msg = makeAssistantError(MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE);
16+
expect(formatAssistantErrorText(msg)).toBe(
17+
"LLM streaming response contained a malformed fragment. Please try again.",
18+
);
19+
});
20+
21+
it("does not suppress unclassified JSON.parse text", () => {
22+
const msg = makeAssistantError(
23+
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)",
24+
);
25+
expect(formatAssistantErrorText(msg)).toBe(
26+
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)",
27+
);
28+
});
29+
30+
it("keeps non-streaming provider request-validation syntax diagnostics", () => {
31+
const msg = makeAssistantError(
32+
'{"type":"error","error":{"type":"invalid_request_error","message":"Expected value in JSON at position 12 for messages.0.content"}}',
33+
);
34+
expect(formatAssistantErrorText(msg)).toBe(
35+
"LLM request rejected: Expected value in JSON at position 12 for messages.0.content",
36+
);
37+
});
38+
});

src/agents/pi-embedded-helpers/errors.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ import {
4646
isInvalidStreamingEventOrderError,
4747
isLikelyHttpErrorText,
4848
isRawApiErrorPayload,
49+
isStreamingJsonParseError,
4950
sanitizeUserFacingText,
5051
} from "./sanitize-user-facing-text.js";
5152
import type { FailoverReason } from "./types.js";
@@ -1139,6 +1140,10 @@ export function formatAssistantErrorText(
11391140
return formatRawAssistantErrorForUi(raw);
11401141
}
11411142

1143+
if (isStreamingJsonParseError(raw)) {
1144+
return "LLM streaming response contained a malformed fragment. Please try again.";
1145+
}
1146+
11421147
// Never return raw unhandled errors - log for debugging but return safe message
11431148
if (raw.length > 600) {
11441149
log.warn(`Long error truncated: ${raw.slice(0, 200)}`);

src/agents/pi-embedded-helpers/sanitize-user-facing-text.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import {
33
extractLeadingHttpStatus,
44
formatRawAssistantErrorForUi,
55
isCloudflareOrHtmlErrorPage,
6+
MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
67
parseApiErrorInfo,
78
parseApiErrorPayload,
89
} from "../../shared/assistant-error-format.js";
@@ -209,6 +210,17 @@ export function isInvalidStreamingEventOrderError(raw: string): boolean {
209210
);
210211
}
211212

213+
export function isStreamingJsonParseError(raw: string): boolean {
214+
if (!raw) {
215+
return false;
216+
}
217+
const trimmed = raw.trim();
218+
if (trimmed === MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE) {
219+
return true;
220+
}
221+
return false;
222+
}
223+
212224
function hasRateLimitTpmHint(raw: string): boolean {
213225
const lower = normalizeLowercaseStringOrEmpty(raw);
214226
return /\btpm\b/i.test(lower) || lower.includes("tokens per minute");
@@ -419,6 +431,10 @@ export function sanitizeUserFacingText(text: unknown, opts?: { errorContext?: bo
419431
return formatRawAssistantErrorForUi(trimmed);
420432
}
421433

434+
if (isStreamingJsonParseError(trimmed)) {
435+
return "LLM streaming response contained a malformed fragment. Please try again.";
436+
}
437+
422438
if (ERROR_PREFIX_RE.test(trimmed)) {
423439
const prefixedCopy = formatRateLimitOrOverloadedErrorCopy(trimmed);
424440
if (prefixedCopy) {

src/agents/transport-stream-shared.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ type TransportOutputShape = {
2020
};
2121

2222
export const EMPTY_TOOL_RESULT_TEXT = "(no output)";
23-
2423
export function sanitizeTransportPayloadText(text: string): string {
2524
return text.replace(
2625
/[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?<![\uD800-\uDBFF])[\uDC00-\uDFFF]/g,

src/shared/assistant-error-format.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,11 @@ const CLOUDFLARE_HTML_ERROR_CODES = new Set([521, 522, 523, 524, 525, 526, 530])
1515
const STANDALONE_HTML_ERROR_HINT_RE =
1616
/\bcloudflare\b|cdn-cgi\/challenge-platform|challenge-error-text|enable javascript and cookies to continue|access denied|forbidden|service unavailable|bad gateway|web server is down|captcha|attention required/i;
1717

18+
export const MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE =
19+
"OpenClaw transport error: malformed_streaming_fragment";
20+
export const MALFORMED_STREAMING_FRAGMENT_USER_MESSAGE =
21+
"LLM streaming response contained a malformed fragment. Please try again.";
22+
1823
type ErrorPayload = Record<string, unknown>;
1924

2025
export type ApiErrorInfo = {
@@ -188,6 +193,10 @@ export function formatRawAssistantErrorForUi(raw?: string): string {
188193
return "LLM request failed with an unknown error.";
189194
}
190195

196+
if (trimmed === MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE) {
197+
return MALFORMED_STREAMING_FRAGMENT_USER_MESSAGE;
198+
}
199+
191200
const leadingStatus = extractLeadingHttpStatus(trimmed);
192201
const isHtmlChallenge = isCloudflareOrHtmlErrorPage(trimmed);
193202
if (leadingStatus && isHtmlChallenge) {

src/tui/tui-event-handlers.test.ts

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
2+
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../shared/assistant-error-format.js";
23
import { createEventHandlers } from "./tui-event-handlers.js";
34
import type { AgentEvent, BtwEvent, ChatEvent, TuiStateAccess } from "./tui-types.js";
45

@@ -753,6 +754,42 @@ describe("tui-event-handlers: handleAgentEvent", () => {
753754
expect(chatLog.dropAssistant).not.toHaveBeenCalledWith("run-error-envelope");
754755
});
755756

757+
it("renders malformed streaming fragment text when chat final only has event errorMessage", () => {
758+
const { state, chatLog, handleChatEvent } = createHandlersHarness({
759+
state: { activeChatRunId: null },
760+
});
761+
762+
handleChatEvent({
763+
runId: "run-malformed-final",
764+
sessionKey: state.currentSessionKey,
765+
state: "final",
766+
message: { content: [] },
767+
errorMessage: MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
768+
});
769+
770+
expect(chatLog.finalizeAssistant).toHaveBeenCalledWith(
771+
"LLM streaming response contained a malformed fragment. Please try again.",
772+
"run-malformed-final",
773+
);
774+
});
775+
776+
it("renders malformed streaming fragment text for chat error events", () => {
777+
const { state, chatLog, handleChatEvent } = createHandlersHarness({
778+
state: { activeChatRunId: null },
779+
});
780+
781+
handleChatEvent({
782+
runId: "run-malformed-error",
783+
sessionKey: state.currentSessionKey,
784+
state: "error",
785+
errorMessage: MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
786+
});
787+
788+
expect(chatLog.addSystem).toHaveBeenCalledWith(
789+
"run error: LLM streaming response contained a malformed fragment. Please try again.",
790+
);
791+
});
792+
756793
it("shows a concise /auth hint for local auth failures", () => {
757794
const { chatLog, handleChatEvent } = createHandlersHarness({
758795
localMode: true,

0 commit comments

Comments
 (0)