Skip to content

Commit ab286f1

Browse files
Backport: fix(ai): doStream should reflect transformed values (#13133)
This is an automated backport of #13113 to the release-v6.0 branch. FYI @aayush-kapoor Co-authored-by: Aayush Kapoor <83492835+aayush-kapoor@users.noreply.github.com>
1 parent 4c1613a commit ab286f1

File tree

3 files changed

+35
-24
lines changed

3 files changed

+35
-24
lines changed

.changeset/calm-squids-sparkle.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
fix(ai): doStream should reflect transformed values

packages/ai/src/generate-text/__snapshots__/stream-text.test.ts.snap

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -240,8 +240,8 @@ exports[`streamText > options.transform > with base transformation > telemetry s
240240
"ai.response.model": "mock-model-id",
241241
"ai.response.msToFinish": 500,
242242
"ai.response.msToFirstChunk": 100,
243-
"ai.response.providerMetadata": "{"testProvider":{"testKey":"testValue"}}",
244-
"ai.response.text": "Hello, world!",
243+
"ai.response.providerMetadata": "{"testProvider":{"testKey":"TEST VALUE"}}",
244+
"ai.response.text": "HELLO, WORLD!",
245245
"ai.response.timestamp": "1970-01-01T00:00:00.000Z",
246246
"ai.response.toolCalls": "[{"type":"tool-call","toolCallId":"call-1","toolName":"tool1","input":{"value":"VALUE"}}]",
247247
"ai.settings.maxRetries": 2,

packages/ai/src/generate-text/stream-text.ts

Lines changed: 28 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1946,38 +1946,20 @@ class DefaultStreamTextResult<TOOLS extends ToolSet, OUTPUT extends Output>
19461946
? JSON.stringify(stepToolCalls)
19471947
: undefined;
19481948

1949-
// record telemetry information first to ensure best effort timing
1949+
// record telemetry attributes that don't depend on transforms:
19501950
try {
19511951
doStreamSpan.setAttributes(
19521952
await selectTelemetryAttributes({
19531953
telemetry,
19541954
attributes: {
19551955
'ai.response.finishReason': stepFinishReason,
1956-
'ai.response.text': {
1957-
output: () => activeText,
1958-
},
1959-
'ai.response.reasoning': {
1960-
output: () => {
1961-
const reasoningParts = recordedContent.filter(
1962-
(
1963-
c,
1964-
): c is { type: 'reasoning'; text: string } =>
1965-
c.type === 'reasoning',
1966-
);
1967-
return reasoningParts.length > 0
1968-
? reasoningParts.map(r => r.text).join('\n')
1969-
: undefined;
1970-
},
1971-
},
19721956
'ai.response.toolCalls': {
19731957
output: () => stepToolCallsJson,
19741958
},
19751959
'ai.response.id': stepResponse.id,
19761960
'ai.response.model': stepResponse.modelId,
19771961
'ai.response.timestamp':
19781962
stepResponse.timestamp.toISOString(),
1979-
'ai.response.providerMetadata':
1980-
JSON.stringify(stepProviderMetadata),
19811963

19821964
'ai.usage.inputTokens': stepUsage.inputTokens,
19831965
'ai.usage.outputTokens': stepUsage.outputTokens,
@@ -2001,9 +1983,6 @@ class DefaultStreamTextResult<TOOLS extends ToolSet, OUTPUT extends Output>
20011983
);
20021984
} catch (error) {
20031985
// ignore error setting telemetry attributes
2004-
} finally {
2005-
// finish doStreamSpan before other operations for correct timing:
2006-
doStreamSpan.end();
20071986
}
20081987

20091988
controller.enqueue({
@@ -2027,6 +2006,33 @@ class DefaultStreamTextResult<TOOLS extends ToolSet, OUTPUT extends Output>
20272006
// to ensure that the recorded steps are complete:
20282007
await stepFinish.promise;
20292008

2009+
// set transform-dependent attributes after the step has been
2010+
// fully processed (post-transform) by the event processor:
2011+
const processedStep =
2012+
recordedSteps[recordedSteps.length - 1];
2013+
try {
2014+
doStreamSpan.setAttributes(
2015+
await selectTelemetryAttributes({
2016+
telemetry,
2017+
attributes: {
2018+
'ai.response.text': {
2019+
output: () => processedStep.text,
2020+
},
2021+
'ai.response.reasoning': {
2022+
output: () => processedStep.reasoningText,
2023+
},
2024+
'ai.response.providerMetadata': JSON.stringify(
2025+
processedStep.providerMetadata,
2026+
),
2027+
},
2028+
}),
2029+
);
2030+
} catch (error) {
2031+
// ignore error setting telemetry attributes
2032+
} finally {
2033+
doStreamSpan.end();
2034+
}
2035+
20302036
const clientToolCalls = stepToolCalls.filter(
20312037
toolCall => toolCall.providerExecuted !== true,
20322038
);

0 commit comments

Comments
 (0)