Skip to content

Commit 772a2d7

Browse files
feat (core): Add finishReason field to NoObjectGeneratedError (#5541)
Co-authored-by: Bram Meerten <bram.meerten@acagroup.be>
1 parent 5952cfb commit 772a2d7

11 files changed

Lines changed: 53 additions & 2 deletions

File tree

.changeset/flat-plums-bake.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': minor
3+
---
4+
5+
feat (core): Add finishReason field to NoObjectGeneratedError

content/docs/07-reference/05-ai-sdk-errors/ai-no-object-generated-error.mdx

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ It can arise due to the following reasons:
1818
- `text`: The text that was generated by the model. This can be the raw text or the tool call text, depending on the object generation mode.
1919
- `response`: Metadata about the language model response, including response id, timestamp, and model.
2020
- `usage`: Request token usage.
21+
- `finishReason`: Request finish reason. For example 'length' if model generated maximum number of tokens, this could result in a JSON parsing error.
2122
- `cause`: The cause of the error (e.g. a JSON parsing error). You can use this for more detailed error handling.
2223

2324
## Checking for this Error
@@ -36,6 +37,7 @@ try {
3637
console.log('Text:', error.text);
3738
console.log('Response:', error.response);
3839
console.log('Usage:', error.usage);
40+
console.log('Finish Reason:', error.finishReason);
3941
}
4042
}
4143
```

packages/ai/core/generate-object/generate-object.test.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -801,6 +801,7 @@ describe('output = "object"', () => {
801801
promptTokens: 10,
802802
totalTokens: 30,
803803
},
804+
finishReason: 'stop',
804805
});
805806
}
806807

packages/ai/core/generate-object/generate-object.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -556,6 +556,7 @@ export async function generateObject<SCHEMA, RESULT>({
556556
'No object generated: the model did not return a response.',
557557
response: responseData,
558558
usage: calculateLanguageModelUsage(result.usage),
559+
finishReason: result.finishReason,
559560
});
560561
}
561562

@@ -681,6 +682,7 @@ export async function generateObject<SCHEMA, RESULT>({
681682
message: 'No object generated: the tool was not called.',
682683
response: responseData,
683684
usage: calculateLanguageModelUsage(result.usage),
685+
finishReason: result.finishReason,
684686
});
685687
}
686688

@@ -751,6 +753,7 @@ export async function generateObject<SCHEMA, RESULT>({
751753
text: result,
752754
response,
753755
usage: calculateLanguageModelUsage(usage),
756+
finishReason: finishReason,
754757
});
755758
}
756759

@@ -770,6 +773,7 @@ export async function generateObject<SCHEMA, RESULT>({
770773
text: result,
771774
response,
772775
usage: calculateLanguageModelUsage(usage),
776+
finishReason: finishReason,
773777
});
774778
}
775779

packages/ai/core/generate-object/output-strategy.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,11 @@ import {
1616
createAsyncIterableStream,
1717
} from '../util/async-iterable-stream';
1818
import { ObjectStreamPart } from './stream-object-result';
19-
import { LanguageModelResponseMetadata, LanguageModelUsage } from '../types';
19+
import {
20+
FinishReason,
21+
LanguageModelResponseMetadata,
22+
LanguageModelUsage,
23+
} from '../types';
2024

2125
export interface OutputStrategy<PARTIAL, RESULT, ELEMENT_STREAM> {
2226
readonly type: 'object' | 'array' | 'enum' | 'no-schema';
@@ -64,6 +68,7 @@ const noSchemaOutputStrategy: OutputStrategy<JSONValue, JSONValue, never> = {
6468
text: string;
6569
response: LanguageModelResponseMetadata;
6670
usage: LanguageModelUsage;
71+
finishReason: FinishReason;
6772
},
6873
): ValidationResult<JSONValue> {
6974
return value === undefined
@@ -74,6 +79,7 @@ const noSchemaOutputStrategy: OutputStrategy<JSONValue, JSONValue, never> = {
7479
text: context.text,
7580
response: context.response,
7681
usage: context.usage,
82+
finishReason: context.finishReason,
7783
}),
7884
}
7985
: { success: true, value };

packages/ai/core/generate-object/stream-object.test.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1311,6 +1311,7 @@ describe('streamObject', () => {
13111311
modelId: 'model-1',
13121312
},
13131313
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
1314+
finishReason: 'stop',
13141315
});
13151316
}
13161317
});
@@ -1354,6 +1355,7 @@ describe('streamObject', () => {
13541355
modelId: 'model-1',
13551356
},
13561357
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
1358+
finishReason: 'stop',
13571359
});
13581360
}
13591361
});
@@ -1403,6 +1405,7 @@ describe('streamObject', () => {
14031405
modelId: 'model-1',
14041406
},
14051407
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
1408+
finishReason: 'stop',
14061409
});
14071410
}
14081411
});
@@ -1446,6 +1449,7 @@ describe('streamObject', () => {
14461449
modelId: 'model-1',
14471450
},
14481451
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
1452+
finishReason: 'stop',
14491453
});
14501454
}
14511455
});
@@ -1488,6 +1492,7 @@ describe('streamObject', () => {
14881492
modelId: 'model-1',
14891493
},
14901494
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
1495+
finishReason: 'stop',
14911496
});
14921497
}
14931498
});
@@ -1530,6 +1535,7 @@ describe('streamObject', () => {
15301535
modelId: 'model-1',
15311536
},
15321537
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
1538+
finishReason: 'stop',
15331539
});
15341540
}
15351541
});

packages/ai/core/generate-object/stream-object.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -898,6 +898,7 @@ class DefaultStreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM>
898898
text: accumulatedText,
899899
response,
900900
usage,
901+
finishReason: finishReason,
901902
});
902903
self.objectPromise.reject(error);
903904
}

packages/ai/core/generate-text/generate-text.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -576,7 +576,11 @@ A function that attempts to repair a tool call that failed to parse.
576576

577577
return output.parseOutput(
578578
{ text },
579-
{ response: currentModelResponse.response, usage },
579+
{
580+
response: currentModelResponse.response,
581+
usage,
582+
finishReason: currentModelResponse.finishReason,
583+
},
580584
);
581585
},
582586
toolCalls: currentToolCalls,

packages/ai/core/generate-text/output.test.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ import { fail } from 'assert';
22
import { z } from 'zod';
33
import { verifyNoObjectGeneratedError } from '../../errors/no-object-generated-error';
44
import { object } from './output';
5+
import { FinishReason } from '../types';
56

67
const context = {
78
response: {
@@ -14,6 +15,7 @@ const context = {
1415
completionTokens: 2,
1516
totalTokens: 3,
1617
},
18+
finishReason: 'length' as FinishReason,
1719
};
1820

1921
describe('Output.object', () => {
@@ -37,6 +39,7 @@ describe('Output.object', () => {
3739
message: 'No object generated: could not parse the response.',
3840
response: context.response,
3941
usage: context.usage,
42+
finishReason: context.finishReason,
4043
});
4144
}
4245
});
@@ -50,6 +53,7 @@ describe('Output.object', () => {
5053
message: 'No object generated: response did not match schema.',
5154
response: context.response,
5255
usage: context.usage,
56+
finishReason: context.finishReason,
5357
});
5458
}
5559
});

packages/ai/core/generate-text/output.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import { z } from 'zod';
99
import { NoObjectGeneratedError } from '../../errors';
1010
import { injectJsonInstruction } from '../generate-object/inject-json-instruction';
1111
import {
12+
FinishReason,
1213
LanguageModel,
1314
LanguageModelV1CallOptions,
1415
} from '../types/language-model';
@@ -33,6 +34,7 @@ export interface Output<OUTPUT, PARTIAL> {
3334
context: {
3435
response: LanguageModelResponseMetadata;
3536
usage: LanguageModelUsage;
37+
finishReason: FinishReason;
3638
},
3739
): OUTPUT;
3840
}
@@ -108,6 +110,7 @@ export const object = <OUTPUT>({
108110
context: {
109111
response: LanguageModelResponseMetadata;
110112
usage: LanguageModelUsage;
113+
finishReason: FinishReason;
111114
},
112115
) {
113116
const parseResult = safeParseJSON({ text });
@@ -119,6 +122,7 @@ export const object = <OUTPUT>({
119122
text,
120123
response: context.response,
121124
usage: context.usage,
125+
finishReason: context.finishReason,
122126
});
123127
}
124128

@@ -134,6 +138,7 @@ export const object = <OUTPUT>({
134138
text,
135139
response: context.response,
136140
usage: context.usage,
141+
finishReason: context.finishReason,
137142
});
138143
}
139144

0 commit comments

Comments
 (0)