Skip to content

Commit 928fadf

Browse files
authored
fix(providers/openai): logprobs for stream alongside completion model (#6091)
Followup to #6049, we were missing streaming logprobs it was commented for the completion model but accidentally missed this one
1 parent 332167b commit 928fadf

3 files changed

Lines changed: 121 additions & 1 deletion

File tree

.changeset/swift-geckos-joke.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@ai-sdk/openai': patch
3+
---
4+
5+
fix(providers/openai): logprobs for stream alongside completion model

packages/openai/src/openai-chat-language-model.test.ts

Lines changed: 94 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1444,7 +1444,100 @@ describe('doStream', () => {
14441444
{
14451445
"finishReason": "stop",
14461446
"providerMetadata": {
1447-
"openai": {},
1447+
"openai": {
1448+
"logprobs": [
1449+
{
1450+
"logprob": -0.0009994634,
1451+
"token": "Hello",
1452+
"top_logprobs": [
1453+
{
1454+
"logprob": -0.0009994634,
1455+
"token": "Hello",
1456+
},
1457+
],
1458+
},
1459+
{
1460+
"logprob": -0.13410144,
1461+
"token": "!",
1462+
"top_logprobs": [
1463+
{
1464+
"logprob": -0.13410144,
1465+
"token": "!",
1466+
},
1467+
],
1468+
},
1469+
{
1470+
"logprob": -0.0009250381,
1471+
"token": " How",
1472+
"top_logprobs": [
1473+
{
1474+
"logprob": -0.0009250381,
1475+
"token": " How",
1476+
},
1477+
],
1478+
},
1479+
{
1480+
"logprob": -0.047709424,
1481+
"token": " can",
1482+
"top_logprobs": [
1483+
{
1484+
"logprob": -0.047709424,
1485+
"token": " can",
1486+
},
1487+
],
1488+
},
1489+
{
1490+
"logprob": -0.000009014684,
1491+
"token": " I",
1492+
"top_logprobs": [
1493+
{
1494+
"logprob": -0.000009014684,
1495+
"token": " I",
1496+
},
1497+
],
1498+
},
1499+
{
1500+
"logprob": -0.009125131,
1501+
"token": " assist",
1502+
"top_logprobs": [
1503+
{
1504+
"logprob": -0.009125131,
1505+
"token": " assist",
1506+
},
1507+
],
1508+
},
1509+
{
1510+
"logprob": -0.0000066306106,
1511+
"token": " you",
1512+
"top_logprobs": [
1513+
{
1514+
"logprob": -0.0000066306106,
1515+
"token": " you",
1516+
},
1517+
],
1518+
},
1519+
{
1520+
"logprob": -0.00011093382,
1521+
"token": " today",
1522+
"top_logprobs": [
1523+
{
1524+
"logprob": -0.00011093382,
1525+
"token": " today",
1526+
},
1527+
],
1528+
},
1529+
{
1530+
"logprob": -0.00004596782,
1531+
"token": "?",
1532+
"top_logprobs": [
1533+
{
1534+
"logprob": -0.00004596782,
1535+
"token": "?",
1536+
},
1537+
],
1538+
},
1539+
],
1540+
},
14481541
},
14491542
"type": "finish",
14501543
"usage": {

packages/openai/src/openai-chat-language-model.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -487,6 +487,10 @@ export class OpenAIChatLanguageModel implements LanguageModelV2 {
487487
finishReason = mapOpenAIFinishReason(choice.finish_reason);
488488
}
489489

490+
if (choice?.logprobs?.content != null) {
491+
providerMetadata.openai.logprobs = choice.logprobs.content;
492+
}
493+
490494
if (choice?.delta == null) {
491495
return;
492496
}
@@ -723,6 +727,24 @@ const openaiChatChunkSchema = z.union([
723727
.nullish(),
724728
})
725729
.nullish(),
730+
logprobs: z
731+
.object({
732+
content: z
733+
.array(
734+
z.object({
735+
token: z.string(),
736+
logprob: z.number(),
737+
top_logprobs: z.array(
738+
z.object({
739+
token: z.string(),
740+
logprob: z.number(),
741+
}),
742+
),
743+
}),
744+
)
745+
.nullish(),
746+
})
747+
.nullish(),
726748
finish_reason: z.string().nullish(),
727749
index: z.number(),
728750
}),

0 commit comments

Comments
 (0)