Skip to content

Commit 72ebb54

Browse files
Backport: fix (provider/xai): handle error chunks in responses api (#14240)
This is an automated backport of #14223 to the release-v6.0 branch. FYI @shaper Co-authored-by: Walter Korman <shaper@vercel.com>
1 parent 37a378e commit 72ebb54

File tree

4 files changed

+57
-0
lines changed

4 files changed

+57
-0
lines changed

.changeset/dirty-moles-jog.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@ai-sdk/xai": patch
3+
---
4+
5+
fix (provider/xai): handle mid-stream error chunks

packages/xai/src/responses/xai-responses-api.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -538,6 +538,12 @@ export const xaiResponsesChunkSchema = z.union([
538538
usage: xaiResponsesUsageSchema.nullish(),
539539
}),
540540
}),
541+
z.object({
542+
type: z.literal('error'),
543+
code: z.string().nullish(),
544+
message: z.string(),
545+
param: z.string().nullish(),
546+
}),
541547
z.object({
542548
type: z.literal('response.done'),
543549
response: xaiResponsesResponseSchema,

packages/xai/src/responses/xai-responses-language-model.test.ts

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3061,4 +3061,45 @@ describe('XaiResponsesLanguageModel', () => {
30613061
});
30623062
});
30633063
});
3064+
3065+
describe('error event handling', () => {
3066+
it('should emit error chunk for server error events', async () => {
3067+
prepareStreamChunks([
3068+
JSON.stringify({
3069+
type: 'response.created',
3070+
response: {
3071+
id: 'resp_123',
3072+
object: 'response',
3073+
model: 'grok-4-fast-non-reasoning',
3074+
output: [],
3075+
},
3076+
}),
3077+
JSON.stringify({
3078+
type: 'error',
3079+
code: null,
3080+
message:
3081+
'Service temporarily unavailable. The model did not respond to this request.',
3082+
param: null,
3083+
}),
3084+
]);
3085+
3086+
const { stream } = await createModel().doStream({
3087+
prompt: TEST_PROMPT,
3088+
});
3089+
3090+
const parts = await convertReadableStreamToArray(stream);
3091+
const errorPart = parts.find(part => part.type === 'error');
3092+
3093+
expect(errorPart).toMatchObject({
3094+
type: 'error',
3095+
error: {
3096+
type: 'error',
3097+
code: null,
3098+
message:
3099+
'Service temporarily unavailable. The model did not respond to this request.',
3100+
param: null,
3101+
},
3102+
});
3103+
});
3104+
});
30643105
});

packages/xai/src/responses/xai-responses-language-model.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -684,6 +684,11 @@ export class XaiResponsesLanguageModel implements LanguageModelV3 {
684684
return;
685685
}
686686

687+
if (event.type === 'error') {
688+
controller.enqueue({ type: 'error', error: event });
689+
return;
690+
}
691+
687692
// Custom tool call input streaming - already handled by output_item events
688693
if (
689694
event.type === 'response.custom_tool_call_input.delta' ||

0 commit comments

Comments
 (0)