Skip to content

Commit 6da8e42

Browse files
committed
fix(deepseek): add Azure AI Inference path handling for DeepSeek via Azure
- Change _isAzureAiInference and _getUrlHost from private to protected in OpenAiHandler - Add OPENAI_AZURE_AI_INFERENCE_PATH import and path handling in DeepSeekHandler.createMessage() - Update test to expect path options argument
1 parent 9c2372b commit 6da8e42

File tree

3 files changed

+17
-4
lines changed

3 files changed

+17
-4
lines changed

src/api/providers/__tests__/deepseek.spec.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -455,10 +455,12 @@ describe("DeepSeekHandler", () => {
455455
}
456456

457457
// Verify that the thinking parameter was passed to the API
458+
// Note: mockCreate receives two arguments - request options and path options
458459
expect(mockCreate).toHaveBeenCalledWith(
459460
expect.objectContaining({
460461
thinking: { type: "enabled" },
461462
}),
463+
{}, // Empty path options for non-Azure URLs
462464
)
463465
})
464466

src/api/providers/deepseek.ts

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,12 @@
11
import { Anthropic } from "@anthropic-ai/sdk"
22
import OpenAI from "openai"
33

4-
import { deepSeekModels, deepSeekDefaultModelId, DEEP_SEEK_DEFAULT_TEMPERATURE } from "@roo-code/types"
4+
import {
5+
deepSeekModels,
6+
deepSeekDefaultModelId,
7+
DEEP_SEEK_DEFAULT_TEMPERATURE,
8+
OPENAI_AZURE_AI_INFERENCE_PATH,
9+
} from "@roo-code/types"
510

611
import type { ApiHandlerOptions } from "../../shared/api"
712

@@ -84,9 +89,15 @@ export class DeepSeekHandler extends OpenAiHandler {
8489
// Add max_tokens if needed
8590
this.addMaxTokensIfNeeded(requestOptions, modelInfo)
8691

92+
// Check if base URL is Azure AI Inference (for DeepSeek via Azure)
93+
const isAzureAiInference = this._isAzureAiInference(this.options.deepSeekBaseUrl)
94+
8795
let stream
8896
try {
89-
stream = await this.client.chat.completions.create(requestOptions)
97+
stream = await this.client.chat.completions.create(
98+
requestOptions,
99+
isAzureAiInference ? { path: OPENAI_AZURE_AI_INFERENCE_PATH } : {},
100+
)
90101
} catch (error) {
91102
const { handleOpenAIError } = await import("./utils/openai-error-handler")
92103
throw handleOpenAIError(error, "DeepSeek")

src/api/providers/openai.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -478,7 +478,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
478478
}
479479
}
480480

481-
private _getUrlHost(baseUrl?: string): string {
481+
protected _getUrlHost(baseUrl?: string): string {
482482
try {
483483
return new URL(baseUrl ?? "").host
484484
} catch (error) {
@@ -491,7 +491,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
491491
return urlHost.includes("x.ai")
492492
}
493493

494-
private _isAzureAiInference(baseUrl?: string): boolean {
494+
protected _isAzureAiInference(baseUrl?: string): boolean {
495495
const urlHost = this._getUrlHost(baseUrl)
496496
return urlHost.endsWith(".services.ai.azure.com")
497497
}

0 commit comments

Comments
 (0)