|
1 | 1 | import { execSync } from "node:child_process"; |
2 | | -import { readdirSync, readFileSync } from "node:fs"; |
| 2 | +import { existsSync, readdirSync, readFileSync } from "node:fs"; |
3 | 3 | import { homedir } from "node:os"; |
4 | 4 | import { join } from "node:path"; |
5 | 5 | import type { BashArgs, ReadArgs } from "@agent-kanban/shared"; |
6 | 6 | import { ToolName } from "@agent-kanban/shared"; |
7 | 7 | import { Codex, type ThreadEvent } from "@openai/codex-sdk"; |
8 | | -import type { AgentEvent, AgentHandle, AgentProvider, ContentBlock, ExecuteOpts, HistoryEvent, UsageInfo, UsageWindow } from "./types.js"; |
| 8 | +import type { |
| 9 | + AgentEvent, |
| 10 | + AgentHandle, |
| 11 | + AgentProvider, |
| 12 | + ContentBlock, |
| 13 | + ExecuteOpts, |
| 14 | + HistoryEvent, |
| 15 | + RuntimeModel, |
| 16 | + UsageInfo, |
| 17 | + UsageWindow, |
| 18 | +} from "./types.js"; |
9 | 19 | import { availabilityFromUsage, availabilityFromUsageError, parseRetryAfterMs, UsageFetchError } from "./types.js"; |
10 | 20 |
|
11 | 21 | const AUTH_PATH = join(homedir(), ".codex", "auth.json"); |
12 | 22 | const CODEX_SESSIONS_DIR = join(homedir(), ".codex", "sessions"); |
| 23 | +const CODEX_MODELS_CACHE_PATH = join(homedir(), ".codex", "models_cache.json"); |
13 | 24 | const USAGE_API = "https://chatgpt.com/backend-api/wham/usage"; |
14 | 25 |
|
15 | 26 | function readAccessToken(): string | null { |
@@ -78,6 +89,39 @@ function resolveCodexModel(opts: ExecuteOpts): string | undefined { |
78 | 89 | return opts.model; |
79 | 90 | } |
80 | 91 |
|
| 92 | +type CodexCachedModel = { |
| 93 | + slug: string; |
| 94 | + display_name?: string; |
| 95 | + description?: string; |
| 96 | + visibility?: string; |
| 97 | + priority?: number; |
| 98 | + context_window?: number; |
| 99 | + max_context_window?: number; |
| 100 | + supported_reasoning_levels?: { effort: string }[]; |
| 101 | + default_reasoning_level?: string; |
| 102 | + support_verbosity?: boolean; |
| 103 | +}; |
| 104 | + |
| 105 | +function readCodexModelCache(): CodexCachedModel[] { |
| 106 | + if (!existsSync(CODEX_MODELS_CACHE_PATH)) throw new Error("Codex models cache not found; start Codex CLI once to populate it"); |
| 107 | + const data = JSON.parse(readFileSync(CODEX_MODELS_CACHE_PATH, "utf-8")) as { models?: CodexCachedModel[] }; |
| 108 | + return data.models ?? []; |
| 109 | +} |
| 110 | + |
| 111 | +function normalizeCodexCachedModel(model: CodexCachedModel): RuntimeModel { |
| 112 | + return { |
| 113 | + id: model.slug, |
| 114 | + name: model.display_name, |
| 115 | + description: model.description, |
| 116 | + context_window: model.context_window, |
| 117 | + supports: { |
| 118 | + verbosity: model.support_verbosity ?? false, |
| 119 | + }, |
| 120 | + supported_reasoning_efforts: model.supported_reasoning_levels?.map((level) => level.effort), |
| 121 | + default_reasoning_effort: model.default_reasoning_level, |
| 122 | + }; |
| 123 | +} |
| 124 | + |
81 | 125 | /** Map a single Codex thread event to an AgentEvent (or null to skip). */ |
82 | 126 | export function mapThreadEvent(event: ThreadEvent, model = "o3"): AgentEvent | null { |
83 | 127 | switch (event.type) { |
@@ -218,6 +262,13 @@ export const codexProvider: AgentProvider = { |
218 | 262 | } |
219 | 263 | }, |
220 | 264 |
|
| 265 | + async listModels(): Promise<RuntimeModel[]> { |
| 266 | + return readCodexModelCache() |
| 267 | + .filter((model) => model.visibility !== "hide") |
| 268 | + .sort((a, b) => (a.priority ?? 0) - (b.priority ?? 0)) |
| 269 | + .map(normalizeCodexCachedModel); |
| 270 | + }, |
| 271 | + |
221 | 272 | async execute(opts: ExecuteOpts): Promise<AgentHandle> { |
222 | 273 | const model = resolveCodexModel(opts) ?? "o3"; |
223 | 274 | let resumeToken: string | undefined = opts.resumeToken; |
|
0 commit comments