diff --git a/README.md b/README.md index 1db6279e..79814b95 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ OAuth plugin for OpenCode that lets you use ChatGPT Plus/Pro rate limits with mo ## What You Get - **GPT-5.4, GPT-5 Codex, GPT-5.1 Codex Max** and all GPT-5.x variants via ChatGPT OAuth -- **Multi-account support** — Add up to 20 ChatGPT accounts, health-aware rotation with automatic failover +- **Multi-account support** — Add as many ChatGPT accounts as you need, health-aware rotation with automatic failover - **Per-project accounts** — Each project gets its own account storage (new in v4.10.0) - **Workspace-aware identity persistence** — Keeps workspace/org identity stable across token refresh and verify-flagged restore flows - **Click-to-switch** — Switch accounts directly from the OpenCode TUI @@ -68,6 +68,8 @@ npx -y oc-chatgpt-multi-auth@latest This writes the config to `~/.config/opencode/opencode.json`, backs up existing config, and clears the plugin cache. +After install, run `codex-doctor` once to confirm your local auth and account health are ready. + > Want legacy config (OpenCode v1.0.209 and below)? Add `--legacy` flag. **Option C: Manual setup** @@ -275,7 +277,7 @@ For legacy OpenCode (v1.0.209 and below), use `config/opencode-legacy.json` whic ## Multi-Account Setup -Add multiple ChatGPT accounts for higher combined quotas. The plugin uses **health-aware rotation** with automatic failover and supports up to 20 accounts. +Add multiple ChatGPT accounts for higher combined quotas. The plugin uses **health-aware rotation** with automatic failover and supports unlimited accounts. ```bash opencode auth login # Run again to add more accounts @@ -836,6 +838,30 @@ Create `~/.opencode/openai-codex-auth-config.json` for optional settings: | `toastDurationMs` | `5000` | How long toast notifications stay visible (ms) | | `beginnerSafeMode` | `false` | Beginner-safe retry profile: conservative retry budget, disables all-accounts wait/retry, and caps all-accounts retries | +### Experimental Settings + +The auth dashboard now includes `Experimental settings` with a manual sync option for `codex-multi-auth`. + +Persist the toggle in `~/.opencode/openai-codex-auth-config.json`: + +```json +{ + "experimental": { + "syncFromCodexMultiAuth": { + "enabled": true + } + } +} +``` + +When enabled, `Sync now` will auto-discover a `codex-multi-auth` account store from: +- `CODEX_MULTI_AUTH_DIR` +- `CODEX_HOME/multi-auth` +- `~/DevTools/config/codex/multi-auth` +- `~/.codex/multi-auth` + +It previews import impact first and skips duplicate overlaps using the existing dedupe-aware import flow. + ### Retry Behavior | Option | Default | What It Does | diff --git a/docs/configuration.md b/docs/configuration.md index 57b76e9e..50b8384f 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -168,6 +168,26 @@ The sample above intentionally sets `"retryAllAccountsMaxRetries": 3` as a bound | `fetchTimeoutMs` | `60000` | upstream fetch timeout in ms | | `streamStallTimeoutMs` | `45000` | max time to wait for next SSE chunk before aborting | +### Experimental Settings + +Enable manual sync from `codex-multi-auth`: + +```json +{ + "experimental": { + "syncFromCodexMultiAuth": { + "enabled": true + } + } +} +``` + +When enabled, the auth dashboard can discover `codex-multi-auth` storage from: +- `CODEX_MULTI_AUTH_DIR` +- `CODEX_HOME/multi-auth` +- `~/DevTools/config/codex/multi-auth` +- `~/.codex/multi-auth` + ### beginner safe mode behavior when `beginnerSafeMode` is enabled (`true` or `CODEX_AUTH_BEGINNER_SAFE_MODE=1`), the plugin applies a safer retry profile automatically: diff --git a/index.ts b/index.ts index 5ff94c0c..a2ed0716 100644 --- a/index.ts +++ b/index.ts @@ -24,6 +24,9 @@ */ import { tool } from "@opencode-ai/plugin/tool"; +import { promises as fsPromises } from "node:fs"; +import { createInterface } from "node:readline/promises"; +import { dirname } from "node:path"; import type { Plugin, PluginInput } from "@opencode-ai/plugin"; import type { Auth } from "@opencode-ai/sdk"; import { @@ -35,7 +38,7 @@ import { import { queuedRefresh, getRefreshQueueMetrics } from "./lib/refresh-queue.js"; import { openBrowserUrl } from "./lib/auth/browser.js"; import { startLocalOAuthServer } from "./lib/auth/server.js"; -import { promptAddAnotherAccount, promptLoginMode } from "./lib/cli.js"; +import { promptAddAnotherAccount, promptCodexMultiAuthSyncPrune, promptLoginMode } from "./lib/cli.js"; import { getCodexMode, getRequestTransformMode, @@ -65,7 +68,9 @@ import { getCodexTuiColorProfile, getCodexTuiGlyphMode, getBeginnerSafeMode, + getSyncFromCodexMultiAuthEnabled, loadPluginConfig, + setSyncFromCodexMultiAuthEnabled, } from "./lib/config.js"; import { AUTH_LABELS, @@ -109,17 +114,23 @@ import { loadAccounts, saveAccounts, withAccountStorageTransaction, + cleanupDuplicateEmailAccounts, + previewDuplicateEmailCleanup, clearAccounts, setStoragePath, + backupRawAccountsFile, exportAccounts, importAccounts, previewImportAccounts, createTimestampedBackupPath, loadFlaggedAccounts, + loadAccountAndFlaggedStorageSnapshot, saveFlaggedAccounts, clearFlaggedAccounts, StorageError, formatStorageErrorHint, + normalizeAccountStorage, + withFlaggedAccountsTransaction, type AccountStorageV3, type FlaggedAccountMetadataV1, } from "./lib/storage.js"; @@ -152,6 +163,8 @@ import { addJitter } from "./lib/rotation.js"; import { buildTableHeader, buildTableRow, type TableOptions } from "./lib/table-formatter.js"; import { setUiRuntimeOptions, type UiRuntimeOptions } from "./lib/ui/runtime.js"; import { paintUiText, formatUiBadge, formatUiHeader, formatUiItem, formatUiKeyValue, formatUiSection } from "./lib/ui/format.js"; +import { confirm } from "./lib/ui/confirm.js"; +import { ANSI, ANSI_CSI_REGEX, CONTROL_CHAR_REGEX } from "./lib/ui/ansi.js"; import { buildBeginnerChecklist, buildBeginnerDoctorFindings, @@ -182,6 +195,16 @@ import { detectErrorType, getRecoveryToastContent, } from "./lib/recovery.js"; +import { + CodexMultiAuthSyncCapacityError, + cleanupCodexMultiAuthSyncedOverlaps, + isCodexMultiAuthSourceTooLargeForCapacity, + loadCodexMultiAuthSourceStorage, + previewCodexMultiAuthSyncedOverlapCleanup, + previewSyncFromCodexMultiAuth, + syncFromCodexMultiAuth, +} from "./lib/codex-multi-auth-sync.js"; +import { createSyncPruneBackupPayload } from "./lib/sync-prune-backup.js"; /** * OpenAI Codex OAuth authentication plugin for opencode @@ -1216,6 +1239,288 @@ export const OpenAIOAuthPlugin: Plugin = async ({ client }: PluginInput) => { return applyUiRuntimeFromConfig(loadPluginConfig()); }; + const sanitizeScreenText = (value: string): string => + value.replace(ANSI_CSI_REGEX, "").replace(CONTROL_CHAR_REGEX, "").trim(); + type OperationTone = "normal" | "muted" | "success" | "warning" | "danger" | "accent"; + + const styleOperationText = ( + ui: UiRuntimeOptions, + text: string, + tone: OperationTone, + ): string => { + if (ui.v2Enabled) { + return paintUiText(ui, text, tone); + } + const ansiCode = + tone === "accent" + ? ANSI.cyan + : tone === "success" + ? ANSI.green + : tone === "warning" + ? ANSI.yellow + : tone === "danger" + ? ANSI.red + : tone === "muted" + ? ANSI.dim + : ""; + return ansiCode ? `${ansiCode}${text}${ANSI.reset}` : text; + }; + + const isAbortError = (error: unknown): boolean => { + if (!(error instanceof Error)) return false; + const maybe = error as Error & { code?: string }; + return maybe.name === "AbortError" || maybe.code === "ABORT_ERR"; + }; + + const waitForMenuReturn = async ( + ui: UiRuntimeOptions, + options: { + promptText?: string; + autoReturnMs?: number; + pauseOnAnyKey?: boolean; + } = {}, + ): Promise => { + if (!process.stdin.isTTY || !process.stdout.isTTY) { + return; + } + + const promptText = options.promptText ?? "Press Enter to return to the dashboard."; + const autoReturnMs = options.autoReturnMs ?? 0; + const pauseOnAnyKey = options.pauseOnAnyKey ?? true; + + try { + let chunk: Buffer | string | null; + do { + chunk = process.stdin.read(); + } while (chunk !== null); + } catch { + // best effort drain + } + + const writeInlineStatus = (message: string) => { + process.stdout.write(`\r${ANSI.clearLine}${styleOperationText(ui, message, "muted")}`); + }; + const clearInlineStatus = () => { + process.stdout.write(`\r${ANSI.clearLine}`); + }; + + if (autoReturnMs > 0) { + if (!pauseOnAnyKey) { + await new Promise((resolve) => setTimeout(resolve, autoReturnMs)); + return; + } + + const wasRaw = process.stdin.isRaw ?? false; + const endAt = Date.now() + autoReturnMs; + let lastShownSeconds: number | null = null; + const renderCountdown = () => { + const remainingMs = Math.max(0, endAt - Date.now()); + const remainingSeconds = Math.max(1, Math.ceil(remainingMs / 1000)); + if (lastShownSeconds === remainingSeconds) return; + lastShownSeconds = remainingSeconds; + writeInlineStatus(`Returning to dashboard in ${remainingSeconds}s. Press any key to pause.`); + }; + + renderCountdown(); + const pinned = await new Promise((resolve) => { + let done = false; + const interval = setInterval(renderCountdown, 80); + let timeout: NodeJS.Timeout | null = setTimeout(() => { + timeout = null; + if (!done) { + done = true; + cleanup(); + resolve(false); + } + }, autoReturnMs); + const onData = () => { + if (done) return; + done = true; + cleanup(); + resolve(true); + }; + const cleanup = () => { + clearInterval(interval); + if (timeout) { + clearTimeout(timeout); + timeout = null; + } + process.stdin.removeListener("data", onData); + try { + process.stdin.setRawMode(wasRaw); + } catch { + // best effort restore + } + }; + + try { + process.stdin.setRawMode(true); + } catch { + // best effort + } + process.stdin.on("data", onData); + process.stdin.resume(); + }); + + clearInlineStatus(); + if (!pinned) { + return; + } + + writeInlineStatus("Paused. Press any key to return."); + await new Promise((resolve) => { + const onData = () => { + cleanup(); + resolve(); + }; + const cleanup = () => { + process.stdin.removeListener("data", onData); + try { + process.stdin.setRawMode(wasRaw); + } catch { + // best effort restore + } + }; + + try { + process.stdin.setRawMode(true); + } catch { + // best effort fallback + } + process.stdin.on("data", onData); + process.stdin.resume(); + }); + clearInlineStatus(); + return; + } + + const rl = createInterface({ + input: process.stdin, + output: process.stdout, + }); + try { + process.stdout.write(`\r${ANSI.clearLine}`); + await rl.question(`${styleOperationText(ui, promptText, "muted")} `); + } catch (error) { + if (!isAbortError(error)) { + throw error; + } + } finally { + rl.close(); + clearInlineStatus(); + } + }; + + const createOperationScreen = ( + ui: UiRuntimeOptions, + title: string, + subtitle?: string, + ): { + push: (line: string, tone?: OperationTone) => void; + finish: ( + summaryLines?: Array<{ line: string; tone?: OperationTone }>, + options?: { failed?: boolean }, + ) => Promise; + abort: () => void; + } | null => { + if (!supportsInteractiveMenus()) { + return null; + } + + const entries: Array<{ line: string; tone: OperationTone }> = []; + const spinnerFrames = ["-", "\\", "|", "/"]; + let frame = 0; + let running = true; + let failed = false; + let initialized = false; + let timer: NodeJS.Timeout | null = null; + let closed = false; + + const dispose = () => { + if (closed) return; + closed = true; + running = false; + if (timer) { + clearInterval(timer); + timer = null; + } + process.stdout.write(ANSI.altScreenOff + ANSI.show + ANSI.clearScreen + ANSI.moveTo(1, 1)); + }; + + const render = () => { + const lines: string[] = []; + const maxVisibleLines = Math.max(8, (process.stdout.rows ?? 24) - 8); + const visibleEntries = entries.slice(-maxVisibleLines); + const spinner = running + ? `${spinnerFrames[frame % spinnerFrames.length] ?? "-"} ` + : failed + ? "x " + : "+ "; + const stageTone: OperationTone = failed ? "danger" : running ? "accent" : "success"; + const stageText = running + ? `${spinner}${sanitizeScreenText(subtitle ?? "Working")}` + : failed + ? "Action failed" + : "Done"; + + lines.push(styleOperationText(ui, sanitizeScreenText(title), "accent")); + lines.push(styleOperationText(ui, stageText, stageTone)); + lines.push(""); + for (const entry of visibleEntries) { + lines.push(styleOperationText(ui, sanitizeScreenText(entry.line), entry.tone)); + } + for (let i = visibleEntries.length; i < maxVisibleLines; i += 1) { + lines.push(""); + } + lines.push(""); + if (running) lines.push(styleOperationText(ui, "Working...", "muted")); + process.stdout.write(ANSI.clearScreen + ANSI.moveTo(1, 1) + lines.join("\n")); + frame += 1; + }; + + const ensureScreen = () => { + if (initialized) return; + process.stdout.write(ANSI.altScreenOn + ANSI.hide + ANSI.clearScreen + ANSI.moveTo(1, 1)); + render(); + timer = setInterval(() => { + if (!running) return; + render(); + }, 120); + initialized = true; + }; + + ensureScreen(); + return { + push: (line: string, tone = "normal") => { + ensureScreen(); + entries.push({ line: sanitizeScreenText(line), tone }); + render(); + }, + finish: async (summaryLines, options) => { + ensureScreen(); + if (summaryLines && summaryLines.length > 0) { + entries.push({ line: "", tone: "normal" }); + for (const entry of summaryLines) { + entries.push({ line: sanitizeScreenText(entry.line), tone: entry.tone ?? "normal" }); + } + } + failed = options?.failed === true; + running = false; + if (timer) { + clearInterval(timer); + timer = null; + } + render(); + await waitForMenuReturn(ui, failed + ? { promptText: "Press Enter to return to the dashboard." } + : { autoReturnMs: 2_000, pauseOnAnyKey: true }); + dispose(); + }, + abort: dispose, + }; + }; + type DashboardOperationScreen = NonNullable>; + const getStatusMarker = ( ui: UiRuntimeOptions, status: "ok" | "warning" | "error", @@ -1270,6 +1575,82 @@ export const OpenAIOAuthPlugin: Plugin = async ({ client }: PluginInput) => { return `Account ${index + 1} (${details.join(", ")})`; }; + const buildEmailCountMap = ( + accounts: Array<{ email?: string }>, + ): Map => { + const counts = new Map(); + for (const account of accounts) { + const normalizedEmail = sanitizeEmail(account.email); + if (!normalizedEmail) continue; + counts.set(normalizedEmail, (counts.get(normalizedEmail) ?? 0) + 1); + } + return counts; + }; + + const updateEmailCountMap = ( + emailCounts: Map, + previousEmail: string | undefined, + nextEmail: string | undefined, + ): void => { + const previousNormalized = sanitizeEmail(previousEmail); + const nextNormalized = sanitizeEmail(nextEmail); + if (previousNormalized === nextNormalized) { + return; + } + if (previousNormalized) { + const nextCount = (emailCounts.get(previousNormalized) ?? 0) - 1; + if (nextCount > 0) { + emailCounts.set(previousNormalized, nextCount); + } else { + emailCounts.delete(previousNormalized); + } + } + if (nextNormalized) { + emailCounts.set(nextNormalized, (emailCounts.get(nextNormalized) ?? 0) + 1); + } + }; + + const canHydrateCachedTokenForAccount = ( + emailCounts: Map, + account: { email?: string; accountId?: string }, + tokenAccountId: string | undefined, + ): boolean => { + const normalizedAccountId = account.accountId?.trim(); + if (normalizedAccountId) { + return tokenAccountId === normalizedAccountId; + } + const normalizedEmail = sanitizeEmail(account.email); + if (normalizedEmail && (emailCounts.get(normalizedEmail) ?? 0) <= 1) { + return true; + } + return false; + }; + + type SyncRemovalTarget = { + refreshToken: string; + organizationId?: string; + accountId?: string; + }; + + const getSyncRemovalTargetKey = (target: SyncRemovalTarget): string => { + return `${target.organizationId ?? ""}|${target.accountId ?? ""}|${target.refreshToken}`; + }; + + const findAccountIndexByExactIdentity = ( + accounts: AccountStorageV3["accounts"], + target: SyncRemovalTarget | null | undefined, + ): number => { + if (!target || !target.refreshToken) return -1; + const targetKey = getSyncRemovalTargetKey(target); + return accounts.findIndex((account) => + getSyncRemovalTargetKey({ + refreshToken: account.refreshToken, + organizationId: account.organizationId, + accountId: account.accountId, + }) === targetKey, + ); + }; + const normalizeAccountTags = (raw: string): string[] => { return Array.from( new Set( @@ -2857,6 +3238,14 @@ while (attempted.size < Math.max(1, accountCount)) { }; const formatCodexQuotaLine = (snapshot: CodexQuotaSnapshot): string => { + const quotaBar = (usedPercent: number | undefined): string => { + if (typeof usedPercent !== "number" || !Number.isFinite(usedPercent)) { + return "▒▒▒▒▒▒▒▒▒▒"; + } + const left = Math.max(0, Math.min(100, Math.round(100 - usedPercent))); + const filled = Math.max(0, Math.min(10, Math.round(left / 10))); + return `${"█".repeat(filled)}${"▒".repeat(10 - filled)}`; + }; const summarizeWindow = (label: string, window: CodexQuotaWindow): string => { const used = window.usedPercent; const left = @@ -2865,7 +3254,7 @@ while (attempted.size < Math.max(1, accountCount)) { : undefined; const reset = formatResetAt(window.resetAtMs); let summary = label; - if (left !== undefined) summary = `${summary} ${left}% left`; + if (left !== undefined) summary = `${summary} ${quotaBar(used)} ${left}% left`; if (reset) summary = `${summary} (resets ${reset})`; return summary; }; @@ -2977,6 +3366,7 @@ while (attempted.size < Math.max(1, accountCount)) { }; const runAccountCheck = async (deepProbe: boolean): Promise => { + const ui = resolveUiRuntime(); const loadedStorage = await hydrateEmails(await loadAccounts()); const workingStorage = loadedStorage ? { @@ -2987,36 +3377,68 @@ while (attempted.size < Math.max(1, accountCount)) { : {}, } : { version: 3 as const, accounts: [], activeIndex: 0, activeIndexByFamily: {} }; - - if (workingStorage.accounts.length === 0) { - console.log("\nNo accounts to check.\n"); - return; - } - - const flaggedStorage = await loadFlaggedAccounts(); - let storageChanged = false; - let flaggedChanged = false; - const removeFromActive = new Set(); - const total = workingStorage.accounts.length; - let ok = 0; - let disabled = 0; - let errors = 0; - - console.log( - `\nChecking ${deepProbe ? "full account health" : "quotas"} for all accounts...\n`, + const screen = createOperationScreen( + ui, + "Health check", + deepProbe + ? `Checking ${workingStorage.accounts.length} account(s) with full refresh + live validation` + : `Checking ${workingStorage.accounts.length} account(s) with quota validation`, ); + let screenFinished = false; + const emit = ( + index: number, + detail: string, + tone: "normal" | "muted" | "success" | "warning" | "danger" | "accent" = "normal", + ) => { + const account = workingStorage.accounts[index]; + const label = sanitizeScreenText(formatCommandAccountLabel(account, index)); + const safeDetail = sanitizeScreenText(detail); + const prefix = + tone === "danger" + ? getStatusMarker(ui, "error") + : tone === "warning" + ? getStatusMarker(ui, "warning") + : getStatusMarker(ui, "ok"); + const line = sanitizeScreenText(`${prefix} ${label} | ${safeDetail}`); + if (screen) { + screen.push(line, tone); + return; + } + console.log(line); + }; - for (let i = 0; i < total; i += 1) { - const account = workingStorage.accounts[i]; - if (!account) continue; - const label = account.email ?? account.accountLabel ?? `Account ${i + 1}`; - if (account.enabled === false) { - disabled += 1; - console.log(`[${i + 1}/${total}] ${label}: DISABLED`); - continue; + try { + if (workingStorage.accounts.length === 0) { + if (screen) { + screen.push("No accounts to check.", "warning"); + await screen.finish(); + screenFinished = true; + } else { + console.log("No accounts to check."); + } + return; } - try { + const flaggedStorage = await loadFlaggedAccounts(); + let storageChanged = false; + let flaggedChanged = false; + const removeFromActive = new Set(); + const total = workingStorage.accounts.length; + let ok = 0; + let disabled = 0; + let errors = 0; + const workingEmailCounts = buildEmailCountMap(workingStorage.accounts); + + for (let i = 0; i < total; i += 1) { + const account = workingStorage.accounts[i]; + if (!account) continue; + if (account.enabled === false) { + disabled += 1; + emit(i, "disabled", "warning"); + continue; + } + + try { // If we already have a valid cached access token, don't force-refresh. // This avoids flagging accounts where the refresh token has been burned // but the access token is still valid (same behavior as Codex CLI). @@ -3050,8 +3472,14 @@ while (attempted.size < Math.max(1, accountCount)) { // instead of forcing a refresh. if (!accessToken) { const cached = await lookupCodexCliTokensByEmail(account.email); + const cachedTokenAccountId = cached ? extractAccountId(cached.accessToken) : undefined; if ( - cached && + cached && + canHydrateCachedTokenForAccount( + workingEmailCounts, + account, + cachedTokenAccountId, + ) && (typeof cached.expiresAt !== "number" || !Number.isFinite(cached.expiresAt) || cached.expiresAt > nowMs) @@ -3076,11 +3504,12 @@ while (attempted.size < Math.max(1, accountCount)) { extractAccountEmail(cached.accessToken), ); if (hydratedEmail && hydratedEmail !== account.email) { + updateEmailCountMap(workingEmailCounts, account.email, hydratedEmail); account.email = hydratedEmail; storageChanged = true; } - tokenAccountId = extractAccountId(cached.accessToken); + tokenAccountId = cachedTokenAccountId; if ( tokenAccountId && shouldUpdateAccountIdFromToken(account.accountIdSource, account.accountId) && @@ -3099,10 +3528,20 @@ while (attempted.size < Math.max(1, accountCount)) { errors += 1; const message = refreshResult.message ?? refreshResult.reason ?? "refresh failed"; - console.log(`[${i + 1}/${total}] ${label}: ERROR (${message})`); + emit(i, `error: ${message}`, "danger"); if (deepProbe && isFlaggableFailure(refreshResult)) { + const flaggedKey = getSyncRemovalTargetKey({ + refreshToken: account.refreshToken, + organizationId: account.organizationId, + accountId: account.accountId, + }); const existingIndex = flaggedStorage.accounts.findIndex( - (flagged) => flagged.refreshToken === account.refreshToken, + (flagged) => + getSyncRemovalTargetKey({ + refreshToken: flagged.refreshToken, + organizationId: flagged.organizationId, + accountId: flagged.accountId, + }) === flaggedKey, ); const flaggedRecord: FlaggedAccountMetadataV1 = { ...account, @@ -3115,7 +3554,7 @@ while (attempted.size < Math.max(1, accountCount)) { } else { flaggedStorage.accounts.push(flaggedRecord); } - removeFromActive.add(account.refreshToken); + removeFromActive.add(flaggedKey); flaggedChanged = true; } continue; @@ -3142,6 +3581,7 @@ while (attempted.size < Math.max(1, accountCount)) { extractAccountEmail(refreshResult.access, refreshResult.idToken), ); if (hydratedEmail && hydratedEmail !== account.email) { + updateEmailCountMap(workingEmailCounts, account.email, hydratedEmail); account.email = hydratedEmail; storageChanged = true; } @@ -3167,7 +3607,7 @@ while (attempted.size < Math.max(1, accountCount)) { tokenAccountId ? `${authDetail} (id:${tokenAccountId.slice(-6)})` : authDetail; - console.log(`[${i + 1}/${total}] ${label}: ${detail}`); + emit(i, detail, "success"); continue; } @@ -3191,69 +3631,151 @@ while (attempted.size < Math.max(1, accountCount)) { organizationId: account.organizationId, }); ok += 1; - console.log( - `[${i + 1}/${total}] ${label}: ${formatCodexQuotaLine(snapshot)}`, - ); + emit(i, formatCodexQuotaLine(snapshot), snapshot.status === 429 ? "warning" : "success"); } catch (error) { errors += 1; const message = error instanceof Error ? error.message : String(error); - console.log( - `[${i + 1}/${total}] ${label}: ERROR (${message.slice(0, 160)})`, - ); + emit(i, `error: ${message.slice(0, 160)}`, "danger"); + } + } catch (error) { + errors += 1; + const message = error instanceof Error ? error.message : String(error); + emit(i, `error: ${message.slice(0, 120)}`, "danger"); } - } catch (error) { - errors += 1; - const message = error instanceof Error ? error.message : String(error); - console.log(`[${i + 1}/${total}] ${label}: ERROR (${message.slice(0, 120)})`); } - } - if (removeFromActive.size > 0) { - workingStorage.accounts = workingStorage.accounts.filter( - (account) => !removeFromActive.has(account.refreshToken), - ); - clampActiveIndices(workingStorage); - storageChanged = true; - } + if (removeFromActive.size > 0) { + workingStorage.accounts = workingStorage.accounts.filter( + (account) => + !removeFromActive.has( + getSyncRemovalTargetKey({ + refreshToken: account.refreshToken, + organizationId: account.organizationId, + accountId: account.accountId, + }), + ), + ); + clampActiveIndices(workingStorage); + storageChanged = true; + } - if (storageChanged) { - await saveAccounts(workingStorage); - invalidateAccountManagerCache(); - } - if (flaggedChanged) { - await saveFlaggedAccounts(flaggedStorage); - } + if (flaggedChanged) { + await saveFlaggedAccounts(flaggedStorage); + } + if (storageChanged) { + await saveAccounts(workingStorage); + invalidateAccountManagerCache(); + } - console.log(""); - console.log(`Results: ${ok} ok, ${errors} error, ${disabled} disabled`); - if (removeFromActive.size > 0) { - console.log( - `Moved ${removeFromActive.size} account(s) to flagged pool (invalid refresh token).`, - ); + const summaryLines: Array<{ + line: string; + tone?: "normal" | "muted" | "success" | "warning" | "danger" | "accent"; + }> = [{ line: `Results: ${ok} ok, ${errors} error, ${disabled} disabled`, tone: errors > 0 ? "warning" : "success" }]; + if (removeFromActive.size > 0) { + summaryLines.push({ line: `Moved ${removeFromActive.size} account(s) to flagged pool (invalid refresh token).`, tone: "warning" as const }); + } + if (screen) { + await screen.finish(summaryLines); + screenFinished = true; + return; + } + console.log(""); + for (const line of summaryLines) { + console.log(line.line); + } + console.log(""); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + if (screen) { + screen.push(`Health check failed: ${message}`, "danger"); + await screen.finish(undefined, { failed: true }); + screenFinished = true; + } else { + console.log(`\nHealth check failed: ${message}\n`); + } + } finally { + if (screen && !screenFinished) { + screen.abort(); + } } - console.log(""); }; - const verifyFlaggedAccounts = async (): Promise => { - const flaggedStorage = await loadFlaggedAccounts(); - if (flaggedStorage.accounts.length === 0) { - console.log("\nNo flagged accounts to verify.\n"); - return; - } + const verifyFlaggedAccounts = async ( + screenOverride?: DashboardOperationScreen | null, + ): Promise => { + const ui = resolveUiRuntime(); + const screen = + screenOverride ?? + createOperationScreen( + ui, + "Check Problem Accounts", + "Checking flagged accounts and attempting restore", + ); + const emit = ( + line: string, + tone: OperationTone = "normal", + ) => { + const safeLine = sanitizeScreenText(line); + if (screen) { + screen.push(safeLine, tone); + return; + } + console.log(safeLine); + }; + let screenFinished = false; + try { + const flaggedStorage = await loadFlaggedAccounts(); + const activeStorage = await loadAccounts(); + if (flaggedStorage.accounts.length === 0) { + emit("No flagged accounts to verify."); + if (screen && !screenOverride) { + await screen.finish(); + screenFinished = true; + } + return; + } - console.log("\nVerifying flagged accounts...\n"); - const remaining: FlaggedAccountMetadataV1[] = []; - const restored: TokenSuccessWithAccount[] = []; + emit(`Checking ${flaggedStorage.accounts.length} problem account(s)...`, "muted"); + const remaining: FlaggedAccountMetadataV1[] = []; + const restored: TokenSuccessWithAccount[] = []; + const flaggedLabelWidth = Math.min( + 72, + Math.max( + 18, + ...flaggedStorage.accounts.map((flagged, index) => + (flagged.email ?? flagged.accountLabel ?? `Flagged ${index + 1}`).length, + ), + ), + ); + const padFlaggedLabel = (value: string): string => + value.length >= flaggedLabelWidth ? value : `${value}${" ".repeat(flaggedLabelWidth - value.length)}`; - for (let i = 0; i < flaggedStorage.accounts.length; i += 1) { - const flagged = flaggedStorage.accounts[i]; - if (!flagged) continue; - const label = flagged.email ?? flagged.accountLabel ?? `Flagged ${i + 1}`; + for (let i = 0; i < flaggedStorage.accounts.length; i += 1) { + const flagged = flaggedStorage.accounts[i]; + if (!flagged) continue; + const label = padFlaggedLabel(flagged.email ?? flagged.accountLabel ?? `Flagged ${i + 1}`); try { const cached = await lookupCodexCliTokensByEmail(flagged.email); const now = Date.now(); + const cachedTokenAccountId = cached ? extractAccountId(cached.accessToken) : undefined; + const restoredIdentityContext = restored.map((entry) => ({ + email: sanitizeEmail(extractAccountEmail(entry.access, entry.idToken)), + accountId: entry.accountIdOverride ?? extractAccountId(entry.access), + })); + const restoreEmailCounts = buildEmailCountMap([ + ...(activeStorage?.accounts ?? []), + ...restoredIdentityContext, + ...remaining, + flagged, + ...flaggedStorage.accounts.slice(i + 1).filter(Boolean), + ]); if ( cached && + canHydrateCachedTokenForAccount( + restoreEmailCounts, + flagged, + cachedTokenAccountId, + ) && typeof cached.expiresAt === "number" && Number.isFinite(cached.expiresAt) && cached.expiresAt > now @@ -3281,17 +3803,13 @@ while (attempted.size < Math.max(1, accountCount)) { resolved.primary.accountLabel = flagged.accountLabel; } restored.push(...resolved.variantsForPersistence); - console.log( - `[${i + 1}/${flaggedStorage.accounts.length}] ${label}: RESTORED (Codex CLI cache)`, - ); + emit(`${getStatusMarker(ui, "ok")} ${label} | restored (cache)`, "success"); continue; } const refreshResult = await queuedRefresh(flagged.refreshToken); if (refreshResult.type !== "success") { - console.log( - `[${i + 1}/${flaggedStorage.accounts.length}] ${label}: STILL FLAGGED (${refreshResult.message ?? refreshResult.reason ?? "refresh failed"})`, - ); + emit(`${getStatusMarker(ui, "warning")} ${label} | still flagged: ${refreshResult.message ?? refreshResult.reason ?? "refresh failed"}`, "warning"); remaining.push(flagged); continue; } @@ -3309,32 +3827,886 @@ while (attempted.size < Math.max(1, accountCount)) { resolved.primary.accountLabel = flagged.accountLabel; } restored.push(...resolved.variantsForPersistence); - console.log(`[${i + 1}/${flaggedStorage.accounts.length}] ${label}: RESTORED`); + emit(`${getStatusMarker(ui, "ok")} ${label} | restored`, "success"); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + emit( + `${getStatusMarker(ui, "error")} ${label} | error: ${message.slice(0, 120)}`, + "danger", + ); + remaining.push({ + ...flagged, + lastError: message, + }); + } + } + + if (restored.length > 0) { + await persistAccountPool(restored, false); + invalidateAccountManagerCache(); + } + + await saveFlaggedAccounts({ + version: 1, + accounts: remaining, + }); + + const summaryLines: Array<{ + line: string; + tone?: "normal" | "muted" | "success" | "warning" | "danger" | "accent"; + }> = [{ line: `Results: ${restored.length} restored, ${remaining.length} still flagged`, tone: remaining.length > 0 ? "warning" : "success" }]; + if (screen && !screenOverride) { + await screen.finish(summaryLines); + screenFinished = true; + return; + } + console.log(""); + for (const line of summaryLines) { + console.log(line.line); + } + console.log(""); + } finally { + if (screen && !screenFinished && !screenOverride) { + screen.abort(); + } + } + }; + + const toggleCodexMultiAuthSyncSetting = async (): Promise => { + try { + const currentConfig = loadPluginConfig(); + const enabled = getSyncFromCodexMultiAuthEnabled(currentConfig); + await setSyncFromCodexMultiAuthEnabled(!enabled); + const nextLabel = !enabled ? "enabled" : "disabled"; + console.log(`\nSync from codex-multi-auth ${nextLabel}.\n`); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.log(`\nFailed to update sync setting: ${message}\n`); + } + }; + + const createMaintenanceAccountsBackup = async ( + prefix: string, + ): Promise => { + const backupPath = createTimestampedBackupPath(prefix); + await backupRawAccountsFile(backupPath, true); + return backupPath; + }; + + const runCodexMultiAuthSync = async (): Promise => { + const currentConfig = loadPluginConfig(); + if (!getSyncFromCodexMultiAuthEnabled(currentConfig)) { + console.log("\nEnable sync from codex-multi-auth in Experimental settings first.\n"); + return; + } + + const PRUNE_BACKUP_READ_RETRY_DELAYS_MS = [100, 250, 500] as const; + + const createSyncPruneBackup = async (): Promise<{ + backupPath: string; + restore: () => Promise; + }> => { + const readPruneBackupFile = async (backupPath: string): Promise => { + const retryableCodes = new Set(["EBUSY", "EACCES", "EPERM"]); + for ( + let attempt = 0; + attempt <= PRUNE_BACKUP_READ_RETRY_DELAYS_MS.length; + attempt += 1 + ) { + try { + return await fsPromises.readFile(backupPath, "utf-8"); + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if (!code || !retryableCodes.has(code) || attempt >= PRUNE_BACKUP_READ_RETRY_DELAYS_MS.length) { + throw error; + } + const delayMs = PRUNE_BACKUP_READ_RETRY_DELAYS_MS[attempt]; + if (delayMs !== undefined) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + } + throw new Error("readPruneBackupFile: unexpected retry exit"); + }; + const { accounts: loadedAccountsStorage, flagged: currentFlaggedStorage } = + await loadAccountAndFlaggedStorageSnapshot(); + const currentAccountsStorage = + loadedAccountsStorage ?? + ({ + version: 3, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + } satisfies AccountStorageV3); + const backupPath = createTimestampedBackupPath("codex-sync-prune-backup"); + await fsPromises.mkdir(dirname(backupPath), { recursive: true }); + const backupPayload = createSyncPruneBackupPayload(currentAccountsStorage, currentFlaggedStorage); + const restoreAccountsSnapshot = structuredClone(currentAccountsStorage); + const restoreFlaggedSnapshot = structuredClone(currentFlaggedStorage); + // On Windows, mode bits are ignored and the backup relies on the parent directory ACLs. + await fsPromises.writeFile(backupPath, `${JSON.stringify(backupPayload, null, 2)}\n`, { + encoding: "utf-8", + mode: 0o600, + flag: "wx", + }); + return { + backupPath, + restore: async () => { + const backupRaw = await readPruneBackupFile(backupPath); + JSON.parse(backupRaw); + const normalizedAccounts = normalizeAccountStorage(restoreAccountsSnapshot); + if (!normalizedAccounts) { + throw new Error("Prune backup account snapshot failed validation."); + } + const flaggedSnapshot = restoreFlaggedSnapshot; + if ( + !flaggedSnapshot || + typeof flaggedSnapshot !== "object" || + (flaggedSnapshot as { version?: unknown }).version !== 1 || + !Array.isArray((flaggedSnapshot as { accounts?: unknown }).accounts) + ) { + throw new Error("Prune backup flagged snapshot failed validation."); + } + const emptyAccountsStorage = { + version: 3, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + } satisfies AccountStorageV3; + const restoredAccountsSnapshot = JSON.stringify(normalizedAccounts); + const liveAccountsBeforeRestore = await withAccountStorageTransaction( + async (current, persist) => { + const snapshot = current ?? emptyAccountsStorage; + try { + await persist(normalizedAccounts); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to restore account storage from prune backup: ${message}`); + } + return snapshot; + }, + ); + try { + await saveFlaggedAccounts( + flaggedSnapshot as { version: 1; accounts: FlaggedAccountMetadataV1[] }, + ); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + try { + let rolledBack = false; + await withAccountStorageTransaction(async (current, persist) => { + const currentStorage = current ?? emptyAccountsStorage; + if (JSON.stringify(currentStorage) !== restoredAccountsSnapshot) { + return; + } + await persist(liveAccountsBeforeRestore); + rolledBack = true; + }); + if (!rolledBack) { + throw new Error("Account storage changed concurrently before rollback could be applied."); + } + } catch (rollbackError) { + const rollbackMessage = + rollbackError instanceof Error ? rollbackError.message : String(rollbackError); + throw new Error( + `Failed to restore flagged storage from prune backup: ${message}. Account-store rollback also failed: ${rollbackMessage}`, + ); + } + throw new Error( + `Failed to restore flagged storage from prune backup: ${message}. Account-store changes were rolled back.`, + ); + } + invalidateAccountManagerCache(); + }, + }; + }; + + const removeAccountsForSync = async ( + targets: SyncRemovalTarget[], + ): Promise => { + const targetKeySet = new Set( + targets + .filter((target) => typeof target.refreshToken === "string" && target.refreshToken.length > 0) + .map((target) => getSyncRemovalTargetKey(target)), + ); + let removedTargets: Array<{ + index: number; + account: AccountStorageV3["accounts"][number]; + }> = []; + await withAccountStorageTransaction(async (loadedStorage, persist) => { + const currentStorage = + loadedStorage ?? + ({ + version: 3, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + } satisfies AccountStorageV3); + removedTargets = currentStorage.accounts + .map((account, index) => ({ index, account })) + .filter((entry) => + entry.account && + targetKeySet.has( + getSyncRemovalTargetKey({ + refreshToken: entry.account.refreshToken, + organizationId: entry.account.organizationId, + accountId: entry.account.accountId, + }), + ), + ); + if (removedTargets.length === 0) { + return; + } + const matchedKeySet = new Set( + removedTargets.map((entry) => + getSyncRemovalTargetKey({ + refreshToken: entry.account.refreshToken, + organizationId: entry.account.organizationId, + accountId: entry.account.accountId, + }), + ), + ); + if ( + removedTargets.length !== targetKeySet.size || + matchedKeySet.size !== targetKeySet.size || + [...targetKeySet].some((key) => !matchedKeySet.has(key)) + ) { + throw new Error("Selected accounts changed before removal. Re-run sync and confirm again."); + } + const activeAccountIdentity = { + refreshToken: + currentStorage.accounts[currentStorage.activeIndex]?.refreshToken ?? "", + organizationId: + currentStorage.accounts[currentStorage.activeIndex]?.organizationId, + accountId: currentStorage.accounts[currentStorage.activeIndex]?.accountId, + } satisfies SyncRemovalTarget; + const familyActiveIdentities = Object.fromEntries( + MODEL_FAMILIES.map((family) => { + const familyIndex = currentStorage.activeIndexByFamily?.[family] ?? currentStorage.activeIndex; + const familyAccount = currentStorage.accounts[familyIndex]; + return [ + family, + familyAccount + ? ({ + refreshToken: familyAccount.refreshToken, + organizationId: familyAccount.organizationId, + accountId: familyAccount.accountId, + } satisfies SyncRemovalTarget) + : null, + ]; + }), + ) as Partial>; + currentStorage.accounts = currentStorage.accounts.filter( + (account) => + !targetKeySet.has( + getSyncRemovalTargetKey({ + refreshToken: account.refreshToken, + organizationId: account.organizationId, + accountId: account.accountId, + }), + ), + ); + const remappedActiveIndex = findAccountIndexByExactIdentity( + currentStorage.accounts, + activeAccountIdentity, + ); + currentStorage.activeIndex = + remappedActiveIndex >= 0 + ? remappedActiveIndex + : Math.min(currentStorage.activeIndex, Math.max(0, currentStorage.accounts.length - 1)); + currentStorage.activeIndexByFamily = currentStorage.activeIndexByFamily ?? {}; + for (const family of MODEL_FAMILIES) { + const remappedFamilyIndex = findAccountIndexByExactIdentity( + currentStorage.accounts, + familyActiveIdentities[family] ?? null, + ); + currentStorage.activeIndexByFamily[family] = + remappedFamilyIndex >= 0 ? remappedFamilyIndex : currentStorage.activeIndex; + } + clampActiveIndices(currentStorage); + await persist(currentStorage); + }); + if (removedTargets.length === 0) { + return; + } + const removedFlaggedKeys = new Set( + removedTargets.map((entry) => + getSyncRemovalTargetKey({ + refreshToken: entry.account.refreshToken, + organizationId: entry.account.organizationId, + accountId: entry.account.accountId, + }), + ), + ); + await withFlaggedAccountsTransaction(async (currentFlaggedStorage, persist) => { + await persist({ + version: 1, + accounts: currentFlaggedStorage.accounts.filter( + (flagged) => + !removedFlaggedKeys.has( + getSyncRemovalTargetKey({ + refreshToken: flagged.refreshToken, + organizationId: flagged.organizationId, + accountId: flagged.accountId, + }), + ), + ), + }); + }); + invalidateAccountManagerCache(); + const removedLabels = removedTargets + .map((entry) => { + const accountId = entry.account?.accountId?.trim(); + return accountId + ? `Account ${entry.index + 1} [${accountId.slice(-6)}]` + : `Account ${entry.index + 1}`; + }) + .join(", "); + console.log(`\nRemoved ${removedTargets.length} account(s): ${removedLabels}\n`); + }; + + const buildSyncRemovalPlan = async (indexes: number[]): Promise<{ + previewLines: string[]; + targets: SyncRemovalTarget[]; + }> => { + const currentStorage = + (await loadAccounts()) ?? + ({ + version: 3, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + } satisfies AccountStorageV3); + const candidates: Array<{ + previewLine: string; + target: SyncRemovalTarget; + }> = [...indexes] + .sort((left, right) => left - right) + .map((index) => { + const account = currentStorage.accounts[index]; + if (!account) { + throw new Error( + `Selected account ${index + 1} changed before confirmation. Re-run sync and confirm again.`, + ); + } + const label = account.email ?? account.accountLabel ?? `Account ${index + 1}`; + const currentSuffix = index === currentStorage.activeIndex ? " | current" : ""; + return { + previewLine: `${index + 1}. ${label}${currentSuffix}`, + target: { + refreshToken: account.refreshToken, + organizationId: account.organizationId, + accountId: account.accountId, + } satisfies SyncRemovalTarget, + }; + }); + return { + previewLines: candidates.map((candidate) => candidate.previewLine), + targets: candidates.map((candidate) => candidate.target), + }; + }; + + let pruneBackup: + | { + backupPath: string; + restore: () => Promise; + restoreFailureMessage?: string; + } + | null = null; + const restorePruneBackup = async (): Promise => { + const currentBackup = pruneBackup; + if (!currentBackup) return; + if (currentBackup.restoreFailureMessage) { + throw new Error( + `${currentBackup.restoreFailureMessage}. Backup remains at ${currentBackup.backupPath}.`, + ); + } + try { + await currentBackup.restore(); + pruneBackup = null; + } catch (restoreError) { + const message = + restoreError instanceof Error ? restoreError.message : String(restoreError); + currentBackup.restoreFailureMessage = message; + pruneBackup = currentBackup; + throw new Error(`${message}. Backup remains at ${currentBackup.backupPath}.`); + } + }; + const safeRestorePruneBackup = async (context: string): Promise => { + try { + await restorePruneBackup(); + } catch (restoreError) { + const message = + restoreError instanceof Error ? restoreError.message : String(restoreError); + console.log(`\nFailed to restore pruned accounts during ${context}: ${message}\n`); + } + }; + const syncPruneMaxAttempts = 5; + let syncPruneAttempts = 0; + while (syncPruneAttempts < syncPruneMaxAttempts) { + syncPruneAttempts += 1; + try { + const loadedSource = await loadCodexMultiAuthSourceStorage(process.cwd()); + const preview = await previewSyncFromCodexMultiAuth(process.cwd(), loadedSource); + console.log(""); + console.log(`codex-multi-auth source: ${preview.accountsPath}`); + console.log(`Scope: ${preview.scope}`); + console.log( + `Preview: +${preview.imported} new, ${preview.skipped} skipped, ${preview.total} total`, + ); + + if (preview.imported <= 0) { + if (pruneBackup) { + try { + await restorePruneBackup(); + } catch (restoreError) { + const message = + restoreError instanceof Error ? restoreError.message : String(restoreError); + logWarn( + `[${PLUGIN_NAME}] Failed to restore prune backup after zero-import preview: ${message}`, + ); + throw new Error( + `Failed to restore previously pruned accounts after zero-import preview: ${message}`, + ); + } + } + console.log("No new accounts to import.\n"); + return; + } + + const confirmed = await confirm( + `Import ${preview.imported} new account(s) from codex-multi-auth?`, + ); + if (!confirmed) { + await safeRestorePruneBackup("sync cancellation"); + console.log("\nSync cancelled.\n"); + return; + } + + const result = await syncFromCodexMultiAuth(process.cwd(), loadedSource); + pruneBackup = null; + invalidateAccountManagerCache(); + const backupLabel = + result.backupStatus === "created" + ? result.backupPath ?? "created" + : result.backupStatus === "skipped" + ? "skipped" + : result.backupError ?? "failed"; + + console.log(""); + console.log("Sync complete."); + console.log(`Source: ${result.accountsPath}`); + console.log(`Imported: ${result.imported}`); + console.log(`Skipped: ${result.skipped}`); + console.log(`Total: ${result.total}`); + console.log(`Auto-backup: ${backupLabel}`); + console.log(""); + return; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + if (error instanceof CodexMultiAuthSyncCapacityError) { + const { details } = error; + console.log(""); + console.log("Sync blocked by account limit."); + console.log(`Source: ${details.accountsPath}`); + console.log(`Scope: ${details.scope}`); + console.log(`Current accounts: ${details.currentCount}`); + console.log(`Source accounts: ${details.sourceCount}`); + console.log(`Deduped total after merge: ${details.dedupedTotal}`); + console.log(`Overlap accounts skipped by dedupe: ${details.skippedOverlaps}`); + console.log(`Importable new accounts: ${details.importableNewAccounts}`); + console.log(`Maximum allowed: ${details.maxAccounts}`); + if (isCodexMultiAuthSourceTooLargeForCapacity(details)) { + await safeRestorePruneBackup("capacity handling"); + console.log( + `Source alone exceeds the configured maximum. Reduce the source set or raise CODEX_AUTH_SYNC_MAX_ACCOUNTS before retrying.`, + ); + console.log(""); + return; + } + console.log(`Remove at least ${details.needToRemove} account(s) first.`); + if (details.suggestedRemovals.length > 0) { + console.log("Suggested removals:"); + for (const suggestion of details.suggestedRemovals) { + const label = + suggestion.email ?? + suggestion.accountLabel ?? + `Account ${suggestion.index + 1}`; + const currentSuffix = suggestion.isCurrentAccount ? " | current" : ""; + console.log( + ` ${suggestion.index + 1}. ${label}${currentSuffix} | score ${suggestion.score} | ${suggestion.reason}`, + ); + } + } + console.log(""); + const indexesToRemove = await promptCodexMultiAuthSyncPrune( + details.needToRemove, + details.suggestedRemovals, + ); + if (!indexesToRemove || indexesToRemove.length === 0) { + await safeRestorePruneBackup("sync cancellation"); + console.log("Sync cancelled.\n"); + return; + } + let removalPlan: { + previewLines: string[]; + targets: SyncRemovalTarget[]; + }; + try { + removalPlan = await buildSyncRemovalPlan(indexesToRemove); + } catch (planError) { + const message = + planError instanceof Error ? planError.message : String(planError); + await safeRestorePruneBackup("removal planning"); + console.log(`\nSync failed: ${message}\n`); + return; + } + console.log("Dry run removal:"); + for (const line of removalPlan.previewLines) { + console.log(` ${line}`); + } console.log( - `[${i + 1}/${flaggedStorage.accounts.length}] ${label}: ERROR (${message.slice(0, 120)})`, + "Accounts removed in this step cannot be recovered if the process is interrupted - ensure sync completes before closing.", ); - remaining.push({ - ...flagged, - lastError: message, - }); + console.log(""); + const confirmed = await confirm( + `Remove ${indexesToRemove.length} selected account(s) and retry sync? ` + + `Accounts cannot be recovered if the process is interrupted before sync completes.`, + ); + if (!confirmed) { + await safeRestorePruneBackup("sync cancellation"); + console.log("Sync cancelled.\n"); + return; + } + if (!pruneBackup) { + pruneBackup = await createSyncPruneBackup(); + } + await removeAccountsForSync(removalPlan.targets); + continue; + } + const message = error instanceof Error ? error.message : String(error); + await safeRestorePruneBackup("sync failure"); + console.log(`\nSync failed: ${message}\n`); + return; + } + } + console.log( + "\nSync hit max retry limit - raise CODEX_AUTH_SYNC_MAX_ACCOUNTS or remove accounts manually.\n", + ); + return; + }; + + const runCodexMultiAuthOverlapCleanup = async (): Promise => { + try { + const preview = await previewCodexMultiAuthSyncedOverlapCleanup(); + if (preview.removed <= 0 && preview.updated <= 0) { + console.log("\nNo synced overlaps found.\n"); + return; + } + console.log(""); + console.log("Cleanup preview."); + console.log(`Before: ${preview.before}`); + console.log(`After: ${preview.after}`); + console.log(`Would remove overlaps: ${preview.removed}`); + console.log(`Would update synced records: ${preview.updated}`); + console.log("A backup will be created before changes are applied."); + console.log(""); + const confirmed = await confirm( + `Create a backup and apply synced overlap cleanup?`, + ); + if (!confirmed) { + console.log("\nCleanup cancelled.\n"); + return; + } + const backupPath = await createMaintenanceAccountsBackup( + "codex-maintenance-overlap-backup", + ); + const result = await cleanupCodexMultiAuthSyncedOverlaps(); + invalidateAccountManagerCache(); + console.log(""); + console.log("Cleanup complete."); + console.log(`Before: ${result.before}`); + console.log(`After: ${result.after}`); + console.log(`Removed overlaps: ${result.removed}`); + console.log(`Updated synced records: ${result.updated}`); + console.log(`Backup: ${backupPath}`); + console.log(""); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.log(`\nCleanup failed: ${message}\n`); + } + }; + + const runDuplicateEmailCleanup = async (): Promise => { + try { + const preview = await previewDuplicateEmailCleanup(); + if (preview.removed <= 0) { + console.log("\nNo legacy duplicate emails found.\n"); + return; + } + console.log(""); + console.log("Cleanup preview."); + console.log(`Before: ${preview.before}`); + console.log(`After: ${preview.after}`); + console.log(`Would remove legacy duplicates: ${preview.removed}`); + console.log("Only legacy accounts without organization or workspace IDs are eligible."); + console.log("A backup will be created before changes are applied."); + console.log(""); + const confirmed = await confirm( + `Create a backup and remove ${preview.removed} legacy duplicate-email account(s)?`, + ); + if (!confirmed) { + console.log("\nDuplicate email cleanup cancelled.\n"); + return; + } + const backupPath = await createMaintenanceAccountsBackup( + "codex-maintenance-duplicate-email-backup", + ); + const result = await cleanupDuplicateEmailAccounts(); + if (result.removed > 0) { + invalidateAccountManagerCache(); + console.log(""); + console.log("Duplicate email cleanup complete."); + console.log(`Before: ${result.before}`); + console.log(`After: ${result.after}`); + console.log(`Removed duplicates: ${result.removed}`); + console.log(`Backup: ${backupPath}`); + console.log(""); + return; } + + console.log("\nNo legacy duplicate emails found.\n"); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.log(`\nDuplicate email cleanup failed: ${message}\n`); } + }; - if (restored.length > 0) { - await persistAccountPool(restored, false); + const pickBestAccountFromDashboard = async ( + screenOverride?: DashboardOperationScreen | null, + ): Promise => { + const ui = resolveUiRuntime(); + const screen = + screenOverride ?? + createOperationScreen(ui, "Best Account", "Comparing accounts"); + let screenFinished = false; + try { + const storage = await loadAccounts(); + if (!storage || storage.accounts.length === 0) { + if (screen) { + screen.push("No accounts available.", "warning"); + if (!screenOverride) { + await screen.finish(); + } + screenFinished = true; + } else { + console.log("\nNo accounts available.\n"); + } + return; + } + + const now = Date.now(); + const managerForFix = await AccountManager.loadFromDisk(); + cachedAccountManager = managerForFix; + const explainability = managerForFix.getSelectionExplainability("codex", undefined, now); + const eligible = explainability + .filter((entry) => entry.eligible) + .sort((a, b) => { + if (b.healthScore !== a.healthScore) return b.healthScore - a.healthScore; + return b.tokensAvailable - a.tokensAvailable; + }); + const best = eligible[0]; + if (!best) { + if (screen) { + screen.push(`Compared ${explainability.length} account(s).`, "muted"); + screen.push("No eligible account available.", "warning"); + if (!screenOverride) { + await screen.finish(); + } + screenFinished = true; + } else { + console.log("\nNo eligible account available.\n"); + } + return; + } + + let selectedAccount: AccountStorageV3["accounts"][number] | undefined; + await withAccountStorageTransaction(async (loadedStorage, persist) => { + const workingStorage = + loadedStorage ?? + ({ + version: 3, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + } satisfies AccountStorageV3); + if (!workingStorage.accounts[best.index]) { + throw new Error(`Best account ${best.index + 1} changed before selection.`); + } + workingStorage.activeIndex = best.index; + workingStorage.activeIndexByFamily = workingStorage.activeIndexByFamily ?? {}; + for (const family of MODEL_FAMILIES) { + workingStorage.activeIndexByFamily[family] = best.index; + } + await persist(workingStorage); + selectedAccount = workingStorage.accounts[best.index]; + }); invalidateAccountManagerCache(); + const selectedLabel = formatCommandAccountLabel(selectedAccount, best.index); + + if (screen) { + screen.push(`Compared ${explainability.length} account(s); ${eligible.length} eligible.`, "muted"); + screen.push(`${getStatusMarker(ui, "ok")} ${selectedLabel}`, "success"); + screen.push( + `Availability ready | risk low | health ${best.healthScore} | tokens ${best.tokensAvailable}`, + "muted", + ); + if (best.reasons.length > 0) { + screen.push(`Why: ${best.reasons.slice(0, 3).join("; ")}`, "muted"); + } + if (!screenOverride) { + await screen.finish([{ line: "Best account selected.", tone: "success" }]); + } + screenFinished = true; + return; + } + + console.log(`\nSelected best account: ${selectedAccount?.email ?? `Account ${best.index + 1}`}\n`); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + if (screen) { + screen.push(`Failed to pick best account: ${message}`, "danger"); + if (!screenOverride) { + await screen.finish(undefined, { failed: true }); + } + screenFinished = true; + return; + } + console.log(`\nFailed to pick best account: ${message}\n`); + } finally { + if (screen && !screenFinished && !screenOverride) { + screen.abort(); + } } + }; - await saveFlaggedAccounts({ - version: 1, - accounts: remaining, - }); + const runAutoRepairFromDashboard = async (): Promise => { + const ui = resolveUiRuntime(); + const screen = createOperationScreen( + ui, + "Auto-Fix", + "Checking and fixing common issues", + ); + let screenFinished = false; + const emit = ( + line: string, + tone: OperationTone = "normal", + ) => { + const safeLine = sanitizeScreenText(line); + if (screen) { + screen.push(safeLine, tone); + return; + } + console.log(safeLine); + }; + try { + const initialStorage = await loadAccounts(); + if (!initialStorage || initialStorage.accounts.length === 0) { + emit("No accounts available.", "warning"); + if (screen) { + await screen.finish(); + screenFinished = true; + } + return; + } + const appliedFixes: string[] = []; + const fixErrors: string[] = []; + const backupPath = await createMaintenanceAccountsBackup("codex-auto-repair-backup"); + emit(`Backup created: ${backupPath}`, "muted"); + const cleanupResult = await cleanupCodexMultiAuthSyncedOverlaps(); + if (cleanupResult.removed > 0) { + appliedFixes.push(`Removed ${cleanupResult.removed} synced overlap(s).`); + emit(`Removed ${cleanupResult.removed} synced overlap(s).`, "success"); + } + const refreshedStorage = await withAccountStorageTransaction( + async (loadedStorage, persist) => { + if (!loadedStorage || loadedStorage.accounts.length === 0) { + return null; + } + const workingStorage: AccountStorageV3 = { + ...loadedStorage, + accounts: loadedStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...(loadedStorage.activeIndexByFamily ?? {}) }, + }; + + let changedByRefresh = false; + let refreshedCount = 0; + for (const account of workingStorage.accounts) { + try { + const refreshResult = await queuedRefresh(account.refreshToken); + if (refreshResult.type === "success") { + account.refreshToken = refreshResult.refresh; + account.accessToken = refreshResult.access; + account.expiresAt = refreshResult.expires; + changedByRefresh = true; + refreshedCount += 1; + } + } catch (error) { + fixErrors.push(error instanceof Error ? error.message : String(error)); + } + } - console.log(""); - console.log(`Results: ${restored.length} restored, ${remaining.length} still flagged`); - console.log(""); + if (changedByRefresh) { + await persist(workingStorage); + } + return { + changedByRefresh, + refreshedCount, + }; + }, + ); + if (!refreshedStorage) { + emit("No accounts available after cleanup.", "warning"); + if (screen) { + await screen.finish(); + screenFinished = true; + } + return; + } + + if (refreshedStorage.changedByRefresh) { + appliedFixes.push(`Refreshed ${refreshedStorage.refreshedCount} account token(s).`); + emit(`Refreshed ${refreshedStorage.refreshedCount} account token(s).`, "success"); + } + await verifyFlaggedAccounts(screen); + await pickBestAccountFromDashboard(screen); + emit(""); + emit("Auto-repair complete.", "success"); + for (const entry of appliedFixes) { + emit(`- ${entry}`, "muted"); + } + for (const entry of fixErrors) { + emit(`- warning: ${entry}`, "warning"); + } + if (screen) { + await screen.finish(); + screenFinished = true; + } else { + console.log(""); + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + if (screen) { + screen.push(`Auto-repair failed: ${message}`, "danger"); + await screen.finish(undefined, { failed: true }); + screenFinished = true; + } else { + console.log(`\nAuto-repair failed: ${message}\n`); + } + } finally { + if (screen && !screenFinished) { + screen.abort(); + } + } }; if (!explicitLoginMode) { @@ -3378,9 +4750,12 @@ while (attempted.size < Math.max(1, accountCount)) { accountLabel: account.accountLabel, email: account.email, index, + sourceIndex: index, + quickSwitchNumber: index + 1, addedAt: account.addedAt, lastUsed: account.lastUsed, status, + quotaSummary: formatRateLimitEntry(account, now) ?? undefined, isCurrentAccount: index === activeIndex, enabled: account.enabled !== false, }; @@ -3388,6 +4763,12 @@ while (attempted.size < Math.max(1, accountCount)) { const menuResult = await promptLoginMode(existingAccounts, { flaggedCount: flaggedStorage.accounts.length, + syncFromCodexMultiAuthEnabled: getSyncFromCodexMultiAuthEnabled(loadPluginConfig()), + statusMessage: () => { + const snapshot = runtimeMetrics.lastSelectionSnapshot; + if (!snapshot) return undefined; + return snapshot.model ? `Current lens: ${snapshot.family}:${snapshot.model}` : `Current lens: ${snapshot.family}`; + }, }); if (menuResult.mode === "cancel") { @@ -3414,6 +4795,33 @@ while (attempted.size < Math.max(1, accountCount)) { await verifyFlaggedAccounts(); continue; } + if (menuResult.mode === "forecast") { + await pickBestAccountFromDashboard(); + continue; + } + if (menuResult.mode === "fix") { + await runAutoRepairFromDashboard(); + continue; + } + if (menuResult.mode === "settings") { + continue; + } + if (menuResult.mode === "experimental-toggle-sync") { + await toggleCodexMultiAuthSyncSetting(); + continue; + } + if (menuResult.mode === "experimental-sync-now") { + await runCodexMultiAuthSync(); + continue; + } + if (menuResult.mode === "experimental-cleanup-overlaps") { + await runCodexMultiAuthOverlapCleanup(); + continue; + } + if (menuResult.mode === "maintenance-clean-duplicate-emails") { + await runDuplicateEmailCleanup(); + continue; + } if (menuResult.mode === "manage") { if (typeof menuResult.deleteAccountIndex === "number") { @@ -3452,6 +4860,28 @@ while (attempted.size < Math.max(1, accountCount)) { startFresh = false; break; } + if (typeof menuResult.switchAccountIndex === "number") { + const targetIndex = menuResult.switchAccountIndex; + let targetLabel: string | null = null; + await withAccountStorageTransaction(async (loadedStorage, persist) => { + const txStorage = loadedStorage; + if (!txStorage) return; + const target = txStorage.accounts[targetIndex]; + if (!target) return; + txStorage.activeIndex = targetIndex; + txStorage.activeIndexByFamily = txStorage.activeIndexByFamily ?? {}; + for (const family of MODEL_FAMILIES) { + txStorage.activeIndexByFamily[family] = targetIndex; + } + await persist(txStorage); + targetLabel = target.email ?? `Account ${targetIndex + 1}`; + }); + if (targetLabel) { + invalidateAccountManagerCache(); + console.log(`\nSet current account: ${targetLabel}.\n`); + } + continue; + } continue; } @@ -3481,7 +4911,9 @@ while (attempted.size < Math.max(1, accountCount)) { ? 1 : startFresh ? ACCOUNT_LIMITS.MAX_ACCOUNTS - : ACCOUNT_LIMITS.MAX_ACCOUNTS - existingCount; + : Number.isFinite(ACCOUNT_LIMITS.MAX_ACCOUNTS) + ? ACCOUNT_LIMITS.MAX_ACCOUNTS - existingCount + : Number.POSITIVE_INFINITY; if (availableSlots <= 0) { return { @@ -3600,7 +5032,10 @@ while (attempted.size < Math.max(1, accountCount)) { }); } - if (accounts.length >= ACCOUNT_LIMITS.MAX_ACCOUNTS) { + if ( + Number.isFinite(ACCOUNT_LIMITS.MAX_ACCOUNTS) && + accounts.length >= ACCOUNT_LIMITS.MAX_ACCOUNTS + ) { break; } diff --git a/lib/cli.ts b/lib/cli.ts index 1bd6656f..cda64221 100644 --- a/lib/cli.ts +++ b/lib/cli.ts @@ -1,18 +1,17 @@ import { createInterface } from "node:readline/promises"; import { stdin as input, stdout as output } from "node:process"; import type { AccountIdSource } from "./types.js"; +import { ANSI_CSI_REGEX, CONTROL_CHAR_REGEX } from "./ui/ansi.js"; import { showAuthMenu, showAccountDetails, + showSettingsMenu, + showSyncPruneMenu, isTTY, type AccountStatus, } from "./ui/auth-menu.js"; +import { UI_COPY } from "./ui/copy.js"; -/** - * Detect if running in OpenCode Desktop/TUI mode where readline prompts don't work. - * In TUI mode, stdin/stdout are controlled by the TUI renderer, so readline breaks. - * Exported for testing purposes. - */ export function isNonInteractiveMode(): boolean { if (process.env.FORCE_INTERACTIVE_MODE === "1") return false; if (!input.isTTY || !output.isTTY) return true; @@ -30,8 +29,8 @@ export async function promptAddAnotherAccount(currentCount: number): Promise 0 ? `${label} | ${details.join(" | ")}` : label; +} + +export async function promptCodexMultiAuthSyncPrune( + neededCount: number, + candidates: SyncPruneCandidate[], +): Promise { + if (isNonInteractiveMode()) { + return null; + } + + const suggested = candidates + .filter((candidate) => candidate.isCurrentAccount !== true) + .slice(0, neededCount) + .map((candidate) => candidate.index); + + if (isTTY()) { + return showSyncPruneMenu(neededCount, candidates); + } + + const rl = createInterface({ input, output }); + try { + console.log(""); + console.log(`Sync needs ${neededCount} free slot(s).`); + console.log("Suggested removals:"); + for (const candidate of candidates) { + console.log(` ${formatPruneCandidate(candidate)}`); + } + console.log(""); + console.log( + suggested.length >= neededCount + ? "Press Enter to remove the suggested accounts, or enter comma-separated numbers." + : "Enter comma-separated account numbers to remove, or Q to cancel.", + ); + + while (true) { + const answer = await rl.question(`Remove at least ${neededCount} account(s): `); + const normalized = answer.trim(); + if (!normalized) { + if (suggested.length >= neededCount) { + return suggested; + } + console.log("No default suggestion is available. Enter one or more account numbers."); + continue; + } + + if (normalized.toLowerCase() === "q" || normalized.toLowerCase() === "quit") { + return null; + } + + const parsed = normalized + .split(",") + .map((value) => Number.parseInt(value.trim(), 10)) + .filter((value) => Number.isFinite(value)) + .map((value) => value - 1); + const unique = Array.from(new Set(parsed)); + if (unique.length < neededCount) { + console.log(`Select at least ${neededCount} unique account number(s).`); + continue; + } + + const invalid = unique.filter((index) => !candidates.some((candidate) => candidate.index === index)); + if (invalid.length > 0) { + console.log("One or more selected account numbers are not valid for removal."); + continue; + } + + return unique; + } + } finally { + rl.close(); + } +} + export type LoginMode = | "add" + | "forecast" + | "fix" + | "settings" + | "experimental-toggle-sync" + | "experimental-sync-now" + | "experimental-cleanup-overlaps" + | "maintenance-clean-duplicate-emails" | "fresh" | "manage" | "check" @@ -53,15 +154,20 @@ export interface ExistingAccountInfo { accountLabel?: string; email?: string; index: number; + sourceIndex?: number; + quickSwitchNumber?: number; addedAt?: number; lastUsed?: number; status?: AccountStatus; + quotaSummary?: string; isCurrentAccount?: boolean; enabled?: boolean; } export interface LoginMenuOptions { flaggedCount?: number; + syncFromCodexMultiAuthEnabled?: boolean; + statusMessage?: string | (() => string | undefined); } export interface LoginMenuResult { @@ -69,14 +175,20 @@ export interface LoginMenuResult { deleteAccountIndex?: number; refreshAccountIndex?: number; toggleAccountIndex?: number; + switchAccountIndex?: number; deleteAll?: boolean; } function formatAccountLabel(account: ExistingAccountInfo, index: number): string { - const num = index + 1; - const label = account.accountLabel?.trim(); - const email = account.email?.trim(); - const accountId = account.accountId?.trim(); + const num = account.quickSwitchNumber ?? (index + 1); + const sanitizeFallbackLabel = (value: string | undefined): string | undefined => { + if (!value) return undefined; + const sanitized = value.replace(ANSI_CSI_REGEX, "").replace(CONTROL_CHAR_REGEX, "").trim(); + return sanitized.length > 0 ? sanitized : undefined; + }; + const label = sanitizeFallbackLabel(account.accountLabel); + const email = sanitizeFallbackLabel(account.email); + const accountId = sanitizeFallbackLabel(account.accountId); const accountIdDisplay = accountId && accountId.length > 14 ? `${accountId.slice(0, 8)}...${accountId.slice(-6)}` @@ -85,23 +197,70 @@ function formatAccountLabel(account: ExistingAccountInfo, index: number): string if (email) details.push(email); if (label) details.push(`workspace:${label}`); if (accountIdDisplay) details.push(`id:${accountIdDisplay}`); - if (details.length > 0) { - return `${num}. ${details.join(" | ")}`; + return details.length > 0 ? `${num}. ${details.join(" | ")}` : `${num}. Account`; +} + +function resolveAccountSourceIndex(account: ExistingAccountInfo): number { + const sourceIndex = + typeof account.sourceIndex === "number" && Number.isFinite(account.sourceIndex) + ? Math.max(0, Math.floor(account.sourceIndex)) + : undefined; + if (typeof sourceIndex === "number") return sourceIndex; + if (typeof account.index === "number" && Number.isFinite(account.index)) { + return Math.max(0, Math.floor(account.index)); } - return `${num}. Account`; + return -1; } -async function promptDeleteAllTypedConfirm(): Promise { - const rl = createInterface({ input, output }); +async function promptDeleteAllTypedConfirm( + rl?: ReturnType, +): Promise { + if (rl) { + const answer = await rl.question("Type DELETE to remove all saved accounts: "); + return answer.trim() === "DELETE"; + } + const localRl = createInterface({ input, output }); try { - const answer = await rl.question("Type DELETE to confirm removing all accounts: "); + const answer = await localRl.question("Type DELETE to remove all saved accounts: "); return answer.trim() === "DELETE"; } finally { - rl.close(); + localRl.close(); + } +} + +async function promptSettingsModeFallback( + rl: ReturnType, + syncFromCodexMultiAuthEnabled: boolean, +): Promise { + while (true) { + const syncState = syncFromCodexMultiAuthEnabled ? "enabled" : "disabled"; + const answer = await rl.question( + `(t) toggle sync [${syncState}], (i) sync now, (c) cleanup overlaps, (d) clean legacy duplicate emails, (b) back [t/i/c/d/b]: `, + ); + const normalized = answer.trim().toLowerCase(); + if (normalized === "t" || normalized === "toggle") { + return { mode: "experimental-toggle-sync" }; + } + if (normalized === "i" || normalized === "import" || normalized === "sync") { + return { mode: "experimental-sync-now" }; + } + if (normalized === "c" || normalized === "cleanup") { + return { mode: "experimental-cleanup-overlaps" }; + } + if (normalized === "d" || normalized === "dedupe" || normalized === "duplicates") { + return { mode: "maintenance-clean-duplicate-emails" }; + } + if (normalized === "b" || normalized === "back") { + return null; + } + console.log("Use one of: t, i, c, d, b."); } } -async function promptLoginModeFallback(existingAccounts: ExistingAccountInfo[]): Promise { +async function promptLoginModeFallback( + existingAccounts: ExistingAccountInfo[], + options: LoginMenuOptions = {}, +): Promise { const rl = createInterface({ input, output }); try { if (existingAccounts.length > 0) { @@ -113,15 +272,31 @@ async function promptLoginModeFallback(existingAccounts: ExistingAccountInfo[]): } while (true) { - const answer = await rl.question("(a)dd, (f)resh, (c)heck, (d)eep, (v)erify flagged, or (q)uit? [a/f/c/d/v/q]: "); + const answer = await rl.question(UI_COPY.fallback.selectModePrompt); const normalized = answer.trim().toLowerCase(); if (normalized === "a" || normalized === "add") return { mode: "add" }; - if (normalized === "f" || normalized === "fresh") return { mode: "fresh", deleteAll: true }; + if (normalized === "b" || normalized === "best" || normalized === "forecast") return { mode: "forecast" }; + if (normalized === "x" || normalized === "fix") return { mode: "fix" }; + if (normalized === "s" || normalized === "settings") { + const settingsResult = await promptSettingsModeFallback( + rl, + options.syncFromCodexMultiAuthEnabled === true, + ); + if (settingsResult) return settingsResult; + continue; + } + if (normalized === "f" || normalized === "fresh") { + if (!(await promptDeleteAllTypedConfirm(rl))) { + console.log("\nDelete all cancelled.\n"); + continue; + } + return { mode: "fresh", deleteAll: true }; + } if (normalized === "c" || normalized === "check") return { mode: "check" }; if (normalized === "d" || normalized === "deep") return { mode: "deep-check" }; - if (normalized === "v" || normalized === "verify") return { mode: "verify-flagged" }; + if (normalized === "g" || normalized === "verify" || normalized === "problem") return { mode: "verify-flagged" }; if (normalized === "q" || normalized === "quit") return { mode: "cancel" }; - console.log("Please enter one of: a, f, c, d, v, q."); + console.log(UI_COPY.fallback.invalidModePrompt); } } finally { rl.close(); @@ -137,20 +312,33 @@ export async function promptLoginMode( } if (!isTTY()) { - return promptLoginModeFallback(existingAccounts); + return promptLoginModeFallback(existingAccounts, options); } while (true) { const action = await showAuthMenu(existingAccounts, { flaggedCount: options.flaggedCount ?? 0, + statusMessage: options.statusMessage, }); switch (action.type) { case "add": return { mode: "add" }; + case "forecast": + return { mode: "forecast" }; + case "fix": + return { mode: "fix" }; + case "settings": { + const settingsAction = await showSettingsMenu(options.syncFromCodexMultiAuthEnabled === true); + if (settingsAction === "toggle-sync") return { mode: "experimental-toggle-sync" }; + if (settingsAction === "sync-now") return { mode: "experimental-sync-now" }; + if (settingsAction === "cleanup-overlaps") return { mode: "experimental-cleanup-overlaps" }; + if (settingsAction === "cleanup-duplicate-emails") return { mode: "maintenance-clean-duplicate-emails" }; + continue; + } case "fresh": if (!(await promptDeleteAllTypedConfirm())) { - console.log("\nDelete-all cancelled.\n"); + console.log("\nDelete all cancelled.\n"); continue; } return { mode: "fresh", deleteAll: true }; @@ -160,22 +348,45 @@ export async function promptLoginMode( return { mode: "deep-check" }; case "verify-flagged": return { mode: "verify-flagged" }; + case "set-current-account": { + const index = resolveAccountSourceIndex(action.account); + if (index >= 0) return { mode: "manage", switchAccountIndex: index }; + console.log("\nUnable to resolve the selected account. Refresh the menu and try again.\n"); + continue; + } case "select-account": { const accountAction = await showAccountDetails(action.account); if (accountAction === "delete") { - return { mode: "manage", deleteAccountIndex: action.account.index }; + const index = resolveAccountSourceIndex(action.account); + if (index >= 0) return { mode: "manage", deleteAccountIndex: index }; + console.log("\nUnable to resolve the selected account. Refresh the menu and try again.\n"); + continue; + } + if (accountAction === "set-current") { + const index = resolveAccountSourceIndex(action.account); + if (index >= 0) return { mode: "manage", switchAccountIndex: index }; + console.log("\nUnable to resolve the selected account. Refresh the menu and try again.\n"); + continue; } if (accountAction === "refresh") { - return { mode: "manage", refreshAccountIndex: action.account.index }; + const index = resolveAccountSourceIndex(action.account); + if (index >= 0) return { mode: "manage", refreshAccountIndex: index }; + console.log("\nUnable to resolve the selected account. Refresh the menu and try again.\n"); + continue; } if (accountAction === "toggle") { - return { mode: "manage", toggleAccountIndex: action.account.index }; + const index = resolveAccountSourceIndex(action.account); + if (index >= 0) return { mode: "manage", toggleAccountIndex: index }; + console.log("\nUnable to resolve the selected account. Refresh the menu and try again.\n"); + continue; } continue; } + case "search": + continue; case "delete-all": if (!(await promptDeleteAllTypedConfirm())) { - console.log("\nDelete-all cancelled.\n"); + console.log("\nDelete all cancelled.\n"); continue; } return { mode: "fresh", deleteAll: true }; diff --git a/lib/codex-multi-auth-sync.ts b/lib/codex-multi-auth-sync.ts new file mode 100644 index 00000000..76e1c070 --- /dev/null +++ b/lib/codex-multi-auth-sync.ts @@ -0,0 +1,1300 @@ +import { existsSync, readdirSync, promises as fs } from "node:fs"; +import { homedir } from "node:os"; +import { join, win32 } from "node:path"; +import { ACCOUNT_LIMITS } from "./constants.js"; +import { logWarn } from "./logger.js"; +import { + deduplicateAccounts, + deduplicateAccountsByEmail, + getStoragePath, + importAccounts, + loadAccounts, + normalizeAccountStorage, + previewImportAccountsWithExistingStorage, + withAccountStorageTransaction, + type AccountStorageV3, + type ImportAccountsResult, +} from "./storage.js"; +import { migrateV1ToV3, type AccountStorageV1 } from "./storage/migrations.js"; +import { findProjectRoot, getProjectStorageKeyCandidates } from "./storage/paths.js"; + +const EXTERNAL_ROOT_SUFFIX = "multi-auth"; +const EXTERNAL_ACCOUNT_FILE_NAMES = [ + "openai-codex-accounts.json", + "codex-accounts.json", +]; +const SYNC_ACCOUNT_TAG = "codex-multi-auth-sync"; +const SYNC_MAX_ACCOUNTS_OVERRIDE_ENV = "CODEX_AUTH_SYNC_MAX_ACCOUNTS"; +const NORMALIZED_IMPORT_TEMP_PREFIX = "oc-chatgpt-multi-auth-sync-"; +const STALE_NORMALIZED_IMPORT_MAX_AGE_MS = 10 * 60 * 1000; + +export interface CodexMultiAuthResolvedSource { + rootDir: string; + accountsPath: string; + scope: "project" | "global"; +} + +export interface LoadedCodexMultiAuthSourceStorage extends CodexMultiAuthResolvedSource { + storage: AccountStorageV3; +} + +export interface CodexMultiAuthSyncPreview extends CodexMultiAuthResolvedSource { + imported: number; + skipped: number; + total: number; +} + +export interface CodexMultiAuthSyncResult extends CodexMultiAuthSyncPreview { + backupStatus: ImportAccountsResult["backupStatus"]; + backupPath?: string; + backupError?: string; +} + +export interface CodexMultiAuthCleanupResult { + before: number; + after: number; + removed: number; + updated: number; +} + +export interface CodexMultiAuthSyncCapacityDetails extends CodexMultiAuthResolvedSource { + currentCount: number; + sourceCount: number; + sourceDedupedTotal: number; + dedupedTotal: number; + maxAccounts: number; + needToRemove: number; + importableNewAccounts: number; + skippedOverlaps: number; + suggestedRemovals: Array<{ + index: number; + email?: string; + accountLabel?: string; + isCurrentAccount: boolean; + score: number; + reason: string; + }>; +} + +function normalizeTrimmedIdentity(value: string | undefined): string | undefined { + const trimmed = value?.trim(); + return trimmed && trimmed.length > 0 ? trimmed : undefined; +} + +function normalizeSourceStorage(storage: AccountStorageV3): AccountStorageV3 { + const normalizedAccounts = storage.accounts.map((account) => { + const accountId = account.accountId?.trim(); + const organizationId = account.organizationId?.trim(); + const inferredOrganizationId = + !organizationId && + account.accountIdSource === "org" && + accountId && + accountId.startsWith("org-") + ? accountId + : organizationId; + + if (inferredOrganizationId && inferredOrganizationId !== organizationId) { + return { + ...account, + organizationId: inferredOrganizationId, + }; + } + return account; + }); + + return { + ...storage, + accounts: normalizedAccounts, + }; +} + +type NormalizedImportFileOptions = { + postSuccessCleanupFailureMode?: "throw" | "warn"; + onPostSuccessCleanupFailure?: (details: { tempDir: string; tempPath: string; message: string }) => void; +}; + +interface PreparedCodexMultiAuthPreviewStorage { + resolved: CodexMultiAuthResolvedSource & { storage: AccountStorageV3 }; + existing: AccountStorageV3; +} + +const TEMP_CLEANUP_RETRY_DELAYS_MS = [100, 250, 500] as const; +const STALE_TEMP_CLEANUP_RETRY_DELAY_MS = 150; + +function sleepAsync(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function removeNormalizedImportTempDir( + tempDir: string, + tempPath: string, + options: NormalizedImportFileOptions, +): Promise { + const retryableCodes = new Set(["EBUSY", "EAGAIN", "ENOTEMPTY", "EACCES", "EPERM"]); + let lastMessage = "unknown cleanup failure"; + for (let attempt = 0; attempt <= TEMP_CLEANUP_RETRY_DELAYS_MS.length; attempt += 1) { + try { + await fs.rm(tempDir, { recursive: true, force: true }); + return; + } catch (cleanupError) { + const code = (cleanupError as NodeJS.ErrnoException).code; + lastMessage = cleanupError instanceof Error ? cleanupError.message : String(cleanupError); + if ((!code || retryableCodes.has(code)) && attempt < TEMP_CLEANUP_RETRY_DELAYS_MS.length) { + const delayMs = TEMP_CLEANUP_RETRY_DELAYS_MS[attempt]; + if (delayMs !== undefined) { + await sleepAsync(delayMs); + } + continue; + } + break; + } + } + + logWarn(`Failed to remove temporary codex sync directory ${tempDir}: ${lastMessage}`); + options.onPostSuccessCleanupFailure?.({ tempDir, tempPath, message: lastMessage }); + if (options.postSuccessCleanupFailureMode !== "warn") { + throw new Error(`Failed to remove temporary codex sync directory ${tempDir}: ${lastMessage}`); + } +} + +function normalizeCleanupRateLimitResetTimes( + value: AccountStorageV3["accounts"][number]["rateLimitResetTimes"], +): Array<[string, number]> { + return Object.entries(value ?? {}) + .filter((entry): entry is [string, number] => typeof entry[1] === "number" && Number.isFinite(entry[1])) + .sort(([left], [right]) => left.localeCompare(right)); +} + +function normalizeCleanupTags(tags: string[] | undefined): string[] { + return [...(tags ?? [])].sort((left, right) => left.localeCompare(right)); +} + +function cleanupComparableAccount(account: AccountStorageV3["accounts"][number]): Record { + return { + refreshToken: account.refreshToken, + accessToken: account.accessToken, + expiresAt: account.expiresAt, + accountId: account.accountId, + organizationId: account.organizationId, + accountIdSource: account.accountIdSource, + accountLabel: account.accountLabel, + email: account.email, + enabled: account.enabled, + addedAt: account.addedAt, + lastUsed: account.lastUsed, + coolingDownUntil: account.coolingDownUntil, + cooldownReason: account.cooldownReason, + lastSwitchReason: account.lastSwitchReason, + accountNote: account.accountNote, + accountTags: normalizeCleanupTags(account.accountTags), + rateLimitResetTimes: normalizeCleanupRateLimitResetTimes(account.rateLimitResetTimes), + }; +} + +function accountsEqualForCleanup( + left: AccountStorageV3["accounts"][number], + right: AccountStorageV3["accounts"][number], +): boolean { + return JSON.stringify(cleanupComparableAccount(left)) === JSON.stringify(cleanupComparableAccount(right)); +} + +function storagesEqualForCleanup(left: AccountStorageV3, right: AccountStorageV3): boolean { + if (left.activeIndex !== right.activeIndex) return false; + + const leftFamilyIndices = (left.activeIndexByFamily ?? {}) as Record; + const rightFamilyIndices = (right.activeIndexByFamily ?? {}) as Record; + const familyKeys = new Set([...Object.keys(leftFamilyIndices), ...Object.keys(rightFamilyIndices)]); + + for (const family of familyKeys) { + if ((leftFamilyIndices[family] ?? left.activeIndex) !== (rightFamilyIndices[family] ?? right.activeIndex)) { + return false; + } + } + + if (left.accounts.length !== right.accounts.length) return false; + return left.accounts.every((account, index) => { + const candidate = right.accounts[index]; + return candidate ? accountsEqualForCleanup(account, candidate) : false; + }); +} + +function createCleanupRedactedStorage(storage: AccountStorageV3): AccountStorageV3 { + return { + ...storage, + accounts: storage.accounts.map((account) => ({ + ...account, + refreshToken: "__redacted__", + accessToken: undefined, + idToken: undefined, + })), + }; +} + +async function redactNormalizedImportTempFile(tempPath: string, storage: AccountStorageV3): Promise { + try { + const redactedStorage = createCleanupRedactedStorage(storage); + await fs.writeFile(tempPath, `${JSON.stringify(redactedStorage, null, 2)}\n`, { + encoding: "utf-8", + mode: 0o600, + flag: "w", + }); + } catch (error) { + logWarn( + `Failed to redact temporary codex sync file ${tempPath} before cleanup: ${ + error instanceof Error ? error.message : String(error) + }`, + ); + } +} + +async function withNormalizedImportFile( + storage: AccountStorageV3, + handler: (filePath: string) => Promise, + options: NormalizedImportFileOptions = {}, +): Promise { + const runWithTempDir = async (tempDir: string): Promise => { + await fs.chmod(tempDir, 0o700).catch(() => undefined); + const tempPath = join(tempDir, "accounts.json"); + try { + await fs.writeFile(tempPath, `${JSON.stringify(storage, null, 2)}\n`, { + encoding: "utf-8", + mode: 0o600, + flag: "wx", + }); + const result = await handler(tempPath); + await redactNormalizedImportTempFile(tempPath, storage); + await removeNormalizedImportTempDir(tempDir, tempPath, options); + return result; + } catch (error) { + await redactNormalizedImportTempFile(tempPath, storage); + try { + await removeNormalizedImportTempDir(tempDir, tempPath, { postSuccessCleanupFailureMode: "warn" }); + } catch (cleanupError) { + const message = cleanupError instanceof Error ? cleanupError.message : String(cleanupError); + logWarn(`Failed to remove temporary codex sync directory ${tempDir}: ${message}`); + } + throw error; + } + }; + + const secureTempRoot = join(getResolvedUserHomeDir(), ".opencode", "tmp"); + // On Windows the mode/chmod calls are ignored; the home-directory ACLs remain + // the actual isolation boundary for this temporary token material. + await fs.mkdir(secureTempRoot, { recursive: true, mode: 0o700 }); + await cleanupStaleNormalizedImportTempDirs(secureTempRoot); + const tempDir = await fs.mkdtemp(join(secureTempRoot, NORMALIZED_IMPORT_TEMP_PREFIX)); + return runWithTempDir(tempDir); +} + +async function cleanupStaleNormalizedImportTempDirs( + secureTempRoot: string, + now = Date.now(), +): Promise { + try { + const entries = await fs.readdir(secureTempRoot, { withFileTypes: true }); + for (const entry of entries) { + if (!entry.isDirectory() || !entry.name.startsWith(NORMALIZED_IMPORT_TEMP_PREFIX)) { + continue; + } + + const candidateDir = join(secureTempRoot, entry.name); + try { + const stats = await fs.stat(candidateDir); + if (now - stats.mtimeMs < STALE_NORMALIZED_IMPORT_MAX_AGE_MS) { + continue; + } + await fs.rm(candidateDir, { recursive: true, force: true }); + } catch (error) { + let code = (error as NodeJS.ErrnoException).code; + if (code === "ENOENT") { + continue; + } + let message = error instanceof Error ? error.message : String(error); + if (code === "EBUSY" || code === "EACCES" || code === "EPERM") { + await sleepAsync(STALE_TEMP_CLEANUP_RETRY_DELAY_MS); + try { + await fs.rm(candidateDir, { recursive: true, force: true }); + continue; + } catch (retryError) { + code = (retryError as NodeJS.ErrnoException).code; + if (code === "ENOENT") { + continue; + } + message = retryError instanceof Error ? retryError.message : String(retryError); + } + } + logWarn(`Failed to sweep stale codex sync temp directory ${candidateDir}: ${message}`); + } + } + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if (code === "ENOENT") { + return; + } + const message = error instanceof Error ? error.message : String(error); + logWarn(`Failed to list codex sync temp root ${secureTempRoot}: ${message}`); + } +} + +function deduplicateAccountsForSync(storage: AccountStorageV3): AccountStorageV3 { + return { + ...storage, + accounts: deduplicateAccountsByEmail(deduplicateAccounts(storage.accounts)), + }; +} + +function selectNewestByTimestamp( + current: T, + candidate: T, +): T { + const currentLastUsed = current.lastUsed ?? 0; + const candidateLastUsed = candidate.lastUsed ?? 0; + if (candidateLastUsed > currentLastUsed) return candidate; + if (candidateLastUsed < currentLastUsed) return current; + const currentAddedAt = current.addedAt ?? 0; + const candidateAddedAt = candidate.addedAt ?? 0; + return candidateAddedAt >= currentAddedAt ? candidate : current; +} + +function deduplicateSourceAccountsByEmail( + accounts: AccountStorageV3["accounts"], +): AccountStorageV3["accounts"] { + const deduplicatedInput = deduplicateAccounts(accounts); + const deduplicated: AccountStorageV3["accounts"] = []; + const emailToIndex = new Map(); + + for (const account of deduplicatedInput) { + if (normalizeIdentity(account.organizationId) || normalizeIdentity(account.accountId)) { + deduplicated.push(account); + continue; + } + const normalizedEmail = normalizeIdentity(account.email); + if (!normalizedEmail) { + deduplicated.push(account); + continue; + } + + const existingIndex = emailToIndex.get(normalizedEmail); + if (existingIndex === undefined) { + emailToIndex.set(normalizedEmail, deduplicated.length); + deduplicated.push(account); + continue; + } + + const existing = deduplicated[existingIndex]; + if (!existing) continue; + const newest = selectNewestByTimestamp(existing, account); + const older = newest === existing ? account : existing; + deduplicated[existingIndex] = { + ...older, + ...newest, + email: newest.email ?? older.email, + accountLabel: newest.accountLabel ?? older.accountLabel, + accountId: newest.accountId ?? older.accountId, + organizationId: newest.organizationId ?? older.organizationId, + accountIdSource: newest.accountIdSource ?? older.accountIdSource, + refreshToken: newest.refreshToken ?? older.refreshToken, + }; + } + + return deduplicated; +} + +function buildExistingSyncIdentityState(existingAccounts: AccountStorageV3["accounts"]): { + organizationIds: Set; + accountIds: Set; + refreshTokens: Set; + emails: Set; +} { + const organizationIds = new Set(); + const accountIds = new Set(); + const refreshTokens = new Set(); + const emails = new Set(); + + for (const account of existingAccounts) { + const organizationId = normalizeIdentity(account.organizationId); + const accountId = normalizeIdentity(account.accountId); + const refreshToken = normalizeTrimmedIdentity(account.refreshToken); + const email = normalizeIdentity(account.email); + if (organizationId) organizationIds.add(organizationId); + if (accountId) accountIds.add(accountId); + if (refreshToken) refreshTokens.add(refreshToken); + if (email) emails.add(email); + } + + return { + organizationIds, + accountIds, + refreshTokens, + emails, + }; +} + +function filterSourceAccountsAgainstExistingEmails( + sourceStorage: AccountStorageV3, + existingAccounts: AccountStorageV3["accounts"], +): AccountStorageV3 { + const existingState = buildExistingSyncIdentityState(existingAccounts); + + return { + ...sourceStorage, + accounts: deduplicateSourceAccountsByEmail(sourceStorage.accounts).filter((account) => { + const normalizedEmail = normalizeIdentity(account.email); + if (normalizedEmail && existingState.emails.has(normalizedEmail)) { + return false; + } + const organizationId = normalizeIdentity(account.organizationId); + if (organizationId) { + return !existingState.organizationIds.has(organizationId); + } + const accountId = normalizeIdentity(account.accountId); + if (accountId) { + return !existingState.accountIds.has(accountId); + } + const refreshToken = normalizeTrimmedIdentity(account.refreshToken); + if (refreshToken && existingState.refreshTokens.has(refreshToken)) { + return false; + } + return true; + }), + }; +} + +function buildMergedDedupedAccounts( + currentAccounts: AccountStorageV3["accounts"], + sourceAccounts: AccountStorageV3["accounts"], +): AccountStorageV3["accounts"] { + return deduplicateAccountsForSync({ + version: 3, + accounts: [...currentAccounts, ...sourceAccounts], + activeIndex: 0, + activeIndexByFamily: {}, + }).accounts; +} + +function computeSyncCapacityDetails( + resolved: CodexMultiAuthResolvedSource, + sourceStorage: AccountStorageV3, + existing: AccountStorageV3, + maxAccounts: number, +): CodexMultiAuthSyncCapacityDetails | null { + const sourceDedupedTotal = buildMergedDedupedAccounts([], sourceStorage.accounts).length; + const mergedAccounts = buildMergedDedupedAccounts(existing.accounts, sourceStorage.accounts); + if (mergedAccounts.length <= maxAccounts) { + return null; + } + + const currentCount = existing.accounts.length; + const sourceCount = sourceStorage.accounts.length; + const dedupedTotal = mergedAccounts.length; + const importableNewAccounts = Math.max(0, dedupedTotal - currentCount); + const skippedOverlaps = Math.max(0, sourceCount - importableNewAccounts); + if (sourceDedupedTotal > maxAccounts) { + return { + rootDir: resolved.rootDir, + accountsPath: resolved.accountsPath, + scope: resolved.scope, + currentCount, + sourceCount, + sourceDedupedTotal, + dedupedTotal: sourceDedupedTotal, + maxAccounts, + needToRemove: sourceDedupedTotal - maxAccounts, + importableNewAccounts: 0, + skippedOverlaps: Math.max(0, sourceCount - sourceDedupedTotal), + suggestedRemovals: [], + }; + } + + const sourceIdentities = buildSourceIdentitySet(sourceStorage); + const suggestedRemovals = existing.accounts + .map((account, index) => { + const matchesSource = accountMatchesSource(account, sourceIdentities); + const isCurrentAccount = index === existing.activeIndex; + const hypotheticalAccounts = existing.accounts.filter((_, candidateIndex) => candidateIndex !== index); + const hypotheticalTotal = buildMergedDedupedAccounts(hypotheticalAccounts, sourceStorage.accounts).length; + const capacityRelief = Math.max(0, dedupedTotal - hypotheticalTotal); + return { + index, + email: account.email, + accountLabel: account.accountLabel, + isCurrentAccount, + enabled: account.enabled !== false, + matchesSource, + lastUsed: account.lastUsed ?? 0, + capacityRelief, + score: buildRemovalScore(account, { matchesSource, isCurrentAccount, capacityRelief }), + reason: buildRemovalExplanation(account, { matchesSource, capacityRelief }), + }; + }) + .sort((left, right) => { + if (left.score !== right.score) { + return right.score - left.score; + } + if (left.lastUsed !== right.lastUsed) { + return left.lastUsed - right.lastUsed; + } + return left.index - right.index; + }) + .slice(0, Math.max(5, dedupedTotal - maxAccounts)) + .map(({ index, email, accountLabel, isCurrentAccount, score, reason }) => ({ + index, + email, + accountLabel, + isCurrentAccount, + score, + reason, + })); + + return { + rootDir: resolved.rootDir, + accountsPath: resolved.accountsPath, + scope: resolved.scope, + currentCount, + sourceCount, + sourceDedupedTotal, + dedupedTotal, + maxAccounts, + needToRemove: dedupedTotal - maxAccounts, + importableNewAccounts, + skippedOverlaps, + suggestedRemovals, + }; +} + +function normalizeIdentity(value: string | undefined): string | undefined { + const trimmed = value?.trim(); + return trimmed && trimmed.length > 0 ? trimmed.toLowerCase() : undefined; +} + +function toCleanupIdentityKeys(account: { + organizationId?: string; + accountId?: string; + refreshToken: string; +}): string[] { + const keys: string[] = []; + const organizationId = normalizeIdentity(account.organizationId); + if (organizationId) keys.push(`org:${organizationId}`); + const accountId = normalizeIdentity(account.accountId); + if (accountId) keys.push(`account:${accountId}`); + const refreshToken = normalizeTrimmedIdentity(account.refreshToken); + if (refreshToken) keys.push(`refresh:${refreshToken}`); + return keys; +} + +function extractCleanupActiveKeys( + accounts: AccountStorageV3["accounts"], + activeIndex: number, +): string[] { + const candidate = accounts[activeIndex]; + if (!candidate) return []; + return toCleanupIdentityKeys({ + organizationId: candidate.organizationId, + accountId: candidate.accountId, + refreshToken: candidate.refreshToken, + }); +} + +function findCleanupAccountIndexByIdentityKeys( + accounts: AccountStorageV3["accounts"], + identityKeys: string[], +): number { + if (identityKeys.length === 0) return -1; + for (const identityKey of identityKeys) { + const index = accounts.findIndex((account) => + toCleanupIdentityKeys({ + organizationId: account.organizationId, + accountId: account.accountId, + refreshToken: account.refreshToken, + }).includes(identityKey), + ); + if (index >= 0) return index; + } + return -1; +} + +function buildSourceIdentitySet(storage: AccountStorageV3): Set { + const identities = new Set(); + for (const account of storage.accounts) { + const organizationId = normalizeIdentity(account.organizationId); + const accountId = normalizeIdentity(account.accountId); + const email = normalizeIdentity(account.email); + const refreshToken = normalizeIdentity(account.refreshToken); + if (organizationId) identities.add(`org:${organizationId}`); + if (accountId) identities.add(`account:${accountId}`); + if (email) identities.add(`email:${email}`); + if (refreshToken) identities.add(`refresh:${refreshToken}`); + } + return identities; +} + +function accountMatchesSource(account: AccountStorageV3["accounts"][number], sourceIdentities: Set): boolean { + const organizationId = normalizeIdentity(account.organizationId); + const accountId = normalizeIdentity(account.accountId); + const email = normalizeIdentity(account.email); + const refreshToken = normalizeIdentity(account.refreshToken); + return ( + (organizationId ? sourceIdentities.has(`org:${organizationId}`) : false) || + (accountId ? sourceIdentities.has(`account:${accountId}`) : false) || + (email ? sourceIdentities.has(`email:${email}`) : false) || + (refreshToken ? sourceIdentities.has(`refresh:${refreshToken}`) : false) + ); +} + +function buildRemovalScore( + account: AccountStorageV3["accounts"][number], + options: { matchesSource: boolean; isCurrentAccount: boolean; capacityRelief: number }, +): number { + let score = 0; + if (options.isCurrentAccount) { + score -= 1000; + } + score += options.capacityRelief * 1000; + if (account.enabled === false) { + score += 120; + } + if (!options.matchesSource) { + score += 80; + } + const lastUsed = account.lastUsed ?? 0; + if (lastUsed > 0) { + const ageDays = Math.max(0, Math.floor((Date.now() - lastUsed) / 86_400_000)); + score += Math.min(60, ageDays); + } else { + score += 40; + } + return score; +} + +function buildRemovalExplanation( + account: AccountStorageV3["accounts"][number], + options: { matchesSource: boolean; capacityRelief: number }, +): string { + const details: string[] = []; + if (options.capacityRelief > 0) { + details.push(`frees ${options.capacityRelief} sync slot${options.capacityRelief === 1 ? "" : "s"}`); + } + if (account.enabled === false) { + details.push("disabled"); + } + if (!options.matchesSource) { + details.push("not present in codex-multi-auth source"); + } + if (details.length === 0) { + details.push("least recently used"); + } + return details.join(", "); +} + +function firstNonEmpty(values: Array): string | null { + for (const value of values) { + const trimmed = (value ?? "").trim(); + if (trimmed.length > 0) { + return trimmed; + } + } + return null; +} + +function getResolvedUserHomeDir(): string { + if (process.platform === "win32") { + const homeDrive = (process.env.HOMEDRIVE ?? "").trim(); + const homePath = (process.env.HOMEPATH ?? "").trim(); + const drivePathHome = + homeDrive.length > 0 && homePath.length > 0 + ? win32.resolve(`${homeDrive}\\`, homePath) + : undefined; + return ( + firstNonEmpty([ + process.env.USERPROFILE, + process.env.HOME, + drivePathHome, + homedir(), + ]) ?? homedir() + ); + } + return firstNonEmpty([process.env.HOME, homedir()]) ?? homedir(); +} + +function deduplicatePaths(paths: string[]): string[] { + const seen = new Set(); + const result: string[] = []; + for (const candidate of paths) { + const trimmed = candidate.trim(); + if (trimmed.length === 0) continue; + const key = process.platform === "win32" ? trimmed.toLowerCase() : trimmed; + if (seen.has(key)) continue; + seen.add(key); + result.push(trimmed); + } + return result; +} + +function hasStorageSignals(dir: string): boolean { + for (const fileName of [...EXTERNAL_ACCOUNT_FILE_NAMES, "settings.json", "dashboard-settings.json", "config.json"]) { + if (existsSync(join(dir, fileName))) { + return true; + } + } + return existsSync(join(dir, "projects")); +} + +function hasProjectScopedAccountsStorage(dir: string): boolean { + const projectsDir = join(dir, "projects"); + try { + for (const entry of readdirSync(projectsDir, { withFileTypes: true })) { + if (!entry.isDirectory()) { + continue; + } + for (const fileName of EXTERNAL_ACCOUNT_FILE_NAMES) { + if (existsSync(join(projectsDir, entry.name, fileName))) { + return true; + } + } + } + } catch { + // best-effort probe; missing or unreadable project roots simply mean "no signal" + } + return false; +} + +function hasAccountsStorage(dir: string): boolean { + return ( + EXTERNAL_ACCOUNT_FILE_NAMES.some((fileName) => existsSync(join(dir, fileName))) || + hasProjectScopedAccountsStorage(dir) + ); +} + +function getCodexHomeDir(): string { + const fromEnv = (process.env.CODEX_HOME ?? "").trim(); + return fromEnv.length > 0 ? fromEnv : join(getResolvedUserHomeDir(), ".codex"); +} + +function getCodexMultiAuthRootCandidates(userHome: string): string[] { + const candidates = [ + join(userHome, "DevTools", "config", "codex", EXTERNAL_ROOT_SUFFIX), + join(userHome, ".codex", EXTERNAL_ROOT_SUFFIX), + ]; + const explicitCodexHome = (process.env.CODEX_HOME ?? "").trim(); + if (explicitCodexHome.length > 0) { + candidates.unshift(join(getCodexHomeDir(), EXTERNAL_ROOT_SUFFIX)); + } + return deduplicatePaths(candidates); +} + +function validateCodexMultiAuthRootDir(pathValue: string): string { + const trimmed = pathValue.trim(); + if (trimmed.length === 0) { + throw new Error("CODEX_MULTI_AUTH_DIR must not be empty"); + } + if (process.platform === "win32") { + const normalized = trimmed.replace(/\//g, "\\"); + const isExtendedDrivePath = /^\\\\[?.]\\[a-zA-Z]:\\/.test(normalized); + if (normalized.startsWith("\\\\") && !isExtendedDrivePath) { + throw new Error("CODEX_MULTI_AUTH_DIR must use a local absolute path, not a UNC network share"); + } + if (!/^[a-zA-Z]:\\/.test(normalized) && !isExtendedDrivePath) { + throw new Error("CODEX_MULTI_AUTH_DIR must be an absolute local path"); + } + return normalized; + } + if (!trimmed.startsWith("/")) { + throw new Error("CODEX_MULTI_AUTH_DIR must be an absolute path"); + } + return trimmed; +} + +function tagSyncedAccounts(storage: AccountStorageV3): AccountStorageV3 { + return { + ...storage, + accounts: storage.accounts.map((account) => { + const existingTags = Array.isArray(account.accountTags) ? account.accountTags : []; + return { + ...account, + accountTags: existingTags.includes(SYNC_ACCOUNT_TAG) + ? existingTags + : [...existingTags, SYNC_ACCOUNT_TAG], + }; + }), + }; +} + +export function getCodexMultiAuthSourceRootDir(): string { + const fromEnv = (process.env.CODEX_MULTI_AUTH_DIR ?? "").trim(); + if (fromEnv.length > 0) { + return validateCodexMultiAuthRootDir(fromEnv); + } + + const userHome = getResolvedUserHomeDir(); + const candidates = getCodexMultiAuthRootCandidates(userHome); + + for (const candidate of candidates) { + if (hasAccountsStorage(candidate)) { + return candidate; + } + } + + for (const candidate of candidates) { + if (hasStorageSignals(candidate)) { + return candidate; + } + } + + return candidates[0] ?? join(userHome, ".codex", EXTERNAL_ROOT_SUFFIX); +} + +function getProjectScopedAccountsPath(rootDir: string, projectPath: string): string | undefined { + const projectRoot = findProjectRoot(projectPath); + if (!projectRoot) { + return undefined; + } + + for (const candidateKey of getProjectStorageKeyCandidates(projectRoot)) { + for (const fileName of EXTERNAL_ACCOUNT_FILE_NAMES) { + const candidate = join(rootDir, "projects", candidateKey, fileName); + if (existsSync(candidate)) { + return candidate; + } + } + } + return undefined; +} + +function getGlobalAccountsPath(rootDir: string): string | undefined { + for (const fileName of EXTERNAL_ACCOUNT_FILE_NAMES) { + const candidate = join(rootDir, fileName); + if (existsSync(candidate)) { + return candidate; + } + } + return undefined; +} + +export function resolveCodexMultiAuthAccountsSource(projectPath = process.cwd()): CodexMultiAuthResolvedSource { + const fromEnv = (process.env.CODEX_MULTI_AUTH_DIR ?? "").trim(); + const userHome = getResolvedUserHomeDir(); + const candidates = + fromEnv.length > 0 + ? [validateCodexMultiAuthRootDir(fromEnv)] + : getCodexMultiAuthRootCandidates(userHome); + + for (const rootDir of candidates) { + const projectScopedPath = getProjectScopedAccountsPath(rootDir, projectPath); + if (projectScopedPath) { + return { + rootDir, + accountsPath: projectScopedPath, + scope: "project", + }; + } + + const globalPath = getGlobalAccountsPath(rootDir); + if (globalPath) { + return { + rootDir, + accountsPath: globalPath, + scope: "global", + }; + } + } + + const hintedRoot = candidates.find((candidate) => hasAccountsStorage(candidate) || hasStorageSignals(candidate)) ?? candidates[0]; + throw new Error(`No codex-multi-auth accounts file found under ${hintedRoot}`); +} + +function getSyncCapacityLimit(): number { + const override = (process.env[SYNC_MAX_ACCOUNTS_OVERRIDE_ENV] ?? "").trim(); + if (override.length === 0) { + return ACCOUNT_LIMITS.MAX_ACCOUNTS; + } + const parsed = Number(override); + if (Number.isFinite(parsed) && parsed > 0) { + return parsed; + } + const message = `${SYNC_MAX_ACCOUNTS_OVERRIDE_ENV} override value "${override}" is not a positive finite number; ignoring.`; + logWarn(message); + try { + process.stderr.write(`${message}\n`); + } catch { + // best-effort warning for non-interactive shells + } + return ACCOUNT_LIMITS.MAX_ACCOUNTS; +} + +export async function loadCodexMultiAuthSourceStorage( + projectPath = process.cwd(), +): Promise { + const resolved = resolveCodexMultiAuthAccountsSource(projectPath); + const raw = await fs.readFile(resolved.accountsPath, "utf-8"); + let parsed: unknown; + try { + parsed = JSON.parse(raw) as unknown; + } catch { + throw new Error(`Invalid JSON in codex-multi-auth accounts file: ${resolved.accountsPath}`); + } + + const storage = normalizeAccountStorage(parsed); + if (!storage) { + throw new Error(`Invalid codex-multi-auth account storage format: ${resolved.accountsPath}`); + } + + return { + ...resolved, + storage: normalizeSourceStorage(storage), + }; +} + +function createEmptyAccountStorage(): AccountStorageV3 { + return { + version: 3, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + }; +} + +async function prepareCodexMultiAuthPreviewStorage( + resolved: CodexMultiAuthResolvedSource & { storage: AccountStorageV3 }, +): Promise { + const current = await loadAccounts(); + const existing = current ?? createEmptyAccountStorage(); + const preparedStorage = filterSourceAccountsAgainstExistingEmails( + resolved.storage, + existing.accounts, + ); + const maxAccounts = getSyncCapacityLimit(); + // Infinity is the sentinel for the default unlimited-account mode. + if (Number.isFinite(maxAccounts)) { + const details = computeSyncCapacityDetails(resolved, preparedStorage, existing, maxAccounts); + if (details) { + throw new CodexMultiAuthSyncCapacityError(details); + } + } + return { + resolved: { + ...resolved, + storage: preparedStorage, + }, + existing, + }; +} + +export async function previewSyncFromCodexMultiAuth( + projectPath = process.cwd(), + loadedSource?: LoadedCodexMultiAuthSourceStorage, +): Promise { + const source = loadedSource ?? (await loadCodexMultiAuthSourceStorage(projectPath)); + const { resolved, existing } = await prepareCodexMultiAuthPreviewStorage(source); + const preview = await withNormalizedImportFile( + resolved.storage, + (filePath) => previewImportAccountsWithExistingStorage(filePath, existing), + { postSuccessCleanupFailureMode: "warn" }, + ); + return { + rootDir: resolved.rootDir, + accountsPath: resolved.accountsPath, + scope: resolved.scope, + ...preview, + }; +} + +export async function syncFromCodexMultiAuth( + projectPath = process.cwd(), + loadedSource?: LoadedCodexMultiAuthSourceStorage, +): Promise { + const resolved = loadedSource ?? (await loadCodexMultiAuthSourceStorage(projectPath)); + const result: ImportAccountsResult = await withNormalizedImportFile( + tagSyncedAccounts(resolved.storage), + (filePath) => { + const maxAccounts = getSyncCapacityLimit(); + return importAccounts( + filePath, + { + preImportBackupPrefix: "codex-multi-auth-sync-backup", + backupMode: "required", + }, + (normalizedStorage, existing) => { + const filteredStorage = filterSourceAccountsAgainstExistingEmails( + normalizedStorage, + existing?.accounts ?? [], + ); + // Infinity is the sentinel for the default unlimited-account mode. + if (Number.isFinite(maxAccounts)) { + const details = computeSyncCapacityDetails( + resolved, + filteredStorage, + existing ?? + ({ + version: 3, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + } satisfies AccountStorageV3), + maxAccounts, + ); + if (details) { + throw new CodexMultiAuthSyncCapacityError(details); + } + } + return filteredStorage; + }, + ); + }, + { postSuccessCleanupFailureMode: "warn" }, + ); + return { + rootDir: resolved.rootDir, + accountsPath: resolved.accountsPath, + scope: resolved.scope, + backupStatus: result.backupStatus, + backupPath: result.backupPath, + backupError: result.backupError, + imported: result.imported, + skipped: result.skipped, + total: result.total, + }; +} + +function buildCodexMultiAuthOverlapCleanupPlan(existing: AccountStorageV3): { + result: CodexMultiAuthCleanupResult; + nextStorage?: AccountStorageV3; +} { + const before = existing.accounts.length; + const syncedAccounts = existing.accounts.filter((account) => + Array.isArray(account.accountTags) && account.accountTags.includes(SYNC_ACCOUNT_TAG), + ); + if (syncedAccounts.length === 0) { + return { + result: { + before, + after: before, + removed: 0, + updated: 0, + }, + }; + } + const preservedAccounts = existing.accounts.filter( + (account) => !(Array.isArray(account.accountTags) && account.accountTags.includes(SYNC_ACCOUNT_TAG)), + ); + const normalizedSyncedStorage = normalizeAccountStorage( + normalizeSourceStorage({ + ...existing, + accounts: syncedAccounts, + }), + ); + if (!normalizedSyncedStorage) { + return { + result: { + before, + after: before, + removed: 0, + updated: 0, + }, + }; + } + const filteredSyncedAccounts = filterSourceAccountsAgainstExistingEmails( + normalizedSyncedStorage, + preservedAccounts, + ).accounts; + const deduplicatedSyncedAccounts = deduplicateAccounts(filteredSyncedAccounts); + const normalized = { + ...existing, + accounts: [...preservedAccounts, ...deduplicatedSyncedAccounts], + } satisfies AccountStorageV3; + const existingActiveKeys = extractCleanupActiveKeys(existing.accounts, existing.activeIndex); + const mappedActiveIndex = (() => { + const byIdentity = findCleanupAccountIndexByIdentityKeys(normalized.accounts, existingActiveKeys); + return byIdentity >= 0 + ? byIdentity + : Math.min(existing.activeIndex, Math.max(0, normalized.accounts.length - 1)); + })(); + const activeIndexByFamily = Object.fromEntries( + Object.entries(existing.activeIndexByFamily ?? {}).map(([family, index]) => { + const identityKeys = extractCleanupActiveKeys(existing.accounts, index); + const mappedIndex = findCleanupAccountIndexByIdentityKeys(normalized.accounts, identityKeys); + return [family, mappedIndex >= 0 ? mappedIndex : mappedActiveIndex]; + }), + ) as AccountStorageV3["activeIndexByFamily"]; + normalized.activeIndex = mappedActiveIndex; + normalized.activeIndexByFamily = activeIndexByFamily; + + const after = normalized.accounts.length; + const removed = Math.max(0, before - after); + const originalAccountsByKey = new Map(); + for (const account of existing.accounts) { + const key = account.organizationId ?? account.accountId ?? account.refreshToken; + if (key) { + originalAccountsByKey.set(key, account); + } + } + const updated = normalized.accounts.reduce((count, account) => { + const key = account.organizationId ?? account.accountId ?? account.refreshToken; + if (!key) return count; + const original = originalAccountsByKey.get(key); + if (!original) return count; + return accountsEqualForCleanup(original, account) ? count : count + 1; + }, 0); + const changed = removed > 0 || after !== before || !storagesEqualForCleanup(normalized, existing); + + return { + result: { + before, + after, + removed, + updated, + }, + nextStorage: changed ? normalized : undefined, + }; +} + +function normalizeOverlapCleanupSourceStorage(data: unknown): AccountStorageV3 | null { + if ( + !data || + typeof data !== "object" || + !("version" in data) || + !((data as { version?: unknown }).version === 1 || (data as { version?: unknown }).version === 3) || + !("accounts" in data) || + !Array.isArray((data as { accounts?: unknown }).accounts) + ) { + return null; + } + + const baseRecord = + (data as { version?: unknown }).version === 1 + ? migrateV1ToV3(data as AccountStorageV1) + : (data as AccountStorageV3); + const originalToFilteredIndex = new Map(); + const accounts = baseRecord.accounts.flatMap((account, index) => { + if (typeof account.refreshToken !== "string" || account.refreshToken.trim().length === 0) { + return []; + } + originalToFilteredIndex.set(index, originalToFilteredIndex.size); + return [account]; + }); + const activeIndexValue = + typeof baseRecord.activeIndex === "number" && Number.isFinite(baseRecord.activeIndex) + ? baseRecord.activeIndex + : 0; + const remappedActiveIndex = originalToFilteredIndex.get(activeIndexValue); + const activeIndex = Math.max( + 0, + Math.min(accounts.length - 1, remappedActiveIndex ?? activeIndexValue), + ); + const rawActiveIndexByFamily = + baseRecord.activeIndexByFamily && typeof baseRecord.activeIndexByFamily === "object" + ? baseRecord.activeIndexByFamily + : {}; + const activeIndexByFamily = Object.fromEntries( + Object.entries(rawActiveIndexByFamily).flatMap(([family, value]) => { + if (typeof value !== "number" || !Number.isFinite(value)) { + return []; + } + const remappedValue = originalToFilteredIndex.get(value) ?? value; + return [[family, Math.max(0, Math.min(accounts.length - 1, remappedValue))]]; + }), + ) as AccountStorageV3["activeIndexByFamily"]; + + return { + version: 3, + accounts, + activeIndex: accounts.length === 0 ? 0 : activeIndex, + activeIndexByFamily, + }; +} + +async function loadRawCodexMultiAuthOverlapCleanupStorage( + fallback: AccountStorageV3, +): Promise { + try { + const raw = await fs.readFile(getStoragePath(), "utf-8"); + const parsed = JSON.parse(raw) as unknown; + const normalized = normalizeOverlapCleanupSourceStorage(parsed); + if (normalized) { + return normalized; + } + throw new Error("Invalid raw storage snapshot for synced overlap cleanup."); + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if (code === "ENOENT") { + return fallback; + } + if (code === "EBUSY" || code === "EACCES" || code === "EPERM") { + logWarn( + `Failed reading raw storage snapshot for synced overlap cleanup (${code}); using transaction snapshot fallback.`, + ); + return fallback; + } + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to read raw storage snapshot for synced overlap cleanup: ${message}`); + } +} + +function sourceExceedsCapacityWithoutLocalRelief(details: CodexMultiAuthSyncCapacityDetails): boolean { + return ( + details.sourceDedupedTotal > details.maxAccounts && + details.importableNewAccounts === 0 && + details.suggestedRemovals.length === 0 + ); +} + +export function isCodexMultiAuthSourceTooLargeForCapacity( + details: CodexMultiAuthSyncCapacityDetails, +): boolean { + return sourceExceedsCapacityWithoutLocalRelief(details); +} + +export function getCodexMultiAuthCapacityErrorMessage( + details: CodexMultiAuthSyncCapacityDetails, +): string { + if (sourceExceedsCapacityWithoutLocalRelief(details)) { + return ( + `Sync source alone exceeds the maximum of ${details.maxAccounts} accounts ` + + `(${details.sourceDedupedTotal} deduped source accounts). Reduce the source set or raise ${SYNC_MAX_ACCOUNTS_OVERRIDE_ENV}.` + ); + } + return ( + `Sync would exceed the maximum of ${details.maxAccounts} accounts ` + + `(current ${details.currentCount}, source ${details.sourceCount}, deduped total ${details.dedupedTotal}). ` + + `Remove at least ${details.needToRemove} account(s) before syncing.` + ); +} + +export class CodexMultiAuthSyncCapacityError extends Error { + readonly details: CodexMultiAuthSyncCapacityDetails; + + constructor(details: CodexMultiAuthSyncCapacityDetails) { + super(getCodexMultiAuthCapacityErrorMessage(details)); + this.name = "CodexMultiAuthSyncCapacityError"; + this.details = details; + } +} + +export async function previewCodexMultiAuthSyncedOverlapCleanup(): Promise { + return withAccountStorageTransaction(async (current) => { + const fallback = current ?? { + version: 3 as const, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + }; + const existing = await loadRawCodexMultiAuthOverlapCleanupStorage(fallback); + return buildCodexMultiAuthOverlapCleanupPlan(existing).result; + }); +} + +export async function cleanupCodexMultiAuthSyncedOverlaps(): Promise { + return withAccountStorageTransaction(async (current, persist) => { + const fallback = current ?? { + version: 3 as const, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + }; + const existing = await loadRawCodexMultiAuthOverlapCleanupStorage(fallback); + const plan = buildCodexMultiAuthOverlapCleanupPlan(existing); + if (plan.nextStorage) { + await persist(plan.nextStorage); + } + return plan.result; + }); +} diff --git a/lib/config.ts b/lib/config.ts index af93ee73..b571e404 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -1,5 +1,5 @@ -import { readFileSync, existsSync } from "node:fs"; -import { join } from "node:path"; +import { readFileSync, existsSync, promises as fs } from "node:fs"; +import { basename, dirname, join } from "node:path"; import { homedir } from "node:os"; import type { PluginConfig } from "./types.js"; import { @@ -11,6 +11,8 @@ import { logWarn } from "./logger.js"; import { PluginConfigSchema, getValidationErrors } from "./schemas.js"; const CONFIG_PATH = join(homedir(), ".opencode", "openai-codex-auth-config.json"); +const CONFIG_LOCK_PATH = `${CONFIG_PATH}.lock`; +const STALE_CONFIG_LOCK_MAX_AGE_MS = 24 * 60 * 60 * 1000; const TUI_COLOR_PROFILES = new Set(["truecolor", "ansi16", "ansi256"]); const TUI_GLYPH_MODES = new Set(["ascii", "unicode", "auto"]); const REQUEST_TRANSFORM_MODES = new Set(["native", "legacy"]); @@ -19,6 +21,8 @@ const RETRY_PROFILES = new Set(["conservative", "balanced", "aggressive"]); export type UnsupportedCodexPolicy = "strict" | "fallback"; +type RawPluginConfig = Record; + /** * Default plugin configuration * CODEX_MODE is enabled by default for better Codex CLI parity @@ -69,9 +73,7 @@ export function loadPluginConfig(): PluginConfig { return DEFAULT_CONFIG; } - const fileContent = readFileSync(CONFIG_PATH, "utf-8"); - const normalizedFileContent = stripUtf8Bom(fileContent); - const userConfig = JSON.parse(normalizedFileContent) as unknown; + const userConfig = readRawPluginConfig(false) as unknown; const hasFallbackEnvOverride = process.env.CODEX_AUTH_FALLBACK_UNSUPPORTED_MODEL !== undefined || process.env.CODEX_AUTH_FALLBACK_GPT53_TO_GPT52 !== undefined; @@ -106,12 +108,276 @@ export function loadPluginConfig(): PluginConfig { } } +function readRawPluginConfig(recoverInvalid = false): RawPluginConfig { + if (!existsSync(CONFIG_PATH)) { + return {}; + } + + try { + const fileContent = readFileSync(CONFIG_PATH, "utf-8"); + const normalizedFileContent = stripUtf8Bom(fileContent); + const parsed = JSON.parse(normalizedFileContent) as unknown; + if (!isRecord(parsed)) { + throw new Error("Plugin config root must be a JSON object"); + } + return { ...parsed }; + } catch (error) { + if (recoverInvalid) { + logWarn(`Failed to read raw plugin config from ${CONFIG_PATH}: ${(error as Error).message}`); + return {}; + } + throw error; + } +} + +async function readRawPluginConfigAsync(recoverInvalid = false): Promise { + try { + const fileContent = await fs.readFile(CONFIG_PATH, "utf-8"); + const normalizedFileContent = stripUtf8Bom(fileContent); + const parsed = JSON.parse(normalizedFileContent) as unknown; + if (!isRecord(parsed)) { + throw new Error("Plugin config root must be a JSON object"); + } + return { ...parsed }; + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if (code === "ENOENT") { + return {}; + } + if (recoverInvalid) { + logWarn(`Failed to read raw plugin config from ${CONFIG_PATH}: ${(error as Error).message}`); + return {}; + } + throw error; + } +} + +export async function savePluginConfigMutation( + mutate: (current: RawPluginConfig) => RawPluginConfig, + options: { recoverInvalidCurrent?: boolean } = {}, +): Promise { + await withPluginConfigLock(async () => { + const current = await readRawPluginConfigAsync(options.recoverInvalidCurrent === true); + const next = mutate({ ...current }); + + if (!isRecord(next)) { + throw new Error("Plugin config mutation must return a JSON object"); + } + + const tempPath = `${CONFIG_PATH}.${process.pid}.${Date.now()}.tmp`; + let tempFilePresent = false; + try { + await fs.writeFile(tempPath, `${JSON.stringify(next, null, 2)}\n`, { + encoding: "utf-8", + mode: 0o600, + }); + tempFilePresent = true; + try { + await fs.rename(tempPath, CONFIG_PATH); + tempFilePresent = false; + return; + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if ( + process.platform === "win32" && + (code === "EEXIST" || code === "EPERM") && + existsSync(CONFIG_PATH) + ) { + const backupPath = `${CONFIG_PATH}.${process.pid}.${Date.now()}.bak`; + let backupMoved = false; + try { + await fs.rename(CONFIG_PATH, backupPath); + backupMoved = true; + await fs.rename(tempPath, CONFIG_PATH); + tempFilePresent = false; + try { + await fs.unlink(backupPath); + } catch { + // best effort backup cleanup + } + backupMoved = false; + return; + } catch (retryError) { + if (backupMoved) { + try { + if (!existsSync(CONFIG_PATH)) { + await fs.rename(backupPath, CONFIG_PATH); + } + } catch { + // best effort config restore + } + backupMoved = false; + } + throw retryError; + } finally { + if (backupMoved) { + try { + await fs.unlink(backupPath); + } catch { + // best effort backup cleanup + } + } + } + } + throw error; + } + } finally { + if (tempFilePresent) { + try { + await fs.unlink(tempPath); + } catch { + // best effort temp cleanup + } + } + } + }); +} + function stripUtf8Bom(content: string): string { return content.charCodeAt(0) === 0xfeff ? content.slice(1) : content; } function isRecord(value: unknown): value is Record { - return value !== null && typeof value === "object"; + return value !== null && typeof value === "object" && !Array.isArray(value); +} + +function sleepAsync(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function isProcessAlive(pid: number): boolean { + try { + process.kill(pid, 0); + return true; + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + return code !== "ESRCH"; + } +} + +async function cleanupStalePluginConfigLockArtifacts(): Promise { + const lockDir = dirname(CONFIG_LOCK_PATH); + const staleLockPrefix = `${basename(CONFIG_LOCK_PATH)}.`; + try { + const entries = await fs.readdir(lockDir, { withFileTypes: true }); + for (const entry of entries) { + if (!entry.isFile() || !entry.name.startsWith(staleLockPrefix) || !entry.name.endsWith(".stale")) { + continue; + } + const stalePath = join(lockDir, entry.name); + try { + const stats = await fs.stat(stalePath); + if (Date.now() - stats.mtimeMs < STALE_CONFIG_LOCK_MAX_AGE_MS) { + continue; + } + await fs.unlink(stalePath); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logWarn(`Failed to remove stale plugin config lock artifact ${stalePath}: ${message}`); + } + } + } catch { + // best effort stale-lock cleanup only + } +} + +async function tryRecoverStalePluginConfigLock(rawLockContents: string): Promise { + const lockOwnerPid = Number.parseInt(rawLockContents.trim(), 10); + if ( + !Number.isFinite(lockOwnerPid) || + lockOwnerPid === process.pid || + isProcessAlive(lockOwnerPid) + ) { + return false; + } + + const staleLockPath = `${CONFIG_LOCK_PATH}.${lockOwnerPid}.${process.pid}.${Date.now()}.stale`; + try { + await fs.rename(CONFIG_LOCK_PATH, staleLockPath); + } catch { + return false; + } + + try { + const movedLockContents = await fs.readFile(staleLockPath, "utf-8"); + if (movedLockContents !== rawLockContents) { + try { + if (!existsSync(CONFIG_LOCK_PATH)) { + await fs.rename(staleLockPath, CONFIG_LOCK_PATH); + } + } catch { + // best effort restore when a live lock was moved unexpectedly + } + return false; + } + } catch { + try { + if (!existsSync(CONFIG_LOCK_PATH)) { + await fs.rename(staleLockPath, CONFIG_LOCK_PATH); + } + } catch { + // best effort restore when stale-lock verification fails + } + return false; + } + + if (existsSync(CONFIG_LOCK_PATH)) { + try { + await fs.unlink(staleLockPath); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logWarn(`Failed to remove stale plugin config lock artifact ${staleLockPath}: ${message}`); + } + return false; + } + + try { + await fs.unlink(staleLockPath); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logWarn(`Failed to remove stale plugin config lock artifact ${staleLockPath}: ${message}`); + } + return true; +} + +async function withPluginConfigLock(fn: () => T | Promise): Promise { + await fs.mkdir(dirname(CONFIG_PATH), { recursive: true }); + await cleanupStalePluginConfigLockArtifacts(); + const deadline = Date.now() + 5_000; + while (true) { + try { + await fs.writeFile(CONFIG_LOCK_PATH, `${process.pid}`, { encoding: "utf-8", flag: "wx" }); + break; + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + const retryableLockError = + code === "EEXIST" || (process.platform === "win32" && (code === "EPERM" || code === "EBUSY")); + if (!retryableLockError || Date.now() >= deadline) { + throw error; + } + if (existsSync(CONFIG_LOCK_PATH) && (code === "EEXIST" || (process.platform === "win32" && (code === "EPERM" || code === "EBUSY")))) { + try { + const rawLockContents = await fs.readFile(CONFIG_LOCK_PATH, "utf-8"); + if (await tryRecoverStalePluginConfigLock(rawLockContents)) { + continue; + } + } catch { + // best effort stale-lock recovery + } + } + await sleepAsync(25 + Math.floor(Math.random() * 25)); + } + } + + try { + return await fn(); + } finally { + try { + await fs.unlink(CONFIG_LOCK_PATH); + } catch { + // best effort cleanup + } + } } /** @@ -198,6 +464,24 @@ export function getCodexTuiV2(pluginConfig: PluginConfig): boolean { return resolveBooleanSetting("CODEX_TUI_V2", pluginConfig.codexTuiV2, true); } +export function getSyncFromCodexMultiAuthEnabled(pluginConfig: PluginConfig): boolean { + return pluginConfig.experimental?.syncFromCodexMultiAuth?.enabled === true; +} + +export async function setSyncFromCodexMultiAuthEnabled(enabled: boolean): Promise { + await savePluginConfigMutation((current) => { + const experimental = isRecord(current.experimental) ? { ...current.experimental } : {}; + const syncSettings = isRecord(experimental.syncFromCodexMultiAuth) + ? { ...experimental.syncFromCodexMultiAuth } + : {}; + + syncSettings.enabled = enabled; + experimental.syncFromCodexMultiAuth = syncSettings; + current.experimental = experimental; + return current; + }); +} + export function getCodexTuiColorProfile( pluginConfig: PluginConfig, ): "truecolor" | "ansi16" | "ansi256" { diff --git a/lib/constants.ts b/lib/constants.ts index 69e7f5bf..a77aa89c 100644 --- a/lib/constants.ts +++ b/lib/constants.ts @@ -74,8 +74,8 @@ export const PLATFORM_OPENERS = { /** OAuth authorization labels */ export const AUTH_LABELS = { - OAUTH: "ChatGPT Plus/Pro MULTI (Codex Subscription)", - OAUTH_MANUAL: "ChatGPT Plus/Pro MULTI (Manual URL Paste)", + OAUTH: "ChatGPT Plus/Pro (Browser Login)", + OAUTH_MANUAL: "ChatGPT Plus/Pro (Manual Paste)", API_KEY: "Manually enter API Key MULTI", INSTRUCTIONS: "A browser window should open. If it doesn't, copy the URL and open it manually.", @@ -85,8 +85,8 @@ export const AUTH_LABELS = { /** Multi-account configuration */ export const ACCOUNT_LIMITS = { - /** Maximum number of OAuth accounts that can be registered */ - MAX_ACCOUNTS: 20, + /** Maximum number of OAuth accounts that can be registered. Infinity means unlimited by default. */ + MAX_ACCOUNTS: Number.POSITIVE_INFINITY, /** Cooldown period (ms) after auth failure before retrying account */ AUTH_FAILURE_COOLDOWN_MS: 30_000, /** Number of consecutive auth failures before auto-removing account */ diff --git a/lib/schemas.ts b/lib/schemas.ts index 6028246d..9f93401c 100644 --- a/lib/schemas.ts +++ b/lib/schemas.ts @@ -13,6 +13,11 @@ import { MODEL_FAMILIES, type ModelFamily } from "./prompts/codex.js"; export const PluginConfigSchema = z.object({ codexMode: z.boolean().optional(), requestTransformMode: z.enum(["native", "legacy"]).optional(), + experimental: z.object({ + syncFromCodexMultiAuth: z.object({ + enabled: z.boolean().optional(), + }).optional(), + }).optional(), codexTuiV2: z.boolean().optional(), codexTuiColorProfile: z.enum(["truecolor", "ansi16", "ansi256"]).optional(), codexTuiGlyphMode: z.enum(["ascii", "unicode", "auto"]).optional(), diff --git a/lib/storage.ts b/lib/storage.ts index 151e2213..8abd3fb0 100644 --- a/lib/storage.ts +++ b/lib/storage.ts @@ -51,6 +51,11 @@ export interface ImportAccountsOptions { backupMode?: ImportBackupMode; } +type PrepareImportStorage = ( + normalized: AccountStorageV3, + existing: AccountStorageV3 | null, +) => AccountStorageV3; + export type ImportBackupStatus = "created" | "skipped" | "failed"; export interface ImportAccountsResult { @@ -62,6 +67,12 @@ export interface ImportAccountsResult { backupError?: string; } +export interface CleanupDuplicateEmailAccountsResult { + before: number; + after: number; + removed: number; +} + /** * Custom error class for storage operations with platform-aware hints. */ @@ -415,6 +426,211 @@ function mergeAccountRecords(target: T, source: T): T { }; } +function normalizeEmailIdentity(value: string | undefined): string | undefined { + const trimmed = value?.trim(); + return trimmed && trimmed.length > 0 ? trimmed.toLowerCase() : undefined; +} + +function deduplicateAccountsByEmailForMaintenance(accounts: T[]): T[] { + const working = [...accounts]; + const emailToIndex = new Map(); + const indicesToRemove = new Set(); + + for (let i = 0; i < working.length; i += 1) { + const account = working[i]; + if (!account) continue; + + const organizationId = account.organizationId?.trim(); + if (organizationId) continue; + + const accountId = account.accountId?.trim(); + if (accountId) continue; + + const email = normalizeEmailIdentity(account.email); + if (!email) continue; + + const existingIndex = emailToIndex.get(email); + if (existingIndex === undefined) { + emailToIndex.set(email, i); + continue; + } + + const newestIndex = pickNewestAccountIndex(working, existingIndex, i); + const obsoleteIndex = newestIndex === existingIndex ? i : existingIndex; + const newest = working[newestIndex]; + const older = working[obsoleteIndex]; + if (newest && older) { + working[newestIndex] = mergeAccountRecords(newest, older); + } + indicesToRemove.add(obsoleteIndex); + emailToIndex.set(email, newestIndex); + } + + const deduplicated: T[] = []; + for (let i = 0; i < working.length; i += 1) { + if (indicesToRemove.has(i)) continue; + const account = working[i]; + if (account) deduplicated.push(account); + } + return deduplicated; +} + +function buildDuplicateEmailCleanupPlan(existing: AccountStorageV3): { + result: CleanupDuplicateEmailAccountsResult; + nextStorage?: AccountStorageV3; +} { + const before = existing.accounts.length; + const existingActiveIndex = clampIndex(existing.activeIndex, existing.accounts.length); + const existingActiveKeys = extractActiveKeys(existing.accounts, existingActiveIndex); + const existingActiveEmail = extractActiveEmail(existing.accounts, existingActiveIndex); + const deduplicatedAccounts = deduplicateAccountsByEmailForMaintenance(existing.accounts); + const after = deduplicatedAccounts.length; + const removed = Math.max(0, before - after); + + if (removed === 0) { + return { + result: { + before, + after, + removed, + }, + }; + } + + const mappedActiveIndex = (() => { + if (deduplicatedAccounts.length === 0) return 0; + if (existingActiveKeys.length > 0) { + const byIdentity = findAccountIndexByIdentityKeys(deduplicatedAccounts, existingActiveKeys); + if (byIdentity >= 0) return byIdentity; + } + const byEmail = + existingActiveKeys.length === 0 + ? findComparableAccountIndexByNormalizedEmail(deduplicatedAccounts, existingActiveEmail) + : findAccountIndexByNormalizedEmail(deduplicatedAccounts, existingActiveEmail); + if (byEmail >= 0) return byEmail; + if (existingActiveKeys.length === 0) { + const fallbackByEmail = findAccountIndexByNormalizedEmail(deduplicatedAccounts, existingActiveEmail); + if (fallbackByEmail >= 0) return fallbackByEmail; + } + return clampIndex(existingActiveIndex, deduplicatedAccounts.length); + })(); + + const activeIndexByFamily: Partial> = {}; + const rawFamilyIndices = existing.activeIndexByFamily ?? {}; + + for (const family of MODEL_FAMILIES) { + const rawIndexValue = rawFamilyIndices[family]; + const rawIndex = + typeof rawIndexValue === "number" && Number.isFinite(rawIndexValue) + ? rawIndexValue + : existingActiveIndex; + const clampedRawIndex = clampIndex(rawIndex, existing.accounts.length); + const familyKeys = extractActiveKeys(existing.accounts, clampedRawIndex); + const familyEmail = extractActiveEmail(existing.accounts, clampedRawIndex); + + let mappedIndex = mappedActiveIndex; + if (familyKeys.length > 0) { + const byIdentity = findAccountIndexByIdentityKeys(deduplicatedAccounts, familyKeys); + if (byIdentity >= 0) { + mappedIndex = byIdentity; + activeIndexByFamily[family] = mappedIndex; + continue; + } + } + + const byEmail = + familyKeys.length === 0 + ? findComparableAccountIndexByNormalizedEmail(deduplicatedAccounts, familyEmail) + : findAccountIndexByNormalizedEmail(deduplicatedAccounts, familyEmail); + if (byEmail >= 0) { + mappedIndex = byEmail; + } else if (familyKeys.length === 0) { + const fallbackByEmail = findAccountIndexByNormalizedEmail(deduplicatedAccounts, familyEmail); + if (fallbackByEmail >= 0) { + mappedIndex = fallbackByEmail; + } + } + activeIndexByFamily[family] = mappedIndex; + } + + return { + result: { + before, + after, + removed, + }, + nextStorage: { + version: 3, + accounts: deduplicatedAccounts, + activeIndex: mappedActiveIndex, + activeIndexByFamily, + }, + }; +} + +function normalizeDuplicateCleanupSourceStorage(data: unknown): AccountStorageV3 | null { + if (!isRecord(data) || (data.version !== 1 && data.version !== 3) || !Array.isArray(data.accounts)) { + return null; + } + + const accounts = data.accounts + .filter( + (account): account is AccountStorageV3["accounts"][number] => + isRecord(account) && + typeof account.refreshToken === "string" && + account.refreshToken.trim().length > 0, + ) + .map((account) => ({ ...account })); + const activeIndexValue = + typeof data.activeIndex === "number" && Number.isFinite(data.activeIndex) + ? data.activeIndex + : 0; + const activeIndex = clampIndex(activeIndexValue, accounts.length); + const rawActiveIndexByFamily = isRecord(data.activeIndexByFamily) ? data.activeIndexByFamily : {}; + const activeIndexByFamily = Object.fromEntries( + MODEL_FAMILIES.map((family) => { + const rawValue = rawActiveIndexByFamily[family]; + const nextIndex = + typeof rawValue === "number" && Number.isFinite(rawValue) + ? clampIndex(rawValue, accounts.length) + : activeIndex; + return [family, nextIndex]; + }), + ) as AccountStorageV3["activeIndexByFamily"]; + + return { + version: 3, + accounts, + activeIndex, + activeIndexByFamily, + }; +} + +async function loadDuplicateCleanupSourceStorage(): Promise { + const fallback = await loadAccountsInternal(saveAccountsUnlocked); + try { + const rawContent = await fs.readFile(getStoragePath(), "utf-8"); + const rawData = JSON.parse(rawContent) as unknown; + const normalized = normalizeDuplicateCleanupSourceStorage(rawData); + if (normalized) { + return normalized; + } + throw new Error("Invalid raw storage snapshot for duplicate cleanup."); + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if (code === "ENOENT" || code === "EBUSY" || code === "EACCES" || code === "EPERM") { + return fallback ?? { + version: 3, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + }; + } + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to read raw storage snapshot for duplicate cleanup: ${message}`); + } +} + /** * Removes duplicate accounts, keeping the most recently used entry for each unique key. * Deduplication identity hierarchy: organizationId -> accountId -> refreshToken. @@ -559,6 +775,15 @@ function extractActiveKeys(accounts: unknown[], activeIndex: number): string[] { }); } +function extractActiveEmail(accounts: unknown[], activeIndex: number): string | undefined { + const candidate = accounts[activeIndex]; + if (!isRecord(candidate)) return undefined; + + return typeof candidate.email === "string" + ? normalizeEmailIdentity(candidate.email) + : undefined; +} + function findAccountIndexByIdentityKeys( accounts: Pick[], identityKeys: string[], @@ -573,6 +798,25 @@ function findAccountIndexByIdentityKeys( return -1; } +function findAccountIndexByNormalizedEmail( + accounts: Pick[], + normalizedEmail: string | undefined, +): number { + if (!normalizedEmail) return -1; + return accounts.findIndex((account) => normalizeEmailIdentity(account.email) === normalizedEmail); +} + +function findComparableAccountIndexByNormalizedEmail( + accounts: AccountMetadataV3[], + normalizedEmail: string | undefined, +): number { + if (!normalizedEmail) return -1; + return accounts.findIndex((account) => { + if (normalizeEmailIdentity(account.email) !== normalizedEmail) return false; + return extractActiveKeys([account], 0).length === 0; + }); +} + /** * Normalizes and validates account storage data, migrating from v1 to v3 if needed. * Handles deduplication, index clamping, and per-family active index mapping. @@ -885,6 +1129,16 @@ export async function withAccountStorageTransaction( }); } +export async function loadAccountAndFlaggedStorageSnapshot(): Promise<{ + accounts: AccountStorageV3 | null; + flagged: FlaggedAccountStorageV1; +}> { + return withStorageLock(async () => ({ + accounts: await loadAccountsInternal(saveAccountsUnlocked), + flagged: await loadFlaggedAccountsUnlocked(saveFlaggedAccountsUnlocked), + })); +} + /** * Persists account storage to disk using atomic write (temp file + rename). * Creates the .opencode directory if it doesn't exist. @@ -916,6 +1170,24 @@ export async function clearAccounts(): Promise { }); } +export async function previewDuplicateEmailCleanup(): Promise { + return withStorageLock(async () => { + const existing = await loadDuplicateCleanupSourceStorage(); + return buildDuplicateEmailCleanupPlan(existing).result; + }); +} + +export async function cleanupDuplicateEmailAccounts(): Promise { + return withStorageLock(async () => { + const existing = await loadDuplicateCleanupSourceStorage(); + const plan = buildDuplicateEmailCleanupPlan(existing); + if (plan.nextStorage) { + await saveAccountsUnlocked(plan.nextStorage); + } + return plan.result; + }); +} + function normalizeFlaggedStorage(data: unknown): FlaggedAccountStorageV1 { if (!isRecord(data) || data.version !== 1 || !Array.isArray(data.accounts)) { return { version: 1, accounts: [] }; @@ -1009,7 +1281,9 @@ function normalizeFlaggedStorage(data: unknown): FlaggedAccountStorageV1 { }; } -export async function loadFlaggedAccounts(): Promise { +async function loadFlaggedAccountsUnlocked( + persistMigration: ((storage: FlaggedAccountStorageV1) => Promise) | null, +): Promise { const path = getFlaggedAccountsPath(); const empty: FlaggedAccountStorageV1 = { version: 1, accounts: [] }; @@ -1034,8 +1308,8 @@ export async function loadFlaggedAccounts(): Promise { const legacyContent = await fs.readFile(legacyPath, "utf-8"); const legacyData = JSON.parse(legacyContent) as unknown; const migrated = normalizeFlaggedStorage(legacyData); - if (migrated.accounts.length > 0) { - await saveFlaggedAccounts(migrated); + if (migrated.accounts.length > 0 && persistMigration) { + await persistMigration(migrated); } try { await fs.unlink(legacyPath); @@ -1058,26 +1332,46 @@ export async function loadFlaggedAccounts(): Promise { } } -export async function saveFlaggedAccounts(storage: FlaggedAccountStorageV1): Promise { - return withStorageLock(async () => { - const path = getFlaggedAccountsPath(); - const uniqueSuffix = `${Date.now()}.${Math.random().toString(36).slice(2, 8)}`; - const tempPath = `${path}.${uniqueSuffix}.tmp`; +async function saveFlaggedAccountsUnlocked(storage: FlaggedAccountStorageV1): Promise { + const path = getFlaggedAccountsPath(); + const uniqueSuffix = `${Date.now()}.${Math.random().toString(36).slice(2, 8)}`; + const tempPath = `${path}.${uniqueSuffix}.tmp`; + try { + await fs.mkdir(dirname(path), { recursive: true }); + const content = JSON.stringify(normalizeFlaggedStorage(storage), null, 2); + await fs.writeFile(tempPath, content, { encoding: "utf-8", mode: 0o600 }); + await renameWithWindowsRetry(tempPath, path); + } catch (error) { try { - await fs.mkdir(dirname(path), { recursive: true }); - const content = JSON.stringify(normalizeFlaggedStorage(storage), null, 2); - await fs.writeFile(tempPath, content, { encoding: "utf-8", mode: 0o600 }); - await renameWithWindowsRetry(tempPath, path); - } catch (error) { - try { - await fs.unlink(tempPath); - } catch { - // Ignore cleanup failures. - } - log.error("Failed to save flagged account storage", { path, error: String(error) }); - throw error; + await fs.unlink(tempPath); + } catch { + // Ignore cleanup failures. } + log.error("Failed to save flagged account storage", { path, error: String(error) }); + throw error; + } +} + +export async function loadFlaggedAccounts(): Promise { + return withStorageLock(async () => loadFlaggedAccountsUnlocked(saveFlaggedAccountsUnlocked)); +} + +export async function withFlaggedAccountsTransaction( + handler: ( + current: FlaggedAccountStorageV1, + persist: (storage: FlaggedAccountStorageV1) => Promise, + ) => Promise, +): Promise { + return withStorageLock(async () => { + const current = await loadFlaggedAccountsUnlocked(saveFlaggedAccountsUnlocked); + return handler(current, saveFlaggedAccountsUnlocked); + }); +} + +export async function saveFlaggedAccounts(storage: FlaggedAccountStorageV1): Promise { + return withStorageLock(async () => { + await saveFlaggedAccountsUnlocked(storage); }); } @@ -1155,27 +1449,43 @@ export async function previewImportAccounts( const { normalized } = await readAndNormalizeImportFile(filePath); return withAccountStorageTransaction((existing) => { - const existingAccounts = existing?.accounts ?? []; - const merged = [...existingAccounts, ...normalized.accounts]; - - if (merged.length > ACCOUNT_LIMITS.MAX_ACCOUNTS) { - const deduped = deduplicateAccountsForStorage(merged); - if (deduped.length > ACCOUNT_LIMITS.MAX_ACCOUNTS) { - throw new Error( - `Import would exceed maximum of ${ACCOUNT_LIMITS.MAX_ACCOUNTS} accounts (would have ${deduped.length})`, - ); - } + return Promise.resolve(previewImportAccountsAgainstExistingNormalized(normalized, existing)); + }); +} + +export async function previewImportAccountsWithExistingStorage( + filePath: string, + existing: AccountStorageV3 | null | undefined, +): Promise<{ imported: number; total: number; skipped: number }> { + const { normalized } = await readAndNormalizeImportFile(filePath); + return previewImportAccountsAgainstExistingNormalized(normalized, existing); +} + +function previewImportAccountsAgainstExistingNormalized( + normalized: AccountStorageV3, + existing: AccountStorageV3 | null | undefined, +): { imported: number; total: number; skipped: number } { + const existingAccounts = existing?.accounts ?? []; + const merged = [...existingAccounts, ...normalized.accounts]; + const hasFiniteAccountLimit = Number.isFinite(ACCOUNT_LIMITS.MAX_ACCOUNTS); + + if (hasFiniteAccountLimit && merged.length > ACCOUNT_LIMITS.MAX_ACCOUNTS) { + const deduped = deduplicateAccountsForStorage(merged); + if (deduped.length > ACCOUNT_LIMITS.MAX_ACCOUNTS) { + throw new Error( + `Import would exceed maximum of ${ACCOUNT_LIMITS.MAX_ACCOUNTS} accounts (would have ${deduped.length})`, + ); } + } - const deduplicatedAccounts = deduplicateAccountsForStorage(merged); - const imported = deduplicatedAccounts.length - existingAccounts.length; - const skipped = normalized.accounts.length - imported; - return Promise.resolve({ - imported, - total: deduplicatedAccounts.length, - skipped, - }); - }); + const deduplicatedAccounts = deduplicateAccountsForStorage(merged); + const imported = Math.max(0, deduplicatedAccounts.length - existingAccounts.length); + const skipped = normalized.accounts.length - imported; + return { + imported, + total: deduplicatedAccounts.length, + skipped, + }; } /** @@ -1203,6 +1513,27 @@ export async function exportAccounts(filePath: string, force = true): Promise { + await withStorageLock(async () => { + const resolvedPath = resolvePath(filePath); + + if (!force && existsSync(resolvedPath)) { + throw new Error(`File already exists: ${resolvedPath}`); + } + + await migrateLegacyProjectStorageIfNeeded(saveAccountsUnlocked); + const storagePath = getStoragePath(); + if (!existsSync(storagePath)) { + throw new Error("No accounts to back up"); + } + + await fs.mkdir(dirname(resolvedPath), { recursive: true }); + await fs.copyFile(storagePath, resolvedPath); + await fs.chmod(resolvedPath, 0o600).catch(() => undefined); + log.info("Backed up raw accounts storage", { path: resolvedPath, source: storagePath }); + }); +} + /** * Imports accounts from a JSON file, merging with existing accounts. * Deduplicates by identity key first (organizationId -> accountId -> refreshToken), @@ -1213,6 +1544,7 @@ export async function exportAccounts(filePath: string, force = true): Promise { const { resolvedPath, normalized } = await readAndNormalizeImportFile(filePath); const backupMode = options.backupMode ?? "none"; @@ -1227,6 +1559,11 @@ export async function importAccounts( backupError, } = await withAccountStorageTransaction(async (existing, persist) => { + const preparedNormalized = prepare ? prepare(normalized, existing) : normalized; + const skippedByPrepare = Math.max( + 0, + normalized.accounts.length - preparedNormalized.accounts.length, + ); const existingStorage: AccountStorageV3 = existing ?? ({ @@ -1262,9 +1599,10 @@ export async function importAccounts( } } - const merged = [...existingAccounts, ...normalized.accounts]; + const merged = [...existingAccounts, ...preparedNormalized.accounts]; + const hasFiniteAccountLimit = Number.isFinite(ACCOUNT_LIMITS.MAX_ACCOUNTS); - if (merged.length > ACCOUNT_LIMITS.MAX_ACCOUNTS) { + if (hasFiniteAccountLimit && merged.length > ACCOUNT_LIMITS.MAX_ACCOUNTS) { const deduped = deduplicateAccountsForStorage(merged); if (deduped.length > ACCOUNT_LIMITS.MAX_ACCOUNTS) { throw new Error( @@ -1309,8 +1647,8 @@ export async function importAccounts( await persist(newStorage); - const imported = deduplicatedAccounts.length - existingAccounts.length; - const skipped = normalized.accounts.length - imported; + const imported = Math.max(0, deduplicatedAccounts.length - existingAccounts.length); + const skipped = skippedByPrepare + Math.max(0, preparedNormalized.accounts.length - imported); return { imported, total: deduplicatedAccounts.length, diff --git a/lib/storage/paths.ts b/lib/storage/paths.ts index b3e9d5b9..dfc46db9 100644 --- a/lib/storage/paths.ts +++ b/lib/storage/paths.ts @@ -3,7 +3,7 @@ * Extracted from storage.ts to reduce module size. */ -import { existsSync } from "node:fs"; +import { existsSync, readFileSync, statSync } from "node:fs"; import { createHash } from "node:crypto"; import { basename, dirname, isAbsolute, join, relative, resolve } from "node:path"; import { homedir, tmpdir } from "node:os"; @@ -34,16 +34,83 @@ function sanitizeProjectName(projectPath: string): string { return sanitized || "project"; } -export function getProjectStorageKey(projectPath: string): string { - const normalizedPath = normalizeProjectPath(projectPath); +function buildProjectStorageKey(projectPath: string, identityPath: string): string { const hash = createHash("sha256") - .update(normalizedPath) + .update(identityPath) .digest("hex") .slice(0, PROJECT_KEY_HASH_LENGTH); - const projectName = sanitizeProjectName(normalizedPath).slice(0, 40); + const projectName = sanitizeProjectName(projectPath).slice(0, 40); return `${projectName}-${hash}`; } +function getCanonicalProjectStorageIdentity(projectPath: string): { + identityPath: string; + projectNamePath: string; +} { + const resolvedProjectPath = resolve(projectPath); + const gitPath = join(resolvedProjectPath, ".git"); + if (!existsSync(gitPath)) { + return { + identityPath: normalizeProjectPath(resolvedProjectPath), + projectNamePath: resolvedProjectPath, + }; + } + + try { + if (statSync(gitPath).isDirectory()) { + return { + identityPath: normalizeProjectPath(gitPath), + projectNamePath: resolvedProjectPath, + }; + } + const gitMetadata = readFileSync(gitPath, "utf-8").trim(); + const gitDirMatch = /^gitdir:\s*(.+)$/im.exec(gitMetadata); + const gitDirValue = gitDirMatch?.[1]; + if (!gitDirValue) { + return { + identityPath: normalizeProjectPath(resolvedProjectPath), + projectNamePath: resolvedProjectPath, + }; + } + const gitDir = resolve(resolvedProjectPath, gitDirValue.trim()); + const gitDirParent = dirname(gitDir); + if (basename(gitDirParent).toLowerCase() === "worktrees") { + const commonGitDir = dirname(gitDirParent); + return { + identityPath: normalizeProjectPath(commonGitDir), + projectNamePath: dirname(commonGitDir), + }; + } + return { + identityPath: normalizeProjectPath(gitDir), + projectNamePath: resolvedProjectPath, + }; + } catch { + return { + identityPath: normalizeProjectPath(resolvedProjectPath), + projectNamePath: resolvedProjectPath, + }; + } +} + +export function getProjectStorageKeyCandidates(projectPath: string): string[] { + const normalizedProjectPath = normalizeProjectPath(projectPath); + const canonicalIdentity = getCanonicalProjectStorageIdentity(projectPath); + const candidates = [ + buildProjectStorageKey(normalizeProjectPath(canonicalIdentity.projectNamePath), canonicalIdentity.identityPath), + buildProjectStorageKey(normalizedProjectPath, normalizedProjectPath), + ]; + return Array.from(new Set(candidates)); +} + +export function getProjectStorageKey(projectPath: string): string { + const canonicalIdentity = getCanonicalProjectStorageIdentity(projectPath); + return buildProjectStorageKey( + normalizeProjectPath(canonicalIdentity.projectNamePath), + canonicalIdentity.identityPath, + ); +} + /** * Per-project storage is namespaced under ~/.opencode/projects * to avoid writing account files into user repositories. @@ -58,7 +125,6 @@ export function isProjectDirectory(dir: string): boolean { export function findProjectRoot(startDir: string): string | null { let current = startDir; - const root = dirname(current) === current ? current : null; while (current) { if (isProjectDirectory(current)) { @@ -72,7 +138,7 @@ export function findProjectRoot(startDir: string): string | null { current = parent; } - return root && isProjectDirectory(root) ? root : null; + return null; } function normalizePathForComparison(filePath: string): string { diff --git a/lib/sync-prune-backup.ts b/lib/sync-prune-backup.ts new file mode 100644 index 00000000..abff7c91 --- /dev/null +++ b/lib/sync-prune-backup.ts @@ -0,0 +1,38 @@ +import type { AccountStorageV3 } from "./storage.js"; + +type FlaggedSnapshot = { + version: 1; + accounts: TAccount[]; +}; + +function cloneWithoutTokens(account: TAccount): TAccount { + const clone = structuredClone(account) as TAccount & { + accessToken?: unknown; + refreshToken?: unknown; + }; + delete clone.accessToken; + delete clone.refreshToken; + return clone as TAccount; +} + +export function createSyncPruneBackupPayload( + currentAccountsStorage: AccountStorageV3, + currentFlaggedStorage: FlaggedSnapshot, +): { + version: 1; + accounts: AccountStorageV3; + flagged: FlaggedSnapshot; +} { + return { + version: 1, + accounts: { + ...currentAccountsStorage, + accounts: currentAccountsStorage.accounts.map((account) => cloneWithoutTokens(account)), + activeIndexByFamily: { ...(currentAccountsStorage.activeIndexByFamily ?? {}) }, + }, + flagged: { + ...currentFlaggedStorage, + accounts: currentFlaggedStorage.accounts.map((flagged) => cloneWithoutTokens(flagged)), + }, + }; +} diff --git a/lib/ui/ansi.ts b/lib/ui/ansi.ts index 4d804c3c..9ad0b98a 100644 --- a/lib/ui/ansi.ts +++ b/lib/ui/ansi.ts @@ -6,28 +6,66 @@ export const ANSI = { // Cursor control hide: "\x1b[?25l", show: "\x1b[?25h", + altScreenOn: "\x1b[?1049h", + altScreenOff: "\x1b[?1049l", up: (lines = 1) => `\x1b[${lines}A`, clearLine: "\x1b[2K", clearScreen: "\x1b[2J", moveTo: (row: number, col: number) => `\x1b[${row};${col}H`, // Styling + black: "\x1b[30m", + white: "\x1b[97m", cyan: "\x1b[36m", green: "\x1b[32m", red: "\x1b[31m", yellow: "\x1b[33m", + bgBlue: "\x1b[44m", + bgBrightBlue: "\x1b[104m", + bgGreen: "\x1b[42m", + bgYellow: "\x1b[43m", + bgRed: "\x1b[41m", + inverse: "\x1b[7m", dim: "\x1b[2m", bold: "\x1b[1m", reset: "\x1b[0m", } as const; -export type KeyAction = "up" | "down" | "enter" | "escape" | "escape-start" | null; +// biome-ignore lint/suspicious/noControlCharactersInRegex: matching ANSI escape codes +export const ANSI_CSI_REGEX = new RegExp("\\x1b\\[[0-?]*[ -/]*[@-~]", "g"); +export const CONTROL_CHAR_REGEX = new RegExp("[\\u0000-\\u001f\\u007f]", "g"); + +export type KeyAction = + | "up" + | "down" + | "home" + | "end" + | "enter" + | "escape" + | "escape-start" + | null; export function parseKey(data: Buffer): KeyAction { const input = data.toString(); if (input === "\x1b[A" || input === "\x1bOA") return "up"; if (input === "\x1b[B" || input === "\x1bOB") return "down"; + if ( + input === "\x1b[H" || + input === "\x1bOH" || + input === "\x1b[1~" || + input === "\x1b[7~" + ) { + return "home"; + } + if ( + input === "\x1b[F" || + input === "\x1bOF" || + input === "\x1b[4~" || + input === "\x1b[8~" + ) { + return "end"; + } if (input === "\r" || input === "\n") return "enter"; if (input === "\x03") return "escape"; if (input === "\x1b") return "escape-start"; diff --git a/lib/ui/auth-menu.ts b/lib/ui/auth-menu.ts index 12007a4e..40b222ae 100644 --- a/lib/ui/auth-menu.ts +++ b/lib/ui/auth-menu.ts @@ -1,8 +1,11 @@ -import { ANSI, isTTY } from "./ansi.js"; +import { createInterface } from "node:readline/promises"; +import { stdin as input, stdout as output } from "node:process"; +import { ANSI, ANSI_CSI_REGEX, CONTROL_CHAR_REGEX, isTTY } from "./ansi.js"; import { confirm } from "./confirm.js"; import { getUiRuntimeOptions } from "./runtime.js"; import { select, type MenuItem } from "./select.js"; import { paintUiText, formatUiBadge } from "./format.js"; +import { UI_COPY, formatCheckFlaggedLabel } from "./copy.js"; export type AccountStatus = | "active" @@ -16,31 +19,68 @@ export type AccountStatus = export interface AccountInfo { index: number; + sourceIndex?: number; + quickSwitchNumber?: number; accountId?: string; accountLabel?: string; email?: string; addedAt?: number; lastUsed?: number; status?: AccountStatus; + quotaSummary?: string; isCurrentAccount?: boolean; enabled?: boolean; } export interface AuthMenuOptions { flaggedCount?: number; + statusMessage?: string | (() => string | undefined); } export type AuthMenuAction = | { type: "add" } + | { type: "forecast" } + | { type: "fix" } + | { type: "settings" } | { type: "fresh" } | { type: "check" } | { type: "deep-check" } | { type: "verify-flagged" } | { type: "select-account"; account: AccountInfo } + | { type: "set-current-account"; account: AccountInfo } + | { type: "search" } | { type: "delete-all" } | { type: "cancel" }; -export type AccountAction = "back" | "delete" | "refresh" | "toggle" | "cancel"; +export type AccountAction = "back" | "delete" | "refresh" | "toggle" | "set-current" | "cancel"; +export type SettingsAction = + | "toggle-sync" + | "sync-now" + | "cleanup-duplicate-emails" + | "cleanup-overlaps" + | "back" + | "cancel"; + +type SettingsHubAction = "sync" | "maintenance" | "back" | "cancel"; + +export interface SyncPruneCandidate { + index: number; + email?: string; + accountLabel?: string; + isCurrentAccount?: boolean; + score?: number; + reason?: string; +} + +type SyncPruneAction = + | { type: "toggle"; candidate: SyncPruneCandidate } + | { type: "confirm" } + | { type: "cancel" }; + +function sanitizeTerminalText(value: string | undefined): string | undefined { + if (!value) return undefined; + return value.replace(ANSI_CSI_REGEX, "").replace(CONTROL_CHAR_REGEX, "").trim(); +} function formatRelativeTime(timestamp: number | undefined): string { if (!timestamp) return "never"; @@ -59,166 +99,414 @@ function formatDate(timestamp: number | undefined): string { function statusBadge(status: AccountStatus | undefined): string { const ui = getUiRuntimeOptions(); - if (ui.v2Enabled) { - switch (status) { - case "active": - return formatUiBadge(ui, "active", "success"); - case "ok": - return formatUiBadge(ui, "ok", "success"); - case "rate-limited": - return formatUiBadge(ui, "rate-limited", "warning"); - case "cooldown": - return formatUiBadge(ui, "cooldown", "warning"); - case "flagged": - return formatUiBadge(ui, "flagged", "danger"); - case "disabled": - return formatUiBadge(ui, "disabled", "danger"); - case "error": - return formatUiBadge(ui, "error", "danger"); + const withTone = ( + label: string, + tone: "accent" | "success" | "warning" | "danger" | "muted", + ): string => { + if (ui.v2Enabled) return formatUiBadge(ui, label, tone); + switch (tone) { + case "success": + return `${ANSI.green}[${label}]${ANSI.reset}`; + case "warning": + return `${ANSI.yellow}[${label}]${ANSI.reset}`; + case "danger": + return `${ANSI.red}[${label}]${ANSI.reset}`; + case "accent": + return `${ANSI.cyan}[${label}]${ANSI.reset}`; default: - return ""; + return `${ANSI.dim}[${label}]${ANSI.reset}`; } - } + }; switch (status) { case "active": - return `${ANSI.green}[active]${ANSI.reset}`; + return withTone("active", "success"); case "ok": - return `${ANSI.green}[ok]${ANSI.reset}`; + return withTone("ok", "success"); case "rate-limited": - return `${ANSI.yellow}[rate-limited]${ANSI.reset}`; + return withTone("rate-limited", "warning"); case "cooldown": - return `${ANSI.yellow}[cooldown]${ANSI.reset}`; + return withTone("cooldown", "warning"); case "flagged": - return `${ANSI.red}[flagged]${ANSI.reset}`; + return withTone("flagged", "danger"); case "disabled": - return `${ANSI.red}[disabled]${ANSI.reset}`; + return withTone("disabled", "danger"); case "error": - return `${ANSI.red}[error]${ANSI.reset}`; + return withTone("error", "danger"); default: - return ""; + return withTone("unknown", "muted"); } } function formatAccountIdSuffix(accountId: string | undefined): string | undefined { - const trimmed = accountId?.trim(); + const trimmed = sanitizeTerminalText(accountId); if (!trimmed) return undefined; - return trimmed.length > 14 - ? `${trimmed.slice(0, 8)}...${trimmed.slice(-6)}` - : trimmed; + return trimmed.length > 14 ? `${trimmed.slice(0, 8)}...${trimmed.slice(-6)}` : trimmed; } function accountTitle(account: AccountInfo): string { - const email = account.email?.trim(); - const label = account.accountLabel?.trim(); + const number = account.quickSwitchNumber ?? (account.index + 1); + const email = sanitizeTerminalText(account.email); + const label = sanitizeTerminalText(account.accountLabel); const accountIdSuffix = formatAccountIdSuffix(account.accountId); - const details: string[] = []; if (email) details.push(email); - if (label) details.push(`workspace:${label}`); + if (label) details.push(label.startsWith("workspace:") ? label : `workspace:${label}`); if (accountIdSuffix && (!label || !label.includes(accountIdSuffix))) { details.push(`id:${accountIdSuffix}`); } + return details.length > 0 ? `${number}. ${details.join(" | ")}` : `${number}. Account`; +} + +function accountSearchText(account: AccountInfo): string { + return [ + sanitizeTerminalText(account.email), + sanitizeTerminalText(account.accountLabel), + sanitizeTerminalText(account.accountId), + String(account.quickSwitchNumber ?? (account.index + 1)), + ] + .filter((value): value is string => typeof value === "string" && value.length > 0) + .join(" ") + .toLowerCase(); +} + +function accountRowColor(account: AccountInfo): MenuItem["color"] { + if (account.isCurrentAccount) return "green"; + switch (account.status) { + case "active": + case "ok": + return "green"; + case "rate-limited": + case "cooldown": + return "yellow"; + case "disabled": + case "error": + case "flagged": + return "red"; + default: + return undefined; + } +} + +function formatAccountHint(account: AccountInfo, ui = getUiRuntimeOptions()): string { + const parts: string[] = []; + parts.push(ui.v2Enabled ? paintUiText(ui, `used ${formatRelativeTime(account.lastUsed)}`, "muted") : `used ${formatRelativeTime(account.lastUsed)}`); + const quotaSummary = sanitizeTerminalText(account.quotaSummary); + if (quotaSummary) { + parts.push(ui.v2Enabled ? paintUiText(ui, quotaSummary, "muted") : quotaSummary); + } + return parts.join(ui.v2Enabled ? ` ${paintUiText(ui, "|", "muted")} ` : " | "); +} + +async function promptSearchQuery(current: string): Promise { + if (!input.isTTY || !output.isTTY) { + return current; + } + const rl = createInterface({ input, output }); + try { + const suffix = current ? ` (${current})` : ""; + let answer: string; + try { + answer = await rl.question(`Search${suffix} (blank clears): `); + } catch { + return current; + } + return answer.trim().toLowerCase(); + } finally { + rl.close(); + } +} - if (details.length === 0) { - return `${account.index + 1}. Account`; +function authMenuFocusKey(action: AuthMenuAction): string { + switch (action.type) { + case "select-account": + case "set-current-account": + return `account:${action.account.sourceIndex ?? action.account.index}`; + default: + return `action:${action.type}`; } - return `${account.index + 1}. ${details.join(" | ")}`; } export async function showAuthMenu( accounts: AccountInfo[], options: AuthMenuOptions = {}, ): Promise { - const ui = getUiRuntimeOptions(); const flaggedCount = options.flaggedCount ?? 0; - const verifyLabel = - flaggedCount > 0 - ? `Verify flagged accounts (${flaggedCount})` - : "Verify flagged accounts"; - - const items: MenuItem[] = [ - { label: "Actions", value: { type: "cancel" }, kind: "heading" }, - { label: "Add account", value: { type: "add" }, color: "cyan" }, - { label: "Check quotas", value: { type: "check" }, color: "cyan" }, - { label: "Deep check accounts", value: { type: "deep-check" }, color: "cyan" }, - { label: verifyLabel, value: { type: "verify-flagged" }, color: "cyan" }, - { label: "Start fresh", value: { type: "fresh" }, color: "yellow" }, - { label: "", value: { type: "cancel" }, separator: true }, - { label: "Accounts", value: { type: "cancel" }, kind: "heading" }, - ...accounts.map((account) => { - const currentBadge = account.isCurrentAccount - ? (ui.v2Enabled ? ` ${formatUiBadge(ui, "current", "accent")}` : ` ${ANSI.cyan}[current]${ANSI.reset}`) - : ""; - const badge = statusBadge(account.status); - const disabledBadge = - account.enabled === false - ? (ui.v2Enabled ? ` ${formatUiBadge(ui, "disabled", "danger")}` : ` ${ANSI.red}[disabled]${ANSI.reset}`) - : ""; - const statusSuffix = badge ? ` ${badge}` : ""; - const label = `${accountTitle(account)}${currentBadge}${statusSuffix}${disabledBadge}`; - return { - label: ui.v2Enabled ? paintUiText(ui, label, "heading") : label, - hint: `used ${formatRelativeTime(account.lastUsed)}`, - value: { type: "select-account" as const, account }, - }; - }), - { label: "", value: { type: "cancel" }, separator: true }, - { label: "Danger zone", value: { type: "cancel" }, kind: "heading" }, - { label: "Delete all accounts", value: { type: "delete-all" }, color: "red" }, - ]; + const verifyLabel = formatCheckFlaggedLabel(flaggedCount); + const ui = getUiRuntimeOptions(); + let showDetailedHelp = false; + let searchQuery = ""; + let focusKey = "action:add"; while (true) { + const normalizedSearch = searchQuery.trim().toLowerCase(); + const visibleAccounts = normalizedSearch.length > 0 + ? accounts.filter((account) => accountSearchText(account).includes(normalizedSearch)) + : accounts; + const visibleByNumber = new Map(); + const duplicateQuickSwitchNumbers = new Set(); + for (const account of visibleAccounts) { + const quickSwitchNumber = account.quickSwitchNumber ?? (account.index + 1); + if (visibleByNumber.has(quickSwitchNumber)) { + duplicateQuickSwitchNumbers.add(quickSwitchNumber); + continue; + } + visibleByNumber.set(quickSwitchNumber, account); + } + + const items: MenuItem[] = [ + { label: UI_COPY.mainMenu.quickStart, value: { type: "cancel" }, kind: "heading" }, + { label: UI_COPY.mainMenu.addAccount, value: { type: "add" }, color: "green" }, + { label: UI_COPY.mainMenu.checkAccounts, value: { type: "check" }, color: "green" }, + { label: UI_COPY.mainMenu.bestAccount, value: { type: "forecast" }, color: "green" }, + { label: UI_COPY.mainMenu.fixIssues, value: { type: "fix" }, color: "green" }, + { label: "", value: { type: "cancel" }, separator: true }, + { label: UI_COPY.mainMenu.moreChecks, value: { type: "cancel" }, kind: "heading" }, + { label: UI_COPY.mainMenu.refreshChecks, value: { type: "deep-check" }, color: "green" }, + { label: verifyLabel, value: { type: "verify-flagged" }, color: flaggedCount > 0 ? "red" : "yellow" }, + { label: "", value: { type: "cancel" }, separator: true }, + { label: UI_COPY.mainMenu.settingsSection, value: { type: "cancel" }, kind: "heading" }, + { label: UI_COPY.mainMenu.settings, value: { type: "settings" }, color: "green" }, + { label: "", value: { type: "cancel" }, separator: true }, + { label: UI_COPY.mainMenu.accounts, value: { type: "cancel" }, kind: "heading" }, + ]; + + if (visibleAccounts.length === 0) { + items.push({ label: UI_COPY.mainMenu.noSearchMatches, value: { type: "cancel" }, disabled: true }); + } else { + items.push( + ...visibleAccounts.map((account) => { + const currentBadge = account.isCurrentAccount + ? (ui.v2Enabled ? ` ${formatUiBadge(ui, "current", "accent")}` : ` ${ANSI.cyan}[current]${ANSI.reset}`) + : ""; + const badge = statusBadge(account.status); + const title = ui.v2Enabled + ? paintUiText(ui, accountTitle(account), account.isCurrentAccount ? "accent" : "heading") + : accountTitle(account); + return { + label: `${title}${currentBadge} ${badge}`.trim(), + hint: formatAccountHint(account, ui), + selectedLabel: `${accountTitle(account)}${currentBadge} ${badge}`.trim(), + color: accountRowColor(account), + value: { type: "select-account" as const, account }, + }; + }), + ); + } + + items.push({ label: "", value: { type: "cancel" }, separator: true }); + items.push({ label: UI_COPY.mainMenu.dangerZone, value: { type: "cancel" }, kind: "heading" }); + items.push({ label: UI_COPY.mainMenu.removeAllAccounts, value: { type: "delete-all" }, color: "red" }); + + const buildSubtitle = (): string | undefined => { + const parts: string[] = []; + if (normalizedSearch.length > 0) { + parts.push(`${UI_COPY.mainMenu.searchSubtitlePrefix} ${normalizedSearch}`); + } + const statusText = typeof options.statusMessage === "function" ? options.statusMessage() : options.statusMessage; + if (typeof statusText === "string" && statusText.trim().length > 0) { + parts.push(statusText.trim()); + } + return parts.length > 0 ? parts.join(" | ") : undefined; + }; + + const initialCursor = items.findIndex((item) => { + if (item.separator || item.disabled || item.kind === "heading") return false; + return authMenuFocusKey(item.value) === focusKey; + }); + const result = await select(items, { - message: ui.v2Enabled ? "OpenAI accounts (Codex)" : "Codex accounts", - subtitle: "Select action or account", + message: UI_COPY.mainMenu.title, + subtitle: buildSubtitle(), + dynamicSubtitle: buildSubtitle, + help: showDetailedHelp ? UI_COPY.mainMenu.helpDetailed : UI_COPY.mainMenu.helpCompact, clearScreen: true, - variant: ui.v2Enabled ? "codex" : "legacy", + selectedEmphasis: "minimal", + focusStyle: "row-invert", + showHintsForUnselected: false, + refreshIntervalMs: 200, + initialCursor: initialCursor >= 0 ? initialCursor : undefined, theme: ui.theme, + onInput: (input, context) => { + const lower = input.toLowerCase(); + if (lower === "?") { + showDetailedHelp = !showDetailedHelp; + context.requestRerender(); + return undefined; + } + if (lower === "q") return { type: "cancel" as const }; + if (lower === "/") return { type: "search" as const }; + const parsed = Number.parseInt(input, 10); + if (Number.isFinite(parsed) && parsed >= 1 && parsed <= 9) { + if (duplicateQuickSwitchNumbers.has(parsed)) return undefined; + const direct = visibleByNumber.get(parsed); + if (direct) { + return { type: "set-current-account" as const, account: direct }; + } + } + return undefined; + }, + onCursorChange: ({ cursor }) => { + const selected = items[cursor]; + if (!selected || selected.separator || selected.disabled || selected.kind === "heading") return; + focusKey = authMenuFocusKey(selected.value); + }, }); if (!result) return { type: "cancel" }; + if (result.type === "search") { + searchQuery = await promptSearchQuery(searchQuery); + focusKey = "action:search"; + continue; + } if (result.type === "delete-all") { const confirmed = await confirm("Delete all accounts?"); if (!confirmed) continue; } + focusKey = authMenuFocusKey(result); return result; } } -export async function showAccountDetails(account: AccountInfo): Promise { +export async function showSettingsMenu( + syncFromCodexMultiAuthEnabled: boolean, +): Promise { const ui = getUiRuntimeOptions(); - const header = - `${accountTitle(account)} ${statusBadge(account.status)}` + - (account.enabled === false - ? (ui.v2Enabled - ? ` ${formatUiBadge(ui, "disabled", "danger")}` - : ` ${ANSI.red}[disabled]${ANSI.reset}`) - : ""); - const subtitle = `Added: ${formatDate(account.addedAt)} | Last used: ${formatRelativeTime(account.lastUsed)}`; + let focus: SettingsHubAction = "sync"; while (true) { - const action = await select( - [ - { label: "Back", value: "back" }, + const hubItems: MenuItem[] = [ + { label: UI_COPY.settings.sectionTitle, value: "cancel", kind: "heading" }, + { label: UI_COPY.settings.syncCategory, value: "sync", color: "green" }, + { label: UI_COPY.settings.maintenanceCategory, value: "maintenance", color: "green" }, + { label: "", value: "cancel", separator: true }, + { label: UI_COPY.settings.navigationHeading, value: "cancel", kind: "heading" }, + { label: UI_COPY.settings.back, value: "back", color: "red" }, + ]; + const initialCursor = hubItems.findIndex((item) => { + if (item.separator || item.disabled || item.kind === "heading") return false; + return item.value === focus; + }); + const action = await select(hubItems, { + message: UI_COPY.settings.title, + subtitle: UI_COPY.settings.subtitle, + help: UI_COPY.settings.help, + clearScreen: true, + selectedEmphasis: "minimal", + focusStyle: "row-invert", + theme: ui.theme, + initialCursor: initialCursor >= 0 ? initialCursor : undefined, + }); + + if (!action || action === "cancel" || action === "back") { + return action ?? "cancel"; + } + + if (action === "sync") { + const syncBadge = syncFromCodexMultiAuthEnabled + ? formatUiBadge(ui, "enabled", "success") + : formatUiBadge(ui, "disabled", "danger"); + const syncLabel = ui.v2Enabled + ? `${UI_COPY.settings.syncToggle} ${syncBadge}` + : `${UI_COPY.settings.syncToggle} ${syncFromCodexMultiAuthEnabled ? `${ANSI.green}[enabled]${ANSI.reset}` : `${ANSI.red}[disabled]${ANSI.reset}`}`; + const syncAction = await select( + [ + { label: UI_COPY.settings.syncHeading, value: "cancel", kind: "heading" }, + { label: syncLabel, value: "toggle-sync", color: syncFromCodexMultiAuthEnabled ? "green" : "yellow" }, + { label: UI_COPY.settings.syncNow, value: "sync-now", color: "cyan" }, + { label: "", value: "cancel", separator: true }, + { label: UI_COPY.settings.navigationHeading, value: "cancel", kind: "heading" }, + { label: UI_COPY.settings.back, value: "back" }, + ], { - label: account.enabled === false ? "Enable account" : "Disable account", - value: "toggle", - color: account.enabled === false ? "green" : "yellow", + message: UI_COPY.settings.title, + subtitle: UI_COPY.settings.syncCategory, + help: UI_COPY.settings.help, + clearScreen: true, + selectedEmphasis: "minimal", + focusStyle: "row-invert", + theme: ui.theme, }, - { label: "Refresh account", value: "refresh", color: "cyan" }, - { label: "Delete this account", value: "delete", color: "red" }, + ); + if (syncAction && syncAction !== "back" && syncAction !== "cancel") { + return syncAction; + } + focus = "sync"; + continue; + } + + const maintenanceAction = await select( + [ + { label: UI_COPY.settings.maintenanceHeading, value: "cancel", kind: "heading" }, + { label: UI_COPY.settings.cleanupDuplicateEmails, value: "cleanup-duplicate-emails", color: "yellow" }, + { label: UI_COPY.settings.cleanupOverlaps, value: "cleanup-overlaps", color: "yellow" }, + { label: "", value: "cancel", separator: true }, + { label: UI_COPY.settings.navigationHeading, value: "cancel", kind: "heading" }, + { label: UI_COPY.settings.back, value: "back" }, ], { - message: header, - subtitle, + message: UI_COPY.settings.title, + subtitle: UI_COPY.settings.maintenanceCategory, + help: UI_COPY.settings.help, clearScreen: true, - variant: ui.v2Enabled ? "codex" : "legacy", + selectedEmphasis: "minimal", + focusStyle: "row-invert", theme: ui.theme, }, ); + if (maintenanceAction && maintenanceAction !== "back" && maintenanceAction !== "cancel") { + return maintenanceAction; + } + focus = "maintenance"; + } +} + +export async function showAccountDetails(account: AccountInfo): Promise { + const ui = getUiRuntimeOptions(); + const header = `${accountTitle(account)} ${statusBadge(account.status)}`; + const subtitle = `Added: ${formatDate(account.addedAt)} | Used: ${formatRelativeTime(account.lastUsed)} | Status: ${account.status ?? "unknown"}`; + let focusAction: AccountAction = "back"; + + while (true) { + const items: MenuItem[] = [ + { label: UI_COPY.accountDetails.back, value: "back" }, + { + label: account.enabled === false ? UI_COPY.accountDetails.enable : UI_COPY.accountDetails.disable, + value: "toggle", + color: account.enabled === false ? "green" : "yellow", + }, + { label: UI_COPY.accountDetails.setCurrent, value: "set-current", color: "green" }, + { label: UI_COPY.accountDetails.refresh, value: "refresh", color: "green" }, + { label: UI_COPY.accountDetails.remove, value: "delete", color: "red" }, + ]; + const initialCursor = items.findIndex((item) => item.value === focusAction); + const action = await select(items, { + message: header, + subtitle, + help: UI_COPY.accountDetails.help, + clearScreen: true, + selectedEmphasis: "minimal", + focusStyle: "row-invert", + initialCursor: initialCursor >= 0 ? initialCursor : undefined, + theme: ui.theme, + onInput: (input) => { + const lower = input.toLowerCase(); + if (lower === "q") return "cancel"; + if (lower === "s") return "set-current"; + if (lower === "r") return "refresh"; + if (lower === "d") return "delete"; + if (lower === "e" || lower === "t" || lower === "x") return "toggle"; + return undefined; + }, + onCursorChange: ({ cursor }) => { + const selected = items[cursor]; + if (!selected || selected.separator || selected.disabled || selected.kind === "heading") return; + focusAction = selected.value; + }, + }); if (!action) return "cancel"; + focusAction = action; if (action === "delete") { const confirmed = await confirm(`Delete ${accountTitle(account)}?`); if (!confirmed) continue; @@ -231,5 +519,122 @@ export async function showAccountDetails(account: AccountInfo): Promise { + const ui = getUiRuntimeOptions(); + const selected = new Set(); + for (const candidate of candidates) { + if (candidate.isCurrentAccount !== true && selected.size < neededCount) { + selected.add(candidate.index); + } + } + let focusKey = candidates[0] ? `candidate:${candidates[0].index}` : "confirm"; + + while (true) { + const items: MenuItem[] = candidates.map((candidate) => { + const isSelected = selected.has(candidate.index); + const selectionBadge = isSelected + ? ui.v2Enabled + ? formatUiBadge(ui, UI_COPY.syncPrune.selected, "warning") + : `${ANSI.yellow}[${UI_COPY.syncPrune.selected}]${ANSI.reset}` + : ""; + return { + label: `${syncPruneTitle(candidate)} ${selectionBadge}`.trim(), + selectedLabel: `${syncPruneTitle(candidate)} ${selectionBadge}`.trim(), + hint: syncPruneHint(candidate), + color: isSelected ? "yellow" : candidate.isCurrentAccount ? "cyan" : "green", + value: { type: "toggle", candidate }, + }; + }); + + items.push({ label: "", value: { type: "cancel" }, separator: true }); + items.push({ + label: `${UI_COPY.syncPrune.confirm}${selected.size >= neededCount ? "" : ` (${selected.size}/${neededCount})`}`, + value: { type: "confirm" }, + color: selected.size >= neededCount ? "green" : "yellow", + }); + items.push({ label: UI_COPY.syncPrune.cancel, value: { type: "cancel" }, color: "red" }); + + const initialCursor = items.findIndex((item) => { + if (item.separator || item.disabled || item.kind === "heading") return false; + if (item.value.type === "toggle") return focusKey === `candidate:${item.value.candidate.index}`; + return focusKey === item.value.type; + }); + + const action = await select(items, { + message: UI_COPY.syncPrune.title, + subtitle: `${UI_COPY.syncPrune.subtitle(neededCount)} | Selected ${selected.size}`, + help: UI_COPY.syncPrune.help, + clearScreen: true, + selectedEmphasis: "minimal", + focusStyle: "row-invert", + initialCursor: initialCursor >= 0 ? initialCursor : undefined, + theme: ui.theme, + onInput: (input, context) => { + const lower = input.toLowerCase(); + if (lower === "q") return { type: "cancel" as const }; + if (lower === "c") return { type: "confirm" as const }; + if (input === " ") { + const current = items[context.cursor]; + if (current?.value.type === "toggle") { + return current.value; + } + return undefined; + } + return undefined; + }, + onCursorChange: ({ cursor }) => { + const current = items[cursor]; + if (!current || current.separator || current.disabled || current.kind === "heading") return; + if (current.value.type === "toggle") focusKey = `candidate:${current.value.candidate.index}`; + else focusKey = current.value.type; + }, + }); + + if (!action || action.type === "cancel") { + return null; + } + if (action.type === "toggle") { + if (selected.has(action.candidate.index)) selected.delete(action.candidate.index); + else selected.add(action.candidate.index); + focusKey = `candidate:${action.candidate.index}`; + continue; + } + if (action.type === "confirm") { + if (selected.size < neededCount) { + continue; + } + return Array.from(selected); + } + } +} + +export { isTTY }; diff --git a/lib/ui/copy.ts b/lib/ui/copy.ts new file mode 100644 index 00000000..6e9b7dac --- /dev/null +++ b/lib/ui/copy.ts @@ -0,0 +1,69 @@ +export const UI_COPY = { + mainMenu: { + title: "Accounts Dashboard", + searchSubtitlePrefix: "Search:", + quickStart: "Quick Actions", + addAccount: "Add New Account", + checkAccounts: "Run Health Check", + bestAccount: "Pick Best Account", + fixIssues: "Auto-Repair Issues", + settings: "Settings", + moreChecks: "Advanced Checks", + refreshChecks: "Refresh All Accounts", + checkFlagged: "Check Problem Accounts", + settingsSection: "Settings", + accounts: "Saved Accounts", + noSearchMatches: "No accounts match your search", + dangerZone: "Danger Zone", + removeAllAccounts: "Delete All Accounts", + helpCompact: "↑↓ Move | Enter Select | / Search | 1-9 Switch | Q Back", + helpDetailed: "Arrow keys move, Enter selects, / searches, 1-9 switches account, Q goes back", + }, + accountDetails: { + back: "Back", + enable: "Enable Account", + disable: "Disable Account", + setCurrent: "Set As Current", + refresh: "Re-Login", + remove: "Delete Account", + help: "↑↓ Move | Enter Select | S Use | R Sign In | D Delete | Q Back", + }, + settings: { + title: "Settings", + subtitle: "Organized settings categories for sync, maintenance, and future tools", + help: "↑↓ Move | Enter Select | Q Back", + sectionTitle: "Categories", + syncCategory: "Sync", + maintenanceCategory: "Maintenance", + syncHeading: "Sync", + maintenanceHeading: "Maintenance", + navigationHeading: "Navigation", + syncToggle: "Sync from codex-multi-auth", + syncNow: "Sync Now", + cleanupDuplicateEmails: "Clean Legacy Duplicate Emails", + cleanupOverlaps: "Cleanup Synced Overlaps", + back: "Back", + }, + syncPrune: { + title: "Prepare Sync", + subtitle: (neededCount: number) => `Select ${neededCount} account(s) to remove before syncing`, + help: "↑↓ Move | Enter Toggle | Space Toggle | C Continue | Q Cancel", + selected: "selected", + current: "current", + confirm: "Continue With Selected Accounts", + cancel: "Cancel", + }, + fallback: { + addAnotherTip: "Tip: Use private mode or sign out before adding another account.", + addAnotherQuestion: (count: number) => `Add another account? (${count} added) (y/n): `, + selectModePrompt: + "(a) add, (c) check, (b) best, fi(x), (s) settings, (d) deep, (g) problem, (f) fresh, (q) back [a/c/b/x/s/d/g/f/q]: ", + invalidModePrompt: "Use one of: a, c, b, x, s, d, g, f, q.", + }, +} as const; + +export function formatCheckFlaggedLabel(flaggedCount: number): string { + return flaggedCount > 0 + ? `${UI_COPY.mainMenu.checkFlagged} (${flaggedCount})` + : UI_COPY.mainMenu.checkFlagged; +} diff --git a/lib/ui/runtime.ts b/lib/ui/runtime.ts index abaf6270..9adef31d 100644 --- a/lib/ui/runtime.ts +++ b/lib/ui/runtime.ts @@ -2,6 +2,8 @@ import { createUiTheme, type UiColorProfile, type UiGlyphMode, + type UiPalette, + type UiAccent, type UiTheme, } from "./theme.js"; @@ -9,6 +11,8 @@ export interface UiRuntimeOptions { v2Enabled: boolean; colorProfile: UiColorProfile; glyphMode: UiGlyphMode; + palette: UiPalette; + accent: UiAccent; theme: UiTheme; } @@ -16,9 +20,13 @@ const DEFAULT_OPTIONS: UiRuntimeOptions = { v2Enabled: true, colorProfile: "truecolor", glyphMode: "ascii", + palette: "green", + accent: "green", theme: createUiTheme({ profile: "truecolor", glyphMode: "ascii", + palette: "green", + accent: "green", }), }; @@ -30,11 +38,15 @@ export function setUiRuntimeOptions( const v2Enabled = options.v2Enabled ?? runtimeOptions.v2Enabled; const colorProfile = options.colorProfile ?? runtimeOptions.colorProfile; const glyphMode = options.glyphMode ?? runtimeOptions.glyphMode; + const palette = options.palette ?? runtimeOptions.palette; + const accent = options.accent ?? runtimeOptions.accent; runtimeOptions = { v2Enabled, colorProfile, glyphMode, - theme: createUiTheme({ profile: colorProfile, glyphMode }), + palette, + accent, + theme: createUiTheme({ profile: colorProfile, glyphMode, palette, accent }), }; return runtimeOptions; } @@ -47,4 +59,3 @@ export function resetUiRuntimeOptions(): UiRuntimeOptions { runtimeOptions = { ...DEFAULT_OPTIONS }; return runtimeOptions; } - diff --git a/lib/ui/select.ts b/lib/ui/select.ts index 3ba4a3c1..0ac84d93 100644 --- a/lib/ui/select.ts +++ b/lib/ui/select.ts @@ -1,33 +1,132 @@ import { ANSI, isTTY, parseKey } from "./ansi.js"; import type { UiTheme } from "./theme.js"; +import { appendFileSync, mkdirSync } from "node:fs"; +import { join } from "node:path"; export interface MenuItem { label: string; + selectedLabel?: string; value: T; hint?: string; disabled?: boolean; + hideUnavailableSuffix?: boolean; separator?: boolean; kind?: "heading"; color?: "red" | "green" | "yellow" | "cyan"; } -export interface SelectOptions { +export interface SelectOptions { message: string; subtitle?: string; + dynamicSubtitle?: () => string | undefined; help?: string; clearScreen?: boolean; variant?: "legacy" | "codex"; theme?: UiTheme; + selectedEmphasis?: "chip" | "minimal"; + focusStyle?: "row-invert" | "chip"; + showHintsForUnselected?: boolean; + refreshIntervalMs?: number; + initialCursor?: number; + allowEscape?: boolean; + onCursorChange?: ( + context: { + cursor: number; + items: MenuItem[]; + requestRerender: () => void; + }, + ) => void; + onInput?: ( + input: string, + context: { + cursor: number; + items: MenuItem[]; + requestRerender: () => void; + }, + ) => T | null | undefined; } const ESCAPE_TIMEOUT_MS = 50; -const ANSI_REGEX = /\x1b\[[0-9;]*m/g; -const ANSI_LEADING_REGEX = /^\x1b\[[0-9;]*m/; +// biome-ignore lint/suspicious/noControlCharactersInRegex: matching ANSI escape codes +const ANSI_REGEX = new RegExp("\\x1b\\[[0-9;]*m", "g"); +// biome-ignore lint/suspicious/noControlCharactersInRegex: matching ANSI escape codes +const ANSI_LEADING_REGEX = new RegExp("^\\x1b\\[[0-9;]*m"); +const CSI_FINAL_KEYS = new Set(["A", "B", "C", "D", "H", "F"]); +const CSI_TILDE_PATTERN = /^\d+~$/; +const CONTROL_CHAR_REGEX = /[\u0000-\u0008\u000b\u000c\u000e-\u001f\u007f]/g; + +export interface PendingInputSequence { + value: string; + hasEscape: boolean; +} + +function writeTuiAudit(event: Record): void { + if (process.env.CODEX_TUI_AUDIT !== "1") return; + try { + const home = process.env.USERPROFILE ?? process.env.HOME; + if (!home) return; + const logDir = join(home, ".opencode", "logs"); + mkdirSync(logDir, { recursive: true, mode: 0o700 }); + const logPath = join(logDir, "codex-tui-audit.log"); + appendFileSync( + logPath, + `${JSON.stringify(sanitizeAuditValue("event", { ts: new Date().toISOString(), ...event }))}\n`, + { encoding: "utf8", mode: 0o600 }, + ); + } catch { + // best effort audit logging only + } +} + +const AUDIT_REDACTED_STRING_KEYS = new Set([ + "label", + "message", + "utf8", + "bytesHex", + "token", + "normalizedInput", + "pending", + "hint", + "subtitle", +]); + +const AUDIT_SECRET_LIKE_PATTERN = /\b(?:Bearer\s+)?[A-Za-z0-9._-]{24,}(?:\.[A-Za-z0-9._-]{8,})*\b/; + +export function sanitizeAuditValue(key: string, value: unknown): unknown { + if (typeof value === "string") { + if (AUDIT_REDACTED_STRING_KEYS.has(key)) { + return `[redacted:${value.length}]`; + } + if (value.includes("@")) { + return "[redacted-email]"; + } + if (AUDIT_SECRET_LIKE_PATTERN.test(value)) { + return "[redacted-token]"; + } + return value; + } + if (Array.isArray(value)) { + return value.map((entry) => sanitizeAuditValue(key, entry)); + } + if (value && typeof value === "object") { + return Object.fromEntries( + Object.entries(value as Record).map(([entryKey, entryValue]) => [ + entryKey, + sanitizeAuditValue(entryKey, entryValue), + ]), + ); + } + return value; +} function stripAnsi(input: string): string { return input.replace(ANSI_REGEX, ""); } +function sanitizeDisplayText(input: string): string { + return stripAnsi(input).replace(CONTROL_CHAR_REGEX, ""); +} + function truncateAnsi(input: string, maxVisibleChars: number): string { if (maxVisibleChars <= 0) return ""; const visible = stripAnsi(input); @@ -71,22 +170,143 @@ function colorCode(color: MenuItem["color"]): string { } } -function codexColorCode(theme: UiTheme, color: MenuItem["color"]): string { - switch (color) { - case "red": - return theme.colors.danger; - case "green": - return theme.colors.success; - case "yellow": - return theme.colors.warning; - case "cyan": - return theme.colors.accent; - default: - return theme.colors.heading; +function decodeHotkeyInput(data: Buffer): string | null { + const input = data.toString("utf8"); + const keypadMap: Record = { + "\x1bOp": "0", + "\x1bOq": "1", + "\x1bOr": "2", + "\x1bOs": "3", + "\x1bOt": "4", + "\x1bOu": "5", + "\x1bOv": "6", + "\x1bOw": "7", + "\x1bOx": "8", + "\x1bOy": "9", + "\x1bOk": "+", + "\x1bOm": "-", + "\x1bOj": "*", + "\x1bOo": "/", + "\x1bOn": ".", + }; + const mapped = keypadMap[input]; + if (mapped) return mapped; + + for (const ch of input) { + const code = ch.charCodeAt(0); + if (code >= 32 && code <= 126) return ch; + } + return null; +} + +function canCompleteCsi(chunk: string): boolean { + return CSI_FINAL_KEYS.has(chunk) || CSI_TILDE_PATTERN.test(chunk); +} + +export function coalesceTerminalInput( + rawInput: string, + pending: PendingInputSequence | null, +): { normalizedInput: string | null; pending: PendingInputSequence | null } { + let nextInput = rawInput; + let nextPending = pending; + + if (nextPending) { + const base = nextPending.value; + if (nextPending.hasEscape && base === "\x1b[" && canCompleteCsi(nextInput)) { + return { normalizedInput: `\x1b[${nextInput}`, pending: null }; + } + if (nextPending.hasEscape && /^\x1b\[[\d;]+$/.test(base) && canCompleteCsi(nextInput)) { + return { normalizedInput: `${base}${nextInput}`, pending: null }; + } + if ( + nextPending.hasEscape && + (base === "\x1b[" || /^\x1b\[[\d;]+$/.test(base)) && + /^[\d;]+$/.test(nextInput) + ) { + return { normalizedInput: null, pending: { value: `${base}${nextInput}`, hasEscape: true } }; + } + if (nextPending.hasEscape && base === "\x1bO" && CSI_FINAL_KEYS.has(nextInput)) { + return { normalizedInput: `\x1bO${nextInput}`, pending: null }; + } + if (base === "\x1b" && (nextInput === "[" || nextInput === "O")) { + return { normalizedInput: null, pending: { value: `\x1b${nextInput}`, hasEscape: true } }; + } + if (base === "\x1b" && ((nextInput.startsWith("[") && nextInput.length > 1) || (nextInput.startsWith("O") && nextInput.length > 1))) { + return { normalizedInput: `\x1b${nextInput}`, pending: null }; + } + nextInput = `${base}${nextInput}`; + nextPending = null; + } + + if (nextInput === "\x1b") { + return { normalizedInput: null, pending: { value: "\x1b", hasEscape: true } }; + } + if (nextInput === "\x1b[" || nextInput === "\x1bO") { + return { normalizedInput: null, pending: { value: nextInput, hasEscape: true } }; + } + if (nextInput === "[" || nextInput === "O") { + return { normalizedInput: nextInput, pending: null }; + } + + return { normalizedInput: nextInput, pending: nextPending }; +} + +export function tokenizeTerminalInput(rawInput: string): string[] { + const tokens: string[] = []; + let index = 0; + while (index < rawInput.length) { + const ch = rawInput.charAt(index); + if (ch !== "\x1b") { + tokens.push(ch); + index += 1; + continue; + } + + const next = rawInput[index + 1]; + const third = rawInput[index + 2]; + if (next === "[") { + let cursor = index + 2; + let consumed = false; + while (cursor < rawInput.length) { + const current = rawInput.charAt(cursor); + if (CSI_FINAL_KEYS.has(current)) { + tokens.push(rawInput.slice(index, cursor + 1)); + index = cursor + 1; + consumed = true; + break; + } + if (current === "~" && CSI_TILDE_PATTERN.test(rawInput.slice(index + 2, cursor + 1))) { + tokens.push(rawInput.slice(index, cursor + 1)); + index = cursor + 1; + consumed = true; + break; + } + if (!/[0-9;]/.test(current)) { + break; + } + cursor += 1; + } + if (consumed) { + continue; + } + } + if (next === "O" && third && CSI_FINAL_KEYS.has(third)) { + tokens.push(rawInput.slice(index, index + 3)); + index += 3; + continue; + } + if (next === "[" || next === "O") { + tokens.push(rawInput.slice(index, index + 2)); + index += 2; + continue; + } + tokens.push(ch); + index += 1; } + return tokens; } -export async function select(items: MenuItem[], options: SelectOptions): Promise { +export async function select(items: MenuItem[], options: SelectOptions): Promise { if (!isTTY()) { throw new Error("Interactive select requires a TTY terminal"); } @@ -106,121 +326,90 @@ export async function select(items: MenuItem[], options: SelectOptions): P const { stdin, stdout } = process; let cursor = items.findIndex(isSelectable); + if (typeof options.initialCursor === "number" && Number.isFinite(options.initialCursor)) { + const bounded = Math.max(0, Math.min(items.length - 1, Math.trunc(options.initialCursor))); + cursor = bounded; + } + if (cursor < 0 || !isSelectable(items[cursor] as MenuItem)) { + cursor = items.findIndex(isSelectable); + } if (cursor < 0) cursor = 0; let escapeTimeout: ReturnType | null = null; let cleanedUp = false; let renderedLines = 0; + let hasRendered = false; + let inputGuardUntil = 0; + const theme = options.theme; + let rerenderRequested = false; - const renderLegacy = () => { - const columns = stdout.columns ?? 80; - const rows = stdout.rows ?? 24; - const previousRenderedLines = renderedLines; - - if (options.clearScreen) { - stdout.write(ANSI.clearScreen + ANSI.moveTo(1, 1)); - } else if (previousRenderedLines > 0) { - stdout.write(ANSI.up(previousRenderedLines)); - } - - let linesWritten = 0; - const writeLine = (line: string) => { - stdout.write(`${ANSI.clearLine}${line}\n`); - linesWritten += 1; - }; + const requestRerender = () => { + rerenderRequested = true; + }; - const subtitleLines = options.subtitle ? 3 : 0; - const fixedLines = 1 + subtitleLines + 2; - const maxVisibleItems = Math.max(1, Math.min(items.length, rows - fixedLines - 1)); + const notifyCursorChange = () => { + if (!options.onCursorChange) return; + rerenderRequested = false; + const current = items[cursor]; + writeTuiAudit({ + type: "focus", + message: options.message, + cursor, + label: current?.label, + }); + options.onCursorChange({ + cursor, + items, + requestRerender, + }); + }; - let windowStart = 0; - let windowEnd = items.length; - if (items.length > maxVisibleItems) { - windowStart = cursor - Math.floor(maxVisibleItems / 2); - windowStart = Math.max(0, Math.min(windowStart, items.length - maxVisibleItems)); - windowEnd = windowStart + maxVisibleItems; + const drainStdinBuffer = () => { + try { + let chunk: Buffer | string | null; + do { + chunk = stdin.read(); + } while (chunk !== null); + } catch { + // best effort } + }; - const visibleItems = items.slice(windowStart, windowEnd); - writeLine(`${ANSI.dim}+ ${ANSI.reset}${truncateAnsi(options.message, Math.max(1, columns - 4))}`); - - if (options.subtitle) { - writeLine("|"); - writeLine(`${ANSI.cyan}>${ANSI.reset} ${truncateAnsi(options.subtitle, Math.max(1, columns - 4))}`); - writeLine(""); + const codexColorCode = (color: MenuItem["color"]): string => { + if (!theme) { + return colorCode(color); } - - for (let i = 0; i < visibleItems.length; i += 1) { - const itemIndex = windowStart + i; - const item = visibleItems[i]; - if (!item) continue; - - if (item.separator) { - writeLine("|"); - continue; - } - - if (item.kind === "heading") { - const heading = truncateAnsi( - `${ANSI.dim}${ANSI.bold}${item.label}${ANSI.reset}`, - Math.max(1, columns - 6), - ); - writeLine(`${ANSI.cyan}|${ANSI.reset} ${heading}`); - continue; - } - - const selected = itemIndex === cursor; - let labelText: string; - if (item.disabled) { - labelText = `${ANSI.dim}${item.label} (unavailable)${ANSI.reset}`; - } else if (selected) { - const color = colorCode(item.color); - labelText = color ? `${color}${item.label}${ANSI.reset}` : item.label; - if (item.hint) { - labelText += ` ${ANSI.dim}${item.hint}${ANSI.reset}`; - } - } else { - const color = colorCode(item.color); - labelText = color - ? `${ANSI.dim}${color}${item.label}${ANSI.reset}` - : `${ANSI.dim}${item.label}${ANSI.reset}`; - if (item.hint) { - labelText += ` ${ANSI.dim}${item.hint}${ANSI.reset}`; - } - } - - labelText = truncateAnsi(labelText, Math.max(1, columns - 8)); - if (selected) { - writeLine(`${ANSI.cyan}|${ANSI.reset} ${ANSI.green}*${ANSI.reset} ${labelText}`); - } else { - writeLine(`${ANSI.cyan}|${ANSI.reset} ${ANSI.dim}o${ANSI.reset} ${labelText}`); - } + switch (color) { + case "red": + return theme.colors.danger; + case "green": + return theme.colors.success; + case "yellow": + return theme.colors.warning; + case "cyan": + return theme.colors.accent; + default: + return theme.colors.heading; } + }; - const windowHint = - items.length > visibleItems.length ? ` (${windowStart + 1}-${windowEnd}/${items.length})` : ""; - const helpText = options.help ?? `Up/Down select | Enter confirm | Esc back${windowHint}`; - writeLine( - `${ANSI.cyan}|${ANSI.reset} ${ANSI.dim}${truncateAnsi(helpText, Math.max(1, columns - 6))}${ANSI.reset}`, - ); - writeLine(`${ANSI.cyan}+${ANSI.reset}`); - - if (!options.clearScreen && previousRenderedLines > linesWritten) { - const extra = previousRenderedLines - linesWritten; - for (let i = 0; i < extra; i += 1) { - writeLine(""); - } + const selectedLabelStart = (): string => { + if (!theme) { + return `${ANSI.bgGreen}${ANSI.black}${ANSI.bold}`; } - - renderedLines = linesWritten; + return `${theme.colors.focusBg}${theme.colors.focusText}${ANSI.bold}`; }; - const renderCodex = (theme: UiTheme) => { + const render = () => { const columns = stdout.columns ?? 80; const rows = stdout.rows ?? 24; const previousRenderedLines = renderedLines; + const subtitleText = options.dynamicSubtitle ? options.dynamicSubtitle() : options.subtitle; + const focusStyle = options.focusStyle ?? "row-invert"; + let didFullClear = false; - if (options.clearScreen) { + if (options.clearScreen && !hasRendered) { stdout.write(ANSI.clearScreen + ANSI.moveTo(1, 1)); + didFullClear = true; } else if (previousRenderedLines > 0) { stdout.write(ANSI.up(previousRenderedLines)); } @@ -231,34 +420,87 @@ export async function select(items: MenuItem[], options: SelectOptions): P linesWritten += 1; }; - const subtitleLines = options.subtitle ? 2 : 0; + const itemRowCost = (item: MenuItem, selected: boolean): number => { + if (item.separator || item.kind === "heading") { + return 1; + } + let cost = 1; + if (item.hint) { + const hintLines = item.hint.split("\n").length; + if (selected) { + cost += Math.min(3, hintLines); + } else if (options.showHintsForUnselected ?? true) { + cost += Math.min(2, hintLines); + } + } + return cost; + }; + + const subtitleLines = subtitleText ? 2 : 0; const fixedLines = 2 + subtitleLines + 2; - const maxVisibleItems = Math.max(1, Math.min(items.length, rows - fixedLines - 1)); + const availableItemRows = Math.max(1, rows - fixedLines); let windowStart = 0; let windowEnd = items.length; - if (items.length > maxVisibleItems) { - windowStart = cursor - Math.floor(maxVisibleItems / 2); - windowStart = Math.max(0, Math.min(windowStart, items.length - maxVisibleItems)); - windowEnd = windowStart + maxVisibleItems; + const totalRenderedRows = items.reduce( + (total, item, index) => total + itemRowCost(item, index === cursor), + 0, + ); + if (totalRenderedRows > availableItemRows) { + windowStart = cursor; + windowEnd = cursor + 1; + let usedRows = itemRowCost(items[cursor] as MenuItem, true); + let up = cursor - 1; + let down = cursor + 1; + + while (true) { + const upCost = + up >= 0 ? itemRowCost(items[up] as MenuItem, false) : Number.POSITIVE_INFINITY; + const downCost = + down < items.length + ? itemRowCost(items[down] as MenuItem, false) + : Number.POSITIVE_INFINITY; + const preferUp = upCost <= downCost; + + if (preferUp && up >= 0 && usedRows + upCost <= availableItemRows) { + usedRows += upCost; + windowStart = up; + up -= 1; + continue; + } + if (down < items.length && usedRows + downCost <= availableItemRows) { + usedRows += downCost; + windowEnd = down + 1; + down += 1; + continue; + } + if (up >= 0 && usedRows + upCost <= availableItemRows) { + usedRows += upCost; + windowStart = up; + up -= 1; + continue; + } + break; + } } const visibleItems = items.slice(windowStart, windowEnd); - const border = theme.colors.border; - const muted = theme.colors.muted; - const heading = theme.colors.heading; - const accent = theme.colors.accent; - const reset = theme.colors.reset; - const selectedGlyph = theme.glyphs.selected; - const unselectedGlyph = theme.glyphs.unselected; - - writeLine(`${border}+${reset} ${heading}${truncateAnsi(options.message, Math.max(1, columns - 4))}${reset}`); - if (options.subtitle) { - writeLine( - `${border}|${reset} ${muted}${truncateAnsi(options.subtitle, Math.max(1, columns - 4))}${reset}`, - ); + const border = theme?.colors.border ?? ANSI.dim; + const muted = theme?.colors.muted ?? ANSI.dim; + const heading = theme?.colors.heading ?? ANSI.reset; + const reset = theme?.colors.reset ?? ANSI.reset; + const selectedGlyph = theme?.glyphs.selected ?? ">"; + const unselectedGlyph = theme?.glyphs.unselected ?? "o"; + const selectedGlyphColor = theme?.colors.success ?? ANSI.green; + const selectedChip = selectedLabelStart(); + + const safeMessage = sanitizeDisplayText(options.message); + writeLine(`${border}+${reset} ${heading}${truncateAnsi(safeMessage, Math.max(1, columns - 4))}${reset}`); + if (subtitleText) { + const safeSubtitle = sanitizeDisplayText(subtitleText); + writeLine(` ${muted}${truncateAnsi(safeSubtitle, Math.max(1, columns - 2))}${reset}`); } - writeLine(`${border}|${reset}`); + writeLine(""); for (let i = 0; i < visibleItems.length; i += 1) { const itemIndex = windowStart + i; @@ -266,47 +508,75 @@ export async function select(items: MenuItem[], options: SelectOptions): P if (!item) continue; if (item.separator) { - writeLine(`${border}|${reset}`); + writeLine(""); continue; } if (item.kind === "heading") { - const headingText = truncateAnsi( - `${theme.colors.dim}${heading}${item.label}${reset}`, - Math.max(1, columns - 6), - ); - writeLine(`${border}|${reset} ${headingText}`); + const safeHeading = sanitizeDisplayText(item.label); + const headingText = truncateAnsi(`${muted}${safeHeading}${reset}`, Math.max(1, columns - 2)); + writeLine(` ${headingText}`); continue; } const selected = itemIndex === cursor; - const prefix = selected - ? `${accent}${selectedGlyph}${reset}` - : `${muted}${unselectedGlyph}${reset}`; - const itemColor = codexColorCode(theme, item.color); - let labelText: string; - if (item.disabled) { - labelText = `${muted}${item.label} (unavailable)${reset}`; - } else if (selected) { - labelText = `${itemColor}${item.label}${reset}`; + const safeLabel = sanitizeDisplayText(item.label); + const safeSelectedLabel = item.selectedLabel ? sanitizeDisplayText(item.selectedLabel) : safeLabel; + const safeHintLines = item.hint + ? item.hint.split("\n").map((line) => sanitizeDisplayText(line)).filter((line) => line.length > 0) + : []; + if (selected) { + const selectedText = item.selectedLabel + ? safeSelectedLabel + : item.disabled + ? item.hideUnavailableSuffix + ? safeLabel + : `${safeLabel} (unavailable)` + : safeLabel; + if (focusStyle === "row-invert") { + const rowText = `${selectedGlyph} ${selectedText}`; + const focusedRow = theme + ? `${theme.colors.focusBg}${theme.colors.focusText}${ANSI.bold}${truncateAnsi(rowText, Math.max(1, columns - 2))}${reset}` + : `${ANSI.inverse}${truncateAnsi(rowText, Math.max(1, columns - 2))}${ANSI.reset}`; + writeLine(` ${focusedRow}`); + } else { + const selectedLabel = `${selectedChip}${selectedText}${reset}`; + writeLine(` ${selectedGlyphColor}${selectedGlyph}${reset} ${truncateAnsi(selectedLabel, Math.max(1, columns - 4))}`); + } + if (safeHintLines.length > 0) { + const detailLines = safeHintLines.slice(0, 3); + for (const detailLine of detailLines) { + const detail = truncateAnsi(detailLine, Math.max(1, columns - 8)); + writeLine(` ${muted}${detail}${reset}`); + } + } } else { - labelText = `${muted}${item.label}${reset}`; - } - if (item.hint) { - labelText += ` ${muted}${item.hint}${reset}`; + const itemColor = codexColorCode(item.color); + const labelText = item.disabled + ? item.hideUnavailableSuffix + ? `${muted}${safeLabel}${reset}` + : `${muted}${safeLabel} (unavailable)${reset}` + : `${itemColor}${safeLabel}${reset}`; + writeLine(` ${muted}${unselectedGlyph}${reset} ${truncateAnsi(labelText, Math.max(1, columns - 4))}`); + if (safeHintLines.length > 0 && (options.showHintsForUnselected ?? true)) { + const detailLines = safeHintLines.slice(0, 2); + for (const detailLine of detailLines) { + const detail = truncateAnsi(`${muted}${detailLine}${reset}`, Math.max(1, columns - 8)); + writeLine(` ${detail}`); + } + } } - - labelText = truncateAnsi(labelText, Math.max(1, columns - 8)); - writeLine(`${border}|${reset} ${prefix} ${labelText}`); } - const windowHint = - items.length > visibleItems.length ? ` (${windowStart + 1}-${windowEnd}/${items.length})` : ""; - const helpText = options.help ?? `Up/Down select | Enter confirm | Esc back${windowHint}`; - writeLine(`${border}|${reset} ${muted}${truncateAnsi(helpText, Math.max(1, columns - 4))}${reset}`); + const windowHint = items.length > visibleItems.length ? ` (${windowStart + 1}-${windowEnd}/${items.length})` : ""; + const backLabel = options.allowEscape === false ? "" : "Q Back"; + const helpText = + options.help ?? + `↑↓ Move | Enter Select${backLabel ? ` | ${backLabel}` : ""}${windowHint}`; + writeLine(` ${muted}${truncateAnsi(helpText, Math.max(1, columns - 2))}${reset}`); writeLine(`${border}+${reset}`); - if (!options.clearScreen && previousRenderedLines > linesWritten) { + if (!didFullClear && previousRenderedLines > linesWritten) { const extra = previousRenderedLines - linesWritten; for (let i = 0; i < extra; i += 1) { writeLine(""); @@ -314,18 +584,14 @@ export async function select(items: MenuItem[], options: SelectOptions): P } renderedLines = linesWritten; + hasRendered = true; }; - const render = () => { - if (options.variant === "codex" && options.theme) { - renderCodex(options.theme); - return; - } - renderLegacy(); - }; - - return new Promise((resolve) => { + return new Promise((resolve, reject) => { + const rejectPromise = reject; const wasRaw = stdin.isRaw ?? false; + let refreshTimer: ReturnType | null = null; + let pendingEscapeSequence: PendingInputSequence | null = null; const cleanup = () => { if (cleanedUp) return; @@ -340,6 +606,19 @@ export async function select(items: MenuItem[], options: SelectOptions): P stdin.removeListener("data", onKey); stdin.setRawMode(wasRaw); stdin.pause(); + if (refreshTimer) { + clearInterval(refreshTimer); + refreshTimer = null; + } + if (options.clearScreen) { + stdout.write(ANSI.clearScreen + ANSI.moveTo(1, 1)); + } else if (renderedLines > 0) { + stdout.write(ANSI.up(renderedLines)); + for (let i = 0; i < renderedLines; i += 1) { + stdout.write(`${ANSI.clearLine}\n`); + } + stdout.write(ANSI.up(renderedLines)); + } stdout.write(ANSI.show); } catch { // best effort cleanup @@ -350,10 +629,23 @@ export async function select(items: MenuItem[], options: SelectOptions): P }; const finish = (value: T | null) => { + writeTuiAudit({ + type: "finish", + message: options.message, + cursor, + label: items[cursor]?.label, + result: value === null ? "cancel" : "selected", + }); cleanup(); resolve(value); }; + const fail = (error: unknown): boolean => { + cleanup(); + rejectPromise(error); + return true; + }; + const onSignal = () => finish(null); const findNextSelectable = (from: number, direction: 1 | -1): number => { @@ -366,32 +658,182 @@ export async function select(items: MenuItem[], options: SelectOptions): P }; const onKey = (data: Buffer) => { - if (escapeTimeout) { - clearTimeout(escapeTimeout); - escapeTimeout = null; - } + const rawInput = data.toString("utf8"); + writeTuiAudit({ + type: "raw", + message: options.message, + bytesHex: Array.from(data.values()).map((value) => value.toString(16).padStart(2, "0")).join(" "), + utf8: rawInput, + }); + + const processToken = (token: string): boolean => { + writeTuiAudit({ + type: "token", + message: options.message, + cursor, + token, + }); + + if (escapeTimeout) { + clearTimeout(escapeTimeout); + escapeTimeout = null; + } + + const { normalizedInput, pending } = coalesceTerminalInput( + token, + pendingEscapeSequence, + ); + pendingEscapeSequence = pending; + writeTuiAudit({ + type: "coalesced", + message: options.message, + cursor, + token, + normalizedInput, + pending: pendingEscapeSequence?.value ?? null, + hasEscape: pendingEscapeSequence?.hasEscape ?? false, + }); + if (pendingEscapeSequence) { + if (pendingEscapeSequence.hasEscape && options.allowEscape !== false) { + const pendingValue = pendingEscapeSequence.value; + escapeTimeout = setTimeout(() => { + if (pendingEscapeSequence?.value === pendingValue) { + pendingEscapeSequence = null; + finish(null); + } + }, ESCAPE_TIMEOUT_MS); + } + return false; + } + if (normalizedInput === null) { + return false; + } + + const normalizedData = Buffer.from(normalizedInput, "utf8"); + + if (Date.now() < inputGuardUntil) { + const guardedAction = parseKey(normalizedData); + if (guardedAction === "enter" || guardedAction === "escape" || guardedAction === "escape-start") { + return false; + } + } - const action = parseKey(data); - switch (action) { + const action = parseKey(normalizedData); + switch (action) { case "up": + writeTuiAudit({ type: "key", message: options.message, action: "up", cursor }); cursor = findNextSelectable(cursor, -1); - render(); - return; + try { + notifyCursorChange(); + render(); + } catch (error) { + return fail(error); + } + return false; case "down": + writeTuiAudit({ type: "key", message: options.message, action: "down", cursor }); cursor = findNextSelectable(cursor, 1); - render(); - return; + try { + notifyCursorChange(); + render(); + } catch (error) { + return fail(error); + } + return false; + case "home": + writeTuiAudit({ type: "key", message: options.message, action: "home", cursor }); + cursor = items.findIndex(isSelectable); + try { + notifyCursorChange(); + render(); + } catch (error) { + return fail(error); + } + return false; + case "end": { + writeTuiAudit({ type: "key", message: options.message, action: "end", cursor }); + for (let i = items.length - 1; i >= 0; i -= 1) { + const item = items[i]; + if (item && isSelectable(item)) { + cursor = i; + break; + } + } + try { + notifyCursorChange(); + render(); + } catch (error) { + return fail(error); + } + return false; + } case "enter": + writeTuiAudit({ type: "key", message: options.message, action: "enter", cursor }); finish(items[cursor]?.value ?? null); - return; + return true; case "escape": - finish(null); - return; + writeTuiAudit({ type: "key", message: options.message, action: "escape", cursor }); + if (options.allowEscape !== false) { + finish(null); + } + return true; case "escape-start": - escapeTimeout = setTimeout(() => finish(null), ESCAPE_TIMEOUT_MS); - return; + writeTuiAudit({ type: "key", message: options.message, action: "escape-start", cursor }); + pendingEscapeSequence = { value: "\x1b", hasEscape: true }; + if (options.allowEscape !== false) { + escapeTimeout = setTimeout(() => { + if (pendingEscapeSequence?.value === "\x1b") { + pendingEscapeSequence = null; + finish(null); + } + }, ESCAPE_TIMEOUT_MS); + } + return false; default: + const hotkey = decodeHotkeyInput(normalizedData); + if (options.onInput && hotkey) { + writeTuiAudit({ + type: "input", + message: options.message, + cursor, + hotkey, + }); + rerenderRequested = false; + let result: T | null | undefined; + try { + result = options.onInput(hotkey, { + cursor, + items, + requestRerender, + }); + } catch (error) { + return fail(error); + } + if (result !== undefined) { + finish(result); + return true; + } + if (rerenderRequested) { + try { + render(); + } catch (error) { + return fail(error); + } + } + } + if ((hotkey === "q" || hotkey === "Q") && options.allowEscape !== false) { + writeTuiAudit({ type: "key", message: options.message, action: "q-back", cursor }); + finish(null); + return true; + } + return false; + } + }; + + for (const token of tokenizeTerminalInput(rawInput)) { + if (processToken(token)) { return; + } } }; @@ -407,9 +849,32 @@ export async function select(items: MenuItem[], options: SelectOptions): P } stdin.resume(); + drainStdinBuffer(); + inputGuardUntil = Date.now() + 120; stdout.write(ANSI.hide); - render(); + writeTuiAudit({ + type: "open", + message: options.message, + subtitle: options.subtitle, + itemCount: items.length, + }); + try { + notifyCursorChange(); + render(); + } catch (error) { + fail(error); + return; + } + if (options.dynamicSubtitle && (options.refreshIntervalMs ?? 0) > 0) { + const intervalMs = Math.max(80, Math.round(options.refreshIntervalMs ?? 0)); + refreshTimer = setInterval(() => { + try { + render(); + } catch (error) { + fail(error); + } + }, intervalMs); + } stdin.on("data", onKey); }); } - diff --git a/lib/ui/theme.ts b/lib/ui/theme.ts index 56ebecbd..8efc9f65 100644 --- a/lib/ui/theme.ts +++ b/lib/ui/theme.ts @@ -4,6 +4,8 @@ export type UiColorProfile = "ansi16" | "ansi256" | "truecolor"; export type UiGlyphMode = "ascii" | "unicode" | "auto"; +export type UiPalette = "green" | "blue"; +export type UiAccent = "green" | "cyan" | "blue" | "yellow"; export interface UiGlyphSet { selected: string; @@ -18,11 +20,14 @@ export interface UiThemeColors { dim: string; muted: string; heading: string; + primary: string; accent: string; success: string; warning: string; danger: string; border: string; + focusBg: string; + focusText: string; } export interface UiTheme { @@ -35,6 +40,8 @@ export interface UiTheme { const ansi16 = (code: number): string => `\x1b[${code}m`; const ansi256 = (code: number): string => `\x1b[38;5;${code}m`; const truecolor = (r: number, g: number, b: number): string => `\x1b[38;2;${r};${g};${b}m`; +const ansi256Bg = (code: number): string => `\x1b[48;5;${code}m`; +const truecolorBg = (r: number, g: number, b: number): string => `\x1b[48;2;${r};${g};${b}m`; function resolveGlyphMode(mode: UiGlyphMode): Exclude { if (mode !== "auto") return mode; @@ -64,31 +71,77 @@ function getGlyphs(mode: Exclude): UiGlyphSet { }; } -function getColors(profile: UiColorProfile): UiThemeColors { +function accentColorForProfile(profile: UiColorProfile, accent: UiAccent): string { + switch (profile) { + case "truecolor": + switch (accent) { + case "cyan": + return truecolor(34, 211, 238); + case "blue": + return truecolor(59, 130, 246); + case "yellow": + return truecolor(245, 158, 11); + default: + return truecolor(74, 222, 128); + } + case "ansi256": + switch (accent) { + case "cyan": + return ansi256(51); + case "blue": + return ansi256(75); + case "yellow": + return ansi256(214); + default: + return ansi256(83); + } + default: + switch (accent) { + case "cyan": + return ansi16(96); + case "blue": + return ansi16(94); + case "yellow": + return ansi16(93); + default: + return ansi16(92); + } + } +} + +function getColors(profile: UiColorProfile, palette: UiPalette, accent: UiAccent): UiThemeColors { + const accentColor = accentColorForProfile(profile, accent); + const isBluePalette = palette === "blue"; switch (profile) { case "truecolor": return { reset: "\x1b[0m", dim: "\x1b[2m", muted: truecolor(148, 163, 184), - heading: truecolor(226, 232, 240), - accent: truecolor(56, 189, 248), - success: truecolor(74, 222, 128), - warning: truecolor(251, 191, 36), - danger: truecolor(248, 113, 113), - border: truecolor(100, 116, 139), + heading: truecolor(240, 253, 244), + primary: isBluePalette ? truecolor(96, 165, 250) : truecolor(74, 222, 128), + accent: accentColor, + success: isBluePalette ? truecolor(96, 165, 250) : truecolor(74, 222, 128), + warning: truecolor(245, 158, 11), + danger: truecolor(239, 68, 68), + border: isBluePalette ? truecolor(59, 130, 246) : truecolor(34, 197, 94), + focusBg: isBluePalette ? truecolorBg(37, 99, 235) : truecolorBg(22, 101, 52), + focusText: truecolor(248, 250, 252), }; case "ansi256": return { reset: "\x1b[0m", dim: "\x1b[2m", - muted: ansi256(109), + muted: ansi256(102), heading: ansi256(255), - accent: ansi256(45), - success: ansi256(84), - warning: ansi256(220), - danger: ansi256(203), - border: ansi256(67), + primary: isBluePalette ? ansi256(75) : ansi256(83), + accent: accentColor, + success: isBluePalette ? ansi256(75) : ansi256(83), + warning: ansi256(214), + danger: ansi256(196), + border: isBluePalette ? ansi256(27) : ansi256(40), + focusBg: isBluePalette ? ansi256Bg(26) : ansi256Bg(28), + focusText: ansi256(231), }; default: return { @@ -96,11 +149,14 @@ function getColors(profile: UiColorProfile): UiThemeColors { dim: "\x1b[2m", muted: ansi16(37), heading: ansi16(97), - accent: ansi16(96), - success: ansi16(92), + primary: isBluePalette ? ansi16(94) : ansi16(92), + accent: accentColor, + success: isBluePalette ? ansi16(94) : ansi16(92), warning: ansi16(93), danger: ansi16(91), - border: ansi16(90), + border: isBluePalette ? ansi16(94) : ansi16(92), + focusBg: isBluePalette ? "\x1b[104m" : "\x1b[102m", + focusText: "\x1b[30m", }; } } @@ -108,15 +164,18 @@ function getColors(profile: UiColorProfile): UiThemeColors { export function createUiTheme(options?: { profile?: UiColorProfile; glyphMode?: UiGlyphMode; + palette?: UiPalette; + accent?: UiAccent; }): UiTheme { const profile = options?.profile ?? "truecolor"; const glyphMode = options?.glyphMode ?? "ascii"; + const palette = options?.palette ?? "green"; + const accent = options?.accent ?? "green"; const resolvedGlyphMode = resolveGlyphMode(glyphMode); return { profile, glyphMode, glyphs: getGlyphs(resolvedGlyphMode), - colors: getColors(profile), + colors: getColors(profile, palette, accent), }; } - diff --git a/package.json b/package.json index 2f7ce6fa..6e9ffa8f 100644 --- a/package.json +++ b/package.json @@ -40,6 +40,7 @@ "test": "vitest run", "test:watch": "vitest", "test:ui": "vitest --ui", + "capture:keys": "node scripts/capture-tui-input.js", "test:coverage": "vitest run --coverage", "coverage": "vitest run --coverage", "audit:prod": "npm audit --omit=dev --audit-level=high", diff --git a/scripts/capture-tui-input.js b/scripts/capture-tui-input.js new file mode 100644 index 00000000..c8414c02 --- /dev/null +++ b/scripts/capture-tui-input.js @@ -0,0 +1,190 @@ +import { appendFileSync, existsSync, mkdirSync } from "node:fs"; +import { dirname, join } from "node:path"; +import { homedir } from "node:os"; + +function resolveDefaultLogPath() { + const home = process.env.USERPROFILE ?? process.env.HOME ?? homedir(); + return join(home, ".opencode", "logs", "capture-tui-input.log"); +} + +function parseArgs(argv) { + const parsed = { + output: resolveDefaultLogPath(), + }; + + for (let i = 0; i < argv.length; i += 1) { + const arg = argv[i]; + if (arg === "--output" && argv[i + 1]) { + parsed.output = argv[i + 1]; + i += 1; + } + } + + return parsed; +} + +function printableHotkey(value) { + if (value.length === 1) { + const code = value.charCodeAt(0); + if (code >= 32 && code <= 126) return value; + } + return null; +} + +const { output } = parseArgs(process.argv.slice(2)); + +const selectModulePath = new URL("../dist/lib/ui/select.js", import.meta.url); +const ansiModulePath = new URL("../dist/lib/ui/ansi.js", import.meta.url); + +if (!existsSync(selectModulePath) || !existsSync(ansiModulePath)) { + console.error("dist/ build output is missing. Run `npm run build` first."); + process.exit(1); +} + +const { coalesceTerminalInput, tokenizeTerminalInput } = await import(selectModulePath); +const { parseKey } = await import(ansiModulePath); +const ESCAPE_TIMEOUT_MS = 50; + +const logEvent = (event) => { + appendFileSync(output, `${JSON.stringify(sanitizeAuditValue("event", { ts: new Date().toISOString(), ...event }))}\n`, { + encoding: "utf8", + mode: 0o600, + }); +}; + +function sanitizeAuditValue(key, value) { + if (typeof value === "string") { + if (["utf8", "bytesHex", "token", "normalizedInput", "pending", "hotkey"].includes(key)) { + return `[redacted:${value.length}]`; + } + if (value.includes("@")) { + return "[redacted-email]"; + } + return value; + } + if (Array.isArray(value)) { + return value.map((entry) => sanitizeAuditValue(key, entry)); + } + if (value && typeof value === "object") { + return Object.fromEntries( + Object.entries(value).map(([entryKey, entryValue]) => [ + entryKey, + sanitizeAuditValue(entryKey, entryValue), + ]), + ); + } + return value; +} + +if (!process.stdin.isTTY || !process.stdout.isTTY) { + console.error("capture-tui-input requires a TTY"); + process.exit(1); +} + +mkdirSync(dirname(output), { recursive: true }); + +console.log(`Logging raw terminal input to ${output}`); +console.log("Press keys to capture. Ctrl+C exits."); + +let pending = null; +let pendingEscapeTimer = null; +const stdin = process.stdin; +const stdout = process.stdout; +const wasRaw = stdin.isRaw ?? false; +let cleanedUp = false; + +const cleanup = () => { + if (cleanedUp) return; + cleanedUp = true; + if (pendingEscapeTimer) { + clearTimeout(pendingEscapeTimer); + pendingEscapeTimer = null; + } + try { + stdin.setRawMode(wasRaw); + stdin.pause(); + } catch { + // best effort cleanup + } +}; + +const exitCapture = (code = 0) => { + cleanup(); + stdout.write("\nCapture complete.\n"); + process.exit(code); +}; + +const handleFatal = (error) => { + cleanup(); + console.error(error); + process.exit(1); +}; + +stdin.setRawMode(true); +stdin.resume(); + +stdin.on("data", (data) => { + try { + const rawInput = data.toString("utf8"); + if (pendingEscapeTimer) { + clearTimeout(pendingEscapeTimer); + pendingEscapeTimer = null; + } + logEvent({ + type: "raw", + bytesHex: Array.from(data.values()).map((value) => value.toString(16).padStart(2, "0")).join(" "), + utf8: rawInput, + }); + + let shouldExit = false; + for (const token of tokenizeTerminalInput(rawInput)) { + const coalesced = coalesceTerminalInput(token, pending); + pending = coalesced.pending; + logEvent({ + type: "token", + token, + pending: pending?.value ?? null, + hasEscape: pending?.hasEscape ?? false, + normalizedInput: coalesced.normalizedInput, + }); + if (coalesced.normalizedInput === null) { + if (pending?.hasEscape && pending.value === "\u001b") { + pendingEscapeTimer = setTimeout(() => { + logEvent({ + type: "timeout", + reason: "escape-start", + }); + exitCapture(0); + }, ESCAPE_TIMEOUT_MS); + } + continue; + } + + const buffer = Buffer.from(coalesced.normalizedInput, "utf8"); + const action = parseKey(buffer); + const hotkey = printableHotkey(coalesced.normalizedInput); + logEvent({ + type: "parsed", + normalizedInput: coalesced.normalizedInput, + action, + hotkey, + }); + + if (action === "escape" || action === "escape-start" || coalesced.normalizedInput === "\u0003") { + shouldExit = true; + break; + } + } + + if (shouldExit) { + exitCapture(0); + } + } catch (error) { + handleFatal(error); + } +}); + +process.on("SIGINT", () => exitCapture(0)); +process.on("SIGTERM", () => exitCapture(0)); +process.on("uncaughtException", handleFatal); +process.on("unhandledRejection", handleFatal); diff --git a/test/auth-menu.test.ts b/test/auth-menu.test.ts index 5edc8f28..25f26cbf 100644 --- a/test/auth-menu.test.ts +++ b/test/auth-menu.test.ts @@ -1,5 +1,11 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; -import { showAuthMenu, showAccountDetails, type AccountInfo } from "../lib/ui/auth-menu.js"; +import { + showAuthMenu, + showAccountDetails, + showSettingsMenu, + showSyncPruneMenu, + type AccountInfo, +} from "../lib/ui/auth-menu.js"; import { setUiRuntimeOptions, resetUiRuntimeOptions } from "../lib/ui/runtime.js"; import { select } from "../lib/ui/select.js"; import { confirm } from "../lib/ui/confirm.js"; @@ -46,7 +52,7 @@ describe("auth-menu", () => { const firstCall = vi.mocked(select).mock.calls[0]; expect(firstCall).toBeDefined(); - const items = firstCall?.[0] as Array<{ label: string; value?: { type?: string } }>; + const items = firstCall?.[0] as Array<{ label: string; kind?: string; value?: { type?: string } }>; const accountRows = items.filter((item) => item.value?.type === "select-account"); expect(accountRows).toHaveLength(2); expect(accountRows[0]?.label).toContain("shared@example.com"); @@ -72,4 +78,73 @@ describe("auth-menu", () => { expect.stringContaining("shared@example.com | workspace:Workspace A | id:org-aaaa...bb2222"), ); }); + + it("shows settings in the main auth menu", async () => { + vi.mocked(select).mockResolvedValueOnce({ type: "cancel" }); + + await showAuthMenu([]); + + const firstCall = vi.mocked(select).mock.calls[0]; + expect(firstCall).toBeDefined(); + const items = firstCall?.[0] as Array<{ label: string; value?: { type?: string } }>; + expect(items.some((item) => item.value?.type === "settings")).toBe(true); + expect(items.some((item) => item.label === "Settings" && item.kind === "heading")).toBe(true); + }); + + it("renders settings hub categories before sync actions", async () => { + vi.mocked(select) + .mockResolvedValueOnce("sync") + .mockResolvedValueOnce("cancel"); + + await showSettingsMenu(true); + + const firstCall = vi.mocked(select).mock.calls[0]; + expect(firstCall).toBeDefined(); + const hubItems = firstCall?.[0] as Array<{ label: string; value?: string; kind?: string }>; + expect(hubItems.some((item) => item.label === "Categories")).toBe(true); + expect(hubItems.some((item) => item.value === "sync")).toBe(true); + expect(hubItems.some((item) => item.value === "maintenance")).toBe(true); + + const secondCall = vi.mocked(select).mock.calls[1]; + expect(secondCall).toBeDefined(); + const items = secondCall?.[0] as Array<{ label: string; value?: string }>; + const toggleItem = items.find((item) => item.value === "toggle-sync"); + expect(toggleItem?.label).toContain("Sync from codex-multi-auth"); + expect(toggleItem?.label).toContain("[enabled]"); + expect(items.some((item) => item.label === "Sync")).toBe(true); + expect(items.some((item) => item.label === "Navigation")).toBe(true); + }); + + it("preselects suggested prune candidates and exposes confirm action", async () => { + vi.mocked(select).mockResolvedValueOnce({ type: "confirm" }); + + const result = await showSyncPruneMenu(1, [ + { index: 0, email: "current@example.com", isCurrentAccount: true, score: -1000, reason: "current" }, + { index: 2, email: "old@example.com", score: 180, reason: "disabled, not present in codex-multi-auth source" }, + ]); + + expect(result).toEqual([2]); + const firstCall = vi.mocked(select).mock.calls[0]; + expect(firstCall).toBeDefined(); + const items = firstCall?.[0] as Array<{ label: string; value?: { type?: string } }>; + expect(items.some((item) => item.label.includes("Continue With Selected Accounts"))).toBe(true); + expect(firstCall?.[0][0]?.hint ?? "").toContain("score"); + }); + + it("sanitizes quota summaries in account hints", async () => { + vi.mocked(select).mockResolvedValueOnce({ type: "cancel" }); + + await showAuthMenu([ + { + index: 0, + email: "safe@example.com", + quotaSummary: "5h \u001b[31m100%\u001b[0m", + }, + ]); + + const firstCall = vi.mocked(select).mock.calls[0]; + const items = firstCall?.[0] as Array<{ hint?: string; value?: { type?: string } }>; + const accountRow = items.find((item) => item.value?.type === "select-account"); + expect(accountRow?.hint ?? "").not.toContain("\u001b"); + }); }); diff --git a/test/cli.test.ts b/test/cli.test.ts index b51dfd7a..e7e937ee 100644 --- a/test/cli.test.ts +++ b/test/cli.test.ts @@ -124,8 +124,10 @@ describe("CLI Module", () => { expect(result).toEqual({ mode: "add" }); }); - it("returns 'fresh' for 'f' input", async () => { - mockRl.question.mockResolvedValueOnce("f"); + it("returns 'fresh' for 'f' input after typed confirmation", async () => { + mockRl.question + .mockResolvedValueOnce("f") + .mockResolvedValueOnce("DELETE"); const { promptLoginMode } = await import("../lib/cli.js"); const result = await promptLoginMode([{ index: 0 }]); @@ -133,8 +135,10 @@ describe("CLI Module", () => { expect(result).toEqual({ mode: "fresh", deleteAll: true }); }); - it("returns 'fresh' for 'fresh' input", async () => { - mockRl.question.mockResolvedValueOnce("fresh"); + it("returns 'fresh' for 'fresh' input after typed confirmation", async () => { + mockRl.question + .mockResolvedValueOnce("fresh") + .mockResolvedValueOnce("DELETE"); const { promptLoginMode } = await import("../lib/cli.js"); const result = await promptLoginMode([{ index: 0 }]); @@ -142,6 +146,31 @@ describe("CLI Module", () => { expect(result).toEqual({ mode: "fresh", deleteAll: true }); }); + it("cancels fallback delete-all when typed confirmation is missing", async () => { + mockRl.question + .mockResolvedValueOnce("fresh") + .mockResolvedValueOnce("nope") + .mockResolvedValueOnce("q"); + + const { promptLoginMode } = await import("../lib/cli.js"); + const result = await promptLoginMode([{ index: 0 }]); + + expect(result).toEqual({ mode: "cancel" }); + }); + + it("routes fallback settings input to experimental sync actions", async () => { + mockRl.question + .mockResolvedValueOnce("s") + .mockResolvedValueOnce("i"); + + const { promptLoginMode } = await import("../lib/cli.js"); + const result = await promptLoginMode([{ index: 0 }], { + syncFromCodexMultiAuthEnabled: true, + }); + + expect(result).toEqual({ mode: "experimental-sync-now" }); + }); + it("is case insensitive", async () => { mockRl.question.mockResolvedValueOnce("A"); @@ -151,10 +180,19 @@ describe("CLI Module", () => { expect(result).toEqual({ mode: "add" }); }); + it("accepts 'best' for the forecast action", async () => { + mockRl.question.mockResolvedValueOnce("best"); + + const { promptLoginMode } = await import("../lib/cli.js"); + const result = await promptLoginMode([{ index: 0 }]); + + expect(result).toEqual({ mode: "forecast" }); + }); + it("re-prompts on invalid input then accepts valid", async () => { mockRl.question .mockResolvedValueOnce("invalid") - .mockResolvedValueOnce("x") + .mockResolvedValueOnce("invalid-again") .mockResolvedValueOnce("a"); const { promptLoginMode } = await import("../lib/cli.js"); @@ -178,7 +216,7 @@ describe("CLI Module", () => { }); it("displays account with accountId suffix when no email", async () => { - mockRl.question.mockResolvedValueOnce("f"); + mockRl.question.mockResolvedValueOnce("a"); const consoleSpy = vi.spyOn(console, "log"); const { promptLoginMode } = await import("../lib/cli.js"); @@ -190,7 +228,7 @@ describe("CLI Module", () => { }); it("displays plain Account N when no email or accountId", async () => { - mockRl.question.mockResolvedValueOnce("f"); + mockRl.question.mockResolvedValueOnce("a"); const consoleSpy = vi.spyOn(console, "log"); const { promptLoginMode } = await import("../lib/cli.js"); @@ -222,6 +260,63 @@ describe("CLI Module", () => { }); }); + describe("promptCodexMultiAuthSyncPrune", () => { + it("uses suggested removals on empty input", async () => { + mockRl.question.mockResolvedValueOnce(""); + + const { promptCodexMultiAuthSyncPrune } = await import("../lib/cli.js"); + const result = await promptCodexMultiAuthSyncPrune(1, [ + { index: 2, email: "old@example.com", reason: "least recently used" }, + { index: 3, email: "disabled@example.com", reason: "disabled", isCurrentAccount: false }, + ]); + + expect(result).toEqual([2]); + }); + + it("parses comma-separated account numbers", async () => { + mockRl.question.mockResolvedValueOnce("2, 4"); + + const { promptCodexMultiAuthSyncPrune } = await import("../lib/cli.js"); + const result = await promptCodexMultiAuthSyncPrune(2, [ + { index: 1, email: "one@example.com", reason: "least recently used" }, + { index: 3, email: "two@example.com", reason: "disabled" }, + ]); + + expect(result).toEqual([1, 3]); + }); + + it("returns null when pruning is cancelled", async () => { + mockRl.question.mockResolvedValueOnce("q"); + + const { promptCodexMultiAuthSyncPrune } = await import("../lib/cli.js"); + const result = await promptCodexMultiAuthSyncPrune(1, [ + { index: 0, email: "one@example.com", reason: "least recently used" }, + ]); + + expect(result).toBeNull(); + }); + + it("uses the readline fallback when menus are unavailable but interaction is still allowed", async () => { + const { stdin, stdout } = await import("node:process"); + const origInputTTY = stdin.isTTY; + const origOutputTTY = stdout.isTTY; + Object.defineProperty(stdin, "isTTY", { value: false, writable: true, configurable: true }); + Object.defineProperty(stdout, "isTTY", { value: false, writable: true, configurable: true }); + mockRl.question.mockResolvedValueOnce("1"); + + try { + const { promptCodexMultiAuthSyncPrune } = await import("../lib/cli.js"); + const result = await promptCodexMultiAuthSyncPrune(1, [ + { index: 0, email: "one@example.com", reason: "least recently used" }, + ]); + expect(result).toEqual([0]); + } finally { + Object.defineProperty(stdin, "isTTY", { value: origInputTTY, writable: true, configurable: true }); + Object.defineProperty(stdout, "isTTY", { value: origOutputTTY, writable: true, configurable: true }); + } + }); + }); + describe("isNonInteractiveMode", () => { it("returns false when FORCE_INTERACTIVE_MODE is set", async () => { process.env.FORCE_INTERACTIVE_MODE = "1"; @@ -438,5 +533,85 @@ describe("CLI Module", () => { const result = await promptAccountSelection(candidates, { defaultIndex: 1 }); expect(result).toEqual(candidates[1]); }); + + it("promptCodexMultiAuthSyncPrune returns null in non-interactive mode", async () => { + const { promptCodexMultiAuthSyncPrune } = await import("../lib/cli.js"); + const result = await promptCodexMultiAuthSyncPrune(1, [ + { index: 0, email: "one@example.com", reason: "least recently used" }, + ]); + expect(result).toBeNull(); + }); + + it("promptLoginMode returns add immediately when TTY is unavailable without env overrides", async () => { + const originalEnv = { + OPENCODE_TUI: process.env.OPENCODE_TUI, + OPENCODE_DESKTOP: process.env.OPENCODE_DESKTOP, + TERM_PROGRAM: process.env.TERM_PROGRAM, + ELECTRON_RUN_AS_NODE: process.env.ELECTRON_RUN_AS_NODE, + }; + delete process.env.OPENCODE_TUI; + delete process.env.OPENCODE_DESKTOP; + delete process.env.TERM_PROGRAM; + delete process.env.ELECTRON_RUN_AS_NODE; + const { stdin, stdout } = await import("node:process"); + const origInputTTY = stdin.isTTY; + const origOutputTTY = stdout.isTTY; + Object.defineProperty(stdin, "isTTY", { value: false, writable: true, configurable: true }); + Object.defineProperty(stdout, "isTTY", { value: false, writable: true, configurable: true }); + + try { + const { promptLoginMode } = await import("../lib/cli.js"); + const result = await promptLoginMode([{ index: 0 }]); + expect(result).toEqual({ mode: "add" }); + expect(mockRl.question).not.toHaveBeenCalled(); + } finally { + Object.defineProperty(stdin, "isTTY", { value: origInputTTY, writable: true, configurable: true }); + Object.defineProperty(stdout, "isTTY", { value: origOutputTTY, writable: true, configurable: true }); + for (const [key, value] of Object.entries(originalEnv)) { + if (value === undefined) { + delete process.env[key]; + } else { + process.env[key] = value; + } + } + } + }); + + it("promptCodexMultiAuthSyncPrune returns null when TTY is unavailable without env overrides", async () => { + const originalEnv = { + OPENCODE_TUI: process.env.OPENCODE_TUI, + OPENCODE_DESKTOP: process.env.OPENCODE_DESKTOP, + TERM_PROGRAM: process.env.TERM_PROGRAM, + ELECTRON_RUN_AS_NODE: process.env.ELECTRON_RUN_AS_NODE, + }; + delete process.env.OPENCODE_TUI; + delete process.env.OPENCODE_DESKTOP; + delete process.env.TERM_PROGRAM; + delete process.env.ELECTRON_RUN_AS_NODE; + const { stdin, stdout } = await import("node:process"); + const origInputTTY = stdin.isTTY; + const origOutputTTY = stdout.isTTY; + Object.defineProperty(stdin, "isTTY", { value: false, writable: true, configurable: true }); + Object.defineProperty(stdout, "isTTY", { value: false, writable: true, configurable: true }); + + try { + const { promptCodexMultiAuthSyncPrune } = await import("../lib/cli.js"); + const result = await promptCodexMultiAuthSyncPrune(1, [ + { index: 0, email: "one@example.com", reason: "least recently used" }, + ]); + expect(result).toBeNull(); + expect(mockRl.question).not.toHaveBeenCalled(); + } finally { + Object.defineProperty(stdin, "isTTY", { value: origInputTTY, writable: true, configurable: true }); + Object.defineProperty(stdout, "isTTY", { value: origOutputTTY, writable: true, configurable: true }); + for (const [key, value] of Object.entries(originalEnv)) { + if (value === undefined) { + delete process.env[key]; + } else { + process.env[key] = value; + } + } + } + }); }); }); diff --git a/test/codex-multi-auth-sync.race.test.ts b/test/codex-multi-auth-sync.race.test.ts new file mode 100644 index 00000000..87174dec --- /dev/null +++ b/test/codex-multi-auth-sync.race.test.ts @@ -0,0 +1,117 @@ +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { promises as fs } from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; + +describe("codex-multi-auth sync race paths", () => { + let testDir: string; + let sourceRoot: string; + let storagePath: string; + const originalEnv = { + CODEX_MULTI_AUTH_DIR: process.env.CODEX_MULTI_AUTH_DIR, + }; + + beforeEach(async () => { + testDir = await fs.mkdtemp(join(tmpdir(), "codex-sync-race-")); + sourceRoot = join(testDir, "source"); + storagePath = join(testDir, "accounts.json"); + process.env.CODEX_MULTI_AUTH_DIR = sourceRoot; + await fs.mkdir(sourceRoot, { recursive: true }); + await fs.writeFile( + join(sourceRoot, "openai-codex-accounts.json"), + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-source-1", + organizationId: "org-source-1", + accountIdSource: "org", + email: "source@example.com", + refreshToken: "rt-source-1", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + "utf8", + ); + + const storageModule = await import("../lib/storage.js"); + storageModule.setStoragePathDirect(storagePath); + await storageModule.clearAccounts(); + }); + + afterEach(async () => { + const storageModule = await import("../lib/storage.js"); + storageModule.setStoragePathDirect(null); + if (originalEnv.CODEX_MULTI_AUTH_DIR === undefined) { + delete process.env.CODEX_MULTI_AUTH_DIR; + } else { + process.env.CODEX_MULTI_AUTH_DIR = originalEnv.CODEX_MULTI_AUTH_DIR; + } + await fs.rm(testDir, { recursive: true, force: true }); + }); + + it("keeps the final account store deduplicated under concurrent syncs", async () => { + const { syncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + const storageModule = await import("../lib/storage.js"); + + const results = await Promise.allSettled([ + syncFromCodexMultiAuth(testDir), + syncFromCodexMultiAuth(testDir), + ]); + + expect(results.every((result) => result.status === "fulfilled")).toBe(true); + + const loaded = await storageModule.loadAccounts(); + expect(loaded?.accounts).toHaveLength(1); + expect(loaded?.accounts[0]?.accountId).toBe("org-source-1"); + expect(new Set(loaded?.accounts.map((account) => account.refreshToken))).toEqual( + new Set(["rt-source-1"]), + ); + }); + + it("keeps synced-overlap cleanup stable under concurrent cleanup runs", async () => { + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + const storageModule = await import("../lib/storage.js"); + + await storageModule.saveAccounts({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-local", + organizationId: "org-local", + accountIdSource: "org", + email: "shared@example.com", + refreshToken: "rt-local", + addedAt: 2, + lastUsed: 2, + }, + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "shared@example.com", + refreshToken: "rt-sync", + addedAt: 1, + lastUsed: 1, + }, + ], + }); + + const results = await Promise.allSettled([ + cleanupCodexMultiAuthSyncedOverlaps(), + cleanupCodexMultiAuthSyncedOverlaps(), + ]); + const loaded = await storageModule.loadAccounts(); + + expect(results.every((result) => result.status === "fulfilled")).toBe(true); + expect(loaded?.accounts).toHaveLength(1); + expect(loaded?.accounts[0]?.accountId).toBe("org-local"); + }); +}); diff --git a/test/codex-multi-auth-sync.test.ts b/test/codex-multi-auth-sync.test.ts new file mode 100644 index 00000000..bf3af40c --- /dev/null +++ b/test/codex-multi-auth-sync.test.ts @@ -0,0 +1,2282 @@ +import { beforeEach, afterEach, describe, expect, it, vi } from "vitest"; +import * as fs from "node:fs"; +import * as os from "node:os"; +import { join } from "node:path"; +import { findProjectRoot, getProjectStorageKey, getProjectStorageKeyCandidates } from "../lib/storage/paths.js"; +import type { AccountStorageV3 } from "../lib/storage.js"; + +vi.mock("../lib/logger.js", () => ({ + logWarn: vi.fn(), +})); + +vi.mock("node:fs", async () => { + const actual = await vi.importActual("node:fs"); + return { + ...actual, + existsSync: vi.fn(), + readdirSync: vi.fn(() => []), + readFileSync: vi.fn(), + statSync: vi.fn(), + }; +}); + +vi.mock("../lib/storage.js", () => ({ + deduplicateAccounts: vi.fn((accounts) => accounts), + deduplicateAccountsByEmail: vi.fn((accounts) => accounts), + getStoragePath: vi.fn(() => "/tmp/opencode-accounts.json"), + loadAccounts: vi.fn(async () => ({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-example123", + accountIdSource: "org", + refreshToken: "sync-refresh", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-example123", + organizationId: "org-example123", + accountIdSource: "org", + refreshToken: "sync-refresh", + addedAt: 2, + lastUsed: 2, + }, + ], + })), + saveAccounts: vi.fn(async () => {}), + clearAccounts: vi.fn(async () => {}), + previewImportAccounts: vi.fn(async () => ({ imported: 2, skipped: 0, total: 4 })), + previewImportAccountsWithExistingStorage: vi.fn(async () => ({ imported: 2, skipped: 0, total: 4 })), + importAccounts: vi.fn(async () => ({ + imported: 2, + skipped: 0, + total: 4, + backupStatus: "created", + backupPath: "/tmp/codex-multi-auth-sync-backup.json", + })), + normalizeAccountStorage: vi.fn((value: unknown) => value), + withAccountStorageTransaction: vi.fn(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-example123", + accountIdSource: "org", + refreshToken: "sync-refresh", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-example123", + organizationId: "org-example123", + accountIdSource: "org", + refreshToken: "sync-refresh", + addedAt: 2, + lastUsed: 2, + }, + ], + }, + vi.fn(async () => {}), + ), + ), +})); + +describe("codex-multi-auth sync", () => { + const mockExistsSync = vi.mocked(fs.existsSync); + const mockReaddirSync = vi.mocked(fs.readdirSync); + const mockReadFileSync = vi.mocked(fs.readFileSync); + const mockStatSync = vi.mocked(fs.statSync); + const originalReadFile = fs.promises.readFile.bind(fs.promises); + const mockReadFile = vi.spyOn(fs.promises, "readFile"); + const originalEnv = { + CODEX_MULTI_AUTH_DIR: process.env.CODEX_MULTI_AUTH_DIR, + CODEX_HOME: process.env.CODEX_HOME, + USERPROFILE: process.env.USERPROFILE, + HOME: process.env.HOME, + }; + const mockSourceStorageFile = (expectedPath: string, content: string) => { + mockReadFile.mockImplementation(async (filePath, options) => { + if (String(filePath) === expectedPath) { + return content; + } + return originalReadFile( + filePath as Parameters[0], + options as never, + ); + }); + }; + const defaultTransactionalStorage = (): AccountStorageV3 => ({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-example123", + accountIdSource: "org", + refreshToken: "sync-refresh", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-example123", + organizationId: "org-example123", + accountIdSource: "org", + refreshToken: "sync-refresh", + addedAt: 2, + lastUsed: 2, + }, + ], + }); + + beforeEach(async () => { + vi.resetModules(); + vi.clearAllMocks(); + mockExistsSync.mockReset(); + mockExistsSync.mockReturnValue(false); + mockReaddirSync.mockReset(); + mockReaddirSync.mockReturnValue([] as ReturnType); + mockReadFileSync.mockReset(); + mockReadFileSync.mockImplementation((candidate) => { + throw new Error(`unexpected read: ${String(candidate)}`); + }); + mockStatSync.mockReset(); + mockStatSync.mockImplementation(() => ({ + isDirectory: () => false, + }) as ReturnType); + mockReadFile.mockReset(); + mockReadFile.mockImplementation((path, options) => + originalReadFile(path as Parameters[0], options as never), + ); + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.previewImportAccounts).mockReset(); + vi.mocked(storageModule.previewImportAccounts).mockResolvedValue({ imported: 2, skipped: 0, total: 4 }); + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockReset(); + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockResolvedValue({ + imported: 2, + skipped: 0, + total: 4, + }); + vi.mocked(storageModule.importAccounts).mockReset(); + vi.mocked(storageModule.importAccounts).mockResolvedValue({ + imported: 2, + skipped: 0, + total: 4, + backupStatus: "created", + backupPath: "/tmp/codex-multi-auth-sync-backup.json", + }); + vi.mocked(storageModule.loadAccounts).mockReset(); + vi.mocked(storageModule.loadAccounts).mockResolvedValue(defaultTransactionalStorage()); + vi.mocked(storageModule.normalizeAccountStorage).mockReset(); + vi.mocked(storageModule.normalizeAccountStorage).mockImplementation((value: unknown) => value as never); + vi.mocked(storageModule.withAccountStorageTransaction).mockReset(); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementation(async (handler) => + handler(defaultTransactionalStorage(), vi.fn(async () => {})), + ); + delete process.env.CODEX_MULTI_AUTH_DIR; + delete process.env.CODEX_HOME; + }); + + afterEach(() => { + if (originalEnv.CODEX_MULTI_AUTH_DIR === undefined) delete process.env.CODEX_MULTI_AUTH_DIR; + else process.env.CODEX_MULTI_AUTH_DIR = originalEnv.CODEX_MULTI_AUTH_DIR; + if (originalEnv.CODEX_HOME === undefined) delete process.env.CODEX_HOME; + else process.env.CODEX_HOME = originalEnv.CODEX_HOME; + if (originalEnv.USERPROFILE === undefined) delete process.env.USERPROFILE; + else process.env.USERPROFILE = originalEnv.USERPROFILE; + if (originalEnv.HOME === undefined) delete process.env.HOME; + else process.env.HOME = originalEnv.HOME; + delete process.env.CODEX_AUTH_SYNC_MAX_ACCOUNTS; + }); + + it("prefers a project-scoped codex-multi-auth accounts file when present", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const projectRoot = findProjectRoot(process.cwd()) ?? process.cwd(); + const projectKey = getProjectStorageKey(projectRoot); + const projectPath = join(rootDir, "projects", projectKey, "openai-codex-accounts.json"); + const globalPath = join(rootDir, "openai-codex-accounts.json"); + const repoPackageJson = join(process.cwd(), "package.json"); + + mockExistsSync.mockImplementation((candidate) => { + return ( + String(candidate) === projectPath || + String(candidate) === globalPath || + String(candidate) === repoPackageJson + ); + }); + + const { resolveCodexMultiAuthAccountsSource } = await import("../lib/codex-multi-auth-sync.js"); + const resolved = resolveCodexMultiAuthAccountsSource(process.cwd()); + + expect(resolved).toEqual({ + rootDir, + accountsPath: projectPath, + scope: "project", + }); + }); + + it("falls back to the global accounts file when no project-scoped file exists", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + + const { resolveCodexMultiAuthAccountsSource } = await import("../lib/codex-multi-auth-sync.js"); + const resolved = resolveCodexMultiAuthAccountsSource(process.cwd()); + + expect(resolved).toEqual({ + rootDir, + accountsPath: globalPath, + scope: "global", + }); + }); + + it("probes the DevTools fallback root when no env override is set", async () => { + process.env.USERPROFILE = "C:\\Users\\tester"; + process.env.HOME = "C:\\Users\\tester"; + const devToolsGlobalPath = join( + "C:\\Users\\tester", + "DevTools", + "config", + "codex", + "multi-auth", + "openai-codex-accounts.json", + ); + mockExistsSync.mockImplementation((candidate) => String(candidate) === devToolsGlobalPath); + + const { getCodexMultiAuthSourceRootDir } = await import("../lib/codex-multi-auth-sync.js"); + expect(getCodexMultiAuthSourceRootDir()).toBe( + join("C:\\Users\\tester", "DevTools", "config", "codex", "multi-auth"), + ); + }); + + it("prefers the DevTools root over ~/.codex when CODEX_HOME is not set", async () => { + process.env.USERPROFILE = "C:\\Users\\tester"; + process.env.HOME = "C:\\Users\\tester"; + const devToolsGlobalPath = join( + "C:\\Users\\tester", + "DevTools", + "config", + "codex", + "multi-auth", + "openai-codex-accounts.json", + ); + const dotCodexGlobalPath = join( + "C:\\Users\\tester", + ".codex", + "multi-auth", + "openai-codex-accounts.json", + ); + mockExistsSync.mockImplementation((candidate) => { + const path = String(candidate); + return path === devToolsGlobalPath || path === dotCodexGlobalPath; + }); + + const { getCodexMultiAuthSourceRootDir } = await import("../lib/codex-multi-auth-sync.js"); + expect(getCodexMultiAuthSourceRootDir()).toBe( + join("C:\\Users\\tester", "DevTools", "config", "codex", "multi-auth"), + ); + }); + + it("skips WAL-only roots when a later candidate has a real accounts file", async () => { + process.env.USERPROFILE = "C:\\Users\\tester"; + process.env.HOME = "C:\\Users\\tester"; + process.env.CODEX_HOME = "C:\\Users\\tester\\.codex"; + const walOnlyPath = join( + "C:\\Users\\tester", + ".codex", + "multi-auth", + "openai-codex-accounts.json.wal", + ); + const laterRealJson = join( + "C:\\Users\\tester", + "DevTools", + "config", + "codex", + "multi-auth", + "openai-codex-accounts.json", + ); + mockExistsSync.mockImplementation((candidate) => { + const path = String(candidate); + return path === walOnlyPath || path === laterRealJson; + }); + + const { getCodexMultiAuthSourceRootDir } = await import("../lib/codex-multi-auth-sync.js"); + expect(getCodexMultiAuthSourceRootDir()).toBe( + join("C:\\Users\\tester", "DevTools", "config", "codex", "multi-auth"), + ); + }); + + it("delegates preview and apply to the existing importer", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const storageModule = await import("../lib/storage.js"); + const { previewSyncFromCodexMultiAuth, syncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + skipped: 0, + total: 4, + }); + await expect(syncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + backupStatus: "created", + }); + + expect(vi.mocked(storageModule.previewImportAccountsWithExistingStorage)).toHaveBeenCalledWith( + expect.stringContaining("oc-chatgpt-multi-auth-sync-"), + expect.any(Object), + ); + expect(vi.mocked(storageModule.importAccounts)).toHaveBeenCalledWith( + expect.stringContaining("oc-chatgpt-multi-auth-sync-"), + { + preImportBackupPrefix: "codex-multi-auth-sync-backup", + backupMode: "required", + }, + expect.any(Function), + ); + }); + + it("rejects CODEX_MULTI_AUTH_DIR values that are not local absolute paths on Windows", async () => { + process.env.CODEX_MULTI_AUTH_DIR = "\\\\server\\share\\multi-auth"; + process.env.USERPROFILE = "C:\\Users\\tester"; + process.env.HOME = "C:\\Users\\tester"; + + const { getCodexMultiAuthSourceRootDir } = await import("../lib/codex-multi-auth-sync.js"); + expect(() => getCodexMultiAuthSourceRootDir()).toThrow(/local absolute path/i); + }); + + it("accepts extended-length local Windows paths for CODEX_MULTI_AUTH_DIR", async () => { + process.env.USERPROFILE = "C:\\Users\\tester"; + process.env.HOME = "C:\\Users\\tester"; + + const { getCodexMultiAuthSourceRootDir } = await import("../lib/codex-multi-auth-sync.js"); + + process.env.CODEX_MULTI_AUTH_DIR = "\\\\?\\C:\\Users\\tester\\multi-auth"; + expect(getCodexMultiAuthSourceRootDir()).toBe("\\\\?\\C:\\Users\\tester\\multi-auth"); + + process.env.CODEX_MULTI_AUTH_DIR = "\\\\.\\C:\\Users\\tester\\multi-auth"; + expect(getCodexMultiAuthSourceRootDir()).toBe("\\\\.\\C:\\Users\\tester\\multi-auth"); + }); + + it("rejects extended UNC Windows paths for CODEX_MULTI_AUTH_DIR", async () => { + process.env.CODEX_MULTI_AUTH_DIR = "\\\\?\\UNC\\server\\share\\multi-auth"; + process.env.USERPROFILE = "C:\\Users\\tester"; + process.env.HOME = "C:\\Users\\tester"; + + const { getCodexMultiAuthSourceRootDir } = await import("../lib/codex-multi-auth-sync.js"); + expect(() => getCodexMultiAuthSourceRootDir()).toThrow(/UNC network share/i); + }); + + it("keeps preview sync on the read-only path without the storage transaction lock", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async () => { + throw new Error("preview should not take write transaction lock"); + }); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + }); + }); + + it("takes the same transaction-backed path for overlap cleanup preview as cleanup", async () => { + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-token", + addedAt: 2, + lastUsed: 2, + }, + ], + }, + vi.fn(async () => {}), + ), + ); + vi.mocked(storageModule.loadAccounts).mockResolvedValue({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-token", + addedAt: 2, + lastUsed: 2, + }, + ], + }); + + const { previewCodexMultiAuthSyncedOverlapCleanup } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewCodexMultiAuthSyncedOverlapCleanup()).resolves.toEqual({ + before: 1, + after: 1, + removed: 0, + updated: 0, + }); + expect(storageModule.withAccountStorageTransaction).toHaveBeenCalledTimes(1); + expect(storageModule.loadAccounts).not.toHaveBeenCalled(); + }); + + it("uses a single account snapshot for preview capacity filtering and preview counts", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { email: "existing@example.com", refreshToken: "rt-source-1", addedAt: 1, lastUsed: 1 }, + { email: "new@example.com", refreshToken: "rt-source-2", addedAt: 2, lastUsed: 2 }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + const firstSnapshot = { + version: 3 as const, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + email: "existing@example.com", + refreshToken: "rt-existing", + addedAt: 10, + lastUsed: 10, + }, + ], + }; + const secondSnapshot = { + version: 3 as const, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [], + }; + vi.mocked(storageModule.loadAccounts) + .mockResolvedValueOnce(firstSnapshot) + .mockResolvedValueOnce(secondSnapshot); + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockImplementationOnce(async (_filePath, existing) => { + expect(existing).toBe(firstSnapshot); + return { imported: 1, skipped: 0, total: 2 }; + }); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 1, + total: 2, + }); + expect(vi.mocked(storageModule.loadAccounts)).toHaveBeenCalledTimes(1); + }); + + it("reuses a previewed source snapshot during sync even if the source file changes later", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { accountId: "org-source-1", organizationId: "org-source-1", accountIdSource: "org", refreshToken: "rt-source-1", addedAt: 1, lastUsed: 1 }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const loadedSource = await syncModule.loadCodexMultiAuthSourceStorage(process.cwd()); + + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { accountId: "org-source-1", organizationId: "org-source-1", accountIdSource: "org", refreshToken: "rt-source-1", addedAt: 1, lastUsed: 1 }, + { accountId: "org-source-2", organizationId: "org-source-2", accountIdSource: "org", refreshToken: "rt-source-2", addedAt: 2, lastUsed: 2 }, + ], + }), + ); + + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockImplementationOnce(async (filePath) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as { accounts: Array<{ refreshToken?: string }> }; + expect(parsed.accounts).toHaveLength(1); + expect(parsed.accounts[0]?.refreshToken).toBe("rt-source-1"); + return { imported: 1, skipped: 0, total: 1 }; + }); + vi.mocked(storageModule.importAccounts).mockImplementationOnce(async (filePath) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as { accounts: Array<{ refreshToken?: string }> }; + expect(parsed.accounts).toHaveLength(1); + expect(parsed.accounts[0]?.refreshToken).toBe("rt-source-1"); + return { + imported: 1, + skipped: 0, + total: 1, + backupStatus: "created", + backupPath: "/tmp/codex-multi-auth-sync-backup.json", + }; + }); + + await expect(syncModule.previewSyncFromCodexMultiAuth(process.cwd(), loadedSource)).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 1, + total: 1, + }); + await expect(syncModule.syncFromCodexMultiAuth(process.cwd(), loadedSource)).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 1, + backupStatus: "created", + }); + }); + + it("uses the same locked raw storage snapshot for overlap preview as cleanup", async () => { + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.loadAccounts).mockResolvedValueOnce({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [], + }); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [], + }, + vi.fn(async () => {}), + ), + ); + mockSourceStorageFile( + "/tmp/opencode-accounts.json", + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "sync@example.com", + refreshToken: "sync-token", + addedAt: 2, + lastUsed: 2, + }, + { + accountId: "org-sync", + accountIdSource: "org", + email: "sync@example.com", + refreshToken: "sync-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const { previewCodexMultiAuthSyncedOverlapCleanup } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewCodexMultiAuthSyncedOverlapCleanup()).resolves.toEqual({ + before: 2, + after: 1, + removed: 1, + updated: 0, + }); + expect(storageModule.withAccountStorageTransaction).toHaveBeenCalledTimes(1); + expect(storageModule.loadAccounts).not.toHaveBeenCalled(); + }); + + it("does not retry through a fallback temp directory when the handler throws", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockRejectedValueOnce( + new Error("preview failed"), + ); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).rejects.toThrow("preview failed"); + expect(vi.mocked(storageModule.previewImportAccountsWithExistingStorage)).toHaveBeenCalledTimes(1); + }); + + it("surfaces secure temp directory creation failures instead of falling back to system tmpdir", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const mkdtempSpy = vi.spyOn(fs.promises, "mkdtemp").mockRejectedValue(new Error("mkdtemp failed")); + const storageModule = await import("../lib/storage.js"); + + try { + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).rejects.toThrow("mkdtemp failed"); + expect(mkdtempSpy).toHaveBeenCalledTimes(1); + expect(vi.mocked(storageModule.previewImportAccountsWithExistingStorage)).not.toHaveBeenCalled(); + } finally { + mkdtempSpy.mockRestore(); + } + }); + + it("warns instead of failing when secure temp cleanup blocks preview cleanup", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const rmSpy = vi.spyOn(fs.promises, "rm").mockRejectedValue(new Error("cleanup blocked")); + const loggerModule = await import("../lib/logger.js"); + + try { + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + skipped: 0, + total: 4, + }); + expect(vi.mocked(loggerModule.logWarn)).toHaveBeenCalledWith( + expect.stringContaining("Failed to remove temporary codex sync directory"), + ); + } finally { + rmSpy.mockRestore(); + } + }); + + it.each(["EACCES", "EPERM"] as const)( + "retries Windows-style %s temp cleanup locks until they clear", + async (code) => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const rmSpy = vi + .spyOn(fs.promises, "rm") + .mockRejectedValueOnce(Object.assign(new Error("permission denied"), { code })) + .mockRejectedValueOnce(Object.assign(new Error("permission denied"), { code })) + .mockResolvedValueOnce(undefined); + const loggerModule = await import("../lib/logger.js"); + + try { + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + skipped: 0, + total: 4, + }); + expect(rmSpy).toHaveBeenCalledTimes(3); + expect(vi.mocked(loggerModule.logWarn)).not.toHaveBeenCalledWith( + expect.stringContaining("Failed to remove temporary codex sync directory"), + ); + } finally { + rmSpy.mockRestore(); + } + }, + ); + + it("fails fast on non-retryable temp cleanup errors", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const rmSpy = vi + .spyOn(fs.promises, "rm") + .mockRejectedValue(Object.assign(new Error("invalid temp dir"), { code: "EINVAL" })); + const loggerModule = await import("../lib/logger.js"); + + try { + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + skipped: 0, + total: 4, + }); + expect(rmSpy.mock.calls.length).toBeGreaterThanOrEqual(1); + expect(vi.mocked(loggerModule.logWarn)).toHaveBeenCalledWith( + expect.stringContaining("Failed to remove temporary codex sync directory"), + ); + } finally { + rmSpy.mockRestore(); + } + }); + + it("retries Windows-style EBUSY temp cleanup until it succeeds", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const rmSpy = vi + .spyOn(fs.promises, "rm") + .mockRejectedValueOnce(Object.assign(new Error("busy"), { code: "EBUSY" })) + .mockRejectedValueOnce(Object.assign(new Error("busy"), { code: "EBUSY" })) + .mockResolvedValueOnce(undefined); + const loggerModule = await import("../lib/logger.js"); + + try { + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + skipped: 0, + total: 4, + }); + expect(rmSpy).toHaveBeenCalledTimes(3); + expect(vi.mocked(loggerModule.logWarn)).not.toHaveBeenCalledWith( + expect.stringContaining("Failed to remove temporary codex sync directory"), + ); + } finally { + rmSpy.mockRestore(); + } + }); + + it.each(["EACCES", "EPERM", "EBUSY"] as const)( + "redacts temp tokens before warning when Windows-style %s cleanup exhausts retries", + async (code) => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + const fakeHome = await fs.promises.mkdtemp(join(os.tmpdir(), "codex-sync-home-")); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + process.env.HOME = fakeHome; + process.env.USERPROFILE = fakeHome; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + const tempRoot = join(fakeHome, ".opencode", "tmp"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + refreshToken: "sync-refresh-secret", + accessToken: "sync-access-secret", + idToken: "sync-id-secret", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const rmSpy = vi + .spyOn(fs.promises, "rm") + .mockRejectedValue(Object.assign(new Error("cleanup still locked"), { code })); + const loggerModule = await import("../lib/logger.js"); + + try { + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + skipped: 0, + total: 4, + }); + expect(rmSpy).toHaveBeenCalledTimes(4); + expect(vi.mocked(loggerModule.logWarn)).toHaveBeenCalledWith( + expect.stringContaining("Failed to remove temporary codex sync directory"), + ); + + const tempEntries = await fs.promises.readdir(tempRoot, { withFileTypes: true }); + const syncDir = tempEntries.find( + (entry) => entry.isDirectory() && entry.name.startsWith("oc-chatgpt-multi-auth-sync-"), + ); + expect(syncDir).toBeDefined(); + const leakedTempPath = join(tempRoot, syncDir!.name, "accounts.json"); + const leakedContent = await fs.promises.readFile(leakedTempPath, "utf8"); + expect(leakedContent).not.toContain("sync-refresh-secret"); + expect(leakedContent).not.toContain("sync-access-secret"); + expect(leakedContent).not.toContain("sync-id-secret"); + expect(leakedContent).toContain("__redacted__"); + } finally { + rmSpy.mockRestore(); + await fs.promises.rm(fakeHome, { recursive: true, force: true }); + } + }, + ); + + it("finds the project-scoped codex-multi-auth source across same-repo worktrees", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const mainWorktree = "C:\\Users\\neil\\DevTools\\oc-chatgpt-multi-auth"; + const branchWorktree = "C:\\Users\\neil\\DevTools\\oc-chatgpt-multi-auth-sync-worktree"; + const sharedGitFile = "gitdir: C:/Users/neil/DevTools/oc-chatgpt-multi-auth/.git/worktrees/feature-sync\n"; + const mainGitPath = join(mainWorktree, ".git"); + const branchGitPath = join(branchWorktree, ".git"); + let projectPath = ""; + + mockExistsSync.mockImplementation((candidate) => { + return ( + String(candidate) === projectPath || + String(candidate) === mainGitPath || + String(candidate) === branchGitPath + ); + }); + vi.mocked(fs.statSync).mockImplementation((candidate) => { + return { + isDirectory: () => String(candidate) === mainGitPath, + } as ReturnType; + }); + vi.mocked(fs.readFileSync).mockImplementation((candidate) => { + if (String(candidate) === branchGitPath) { + return sharedGitFile; + } + throw new Error(`unexpected read: ${String(candidate)}`); + }); + const sharedProjectKey = getProjectStorageKeyCandidates(mainWorktree)[0]; + projectPath = join(rootDir, "projects", sharedProjectKey ?? "missing", "openai-codex-accounts.json"); + + const { resolveCodexMultiAuthAccountsSource } = await import("../lib/codex-multi-auth-sync.js"); + const resolved = resolveCodexMultiAuthAccountsSource(branchWorktree); + expect(resolved).toEqual({ + rootDir, + accountsPath: projectPath, + scope: "project", + }); + }); + + it("prefers a later root with project-scoped accounts over an earlier settings-only root", async () => { + process.env.USERPROFILE = "C:\\Users\\tester"; + process.env.HOME = "C:\\Users\\tester"; + const projectRoot = findProjectRoot(process.cwd()) ?? process.cwd(); + const candidateKey = getProjectStorageKeyCandidates(projectRoot)[0] ?? "missing"; + const firstRootSettings = join("C:\\Users\\tester", "DevTools", "config", "codex", "multi-auth", "settings.json"); + const secondProjectsDir = join("C:\\Users\\tester", ".codex", "multi-auth", "projects"); + const repoPackageJson = join(process.cwd(), "package.json"); + const secondProjectPath = join( + "C:\\Users\\tester", + ".codex", + "multi-auth", + "projects", + candidateKey, + "openai-codex-accounts.json", + ); + mockExistsSync.mockImplementation((candidate) => { + const pathValue = String(candidate); + return pathValue === firstRootSettings || pathValue === secondProjectPath || pathValue === repoPackageJson; + }); + mockReaddirSync.mockImplementation((candidate) => { + if (String(candidate) === secondProjectsDir) { + return [ + { + name: candidateKey, + isDirectory: () => true, + }, + ] as unknown as ReturnType; + } + return []; + }); + + const { getCodexMultiAuthSourceRootDir, resolveCodexMultiAuthAccountsSource } = + await import("../lib/codex-multi-auth-sync.js"); + expect(getCodexMultiAuthSourceRootDir()).toBe(join("C:\\Users\\tester", ".codex", "multi-auth")); + const resolved = resolveCodexMultiAuthAccountsSource(process.cwd()); + expect(resolved).toEqual({ + rootDir: join("C:\\Users\\tester", ".codex", "multi-auth"), + accountsPath: secondProjectPath, + scope: "project", + }); + }); + + it("warns and returns preview results when secure temp cleanup leaves sync data on disk", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + + const rmSpy = vi.spyOn(fs.promises, "rm").mockRejectedValue(new Error("cleanup blocked")); + + try { + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + skipped: 0, + total: 4, + }); + } finally { + rmSpy.mockRestore(); + } + }); + + it("sweeps stale sync temp directories before creating a new import temp dir", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + const fakeHome = await fs.promises.mkdtemp(join(os.tmpdir(), "codex-sync-home-")); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + process.env.HOME = fakeHome; + process.env.USERPROFILE = fakeHome; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + const tempRoot = join(fakeHome, ".opencode", "tmp"); + const staleDir = join(tempRoot, "oc-chatgpt-multi-auth-sync-stale-test"); + const staleFile = join(staleDir, "accounts.json"); + const recentDir = join(tempRoot, "oc-chatgpt-multi-auth-sync-recent-test"); + const recentFile = join(recentDir, "accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ accountId: "org-source", organizationId: "org-source", refreshToken: "rt-source", addedAt: 1, lastUsed: 1 }], + }), + ); + + try { + await fs.promises.mkdir(staleDir, { recursive: true }); + await fs.promises.writeFile(staleFile, "sensitive", "utf8"); + await fs.promises.mkdir(recentDir, { recursive: true }); + await fs.promises.writeFile(recentFile, "recent", "utf8"); + const oldTime = new Date(Date.now() - (15 * 60 * 1000)); + const recentTime = new Date(Date.now() - (2 * 60 * 1000)); + await fs.promises.utimes(staleDir, oldTime, oldTime); + await fs.promises.utimes(staleFile, oldTime, oldTime); + await fs.promises.utimes(recentDir, recentTime, recentTime); + await fs.promises.utimes(recentFile, recentTime, recentTime); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + rootDir, + accountsPath: globalPath, + scope: "global", + }); + + await expect(fs.promises.stat(staleDir)).rejects.toThrow(); + await expect(fs.promises.stat(recentDir)).resolves.toBeTruthy(); + } finally { + await fs.promises.rm(fakeHome, { recursive: true, force: true }); + } + }); + + it("retries stale temp sweep once on transient Windows lock errors", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + const fakeHome = await fs.promises.mkdtemp(join(os.tmpdir(), "codex-sync-home-")); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + process.env.HOME = fakeHome; + process.env.USERPROFILE = fakeHome; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + const tempRoot = join(fakeHome, ".opencode", "tmp"); + const staleDir = join(tempRoot, "oc-chatgpt-multi-auth-sync-stale-retry-test"); + const staleFile = join(staleDir, "accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ accountId: "org-source", organizationId: "org-source", refreshToken: "rt-source", addedAt: 1, lastUsed: 1 }], + }), + ); + + const originalRm = fs.promises.rm.bind(fs.promises); + let staleSweepBlocked = false; + const rmSpy = vi.spyOn(fs.promises, "rm").mockImplementation(async (path, options) => { + if (!staleSweepBlocked && String(path) === staleDir) { + staleSweepBlocked = true; + throw Object.assign(new Error("busy"), { code: "EBUSY" }); + } + return originalRm(path, options as never); + }); + const loggerModule = await import("../lib/logger.js"); + + try { + await fs.promises.mkdir(staleDir, { recursive: true }); + await fs.promises.writeFile(staleFile, "sensitive", "utf8"); + const oldTime = new Date(Date.now() - (15 * 60 * 1000)); + await fs.promises.utimes(staleDir, oldTime, oldTime); + await fs.promises.utimes(staleFile, oldTime, oldTime); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + rootDir, + accountsPath: globalPath, + scope: "global", + }); + + expect(staleSweepBlocked).toBe(true); + expect(rmSpy.mock.calls.filter(([path]) => String(path) === staleDir)).toHaveLength(2); + expect(vi.mocked(loggerModule.logWarn)).not.toHaveBeenCalledWith( + expect.stringContaining("Failed to sweep stale codex sync temp directory"), + ); + await expect(fs.promises.stat(staleDir)).rejects.toThrow(); + } finally { + rmSpy.mockRestore(); + await fs.promises.rm(fakeHome, { recursive: true, force: true }); + } + }); + + it("skips source accounts whose emails already exist locally during sync", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-shared-a", + organizationId: "org-shared-a", + accountIdSource: "org", + email: "shared@example.com", + refreshToken: "rt-shared-a", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-shared-b", + organizationId: "org-shared-b", + accountIdSource: "org", + email: "shared@example.com", + refreshToken: "rt-shared-b", + addedAt: 2, + lastUsed: 2, + }, + { + accountId: "org-new", + organizationId: "org-new", + accountIdSource: "org", + email: "new@example.com", + refreshToken: "rt-new", + addedAt: 3, + lastUsed: 3, + }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + const currentStorage = { + version: 3 as const, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-existing", + organizationId: "org-existing", + accountIdSource: "org", + email: "shared@example.com", + refreshToken: "rt-existing", + addedAt: 10, + lastUsed: 10, + }, + ], + } satisfies AccountStorageV3; + vi.mocked(storageModule.loadAccounts).mockResolvedValue(currentStorage); + + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockImplementationOnce(async (filePath) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as { accounts: Array<{ email?: string }> }; + expect(parsed.accounts.map((account) => account.email)).toEqual([ + "new@example.com", + ]); + return { imported: 1, skipped: 0, total: 1 }; + }); + vi.mocked(storageModule.importAccounts).mockImplementationOnce(async (filePath, _options, prepare) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as AccountStorageV3; + const prepared = prepare ? prepare(parsed, currentStorage) : parsed; + expect(prepared.accounts.map((account) => account.email)).toEqual([ + "new@example.com", + ]); + return { + imported: 1, + skipped: 0, + total: 1, + backupStatus: "created", + backupPath: "/tmp/filtered-sync-backup.json", + }; + }); + + const { previewSyncFromCodexMultiAuth, syncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 1, + total: 1, + skipped: 0, + }); + await expect(syncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 1, + total: 1, + skipped: 0, + }); + }); + + it("treats refresh tokens as case-sensitive identities during sync filtering", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + refreshToken: "abc-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + const currentStorage = { + version: 3 as const, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + refreshToken: "ABC-token", + addedAt: 10, + lastUsed: 10, + }, + ], + } satisfies AccountStorageV3; + vi.mocked(storageModule.loadAccounts).mockResolvedValue(currentStorage); + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockImplementationOnce(async (filePath) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as { accounts: Array<{ refreshToken?: string }> }; + expect(parsed.accounts).toHaveLength(1); + expect(parsed.accounts[0]?.refreshToken).toBe("abc-token"); + return { imported: 1, skipped: 0, total: 2 }; + }); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 1, + total: 2, + skipped: 0, + }); + }); + + it("deduplicates email-less source accounts by identity before import", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-shared", + organizationId: "org-shared", + accountIdSource: "org", + refreshToken: "rt-shared", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-shared", + organizationId: "org-shared", + accountIdSource: "org", + refreshToken: "rt-shared", + addedAt: 2, + lastUsed: 2, + }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.deduplicateAccounts).mockImplementationOnce((accounts) => [accounts[1]]); + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockImplementationOnce(async (filePath) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as { accounts: Array<{ refreshToken?: string }> }; + expect(parsed.accounts).toHaveLength(1); + expect(parsed.accounts[0]?.refreshToken).toBe("rt-shared"); + return { imported: 1, skipped: 0, total: 1 }; + }); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 1, + total: 1, + skipped: 0, + }); + }); + + it("normalizes org-scoped source accounts to include organizationId before import", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + accounts: [ + { + accountId: "org-example123", + accountIdSource: "org", + refreshToken: "sync-refresh", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const { loadCodexMultiAuthSourceStorage } = await import("../lib/codex-multi-auth-sync.js"); + const resolved = await loadCodexMultiAuthSourceStorage(process.cwd()); + + expect(resolved.storage.accounts[0]?.organizationId).toBe("org-example123"); + }); + + it("throws for invalid JSON in the external accounts file", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, "not valid json"); + + const { loadCodexMultiAuthSourceStorage } = await import("../lib/codex-multi-auth-sync.js"); + await expect(loadCodexMultiAuthSourceStorage(process.cwd())).rejects.toThrow(/Invalid JSON/); + }); + + it("enforces finite sync capacity override for prune-capable flows", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + process.env.CODEX_AUTH_SYNC_MAX_ACCOUNTS = "2"; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-new-1", + organizationId: "org-new-1", + accountIdSource: "org", + email: "new-1@example.com", + refreshToken: "rt-new-1", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-new-2", + organizationId: "org-new-2", + accountIdSource: "org", + email: "new-2@example.com", + refreshToken: "rt-new-2", + addedAt: 2, + lastUsed: 2, + }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.loadAccounts).mockResolvedValue({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-existing", + organizationId: "org-existing", + accountIdSource: "org", + email: "existing@example.com", + refreshToken: "rt-existing", + addedAt: 10, + lastUsed: 10, + }, + ], + }); + + const { previewSyncFromCodexMultiAuth, CodexMultiAuthSyncCapacityError } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).rejects.toBeInstanceOf( + CodexMultiAuthSyncCapacityError, + ); + }); + + it("enforces finite sync capacity override during apply", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + process.env.CODEX_AUTH_SYNC_MAX_ACCOUNTS = "2"; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-new-1", + organizationId: "org-new-1", + accountIdSource: "org", + email: "new-1@example.com", + refreshToken: "rt-new-1", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-new-2", + organizationId: "org-new-2", + accountIdSource: "org", + email: "new-2@example.com", + refreshToken: "rt-new-2", + addedAt: 2, + lastUsed: 2, + }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + const currentStorage = { + version: 3 as const, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-existing", + organizationId: "org-existing", + accountIdSource: "org", + email: "existing@example.com", + refreshToken: "rt-existing", + addedAt: 10, + lastUsed: 10, + }, + ], + } satisfies AccountStorageV3; + vi.mocked(storageModule.importAccounts).mockImplementationOnce(async (filePath, _options, prepare) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as AccountStorageV3; + if (prepare) { + prepare(parsed, currentStorage); + } + return { + imported: 2, + skipped: 0, + total: 4, + backupStatus: "created", + backupPath: "/tmp/codex-multi-auth-sync-backup.json", + }; + }); + + const { syncFromCodexMultiAuth, CodexMultiAuthSyncCapacityError } = await import("../lib/codex-multi-auth-sync.js"); + await expect(syncFromCodexMultiAuth(process.cwd())).rejects.toBeInstanceOf( + CodexMultiAuthSyncCapacityError, + ); + }); + + it("ignores a zero sync capacity override and warns instead of disabling sync", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + process.env.CODEX_AUTH_SYNC_MAX_ACCOUNTS = "0"; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-new-1", + organizationId: "org-new-1", + accountIdSource: "org", + email: "new-1@example.com", + refreshToken: "rt-new-1", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const loggerModule = await import("../lib/logger.js"); + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + }); + expect(vi.mocked(loggerModule.logWarn)).toHaveBeenCalledWith( + expect.stringContaining('CODEX_AUTH_SYNC_MAX_ACCOUNTS override value "0" is not a positive finite number; ignoring.'), + ); + }); + + it("reports when the source alone exceeds a finite sync capacity", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + process.env.CODEX_AUTH_SYNC_MAX_ACCOUNTS = "2"; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-new-1", + organizationId: "org-new-1", + accountIdSource: "org", + email: "new-1@example.com", + refreshToken: "rt-new-1", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-new-2", + organizationId: "org-new-2", + accountIdSource: "org", + email: "new-2@example.com", + refreshToken: "rt-new-2", + addedAt: 2, + lastUsed: 2, + }, + { + accountId: "org-new-3", + organizationId: "org-new-3", + accountIdSource: "org", + email: "new-3@example.com", + refreshToken: "rt-new-3", + addedAt: 3, + lastUsed: 3, + }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.loadAccounts).mockResolvedValue({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [], + }); + + const { previewSyncFromCodexMultiAuth, CodexMultiAuthSyncCapacityError } = await import("../lib/codex-multi-auth-sync.js"); + let thrown: unknown; + try { + await previewSyncFromCodexMultiAuth(process.cwd()); + } catch (error) { + thrown = error; + } + expect(thrown).toBeInstanceOf(CodexMultiAuthSyncCapacityError); + expect(thrown).toMatchObject({ + name: "CodexMultiAuthSyncCapacityError", + details: expect.objectContaining({ + sourceDedupedTotal: 3, + importableNewAccounts: 0, + needToRemove: 1, + suggestedRemovals: [], + }), + }); + }); + + it("cleans up tagged synced overlaps by normalizing org-scoped identities first", async () => { + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-example123", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-refresh", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-example123", + organizationId: "org-example123", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-refresh", + addedAt: 2, + lastUsed: 2, + }, + ], + }, + vi.fn(async () => {}), + ), + ); + vi.mocked(storageModule.normalizeAccountStorage).mockImplementationOnce((value: unknown): AccountStorageV3 => { + const record = value as { + version: 3; + activeIndex: number; + activeIndexByFamily: Record; + accounts: AccountStorageV3["accounts"]; + }; + return { + ...record, + accounts: [record.accounts[1]], + }; + }); + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + await expect(cleanupCodexMultiAuthSyncedOverlaps()).resolves.toEqual({ + before: 2, + after: 1, + removed: 1, + updated: 0, + }); + }); + + it("reads the raw storage file so duplicate tagged rows are removed from disk", async () => { + const storageModule = await import("../lib/storage.js"); + const persist = vi.fn(async (_next: AccountStorageV3) => {}); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "sync@example.com", + refreshToken: "sync-token", + addedAt: 2, + lastUsed: 2, + }, + ], + }, + persist, + ), + ); + mockSourceStorageFile( + "/tmp/opencode-accounts.json", + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "sync@example.com", + refreshToken: "sync-token", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "sync@example.com", + refreshToken: "sync-token", + addedAt: 2, + lastUsed: 2, + }, + ], + }), + ); + vi.mocked(storageModule.normalizeAccountStorage).mockImplementationOnce((value: unknown): AccountStorageV3 => { + const record = value as { + version: 3; + activeIndex: number; + activeIndexByFamily: Record; + accounts: AccountStorageV3["accounts"]; + }; + return { + ...record, + accounts: [record.accounts[1]], + }; + }); + + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + await expect(cleanupCodexMultiAuthSyncedOverlaps()).resolves.toEqual({ + before: 2, + after: 1, + removed: 1, + updated: 0, + }); + const saved = persist.mock.calls[0]?.[0]; + if (!saved) { + throw new Error("Expected persisted overlap cleanup result"); + } + expect(saved.accounts).toHaveLength(1); + expect(saved.accounts[0]?.organizationId).toBe("org-sync"); + }); + + it("does not count synced overlap records as updated when only key order differs", async () => { + const storageModule = await import("../lib/storage.js"); + const persist = vi.fn(async () => {}); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + refreshToken: "sync-token", + accountTags: ["codex-multi-auth-sync"], + organizationId: "org-sync", + accountId: "org-sync", + accountIdSource: "org", + addedAt: 2, + lastUsed: 2, + }, + ], + }, + persist, + ), + ); + mockSourceStorageFile( + "/tmp/opencode-accounts.json", + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-token", + addedAt: 2, + lastUsed: 2, + }, + ], + }), + ); + + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + await expect(cleanupCodexMultiAuthSyncedOverlaps()).resolves.toEqual({ + before: 1, + after: 1, + removed: 0, + updated: 0, + }); + expect(persist).not.toHaveBeenCalled(); + }); + + it("migrates v1 raw overlap snapshots without collapsing duplicate tagged rows before cleanup", async () => { + const storageModule = await import("../lib/storage.js"); + const persist = vi.fn(async (_next: AccountStorageV3) => {}); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: { codex: 0 }, + accounts: [ + { + accountId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }, + persist, + ), + ); + mockSourceStorageFile( + "/tmp/opencode-accounts.json", + JSON.stringify({ + version: 1, + activeIndex: 1, + accounts: [ + { + accountId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-token", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-token", + addedAt: 2, + lastUsed: 2, + }, + ], + }), + ); + vi.mocked(storageModule.normalizeAccountStorage).mockImplementationOnce((value: unknown): AccountStorageV3 => { + const record = value as { + version: 3; + activeIndex: number; + activeIndexByFamily: Record; + accounts: AccountStorageV3["accounts"]; + }; + return { + ...record, + accounts: [record.accounts[1]], + }; + }); + + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + await expect(cleanupCodexMultiAuthSyncedOverlaps()).resolves.toEqual({ + before: 2, + after: 1, + removed: 1, + updated: 0, + }); + const saved = persist.mock.calls[0]?.[0]; + if (!saved) { + throw new Error("Expected persisted overlap cleanup result"); + } + expect(saved.accounts).toHaveLength(1); + expect(saved.accounts[0]?.organizationId).toBe("org-sync"); + expect(saved.activeIndexByFamily?.codex).toBe(0); + }); + + it("falls back to in-memory overlap cleanup state on transient Windows lock errors", async () => { + const storageModule = await import("../lib/storage.js"); + const loggerModule = await import("../lib/logger.js"); + const persist = vi.fn(async (_next: AccountStorageV3) => {}); + vi.mocked(storageModule.deduplicateAccounts).mockImplementationOnce((accounts) => { + return accounts.length > 1 ? [accounts[1] ?? accounts[0]].filter(Boolean) : accounts; + }); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-token", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + refreshToken: "sync-token", + addedAt: 2, + lastUsed: 2, + }, + ], + }, + persist, + ), + ); + mockReadFile.mockRejectedValueOnce(Object.assign(new Error("busy"), { code: "EBUSY" })); + const storagePath = await import("../lib/storage.js"); + vi.mocked(storagePath.getStoragePath).mockReturnValueOnce("/tmp/opencode-accounts.json"); + + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + await expect(cleanupCodexMultiAuthSyncedOverlaps()).resolves.toEqual({ + before: 2, + after: 1, + removed: 1, + updated: 0, + }); + const saved = persist.mock.calls[0]?.[0]; + if (!saved) { + throw new Error("Expected persisted overlap cleanup result"); + } + expect(saved.accounts).toHaveLength(1); + expect(saved.accounts[0]?.organizationId).toBe("org-sync"); + expect(vi.mocked(loggerModule.logWarn)).toHaveBeenCalledWith( + expect.stringContaining("raw storage snapshot for synced overlap cleanup (EBUSY)"), + ); + }); + + it("limits overlap cleanup to accounts tagged from codex-multi-auth sync", async () => { + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + refreshToken: "legacy-a", + email: "shared@example.com", + addedAt: 1, + lastUsed: 1, + }, + { + refreshToken: "legacy-b", + email: "shared@example.com", + addedAt: 2, + lastUsed: 2, + }, + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "sync@example.com", + refreshToken: "sync-token", + addedAt: 3, + lastUsed: 3, + }, + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "sync@example.com", + refreshToken: "sync-token", + addedAt: 4, + lastUsed: 4, + }, + ], + }, + vi.fn(async () => {}), + ), + ); + + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + await expect(cleanupCodexMultiAuthSyncedOverlaps()).resolves.toEqual({ + before: 4, + after: 4, + removed: 0, + updated: 1, + }); + }); + + it("removes synced accounts that overlap preserved local accounts", async () => { + const storageModule = await import("../lib/storage.js"); + const persist = vi.fn(async (_next: AccountStorageV3) => {}); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-local", + organizationId: "org-local", + accountIdSource: "org", + email: "shared@example.com", + refreshToken: "rt-local", + addedAt: 5, + lastUsed: 5, + }, + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "shared@example.com", + refreshToken: "rt-sync", + addedAt: 4, + lastUsed: 4, + }, + ], + }, + persist, + ), + ); + + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + await expect(cleanupCodexMultiAuthSyncedOverlaps()).resolves.toEqual({ + before: 2, + after: 1, + removed: 1, + updated: 0, + }); + const saved = persist.mock.calls[0]?.[0]; + if (!saved) { + throw new Error("Expected persisted overlap cleanup result"); + } + expect(saved.accounts).toHaveLength(1); + expect(saved.accounts[0]?.accountId).toBe("org-local"); + }); + + it("remaps active indices when synced overlap cleanup reorders accounts", async () => { + const storageModule = await import("../lib/storage.js"); + const persist = vi.fn(async (_next: AccountStorageV3) => {}); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: { codex: 0 }, + accounts: [ + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + accountTags: ["codex-multi-auth-sync"], + email: "sync@example.com", + refreshToken: "sync-token", + addedAt: 3, + lastUsed: 3, + }, + { + accountId: "org-local", + organizationId: "org-local", + accountIdSource: "org", + email: "local@example.com", + refreshToken: "local-token", + addedAt: 4, + lastUsed: 4, + }, + ], + }, + persist, + ), + ); + + const { cleanupCodexMultiAuthSyncedOverlaps } = await import("../lib/codex-multi-auth-sync.js"); + await cleanupCodexMultiAuthSyncedOverlaps(); + + const saved = persist.mock.calls[0]?.[0]; + if (!saved) { + throw new Error("Expected persisted overlap cleanup result"); + } + expect(saved.accounts.map((account) => account.accountId)).toEqual(["org-local", "org-sync"]); + expect(saved.activeIndex).toBe(1); + expect(saved.activeIndexByFamily?.codex).toBe(1); + }); + + it("does not block preview when account limit is unlimited", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-new-1", + organizationId: "org-new-1", + accountIdSource: "org", + email: "new1@example.com", + refreshToken: "rt-new-1", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-new-2", + organizationId: "org-new-2", + accountIdSource: "org", + email: "new2@example.com", + refreshToken: "rt-new-2", + addedAt: 2, + lastUsed: 2, + }, + ], + }), + ); + + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce(async (handler) => + handler( + { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: Array.from({ length: 19 }, (_, index) => ({ + accountId: `org-existing-${index + 1}`, + organizationId: `org-existing-${index + 1}`, + accountIdSource: "org", + email: `existing${index + 1}@example.com`, + refreshToken: `rt-existing-${index + 1}`, + addedAt: index + 1, + lastUsed: index + 1, + })), + }, + vi.fn(async () => {}), + ), + ); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + total: 4, + skipped: 0, + }); + }); + + it("warns instead of failing when post-success temp cleanup cannot remove sync data", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile( + globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ refreshToken: "sync-refresh", addedAt: 1, lastUsed: 1 }], + }), + ); + const rmSpy = vi.spyOn(fs.promises, "rm").mockRejectedValue(new Error("rm failed")); + const loggerModule = await import("../lib/logger.js"); + const storageModule = await import("../lib/storage.js"); + try { + const { syncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + + await expect(syncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 2, + backupStatus: "created", + }); + expect(vi.mocked(loggerModule.logWarn)).toHaveBeenCalledWith( + expect.stringContaining("Failed to remove temporary codex sync directory"), + ); + } finally { + rmSpy.mockRestore(); + } + }); + + it("does not block source-only imports above the old cap when limit is unlimited", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: Array.from({ length: 21 }, (_, index) => ({ + accountId: `org-source-${index + 1}`, + organizationId: `org-source-${index + 1}`, + accountIdSource: "org", + email: `source${index + 1}@example.com`, + refreshToken: `rt-source-${index + 1}`, + addedAt: index + 1, + lastUsed: index + 1, + })), + }), + ); + + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.loadAccounts).mockResolvedValue({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-local", + organizationId: "org-local", + accountIdSource: "org", + email: "local@example.com", + refreshToken: "rt-local", + addedAt: 1, + lastUsed: 1, + }, + ], + }); + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockImplementationOnce(async (filePath) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as { accounts: Array<{ accountId?: string }> }; + expect(parsed.accounts).toHaveLength(21); + expect(parsed.accounts[0]?.accountId).toBe("org-source-1"); + expect(parsed.accounts[20]?.accountId).toBe("org-source-21"); + return { imported: 21, skipped: 0, total: 22 }; + }); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 21, + total: 22, + skipped: 0, + }); + }); + + it("does not produce capacity errors for large existing stores when unlimited", async () => { + const rootDir = join(process.cwd(), ".tmp-codex-multi-auth"); + process.env.CODEX_MULTI_AUTH_DIR = rootDir; + const globalPath = join(rootDir, "openai-codex-accounts.json"); + mockExistsSync.mockImplementation((candidate) => String(candidate) === globalPath); + mockSourceStorageFile(globalPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: Array.from({ length: 50 }, (_, index) => ({ + accountId: `org-source-${index + 1}`, + organizationId: `org-source-${index + 1}`, + accountIdSource: "org", + email: `source${index + 1}@example.com`, + refreshToken: `rt-source-${index + 1}`, + addedAt: index + 1, + lastUsed: index + 1, + })), + }), + ); + const storageModule = await import("../lib/storage.js"); + vi.mocked(storageModule.previewImportAccountsWithExistingStorage).mockImplementationOnce(async (filePath) => { + const raw = await fs.promises.readFile(filePath, "utf8"); + const parsed = JSON.parse(raw) as { accounts: Array<{ accountId?: string }> }; + expect(parsed.accounts).toHaveLength(50); + expect(parsed.accounts[0]?.accountId).toBe("org-source-1"); + expect(parsed.accounts[49]?.accountId).toBe("org-source-50"); + return { imported: 50, skipped: 0, total: 52 }; + }); + + const { previewSyncFromCodexMultiAuth } = await import("../lib/codex-multi-auth-sync.js"); + await expect(previewSyncFromCodexMultiAuth(process.cwd())).resolves.toMatchObject({ + accountsPath: globalPath, + imported: 50, + total: 52, + skipped: 0, + }); + }); +}); diff --git a/test/index.test.ts b/test/index.test.ts index daf55c6c..2f91546a 100644 --- a/test/index.test.ts +++ b/test/index.test.ts @@ -1,4 +1,18 @@ import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { promises as fs } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +const readlineMocks = vi.hoisted(() => { + const question = vi.fn(async () => undefined); + const close = vi.fn(); + const createInterface = vi.fn(() => ({ question, close })); + return { question, close, createInterface }; +}); + +vi.mock("node:readline/promises", () => ({ + createInterface: readlineMocks.createInterface, +})); vi.mock("@opencode-ai/plugin/tool", () => { const makeSchema = () => ({ @@ -77,6 +91,11 @@ vi.mock("../lib/auth/server.js", () => ({ vi.mock("../lib/cli.js", () => ({ promptLoginMode: vi.fn(async () => ({ mode: "add" })), promptAddAnotherAccount: vi.fn(async () => false), + promptCodexMultiAuthSyncPrune: vi.fn(async () => null), +})); + +vi.mock("../lib/ui/confirm.js", () => ({ + confirm: vi.fn(async () => true), })); vi.mock("../lib/config.js", () => ({ @@ -109,7 +128,9 @@ vi.mock("../lib/config.js", () => ({ getCodexTuiColorProfile: () => "ansi16", getCodexTuiGlyphMode: () => "ascii", getBeginnerSafeMode: () => false, - loadPluginConfig: () => ({}), + getSyncFromCodexMultiAuthEnabled: vi.fn(() => false), + loadPluginConfig: vi.fn(() => ({})), + setSyncFromCodexMultiAuthEnabled: vi.fn(), })); vi.mock("../lib/request/request-transformer.js", () => ({ @@ -169,6 +190,70 @@ vi.mock("../lib/recovery.js", () => ({ getRecoveryToastContent: () => ({ title: "Error", message: "Test" }), })); +vi.mock("../lib/codex-multi-auth-sync.js", () => ({ + CodexMultiAuthSyncCapacityError: class CodexMultiAuthSyncCapacityError extends Error { + details: Record; + + constructor(details: Record) { + super("capacity"); + this.name = "CodexMultiAuthSyncCapacityError"; + this.details = details; + } + }, + previewSyncFromCodexMultiAuth: vi.fn(async () => ({ + rootDir: "/tmp/codex-root", + accountsPath: "/tmp/codex-root/openai-codex-accounts.json", + scope: "global", + imported: 2, + skipped: 0, + total: 4, + })), + previewCodexMultiAuthSyncedOverlapCleanup: vi.fn(async () => ({ + before: 0, + after: 0, + removed: 0, + updated: 0, + })), + loadCodexMultiAuthSourceStorage: vi.fn(async () => ({ + rootDir: "/tmp/codex-root", + accountsPath: "/tmp/codex-root/openai-codex-accounts.json", + scope: "global", + storage: { + version: 3 as const, + accounts: [], + activeIndex: 0, + activeIndexByFamily: {}, + }, + })), + syncFromCodexMultiAuth: vi.fn(async () => ({ + rootDir: "/tmp/codex-root", + accountsPath: "/tmp/codex-root/openai-codex-accounts.json", + scope: "global", + imported: 2, + skipped: 0, + total: 4, + backupStatus: "created", + backupPath: "/tmp/codex-sync-backup.json", + })), + cleanupCodexMultiAuthSyncedOverlaps: vi.fn(async () => ({ + before: 0, + after: 0, + removed: 0, + updated: 0, + })), + isCodexMultiAuthSourceTooLargeForCapacity: vi.fn( + (details: { sourceDedupedTotal?: number; maxAccounts?: number; importableNewAccounts?: number; suggestedRemovals?: unknown[] }) => + Boolean( + typeof details.sourceDedupedTotal === "number" && + typeof details.maxAccounts === "number" && + details.sourceDedupedTotal > details.maxAccounts && + details.importableNewAccounts === 0 && + Array.isArray(details.suggestedRemovals) && + details.suggestedRemovals.length === 0, + ), + ), +})); + vi.mock("../lib/request/rate-limit-backoff.js", () => ({ getRateLimitBackoff: () => ({ attempt: 1, delayMs: 1000 }), RATE_LIMIT_SHORT_RETRY_THRESHOLD_MS: 5000, @@ -215,6 +300,16 @@ const mockStorage = { activeIndexByFamily: {} as Record, }; +const mockFlaggedStorage = { + version: 1 as const, + accounts: [] as Array<{ + refreshToken: string; + organizationId?: string; + accountId?: string; + flaggedAt: number; + }>, +}; + const cloneAccount = (account: (typeof mockStorage.accounts)[number]) => structuredClone(account); const cloneMockStorage = () => ({ @@ -249,8 +344,49 @@ vi.mock("../lib/storage.js", () => ({ return await callback(loadedStorage, persist); }, ), + loadAccountAndFlaggedStorageSnapshot: vi.fn(async () => ({ + accounts: { + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + }, + flagged: { + version: 1, + accounts: mockFlaggedStorage.accounts.map((account) => ({ ...account })), + }, + })), + withFlaggedAccountsTransaction: vi.fn( + async ( + callback: ( + current: typeof mockFlaggedStorage, + persist: (nextStorage: typeof mockFlaggedStorage) => Promise, + ) => Promise, + ) => { + const loadedStorage = { + version: 1 as const, + accounts: mockFlaggedStorage.accounts.map((account) => ({ ...account })), + }; + const persist = async (nextStorage: typeof mockFlaggedStorage) => { + mockFlaggedStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + const storageModule = await import("../lib/storage.js"); + await vi.mocked(storageModule.saveFlaggedAccounts)(nextStorage); + }; + await callback(loadedStorage, persist); + }, + ), + cleanupDuplicateEmailAccounts: vi.fn(async () => ({ + before: 0, + after: 0, + removed: 0, + })), + previewDuplicateEmailCleanup: vi.fn(async () => ({ + before: 0, + after: 0, + removed: 0, + })), clearAccounts: vi.fn(async () => {}), setStoragePath: vi.fn(), + backupRawAccountsFile: vi.fn(async () => {}), exportAccounts: vi.fn(async () => {}), importAccounts: vi.fn(async () => ({ imported: 2, @@ -261,9 +397,15 @@ vi.mock("../lib/storage.js", () => ({ })), previewImportAccounts: vi.fn(async () => ({ imported: 2, skipped: 1, total: 5 })), createTimestampedBackupPath: vi.fn((prefix?: string) => `/tmp/${prefix ?? "codex-backup"}-20260101-000000.json`), - loadFlaggedAccounts: vi.fn(async () => ({ version: 1, accounts: [] })), - saveFlaggedAccounts: vi.fn(async () => {}), + loadFlaggedAccounts: vi.fn(async () => ({ + version: 1, + accounts: mockFlaggedStorage.accounts.map((account) => ({ ...account })), + })), + saveFlaggedAccounts: vi.fn(async (storage: typeof mockFlaggedStorage) => { + mockFlaggedStorage.accounts = storage.accounts.map((account) => ({ ...account })); + }), clearFlaggedAccounts: vi.fn(async () => {}), + normalizeAccountStorage: vi.fn((value: unknown) => value), StorageError: class StorageError extends Error { hint: string; constructor(message: string, hint: string) { @@ -427,17 +569,67 @@ const createMockClient = () => ({ session: { prompt: vi.fn() }, }); +const withInteractiveTerminal = async (run: (context: { + writeSpy: ReturnType; + setRawMode: ReturnType; + readSpy: ReturnType; +}) => Promise) => { + const stdinIsTTY = Object.getOwnPropertyDescriptor(process.stdin, "isTTY"); + const stdinIsRaw = Object.getOwnPropertyDescriptor(process.stdin, "isRaw"); + const stdoutIsTTY = Object.getOwnPropertyDescriptor(process.stdout, "isTTY"); + const stdoutRows = Object.getOwnPropertyDescriptor(process.stdout, "rows"); + const originalSetRawMode = (process.stdin as NodeJS.ReadStream & { setRawMode?: (value: boolean) => void }).setRawMode; + const setRawMode = vi.fn(); + const writeSpy = vi.spyOn(process.stdout, "write").mockReturnValue(true); + const readSpy = vi.spyOn(process.stdin, "read").mockReturnValue(null); + Object.defineProperty(process.stdin, "isTTY", { value: true, configurable: true }); + Object.defineProperty(process.stdin, "isRaw", { value: false, configurable: true, writable: true }); + Object.defineProperty(process.stdout, "isTTY", { value: true, configurable: true }); + Object.defineProperty(process.stdout, "rows", { value: 24, configurable: true }); + (process.stdin as NodeJS.ReadStream & { setRawMode?: (value: boolean) => void }).setRawMode = setRawMode; + + try { + await run({ writeSpy, setRawMode, readSpy }); + } finally { + writeSpy.mockRestore(); + readSpy.mockRestore(); + if (stdinIsTTY) { + Object.defineProperty(process.stdin, "isTTY", stdinIsTTY); + } else { + delete (process.stdin as NodeJS.ReadStream & { isTTY?: boolean }).isTTY; + } + if (stdinIsRaw) { + Object.defineProperty(process.stdin, "isRaw", stdinIsRaw); + } else { + delete (process.stdin as NodeJS.ReadStream & { isRaw?: boolean }).isRaw; + } + if (stdoutIsTTY) { + Object.defineProperty(process.stdout, "isTTY", stdoutIsTTY); + } else { + delete (process.stdout as NodeJS.WriteStream & { isTTY?: boolean }).isTTY; + } + if (stdoutRows) { + Object.defineProperty(process.stdout, "rows", stdoutRows); + } else { + delete (process.stdout as NodeJS.WriteStream & { rows?: number }).rows; + } + (process.stdin as NodeJS.ReadStream & { setRawMode?: (value: boolean) => void }).setRawMode = originalSetRawMode; + } +}; + describe("OpenAIOAuthPlugin", () => { let plugin: PluginType; let mockClient: ReturnType; beforeEach(async () => { vi.clearAllMocks(); + readlineMocks.question.mockResolvedValue(undefined); mockClient = createMockClient(); mockStorage.accounts = []; mockStorage.activeIndex = 0; mockStorage.activeIndexByFamily = {}; + mockFlaggedStorage.accounts = []; const { OpenAIOAuthPlugin } = await import("../index.js"); plugin = await OpenAIOAuthPlugin({ client: mockClient } as never) as unknown as PluginType; @@ -482,8 +674,8 @@ describe("OpenAIOAuthPlugin", () => { it("has two auth methods", () => { expect(plugin.auth.methods).toHaveLength(2); - expect(plugin.auth.methods[0].label).toBe("ChatGPT Plus/Pro MULTI (Codex Subscription)"); - expect(plugin.auth.methods[1].label).toBe("ChatGPT Plus/Pro MULTI (Manual URL Paste)"); + expect(plugin.auth.methods[0].label).toBe("ChatGPT Plus/Pro (Browser Login)"); + expect(plugin.auth.methods[1].label).toBe("ChatGPT Plus/Pro (Manual Paste)"); }); it("rejects manual OAuth callbacks with mismatched state", async () => { @@ -3303,7 +3495,7 @@ describe("OpenAIOAuthPlugin persistAccountPool", () => { const authResult = await autoMethod.authorize(); expect(authResult.instructions).toBe("Authentication cancelled"); - expect(vi.mocked(refreshQueueModule.queuedRefresh)).toHaveBeenCalledTimes(1); + expect(vi.mocked(refreshQueueModule.queuedRefresh)).toHaveBeenCalledTimes(2); expect(mockStorage.accounts).toHaveLength(2); expect(new Set(mockStorage.accounts.map((account) => account.organizationId))).toEqual( new Set(["org-cache", "org-refresh"]), @@ -3313,6 +3505,1623 @@ describe("OpenAIOAuthPlugin persistAccountPool", () => { accounts: [], }); }); + + it("hydrates shared-email cached tokens only for the flagged account whose accountId matches the token", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const accountsModule = await import("../lib/accounts.js"); + const refreshQueueModule = await import("../lib/refresh-queue.js"); + + const flaggedAccounts = [ + { + refreshToken: "flagged-refresh-a", + organizationId: "org-shared-a", + accountId: "shared-a", + accountIdSource: "manual", + accountLabel: "Shared Workspace A", + email: "shared@example.com", + flaggedAt: Date.now() - 1000, + addedAt: Date.now() - 1000, + lastUsed: Date.now() - 1000, + }, + { + refreshToken: "flagged-refresh-b", + organizationId: "org-shared-b", + accountId: "shared-b", + accountIdSource: "manual", + accountLabel: "Shared Workspace B", + email: "shared@example.com", + flaggedAt: Date.now() - 500, + addedAt: Date.now() - 500, + lastUsed: Date.now() - 500, + }, + ]; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "verify-flagged" }) + .mockResolvedValueOnce({ mode: "cancel" }); + + vi.mocked(storageModule.loadFlaggedAccounts) + .mockResolvedValueOnce({ + version: 1, + accounts: flaggedAccounts, + }) + .mockResolvedValueOnce({ + version: 1, + accounts: flaggedAccounts, + }) + .mockResolvedValueOnce({ + version: 1, + accounts: [], + }); + + vi.mocked(accountsModule.lookupCodexCliTokensByEmail).mockImplementation(async (email) => { + if (email === "shared@example.com") { + return { + accessToken: "cached-access-b", + refreshToken: "cached-refresh-b", + expiresAt: Date.now() + 60_000, + }; + } + return null; + }); + vi.mocked(accountsModule.extractAccountId).mockImplementation((token) => { + if (token === "cached-access-b") return "shared-b"; + if (token === "live-access-a") return "shared-a"; + return "account-1"; + }); + vi.mocked(accountsModule.extractAccountEmail).mockImplementation((accessToken) => { + if (accessToken === "cached-access-b" || accessToken === "live-access-a") { + return "shared@example.com"; + } + return "user@example.com"; + }); + vi.mocked(accountsModule.getAccountIdCandidates).mockReturnValue([ + { + accountId: "shared-b", + source: "token", + label: "Shared Workspace B [id:shared-b]", + }, + ]); + vi.mocked(accountsModule.selectBestAccountCandidate).mockImplementation( + (candidates) => candidates[0] ?? null, + ); + vi.mocked(refreshQueueModule.queuedRefresh).mockResolvedValueOnce({ + type: "success", + access: "live-access-a", + refresh: "live-refresh-a", + expires: Date.now() + 60_000, + }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ + client: mockClient, + } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(vi.mocked(refreshQueueModule.queuedRefresh)).toHaveBeenCalledTimes(1); + expect(mockStorage.accounts).toHaveLength(2); + expect( + mockStorage.accounts.find((account) => account.organizationId === "org-shared-a")?.refreshToken, + ).toBe("live-refresh-a"); + expect( + mockStorage.accounts.find((account) => account.organizationId === "org-shared-b")?.refreshToken, + ).toBe("cached-refresh-b"); + }); + + it("reloads storage after synced overlap cleanup before persisting auto-repair refreshes", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + + mockStorage.accounts = [ + { + accountId: "org-keep", + organizationId: "org-keep", + accountIdSource: "org", + email: "keep@example.com", + refreshToken: "refresh-keep", + }, + { + accountId: "org-overlap", + organizationId: "org-overlap", + accountIdSource: "org", + email: "overlap@example.com", + refreshToken: "refresh-overlap", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "fix" }) + .mockResolvedValueOnce({ mode: "cancel" }); + + vi.mocked(syncModule.cleanupCodexMultiAuthSyncedOverlaps).mockImplementationOnce(async () => { + mockStorage.accounts = [mockStorage.accounts[0]].filter(Boolean) as typeof mockStorage.accounts; + return { + before: 2, + after: 1, + removed: 1, + updated: 0, + }; + }); + + vi.mocked(storageModule.loadAccounts).mockImplementation(async () => ({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + })); + vi.mocked(storageModule.saveAccounts).mockImplementation(async (nextStorage) => { + if (nextStorage.accounts.length === 2) { + throw new Error("stale pre-cleanup storage was persisted"); + } + mockStorage.version = nextStorage.version; + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(vi.mocked(storageModule.createTimestampedBackupPath)).toHaveBeenCalledWith( + "codex-auto-repair-backup", + ); + expect(vi.mocked(storageModule.backupRawAccountsFile)).toHaveBeenCalledWith( + "/tmp/codex-auto-repair-backup-20260101-000000.json", + true, + ); + expect(mockStorage.accounts).toHaveLength(1); + expect(mockStorage.accounts[0]?.email).toBe("keep@example.com"); + }); + + it("supports add-account flow when max accounts is unlimited", async () => { + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = await OpenAIOAuthPlugin({ client: mockClient } as never) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ method: string; instructions: string }>; + }; + + await expect( + autoMethod.authorize({ loginMode: "add", accountCount: "101" }), + ).resolves.toMatchObject({ + method: "auto", + }); + }); + + it("runs legacy duplicate email cleanup from maintenance settings with confirmation and backup", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + mockStorage.accounts = [ + { + email: "shared@example.com", + refreshToken: "refresh-older", + lastUsed: 1, + }, + { + email: "shared@example.com", + refreshToken: "refresh-newer", + lastUsed: 2, + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "maintenance-clean-duplicate-emails" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(storageModule.previewDuplicateEmailCleanup).mockResolvedValueOnce({ + before: 2, + after: 1, + removed: 1, + }); + vi.mocked(confirmModule.confirm).mockResolvedValueOnce(true); + + vi.mocked(storageModule.cleanupDuplicateEmailAccounts).mockImplementationOnce(async () => { + mockStorage.accounts = [mockStorage.accounts[1]].filter(Boolean) as typeof mockStorage.accounts; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + return { + before: 2, + after: 1, + removed: 1, + }; + }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(vi.mocked(storageModule.previewDuplicateEmailCleanup)).toHaveBeenCalledTimes(1); + expect(vi.mocked(confirmModule.confirm)).toHaveBeenCalledWith( + "Create a backup and remove 1 legacy duplicate-email account(s)?", + ); + expect(vi.mocked(storageModule.createTimestampedBackupPath)).toHaveBeenCalledWith( + "codex-maintenance-duplicate-email-backup", + ); + expect(vi.mocked(storageModule.backupRawAccountsFile)).toHaveBeenCalledWith( + "/tmp/codex-maintenance-duplicate-email-backup-20260101-000000.json", + true, + ); + expect(vi.mocked(storageModule.cleanupDuplicateEmailAccounts)).toHaveBeenCalledTimes(1); + expect(mockStorage.accounts).toHaveLength(1); + expect(mockStorage.accounts[0]?.refreshToken).toBe("refresh-newer"); + }); + + it("runs synced overlap cleanup from maintenance settings with confirmation and backup", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + mockStorage.accounts = [ + { + accountId: "org-local", + organizationId: "org-local", + accountIdSource: "org", + email: "local@example.com", + refreshToken: "refresh-local", + }, + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + email: "sync@example.com", + refreshToken: "refresh-sync", + }, + ]; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-cleanup-overlaps" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(syncModule.previewCodexMultiAuthSyncedOverlapCleanup).mockResolvedValueOnce({ + before: 2, + after: 1, + removed: 1, + updated: 0, + }); + vi.mocked(confirmModule.confirm).mockResolvedValueOnce(true); + vi.mocked(syncModule.cleanupCodexMultiAuthSyncedOverlaps).mockImplementationOnce(async () => { + mockStorage.accounts = [mockStorage.accounts[0]].filter(Boolean) as typeof mockStorage.accounts; + return { + before: 2, + after: 1, + removed: 1, + updated: 0, + }; + }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(vi.mocked(syncModule.previewCodexMultiAuthSyncedOverlapCleanup)).toHaveBeenCalledTimes(1); + expect(vi.mocked(confirmModule.confirm)).toHaveBeenCalledWith( + "Create a backup and apply synced overlap cleanup?", + ); + expect(vi.mocked(storageModule.createTimestampedBackupPath)).toHaveBeenCalledWith( + "codex-maintenance-overlap-backup", + ); + expect(vi.mocked(storageModule.backupRawAccountsFile)).toHaveBeenCalledWith( + "/tmp/codex-maintenance-overlap-backup-20260101-000000.json", + true, + ); + expect(vi.mocked(syncModule.cleanupCodexMultiAuthSyncedOverlaps)).toHaveBeenCalledTimes(1); + expect(mockStorage.accounts).toHaveLength(1); + expect(mockStorage.accounts[0]?.accountId).toBe("org-local"); + }); + + it("runs best-account selection from the dashboard forecast action", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + + mockStorage.accounts = [ + { + accountId: "org-primary", + organizationId: "org-primary", + accountIdSource: "org", + email: "primary@example.com", + refreshToken: "refresh-primary", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "forecast" }) + .mockResolvedValueOnce({ mode: "cancel" }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(vi.mocked(storageModule.withAccountStorageTransaction)).toHaveBeenCalled(); + expect(mockStorage.activeIndex).toBe(0); + expect(mockStorage.activeIndexByFamily.codex).toBe(0); + }); + + it("renders and auto-closes the forecast operation screen in interactive terminals", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const { ANSI } = await import("../lib/ui/ansi.js"); + + mockStorage.accounts = [ + { + accountId: "org-primary", + organizationId: "org-primary", + accountIdSource: "org", + email: "primary@example.com", + refreshToken: "refresh-primary", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "forecast" }) + .mockResolvedValueOnce({ mode: "cancel" }); + await withInteractiveTerminal(async ({ writeSpy, setRawMode }) => { + vi.useFakeTimers(); + try { + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const resultPromise = autoMethod.authorize(); + await vi.runAllTimersAsync(); + const authResult = await resultPromise; + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(writeSpy).toHaveBeenCalledWith(expect.stringContaining(ANSI.altScreenOn)); + expect(writeSpy).toHaveBeenCalledWith(expect.stringContaining(ANSI.altScreenOff)); + expect(setRawMode).toHaveBeenCalledWith(true); + expect(setRawMode).toHaveBeenLastCalledWith(false); + expect(readlineMocks.createInterface).not.toHaveBeenCalled(); + } finally { + vi.useRealTimers(); + } + }); + }); + + it("closes the interactive operation screen after a failed forecast action", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const { ANSI } = await import("../lib/ui/ansi.js"); + + mockStorage.accounts = [ + { + accountId: "org-primary", + organizationId: "org-primary", + accountIdSource: "org", + email: "primary@example.com", + refreshToken: "refresh-primary", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "forecast" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(storageModule.withAccountStorageTransaction).mockRejectedValueOnce(new Error("save failed")); + + await withInteractiveTerminal(async ({ writeSpy }) => { + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(readlineMocks.createInterface).toHaveBeenCalled(); + expect(readlineMocks.question).toHaveBeenCalled(); + expect(readlineMocks.close).toHaveBeenCalled(); + expect(writeSpy).toHaveBeenCalledWith(expect.stringContaining(ANSI.altScreenOn)); + expect(writeSpy).toHaveBeenCalledWith(expect.stringContaining(ANSI.altScreenOff)); + }); + }); + + it("restores pruned accounts when sync does not commit after a prune retry", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-")); + const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => undefined); + try { + mockStorage.accounts = [ + { + accountId: "org-keep", + organizationId: "org-keep", + accountIdSource: "org", + email: "keep@example.com", + refreshToken: "refresh-keep", + }, + { + accountId: "org-prune", + organizationId: "org-prune", + accountIdSource: "org", + email: "prune@example.com", + refreshToken: "refresh-prune", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune) + .mockResolvedValueOnce([1]); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + vi.mocked(confirmModule.confirm).mockResolvedValueOnce(true); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + vi.mocked(storageModule.saveAccounts).mockImplementation(async (nextStorage) => { + mockStorage.version = nextStorage.version; + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + }); + vi.mocked(storageModule.loadAccounts).mockImplementation(async () => ({ + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + })); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementation( + async (callback) => { + const loadedStorage = { + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + }; + const persist = async (nextStorage: typeof mockStorage) => { + mockStorage.version = nextStorage.version; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + }; + await callback(loadedStorage, persist); + }, + ); + + const capacityError = new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 2, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 1, + skippedOverlaps: 1, + suggestedRemovals: [ + { + index: 1, + email: "prune@example.com", + accountLabel: "Workspace prune", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }); + + vi.mocked(syncModule.previewSyncFromCodexMultiAuth) + .mockRejectedValueOnce(capacityError) + .mockResolvedValueOnce({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + imported: 0, + skipped: 1, + total: 1, + }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect( + consoleSpy.mock.calls.some(([value]) => + String(value).includes("cannot be recovered if the process is interrupted"), + ), + ).toBe(true); + expect(vi.mocked(confirmModule.confirm)).toHaveBeenCalledWith( + expect.stringContaining("cannot be recovered if the process is interrupted"), + ); + expect(mockStorage.accounts).toHaveLength(2); + expect(mockStorage.accounts.map((account) => account.accountId)).toEqual([ + "org-keep", + "org-prune", + ]); + } finally { + consoleSpy.mockRestore(); + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("retries prune-backup reads after a transient Windows lock", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-read-retry-")); + const originalReadFile = fs.readFile.bind(fs); + let backupReadAttempts = 0; + const readFileSpy = vi.spyOn(fs, "readFile").mockImplementation(async (path, options) => { + if (String(path).includes("codex-sync-prune-backup") && backupReadAttempts < 2) { + backupReadAttempts += 1; + throw Object.assign(new Error("busy"), { code: "EBUSY" }); + } + return originalReadFile(path, options as never); + }); + + try { + mockStorage.accounts = [ + { + accountId: "org-keep", + organizationId: "org-keep", + accountIdSource: "org", + email: "keep@example.com", + refreshToken: "refresh-keep", + }, + { + accountId: "org-prune", + organizationId: "org-prune", + accountIdSource: "org", + email: "prune@example.com", + refreshToken: "refresh-prune", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune).mockResolvedValueOnce([1]); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + vi.mocked(confirmModule.confirm).mockResolvedValueOnce(true); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + vi.mocked(syncModule.previewSyncFromCodexMultiAuth) + .mockRejectedValueOnce( + new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 2, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 1, + skippedOverlaps: 1, + suggestedRemovals: [ + { + index: 1, + email: "prune@example.com", + accountLabel: "Workspace prune", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }), + ) + .mockResolvedValueOnce({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + imported: 0, + skipped: 1, + total: 1, + }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(backupReadAttempts).toBe(2); + expect(readFileSpy.mock.calls.filter(([path]) => String(path).includes("codex-sync-prune-backup"))).toHaveLength(3); + expect(mockStorage.accounts.map((account) => account.accountId)).toEqual(["org-keep", "org-prune"]); + } finally { + readFileSpy.mockRestore(); + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("fails prune-backup restore after exhausting the Windows lock retry budget", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-read-fail-")); + const originalReadFile = fs.readFile.bind(fs); + let backupReadAttempts = 0; + const readFileSpy = vi.spyOn(fs, "readFile").mockImplementation(async (path, options) => { + if (String(path).includes("codex-sync-prune-backup")) { + backupReadAttempts += 1; + throw Object.assign(new Error("busy"), { code: "EBUSY" }); + } + return originalReadFile(path, options as never); + }); + const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => undefined); + + try { + mockStorage.accounts = [ + { + accountId: "org-keep", + organizationId: "org-keep", + accountIdSource: "org", + email: "keep@example.com", + refreshToken: "refresh-keep", + }, + { + accountId: "org-prune", + organizationId: "org-prune", + accountIdSource: "org", + email: "prune@example.com", + refreshToken: "refresh-prune", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune).mockResolvedValueOnce([1]); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + vi.mocked(confirmModule.confirm).mockResolvedValueOnce(true); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + vi.mocked(syncModule.previewSyncFromCodexMultiAuth) + .mockRejectedValueOnce( + new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 2, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 1, + skippedOverlaps: 1, + suggestedRemovals: [ + { + index: 1, + email: "prune@example.com", + accountLabel: "Workspace prune", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }), + ) + .mockResolvedValueOnce({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + imported: 0, + skipped: 1, + total: 1, + }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(backupReadAttempts).toBe(4); + expect(mockStorage.accounts.map((account) => account.accountId)).toEqual(["org-keep"]); + expect( + consoleSpy.mock.calls.some(([value]) => + String(value).includes("Failed to restore previously pruned accounts after zero-import preview"), + ), + ).toBe(true); + } finally { + consoleSpy.mockRestore(); + readFileSpy.mockRestore(); + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("does not overwrite concurrent account changes when prune-backup rollback cannot safely apply", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-concurrent-")); + try { + mockStorage.accounts = [ + { + accountId: "org-keep", + organizationId: "org-keep", + accountIdSource: "org", + email: "keep@example.com", + refreshToken: "refresh-keep", + }, + { + accountId: "org-prune", + organizationId: "org-prune", + accountIdSource: "org", + email: "prune@example.com", + refreshToken: "refresh-prune", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune).mockResolvedValueOnce([1]); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + vi.mocked(confirmModule.confirm).mockResolvedValueOnce(true); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + vi.mocked(storageModule.loadAccounts).mockImplementation(async () => ({ + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + })); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementation( + async (callback) => { + const loadedStorage = { + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + }; + const persist = async (nextStorage: typeof mockStorage) => { + mockStorage.version = nextStorage.version; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + }; + await callback(loadedStorage, persist); + }, + ); + vi.mocked(storageModule.saveFlaggedAccounts) + .mockResolvedValueOnce(undefined) + .mockImplementationOnce(async () => { + mockStorage.accounts = [ + { + accountId: "org-concurrent", + organizationId: "org-concurrent", + accountIdSource: "org", + email: "concurrent@example.com", + refreshToken: "refresh-concurrent", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + throw new Error("flagged write failed"); + }); + + const capacityError = new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 2, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 1, + skippedOverlaps: 1, + suggestedRemovals: [ + { + index: 1, + email: "prune@example.com", + accountLabel: "Workspace prune", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }); + + vi.mocked(syncModule.previewSyncFromCodexMultiAuth) + .mockRejectedValueOnce(capacityError) + .mockResolvedValueOnce({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + imported: 0, + skipped: 1, + total: 1, + }); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(mockStorage.accounts.map((account) => account.accountId)).toEqual(["org-concurrent"]); + } finally { + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("stops sync-prune retries after repeated capacity failures", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-max-")); + const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + try { + mockStorage.accounts = [ + { + accountId: "org-keep", + organizationId: "org-keep", + accountIdSource: "org", + email: "keep@example.com", + refreshToken: "refresh-keep", + }, + { + accountId: "org-prune", + organizationId: "org-prune", + accountIdSource: "org", + email: "prune@example.com", + refreshToken: "refresh-prune", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + for (let attempt = 0; attempt < 5; attempt += 1) { + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune).mockResolvedValueOnce([1]); + vi.mocked(confirmModule.confirm).mockResolvedValueOnce(true); + } + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + for (let attempt = 0; attempt < 5; attempt += 1) { + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementationOnce( + async (callback) => { + const loadedStorage = { + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + }; + const persist = async (_nextStorage: typeof mockStorage) => {}; + await callback(loadedStorage, persist); + }, + ); + } + + const capacityError = new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 2, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 1, + skippedOverlaps: 1, + suggestedRemovals: [ + { + index: 1, + email: "prune@example.com", + accountLabel: "Workspace prune", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }); + + for (let attempt = 0; attempt < 5; attempt += 1) { + vi.mocked(syncModule.previewSyncFromCodexMultiAuth).mockRejectedValueOnce(capacityError); + } + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(vi.mocked(cliModule.promptCodexMultiAuthSyncPrune)).toHaveBeenCalledTimes(5); + expect(consoleSpy.mock.calls.some(([value]) => String(value).includes("Sync hit max retry limit"))).toBe(true); + } finally { + consoleSpy.mockRestore(); + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("aborts sync prune when a selected account disappears before confirmation", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-missing-")); + try { + mockStorage.accounts = [ + { + accountId: "org-keep", + organizationId: "org-keep", + accountIdSource: "org", + email: "keep@example.com", + refreshToken: "refresh-keep", + }, + ]; + mockStorage.activeIndex = 0; + mockStorage.activeIndexByFamily = {}; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune).mockResolvedValueOnce([1]); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + vi.mocked(confirmModule.confirm).mockResolvedValueOnce(true); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + + const capacityError = new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 1, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 2, + skippedOverlaps: 0, + suggestedRemovals: [ + { + index: 1, + email: "missing@example.com", + accountLabel: "Workspace missing", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }); + + vi.mocked(syncModule.previewSyncFromCodexMultiAuth).mockRejectedValueOnce(capacityError); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(mockStorage.accounts).toHaveLength(1); + expect(vi.mocked(syncModule.syncFromCodexMultiAuth)).not.toHaveBeenCalled(); + } finally { + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("does not prompt for local pruning when the sync source alone exceeds the configured limit", async () => { + const cliModule = await import("../lib/cli.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + + const capacityError = new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: "/tmp/codex-root", + accountsPath: "/tmp/codex-root/openai-codex-accounts.json", + scope: "global", + currentCount: 0, + sourceCount: 3, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 0, + skippedOverlaps: 0, + suggestedRemovals: [], + }); + + vi.mocked(syncModule.previewSyncFromCodexMultiAuth).mockRejectedValueOnce(capacityError); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + await autoMethod.authorize(); + expect(vi.mocked(cliModule.promptCodexMultiAuthSyncPrune)).not.toHaveBeenCalled(); + expect(vi.mocked(syncModule.syncFromCodexMultiAuth)).not.toHaveBeenCalled(); + }); + + it("preserves active pointers when sync prune removes an earlier account", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-active-")); + try { + mockStorage.accounts = [ + { + accountId: "org-prune", + organizationId: "org-prune", + accountIdSource: "org", + email: "prune@example.com", + refreshToken: "refresh-prune", + }, + { + accountId: "org-current", + organizationId: "org-current", + accountIdSource: "org", + email: "current@example.com", + refreshToken: "refresh-current", + }, + ]; + mockStorage.activeIndex = 1; + mockStorage.activeIndexByFamily = { codex: 1 }; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune).mockResolvedValueOnce([0]); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + vi.mocked(confirmModule.confirm) + .mockResolvedValueOnce(true) + .mockResolvedValueOnce(false); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + vi.mocked(storageModule.saveAccounts).mockImplementation(async (nextStorage) => { + mockStorage.version = nextStorage.version; + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + }); + + const capacityError = new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 2, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 1, + skippedOverlaps: 1, + suggestedRemovals: [ + { + index: 0, + email: "prune@example.com", + accountLabel: "Workspace prune", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }); + + vi.mocked(syncModule.previewSyncFromCodexMultiAuth).mockRejectedValueOnce(capacityError); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(mockStorage.accounts).toHaveLength(1); + expect(mockStorage.accounts[0]?.accountId).toBe("org-current"); + expect(mockStorage.activeIndex).toBe(0); + expect(mockStorage.activeIndexByFamily.codex).toBe(0); + } finally { + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("removes only exact flagged identities during sync prune cleanup", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-flagged-")); + try { + mockStorage.accounts = [ + { + accountId: "org-prune", + organizationId: "org-prune", + accountIdSource: "org", + email: "prune@example.com", + refreshToken: "refresh-shared", + }, + { + accountId: "org-keep", + organizationId: "org-keep", + accountIdSource: "org", + email: "keep@example.com", + refreshToken: "refresh-keep", + }, + ]; + mockStorage.activeIndex = 1; + mockStorage.activeIndexByFamily = { codex: 1 }; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune).mockResolvedValueOnce([0]); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + vi.mocked(confirmModule.confirm).mockReset(); + vi.mocked(confirmModule.confirm).mockResolvedValue(true); + vi.mocked(storageModule.withFlaggedAccountsTransaction).mockReset(); + vi.mocked(storageModule.saveFlaggedAccounts).mockReset(); + vi.mocked(storageModule.saveFlaggedAccounts).mockResolvedValue(undefined); + mockFlaggedStorage.accounts = [ + { + refreshToken: "refresh-shared", + organizationId: "org-other", + accountId: "org-other", + flaggedAt: 1, + }, + { + refreshToken: "refresh-shared", + organizationId: "org-prune", + accountId: "org-prune", + flaggedAt: 2, + }, + { + refreshToken: "refresh-keep", + organizationId: "org-keep", + accountId: "org-keep", + flaggedAt: 3, + }, + { + refreshToken: "refresh-concurrent-flagged", + organizationId: "org-concurrent-flagged", + accountId: "org-concurrent-flagged", + flaggedAt: 4, + }, + ]; + vi.mocked(storageModule.withFlaggedAccountsTransaction).mockImplementation( + async (callback) => { + const persist = async (nextStorage) => { + mockFlaggedStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + await vi.mocked(storageModule.saveFlaggedAccounts)(nextStorage); + }; + await callback( + { + version: 1, + accounts: mockFlaggedStorage.accounts.map((account) => ({ ...account })), + }, + persist, + ); + }, + ); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + vi.mocked(storageModule.saveAccounts).mockImplementation(async (nextStorage) => { + mockStorage.version = nextStorage.version; + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + }); + vi.mocked(storageModule.loadAccounts).mockImplementation(async () => ({ + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + })); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementation( + async (callback) => { + const loadedStorage = { + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + }; + const persist = async (nextStorage: typeof mockStorage) => { + mockStorage.version = nextStorage.version; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + }; + await callback(loadedStorage, persist); + }, + ); + + const capacityError = new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 2, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 1, + skippedOverlaps: 1, + suggestedRemovals: [ + { + index: 0, + email: "prune@example.com", + accountLabel: "Workspace prune", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }); + + vi.mocked(syncModule.previewSyncFromCodexMultiAuth).mockRejectedValueOnce(capacityError); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(vi.mocked(storageModule.saveFlaggedAccounts)).toHaveBeenCalledWith({ + version: 1, + accounts: [ + { + refreshToken: "refresh-shared", + organizationId: "org-other", + accountId: "org-other", + flaggedAt: 1, + }, + { + refreshToken: "refresh-keep", + organizationId: "org-keep", + accountId: "org-keep", + flaggedAt: 3, + }, + { + refreshToken: "refresh-concurrent-flagged", + organizationId: "org-concurrent-flagged", + accountId: "org-concurrent-flagged", + flaggedAt: 4, + }, + ], + }); + } finally { + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("remaps sync-prune active pointers by exact identity when sibling accounts share fragments", async () => { + const cliModule = await import("../lib/cli.js"); + const storageModule = await import("../lib/storage.js"); + const syncModule = await import("../lib/codex-multi-auth-sync.js"); + const configModule = await import("../lib/config.js"); + const confirmModule = await import("../lib/ui/confirm.js"); + + const tempDir = await fs.mkdtemp(join(tmpdir(), "oc-sync-prune-exact-")); + try { + mockStorage.accounts = [ + { + accountId: "org-prune", + organizationId: "org-prune", + accountIdSource: "org", + email: "prune@example.com", + refreshToken: "refresh-prune", + }, + { + accountId: "workspace-a", + organizationId: "org-shared", + accountIdSource: "org", + email: "shared-a@example.com", + refreshToken: "refresh-shared", + }, + { + accountId: "workspace-b", + organizationId: "org-shared", + accountIdSource: "org", + email: "shared-b@example.com", + refreshToken: "refresh-shared", + }, + ]; + mockStorage.activeIndex = 2; + mockStorage.activeIndexByFamily = { codex: 2 }; + + vi.mocked(cliModule.promptLoginMode) + .mockResolvedValueOnce({ mode: "experimental-sync-now" }) + .mockResolvedValueOnce({ mode: "cancel" }); + vi.mocked(cliModule.promptCodexMultiAuthSyncPrune).mockResolvedValueOnce([0]); + vi.mocked(configModule.getSyncFromCodexMultiAuthEnabled).mockReturnValue(true); + vi.mocked(confirmModule.confirm).mockReset(); + vi.mocked(confirmModule.confirm).mockResolvedValue(true); + + vi.mocked(storageModule.createTimestampedBackupPath).mockImplementation((prefix?: string) => + join(tempDir, `${prefix ?? "codex-backup"}.json`), + ); + vi.mocked(storageModule.exportAccounts).mockImplementation(async (filePath: string) => { + await fs.writeFile( + filePath, + JSON.stringify({ + version: mockStorage.version, + activeIndex: mockStorage.activeIndex, + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + }), + "utf8", + ); + }); + vi.mocked(storageModule.saveAccounts).mockImplementation(async (nextStorage) => { + mockStorage.version = nextStorage.version; + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + }); + vi.mocked(storageModule.loadAccounts).mockImplementation(async () => ({ + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + })); + vi.mocked(storageModule.withAccountStorageTransaction).mockImplementation( + async (callback) => { + const loadedStorage = { + ...mockStorage, + accounts: mockStorage.accounts.map((account) => ({ ...account })), + activeIndexByFamily: { ...mockStorage.activeIndexByFamily }, + }; + const persist = async (nextStorage: typeof mockStorage) => { + mockStorage.version = nextStorage.version; + mockStorage.accounts = nextStorage.accounts.map((account) => ({ ...account })); + mockStorage.activeIndex = nextStorage.activeIndex; + mockStorage.activeIndexByFamily = { ...nextStorage.activeIndexByFamily }; + }; + await callback(loadedStorage, persist); + }, + ); + + const capacityError = new syncModule.CodexMultiAuthSyncCapacityError({ + rootDir: tempDir, + accountsPath: join(tempDir, "openai-codex-accounts.json"), + scope: "global", + currentCount: 3, + sourceCount: 2, + sourceDedupedTotal: 3, + dedupedTotal: 3, + maxAccounts: 2, + needToRemove: 1, + importableNewAccounts: 1, + skippedOverlaps: 1, + suggestedRemovals: [ + { + index: 0, + email: "prune@example.com", + accountLabel: "Workspace prune", + isCurrentAccount: false, + score: 180, + reason: "disabled", + }, + ], + }); + + vi.mocked(syncModule.previewSyncFromCodexMultiAuth).mockRejectedValueOnce(capacityError); + + const mockClient = createMockClient(); + const { OpenAIOAuthPlugin } = await import("../index.js"); + const plugin = (await OpenAIOAuthPlugin({ client: mockClient } as never)) as unknown as PluginType; + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: (inputs?: Record) => Promise<{ instructions: string }>; + }; + + const authResult = await autoMethod.authorize(); + expect(authResult.instructions).toBe("Authentication cancelled"); + expect(mockStorage.accounts.map((account) => account.accountId)).toEqual(["workspace-a", "workspace-b"]); + expect(mockStorage.activeIndex).toBe(1); + expect(mockStorage.activeIndexByFamily.codex).toBe(1); + } finally { + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); }); describe("OpenAIOAuthPlugin showToast error handling", () => { diff --git a/test/paths.test.ts b/test/paths.test.ts index ca215ec8..b27c71d9 100644 --- a/test/paths.test.ts +++ b/test/paths.test.ts @@ -4,20 +4,26 @@ import path from "node:path"; vi.mock("node:fs", () => ({ existsSync: vi.fn(), + readFileSync: vi.fn(), + statSync: vi.fn(), })); -import { existsSync } from "node:fs"; +import { existsSync, readFileSync, statSync } from "node:fs"; import { getConfigDir, getProjectConfigDir, getProjectGlobalConfigDir, getProjectStorageKey, + getProjectStorageKeyCandidates, isProjectDirectory, findProjectRoot, resolvePath, } from "../lib/storage/paths.js"; const mockedExistsSync = vi.mocked(existsSync); +const mockedReadFileSync = vi.mocked(readFileSync); +const mockedStatSync = vi.mocked(statSync); +const originalPlatform = process.platform; describe("Storage Paths Module", () => { beforeEach(() => { @@ -25,6 +31,7 @@ describe("Storage Paths Module", () => { }); afterEach(() => { + Object.defineProperty(process, "platform", { value: originalPlatform }); vi.resetAllMocks(); }); @@ -57,6 +64,72 @@ describe("Storage Paths Module", () => { expect(first).toBe(second); expect(first).toMatch(/^myproject-[a-f0-9]{12}$/); }); + + it("preserves the legacy lowercase key prefix on Windows paths", () => { + Object.defineProperty(process, "platform", { value: "win32" }); + const projectPath = "C:\\Users\\Test\\MyProject"; + expect(getProjectStorageKey(projectPath)).toMatch(/^myproject-[a-f0-9]{12}$/); + }); + + it("uses the canonical git identity for same-repo worktrees", () => { + const mainWorktree = "C:\\Users\\neil\\DevTools\\oc-chatgpt-multi-auth"; + const branchWorktree = "C:\\Users\\neil\\DevTools\\oc-chatgpt-multi-auth-sync-worktree"; + const mainGitPath = `${mainWorktree}\\.git`.toLowerCase(); + const branchGitPath = `${branchWorktree}\\.git`.toLowerCase(); + const sharedGitFile = "gitdir: C:/Users/neil/DevTools/oc-chatgpt-multi-auth/.git/worktrees/feature-sync\n"; + mockedExistsSync.mockImplementation((candidate) => { + const normalized = String(candidate).replace(/\//g, "\\").toLowerCase(); + return normalized === mainGitPath || normalized === branchGitPath; + }); + mockedStatSync.mockImplementation((candidate) => { + const normalized = String(candidate).replace(/\//g, "\\").toLowerCase(); + return { + isDirectory: () => normalized === mainGitPath, + } as ReturnType; + }); + mockedReadFileSync.mockImplementation((candidate) => { + const normalized = String(candidate).replace(/\//g, "\\").toLowerCase(); + if (normalized === branchGitPath) { + return sharedGitFile; + } + throw new Error(`unexpected read: ${String(candidate)}`); + }); + + expect(getProjectStorageKey(mainWorktree)).toBe(getProjectStorageKey(branchWorktree)); + }); + }); + + describe("getProjectStorageKeyCandidates", () => { + it("returns a shared canonical key for same-repo worktrees before the legacy fallback", () => { + const mainWorktree = "C:\\Users\\neil\\DevTools\\oc-chatgpt-multi-auth"; + const branchWorktree = "C:\\Users\\neil\\DevTools\\oc-chatgpt-multi-auth-sync-worktree"; + const mainGitPath = `${mainWorktree}\\.git`.toLowerCase(); + const branchGitPath = `${branchWorktree}\\.git`.toLowerCase(); + const sharedGitFile = "gitdir: C:/Users/neil/DevTools/oc-chatgpt-multi-auth/.git/worktrees/feature-sync\n"; + mockedExistsSync.mockImplementation((candidate) => { + const normalized = String(candidate).replace(/\//g, "\\").toLowerCase(); + return normalized === mainGitPath || normalized === branchGitPath; + }); + mockedStatSync.mockImplementation((candidate) => { + const normalized = String(candidate).replace(/\//g, "\\").toLowerCase(); + return { + isDirectory: () => normalized === mainGitPath, + } as ReturnType; + }); + mockedReadFileSync.mockImplementation((candidate) => { + const normalized = String(candidate).replace(/\//g, "\\").toLowerCase(); + if (normalized === branchGitPath) { + return sharedGitFile; + } + throw new Error(`unexpected read: ${String(candidate)}`); + }); + + const mainCandidates = getProjectStorageKeyCandidates(mainWorktree); + const branchCandidates = getProjectStorageKeyCandidates(branchWorktree); + + expect(mainCandidates[0]).toBe(branchCandidates[0]); + expect(mainCandidates[1]).not.toBe(branchCandidates[1]); + }); }); describe("getProjectGlobalConfigDir", () => { @@ -128,6 +201,15 @@ describe("Storage Paths Module", () => { findProjectRoot("/a/b/c/d/e"); expect(mockedExistsSync.mock.calls.length).toBeGreaterThan(callCount); }); + + it("returns the filesystem root when it contains a project marker", () => { + const root = path.parse(process.cwd()).root; + mockedExistsSync.mockImplementation((p) => { + return typeof p === "string" && p === path.join(root, ".git"); + }); + const nestedPath = path.join(root, "workspace", "repo", "src"); + expect(findProjectRoot(nestedPath)).toBe(root); + }); }); describe("resolvePath", () => { diff --git a/test/plugin-config.race.test.ts b/test/plugin-config.race.test.ts new file mode 100644 index 00000000..b553f4ad --- /dev/null +++ b/test/plugin-config.race.test.ts @@ -0,0 +1,242 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import * as fs from "node:fs"; +import * as os from "node:os"; +import * as path from "node:path"; +import * as logger from "../lib/logger.js"; + +vi.mock("node:fs", async () => { + const actual = await vi.importActual("node:fs"); + return { + ...actual, + existsSync: vi.fn(), + promises: { + ...actual.promises, + mkdir: vi.fn(), + readFile: vi.fn(), + rename: vi.fn(), + unlink: vi.fn(), + writeFile: vi.fn(), + }, + }; +}); + +vi.mock("../lib/logger.js", async () => { + const actual = await vi.importActual("../lib/logger.js"); + return { + ...actual, + logWarn: vi.fn(), + }; +}); + +describe("plugin config lock retry", () => { + const mockExistsSync = vi.mocked(fs.existsSync); + const mockMkdir = vi.mocked(fs.promises.mkdir); + const mockReadFile = vi.mocked(fs.promises.readFile); + const mockRename = vi.mocked(fs.promises.rename); + const mockUnlink = vi.mocked(fs.promises.unlink); + const mockWriteFile = vi.mocked(fs.promises.writeFile); + const originalPlatform = process.platform; + + beforeEach(() => { + vi.resetModules(); + vi.clearAllMocks(); + mockExistsSync.mockReturnValue(false); + mockReadFile.mockResolvedValue("{}"); + mockMkdir.mockResolvedValue(undefined); + mockRename.mockResolvedValue(undefined); + mockUnlink.mockResolvedValue(undefined); + mockWriteFile.mockResolvedValue(undefined); + }); + + afterEach(() => { + vi.restoreAllMocks(); + Object.defineProperty(process, "platform", { value: originalPlatform }); + }); + + it("retries transient EPERM when taking the lock on Windows", async () => { + Object.defineProperty(process, "platform", { value: "win32" }); + + let lockAttempts = 0; + mockWriteFile.mockImplementation(async (filePath) => { + const path = String(filePath); + if (path.endsWith(".lock")) { + lockAttempts += 1; + if (lockAttempts === 1) { + const error = new Error("lock busy") as NodeJS.ErrnoException; + error.code = "EPERM"; + throw error; + } + } + return undefined; + }); + + const { savePluginConfigMutation } = await import("../lib/config.js"); + + await expect( + savePluginConfigMutation((current) => ({ + ...current, + experimental: { syncFromCodexMultiAuth: { enabled: true } }, + })), + ).resolves.toBeUndefined(); + + expect(lockAttempts).toBeGreaterThanOrEqual(2); + expect(mockWriteFile).toHaveBeenCalled(); + expect(vi.mocked(logger.logWarn)).not.toHaveBeenCalled(); + }); + + it("does not steal a live lock that replaced a stale one before rename", async () => { + Object.defineProperty(process, "platform", { value: "win32" }); + const configPath = path.join(os.homedir(), ".opencode", "openai-codex-auth-config.json"); + const lockPath = `${configPath}.lock`; + let lockAttempts = 0; + let lockFilePresent = true; + const killSpy = vi.spyOn(process, "kill").mockImplementation((pid) => { + if (pid === 111) { + const error = new Error("process not found") as NodeJS.ErrnoException; + error.code = "ESRCH"; + throw error; + } + return true as never; + }); + + mockExistsSync.mockImplementation((filePath) => String(filePath) === lockPath && lockFilePresent); + mockReadFile.mockImplementation(async (filePath: fs.PathLike | number) => { + const path = String(filePath); + if (path === lockPath) { + return lockAttempts === 1 ? "111" : "{}"; + } + if (path.includes(".stale")) { + return "222"; + } + return "{}"; + }); + mockRename.mockImplementation(async (source, destination) => { + if (String(source) === lockPath) { + lockFilePresent = false; + } + if (String(destination) === lockPath) { + lockFilePresent = true; + } + return undefined; + }); + mockWriteFile.mockImplementation(async (filePath) => { + const path = String(filePath); + if (path === lockPath) { + lockAttempts += 1; + if (lockAttempts === 1) { + const error = new Error("exists") as NodeJS.ErrnoException; + error.code = "EEXIST"; + throw error; + } + } + return undefined; + }); + + const { savePluginConfigMutation } = await import("../lib/config.js"); + + try { + await expect( + savePluginConfigMutation((current) => ({ + ...current, + experimental: { syncFromCodexMultiAuth: { enabled: true } }, + })), + ).resolves.toBeUndefined(); + const lockRenameCalls = mockRename.mock.calls.filter( + ([source, destination]) => + String(source) === lockPath || String(destination) === lockPath, + ); + expect(lockRenameCalls).toHaveLength(2); + expect(String(lockRenameCalls[0]?.[0])).toBe(lockPath); + expect(String(lockRenameCalls[1]?.[1])).toBe(lockPath); + expect(killSpy).toHaveBeenCalledWith(111, 0); + } finally { + killSpy.mockRestore(); + } + }); + + it("recovers stale locks on Windows when the pid probe returns EPERM", async () => { + Object.defineProperty(process, "platform", { value: "win32" }); + const configPath = path.join(os.homedir(), ".opencode", "openai-codex-auth-config.json"); + const lockPath = `${configPath}.lock`; + let lockAttempts = 0; + let lockFilePresent = true; + const killSpy = vi.spyOn(process, "kill").mockImplementation(() => { + const error = new Error("permission denied") as NodeJS.ErrnoException; + error.code = "EPERM"; + throw error; + }); + + mockExistsSync.mockImplementation((filePath) => String(filePath) === lockPath && lockFilePresent); + mockReadFile.mockImplementation(async (filePath: fs.PathLike | number) => { + const pathValue = String(filePath); + if (pathValue === lockPath) { + return "111"; + } + if (pathValue.includes(".stale")) { + return "111"; + } + return "{}"; + }); + mockRename.mockImplementation(async (source, destination) => { + if (String(source) === lockPath) { + lockFilePresent = false; + } + if (String(destination) === lockPath) { + lockFilePresent = true; + } + return undefined; + }); + mockWriteFile.mockImplementation(async (filePath) => { + const pathValue = String(filePath); + if (pathValue === lockPath) { + lockAttempts += 1; + if (lockAttempts === 1) { + const error = new Error("exists") as NodeJS.ErrnoException; + error.code = "EEXIST"; + throw error; + } + } + return undefined; + }); + + const { savePluginConfigMutation } = await import("../lib/config.js"); + + try { + await expect( + savePluginConfigMutation((current) => ({ + ...current, + experimental: { syncFromCodexMultiAuth: { enabled: true } }, + })), + ).resolves.toBeUndefined(); + expect(killSpy).toHaveBeenCalledWith(111, 0); + expect(mockRename).toHaveBeenCalled(); + } finally { + killSpy.mockRestore(); + } + }); + + it("fails cleanly when config rename stays EBUSY on Windows", async () => { + Object.defineProperty(process, "platform", { value: "win32" }); + const configPath = path.join(os.homedir(), ".opencode", "openai-codex-auth-config.json"); + + mockExistsSync.mockReturnValue(false); + mockRename.mockImplementation(async (source, destination) => { + if (String(source).includes(".tmp") && String(destination) === configPath) { + const error = new Error("busy") as NodeJS.ErrnoException; + error.code = "EBUSY"; + throw error; + } + return undefined; + }); + + const { savePluginConfigMutation } = await import("../lib/config.js"); + + await expect( + savePluginConfigMutation((current) => ({ + ...current, + experimental: { syncFromCodexMultiAuth: { enabled: true } }, + })), + ).rejects.toThrow("busy"); + expect(mockUnlink).toHaveBeenCalledWith(expect.stringContaining(".tmp")); + }); +}); diff --git a/test/plugin-config.test.ts b/test/plugin-config.test.ts index 1cf69951..fe433a4e 100644 --- a/test/plugin-config.test.ts +++ b/test/plugin-config.test.ts @@ -1,6 +1,7 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; import { loadPluginConfig, + savePluginConfigMutation, getCodexMode, getCodexTuiV2, getCodexTuiColorProfile, @@ -20,6 +21,8 @@ import { getRequestTransformMode, getFetchTimeoutMs, getStreamStallTimeoutMs, + getSyncFromCodexMultiAuthEnabled, + setSyncFromCodexMultiAuthEnabled, } from '../lib/config.js'; import type { PluginConfig } from '../lib/types.js'; import * as fs from 'node:fs'; @@ -34,6 +37,16 @@ vi.mock('node:fs', async () => { ...actual, existsSync: vi.fn(), readFileSync: vi.fn(), + promises: { + ...actual.promises, + mkdir: vi.fn(), + readFile: vi.fn(), + readdir: vi.fn(), + rename: vi.fn(), + stat: vi.fn(), + unlink: vi.fn(), + writeFile: vi.fn(), + }, }; }); @@ -49,6 +62,14 @@ vi.mock('../lib/logger.js', async () => { describe('Plugin Configuration', () => { const mockExistsSync = vi.mocked(fs.existsSync); const mockReadFileSync = vi.mocked(fs.readFileSync); + const mockMkdir = vi.mocked(fs.promises.mkdir); + const mockReadFile = vi.mocked(fs.promises.readFile); + const mockReaddir = vi.mocked(fs.promises.readdir); + const mockRename = vi.mocked(fs.promises.rename); + const mockStat = vi.mocked(fs.promises.stat); + const mockUnlink = vi.mocked(fs.promises.unlink); + const mockWriteFile = vi.mocked(fs.promises.writeFile); + const mockLogWarn = vi.mocked(logger.logWarn); const envKeys = [ 'CODEX_MODE', 'CODEX_TUI_V2', @@ -71,6 +92,15 @@ describe('Plugin Configuration', () => { originalEnv[key] = process.env[key]; } vi.clearAllMocks(); + mockExistsSync.mockReturnValue(false); + mockReadFileSync.mockReturnValue('{}'); + mockMkdir.mockResolvedValue(undefined); + mockReadFile.mockResolvedValue('{}'); + mockReaddir.mockResolvedValue([]); + mockRename.mockResolvedValue(undefined); + mockStat.mockResolvedValue({ mtimeMs: Date.now() } as fs.Stats); + mockUnlink.mockResolvedValue(undefined); + mockWriteFile.mockResolvedValue(undefined); }); afterEach(() => { @@ -171,13 +201,20 @@ describe('Plugin Configuration', () => { it('should merge user config with defaults', () => { mockExistsSync.mockReturnValue(true); - mockReadFileSync.mockReturnValue(JSON.stringify({})); + mockReadFileSync.mockReturnValue(JSON.stringify({ + experimental: { syncFromCodexMultiAuth: { enabled: true } }, + })); const config = loadPluginConfig(); expect(config).toEqual({ codexMode: true, requestTransformMode: 'native', + experimental: { + syncFromCodexMultiAuth: { + enabled: true, + }, + }, codexTuiV2: true, codexTuiColorProfile: 'truecolor', codexTuiGlyphMode: 'ascii', @@ -260,7 +297,7 @@ describe('Plugin Configuration', () => { fetchTimeoutMs: 60_000, streamStallTimeoutMs: 45_000, }); - expect(mockLogWarn).toHaveBeenCalled(); + expect(mockLogWarn).toHaveBeenCalled(); }); it('should handle file read errors gracefully', () => { @@ -739,5 +776,279 @@ describe('Plugin Configuration', () => { delete process.env.CODEX_AUTH_STREAM_STALL_TIMEOUT_MS; }); }); + + describe('experimental sync settings', () => { + it('defaults sync-from-codex-multi-auth to false', () => { + expect(getSyncFromCodexMultiAuthEnabled({})).toBe(false); + }); + + it('reads sync-from-codex-multi-auth from config', () => { + expect( + getSyncFromCodexMultiAuthEnabled({ + experimental: { + syncFromCodexMultiAuth: { + enabled: true, + }, + }, + }), + ).toBe(true); + }); + + it('persists sync-from-codex-multi-auth while preserving unrelated keys', async () => { + mockExistsSync.mockReturnValue(true); + mockReadFile.mockResolvedValue( + JSON.stringify({ + codexMode: false, + customKey: 'keep-me', + }), + ); + + await setSyncFromCodexMultiAuthEnabled(true); + + expect(mockMkdir).toHaveBeenCalledWith( + path.join(os.homedir(), '.opencode'), + { recursive: true }, + ); + expect(mockWriteFile).toHaveBeenCalledTimes(2); + // calls[0] is the lock file write, calls[1] is the temp config write + const [writtenPath, writtenContent] = mockWriteFile.mock.calls[1] ?? []; + expect(String(writtenPath)).toContain('.tmp'); + expect(mockRename).toHaveBeenCalled(); + expect(JSON.parse(String(writtenContent))).toEqual({ + codexMode: false, + customKey: 'keep-me', + experimental: { + syncFromCodexMultiAuth: { + enabled: true, + }, + }, + }); + expect(mockUnlink).not.toHaveBeenCalledWith( + path.join(os.homedir(), '.opencode', 'openai-codex-auth-config.json'), + ); + }); + + it('creates a new config file when enabling sync on a missing config', async () => { + mockExistsSync.mockReturnValue(false); + + await setSyncFromCodexMultiAuthEnabled(true); + + const [, writtenContent] = mockWriteFile.mock.calls[1] ?? []; + expect(JSON.parse(String(writtenContent))).toEqual({ + experimental: { + syncFromCodexMultiAuth: { + enabled: true, + }, + }, + }); + }); + + it('throws when mutating an invalid existing config file to avoid clobbering it', async () => { + mockExistsSync.mockReturnValue(true); + mockReadFile.mockResolvedValue('invalid json'); + + await expect(savePluginConfigMutation((current) => current)).rejects.toThrow(); + expect(mockRename).not.toHaveBeenCalled(); + }); + + it('rejects array roots when reading raw plugin config', async () => { + mockExistsSync.mockReturnValue(true); + mockReadFile.mockResolvedValue('[]'); + + await expect(savePluginConfigMutation((current) => current)).rejects.toThrow( + 'Plugin config root must be a JSON object', + ); + }); + + it('throws when toggling sync setting on malformed config to preserve existing settings', async () => { + mockExistsSync.mockReturnValue(true); + mockReadFile.mockResolvedValue('invalid json'); + + await expect(setSyncFromCodexMultiAuthEnabled(true)).rejects.toThrow(); + expect(mockRename).not.toHaveBeenCalled(); + }); + + it('cleans up temp config files when the initial rename fails', async () => { + mockExistsSync.mockReturnValue(false); + mockRename.mockRejectedValueOnce(Object.assign(new Error('rename failed'), { code: 'EACCES' })); + + await expect(setSyncFromCodexMultiAuthEnabled(true)).rejects.toThrow('rename failed'); + expect(mockUnlink).toHaveBeenCalledWith(expect.stringContaining('.tmp')); + }); + + it('cleans up temp config files when the Windows fallback retry fails', async () => { + const originalPlatform = process.platform; + Object.defineProperty(process, 'platform', { value: 'win32' }); + mockExistsSync.mockImplementation((filePath) => + String(filePath).endsWith('openai-codex-auth-config.json'), + ); + let renameCalls = 0; + mockRename.mockImplementation(async (source, destination) => { + if (String(source).includes('.tmp') && String(destination).endsWith('openai-codex-auth-config.json')) { + renameCalls += 1; + if (renameCalls <= 2) { + throw Object.assign(new Error('rename failed'), { code: 'EPERM' }); + } + } + return undefined; + }); + + try { + await expect(setSyncFromCodexMultiAuthEnabled(true)).rejects.toThrow('rename failed'); + expect(mockUnlink).toHaveBeenCalledWith(expect.stringContaining('.tmp')); + } finally { + Object.defineProperty(process, 'platform', { value: originalPlatform }); + } + }); + + it('recovers stale config lock files before mutating config', async () => { + const configPath = path.join(os.homedir(), '.opencode', 'openai-codex-auth-config.json'); + const lockPath = `${configPath}.lock`; + const killSpy = vi.spyOn(process, 'kill').mockImplementation(() => { + const error = new Error('process not found') as NodeJS.ErrnoException; + error.code = 'ESRCH'; + throw error; + }); + mockExistsSync.mockReturnValue(true); + mockReadFile.mockImplementation(async (filePath: Parameters[0]) => { + if (String(filePath) === lockPath) { + return '424242'; + } + if (String(filePath).includes('.stale')) { + return '424242'; + } + return JSON.stringify({ codexMode: false }); + }); + mockWriteFile.mockImplementation(async (filePath) => { + if (String(filePath) === lockPath && mockWriteFile.mock.calls.length === 1) { + const error = new Error('exists') as NodeJS.ErrnoException; + error.code = 'EEXIST'; + throw error; + } + return undefined; + }); + + try { + await expect(setSyncFromCodexMultiAuthEnabled(true)).resolves.toBeUndefined(); + expect(mockUnlink).toHaveBeenCalledWith(expect.stringContaining('.stale')); + expect(killSpy).toHaveBeenCalledWith(424242, 0); + expect(mockRename).toHaveBeenCalled(); + } finally { + killSpy.mockRestore(); + } + }); + + it('sweeps old stale lock artifacts before acquiring the config lock', async () => { + const configPath = path.join(os.homedir(), '.opencode', 'openai-codex-auth-config.json'); + const stalePath = `${configPath}.lock.424242.777777.1700000000000.stale`; + mockReaddir.mockResolvedValue( + [ + { isFile: () => true, name: path.basename(stalePath) } as unknown as fs.Dirent, + ] as unknown as Awaited>, + ); + mockStat.mockResolvedValue({ + mtimeMs: Date.now() - (25 * 60 * 60 * 1000), + } as fs.Stats); + + await expect(setSyncFromCodexMultiAuthEnabled(true)).resolves.toBeUndefined(); + expect(mockUnlink).toHaveBeenCalledWith(stalePath); + }); + + it('warns when stale lock cleanup cannot remove a recovered stale file', async () => { + const configPath = path.join(os.homedir(), '.opencode', 'openai-codex-auth-config.json'); + const lockPath = `${configPath}.lock`; + const killSpy = vi.spyOn(process, 'kill').mockImplementation(() => { + const error = new Error('process not found') as NodeJS.ErrnoException; + error.code = 'ESRCH'; + throw error; + }); + mockExistsSync.mockReturnValue(true); + mockReadFile.mockImplementation(async (filePath: Parameters[0]) => { + if (String(filePath) === lockPath) { + return '424242'; + } + if (String(filePath).includes('.stale')) { + return '424242'; + } + return JSON.stringify({ codexMode: false }); + }); + mockWriteFile.mockImplementation(async (filePath) => { + if (String(filePath) === lockPath && mockWriteFile.mock.calls.length === 1) { + const error = new Error('exists') as NodeJS.ErrnoException; + error.code = 'EEXIST'; + throw error; + } + return undefined; + }); + mockUnlink.mockImplementation(async (filePath) => { + if (String(filePath).includes('.stale')) { + throw new Error('stale unlink blocked'); + } + return undefined; + }); + + try { + await expect(setSyncFromCodexMultiAuthEnabled(true)).resolves.toBeUndefined(); + expect(mockLogWarn).toHaveBeenCalledWith( + expect.stringContaining('Failed to remove stale plugin config lock artifact'), + ); + } finally { + killSpy.mockRestore(); + } + }); + + it('backs off when a live lock reappears during stale-lock recovery', async () => { + const configPath = path.join(os.homedir(), '.opencode', 'openai-codex-auth-config.json'); + const lockPath = `${configPath}.lock`; + const killSpy = vi.spyOn(process, 'kill').mockImplementation(() => { + const error = new Error('process not found') as NodeJS.ErrnoException; + error.code = 'ESRCH'; + throw error; + }); + let lockExistsChecks = 0; + mockExistsSync.mockImplementation((filePath) => { + const candidate = String(filePath); + if (candidate === configPath) { + return true; + } + if (candidate === lockPath) { + lockExistsChecks += 1; + return lockExistsChecks >= 1; + } + return false; + }); + mockReadFile.mockImplementation(async (filePath: Parameters[0]) => { + if (String(filePath) === lockPath || String(filePath).includes('.stale')) { + return '424242'; + } + return JSON.stringify({ codexMode: false }); + }); + let lockWriteAttempts = 0; + mockWriteFile.mockImplementation(async (filePath) => { + if (String(filePath) === lockPath) { + lockWriteAttempts += 1; + if (lockWriteAttempts === 1) { + const error = new Error('exists') as NodeJS.ErrnoException; + error.code = 'EEXIST'; + throw error; + } + } + return undefined; + }); + + try { + await expect(setSyncFromCodexMultiAuthEnabled(true)).resolves.toBeUndefined(); + expect(lockWriteAttempts).toBeGreaterThan(1); + expect( + mockRename.mock.calls.some( + ([source, destination]) => + String(source).includes('.stale') && String(destination) === lockPath, + ), + ).toBe(false); + } finally { + killSpy.mockRestore(); + } + }); + }); }); diff --git a/test/schemas.test.ts b/test/schemas.test.ts index 81283aff..561923ae 100644 --- a/test/schemas.test.ts +++ b/test/schemas.test.ts @@ -26,6 +26,11 @@ describe("PluginConfigSchema", () => { it("accepts valid full config", () => { const config = { codexMode: true, + experimental: { + syncFromCodexMultiAuth: { + enabled: true, + }, + }, fastSession: true, retryProfile: "balanced", retryBudgetOverrides: { diff --git a/test/storage.race.test.ts b/test/storage.race.test.ts new file mode 100644 index 00000000..4767ec73 --- /dev/null +++ b/test/storage.race.test.ts @@ -0,0 +1,136 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { promises as fs } from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; + +describe("storage race paths", () => { + let testDir: string; + let exportPath: string; + + beforeEach(async () => { + testDir = await fs.mkdtemp(join(tmpdir(), "storage-race-")); + exportPath = join(testDir, "import.json"); + }); + + afterEach(async () => { + const storageModule = await import("../lib/storage.js"); + storageModule.setStoragePathDirect(null); + await fs.rm(testDir, { recursive: true, force: true }); + vi.restoreAllMocks(); + }); + + it("retries a transient EBUSY during import commit rename", async () => { + const storageModule = await import("../lib/storage.js"); + const originalRename = fs.rename.bind(fs); + let renameAttempts = 0; + + storageModule.setStoragePathDirect(join(testDir, "accounts.json")); + await fs.writeFile( + exportPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + accounts: [{ accountId: "race-import", refreshToken: "race-refresh", addedAt: 1, lastUsed: 1 }], + }), + "utf8", + ); + + vi.spyOn(fs, "rename").mockImplementation(async (source, destination) => { + if (String(destination).endsWith("accounts.json")) { + renameAttempts += 1; + if (renameAttempts === 1) { + const error = new Error("busy") as NodeJS.ErrnoException; + error.code = "EBUSY"; + throw error; + } + } + return originalRename(source, destination); + }); + + const result = await storageModule.importAccounts(exportPath); + const loaded = await storageModule.loadAccounts(); + + expect(result.imported).toBe(1); + expect(renameAttempts).toBeGreaterThanOrEqual(2); + expect(loaded?.accounts).toHaveLength(1); + expect(loaded?.accounts[0]?.accountId).toBe("race-import"); + }); + + it("keeps duplicate-email cleanup stable under concurrent cleanup runs", async () => { + const storageModule = await import("../lib/storage.js"); + + storageModule.setStoragePathDirect(join(testDir, "accounts.json")); + await fs.writeFile( + join(testDir, "accounts.json"), + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { email: "shared@example.com", refreshToken: "older", addedAt: 1, lastUsed: 1 }, + { email: "shared@example.com", refreshToken: "newer", addedAt: 2, lastUsed: 2 }, + { email: "unique@example.com", refreshToken: "unique", addedAt: 3, lastUsed: 3 }, + ], + }), + "utf8", + ); + + const results = await Promise.allSettled([ + storageModule.cleanupDuplicateEmailAccounts(), + storageModule.cleanupDuplicateEmailAccounts(), + ]); + const loaded = await storageModule.loadAccounts(); + + expect(results.every((result) => result.status === "fulfilled")).toBe(true); + expect(loaded?.accounts).toHaveLength(2); + expect(loaded?.accounts[0]?.refreshToken).toBe("newer"); + expect(loaded?.accounts[1]?.refreshToken).toBe("unique"); + }); + + it("serializes raw backups behind the storage lock during concurrent saves", async () => { + const storageModule = await import("../lib/storage.js"); + const originalRename = fs.rename.bind(fs); + const storagePath = join(testDir, "accounts.json"); + const backupPath = join(testDir, "backup.json"); + let releaseRename: (() => void) | null = null; + let backupFinished = false; + + storageModule.setStoragePathDirect(storagePath); + await storageModule.saveAccounts({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ accountId: "before", refreshToken: "before", addedAt: 1, lastUsed: 1 }], + }); + + vi.spyOn(fs, "rename").mockImplementation(async (source, destination) => { + if (String(destination) === storagePath && releaseRename === null) { + await new Promise((resolve) => { + releaseRename = resolve; + }); + } + return originalRename(source, destination); + }); + + const savePromise = storageModule.saveAccounts({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [{ accountId: "after", refreshToken: "after", addedAt: 2, lastUsed: 2 }], + }); + const backupPromise = storageModule.backupRawAccountsFile(backupPath).then(() => { + backupFinished = true; + }); + + await new Promise((resolve) => setTimeout(resolve, 50)); + expect(backupFinished).toBe(false); + releaseRename?.(); + + await Promise.all([savePromise, backupPromise]); + + const backup = JSON.parse(await fs.readFile(backupPath, "utf8")) as { + accounts: Array<{ accountId?: string }>; + }; + expect(backup.accounts[0]?.accountId).toBe("after"); + }); +}); diff --git a/test/storage.test.ts b/test/storage.test.ts index 94d20141..2121dcc3 100644 --- a/test/storage.test.ts +++ b/test/storage.test.ts @@ -21,6 +21,10 @@ import { previewImportAccounts, createTimestampedBackupPath, withAccountStorageTransaction, + withFlaggedAccountsTransaction, + loadAccountAndFlaggedStorageSnapshot, + previewDuplicateEmailCleanup, + cleanupDuplicateEmailAccounts, } from "../lib/storage.js"; // Mocking the behavior we're about to implement for TDD @@ -89,6 +93,196 @@ describe("storage", () => { expect(deduped[0]?.addedAt).toBe(now - 1500); expect(deduped[0]?.lastUsed).toBe(now); }); + + it("preserves org-scoped accounts that share an email during duplicate cleanup", async () => { + const testStoragePath = join( + tmpdir(), + `codex-clean-duplicate-emails-${Math.random().toString(36).slice(2)}.json`, + ); + setStoragePathDirect(testStoragePath); + + try { + await saveAccounts({ + version: 3, + activeIndex: 0, + activeIndexByFamily: { + codex: 0, + "gpt-5.1": 1, + }, + accounts: [ + { + accountId: "org-older", + organizationId: "org-older", + accountIdSource: "org", + email: "shared@example.com", + refreshToken: "rt-older", + addedAt: 1, + lastUsed: 1, + }, + { + accountId: "org-newer", + organizationId: "org-newer", + accountIdSource: "org", + email: "shared@example.com", + refreshToken: "rt-newer", + addedAt: 2, + lastUsed: 2, + }, + { + accountId: "org-unique", + organizationId: "org-unique", + accountIdSource: "org", + email: "unique@example.com", + refreshToken: "rt-unique", + addedAt: 3, + lastUsed: 3, + }, + ], + }); + + await expect(cleanupDuplicateEmailAccounts()).resolves.toEqual({ + before: 3, + after: 3, + removed: 0, + }); + + const loaded = await loadAccounts(); + expect(loaded?.accounts).toHaveLength(3); + expect(loaded?.accounts[0]).toMatchObject({ + accountId: "org-older", + organizationId: "org-older", + email: "shared@example.com", + refreshToken: "rt-older", + }); + expect(loaded?.accounts[1]?.accountId).toBe("org-newer"); + expect(loaded?.accounts[2]?.email).toBe("unique@example.com"); + expect(loaded?.activeIndex).toBe(0); + expect(loaded?.activeIndexByFamily?.codex).toBe(0); + expect(loaded?.activeIndexByFamily?.["gpt-5.1"]).toBe(1); + } finally { + setStoragePathDirect(null); + await fs.rm(testStoragePath, { force: true }); + } + }); + + it("cleans legacy duplicate emails and remaps active indices", async () => { + const testStoragePath = join( + tmpdir(), + `codex-clean-legacy-duplicate-emails-${Math.random().toString(36).slice(2)}.json`, + ); + setStoragePathDirect(testStoragePath); + + try { + await fs.writeFile( + testStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: { + codex: 0, + "gpt-5.1": 1, + }, + accounts: [ + { + email: "shared@example.com", + refreshToken: "rt-older", + addedAt: 1, + lastUsed: 1, + }, + { + email: "shared@example.com", + refreshToken: "rt-newer", + addedAt: 2, + lastUsed: 2, + }, + { + email: "unique@example.com", + refreshToken: "rt-unique", + addedAt: 3, + lastUsed: 3, + }, + ], + }), + "utf8", + ); + + await expect(cleanupDuplicateEmailAccounts()).resolves.toEqual({ + before: 3, + after: 2, + removed: 1, + }); + + const loaded = await loadAccounts(); + expect(loaded?.accounts).toHaveLength(2); + expect(loaded?.accounts[0]).toMatchObject({ + email: "shared@example.com", + refreshToken: "rt-newer", + }); + expect(loaded?.accounts[1]?.email).toBe("unique@example.com"); + expect(loaded?.activeIndex).toBe(0); + expect(loaded?.activeIndexByFamily?.codex).toBe(0); + expect(loaded?.activeIndexByFamily?.["gpt-5.1"]).toBe(0); + } finally { + setStoragePathDirect(null); + await fs.rm(testStoragePath, { force: true }); + } + }); + + it.each(["EBUSY", "EACCES", "EPERM"] as const)( + "falls back to the current duplicate-cleanup snapshot when raw storage read fails with %s", + async (errorCode) => { + const testStoragePath = join( + tmpdir(), + `codex-clean-duplicate-email-fallback-${errorCode}-${Math.random().toString(36).slice(2)}.json`, + ); + setStoragePathDirect(testStoragePath); + + try { + await fs.writeFile( + testStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { email: "shared@example.com", refreshToken: "older", addedAt: 1, lastUsed: 1 }, + { email: "shared@example.com", refreshToken: "newer", addedAt: 2, lastUsed: 2 }, + ], + }), + "utf8", + ); + + const originalReadFile = fs.readFile.bind(fs); + let readAttempts = 0; + const readSpy = vi.spyOn(fs, "readFile").mockImplementation(async (path, options) => { + readAttempts += 1; + if (String(path) === testStoragePath && readAttempts === 2) { + const error = new Error("locked") as NodeJS.ErrnoException; + error.code = errorCode; + throw error; + } + return originalReadFile(path, options as never); + }); + + try { + await expect(cleanupDuplicateEmailAccounts()).resolves.toEqual({ + before: 1, + after: 1, + removed: 0, + }); + } finally { + readSpy.mockRestore(); + } + + const loaded = await loadAccounts(); + expect(loaded?.accounts).toHaveLength(1); + expect(loaded?.accounts[0]?.refreshToken).toBe("newer"); + } finally { + setStoragePathDirect(null); + await fs.rm(testStoragePath, { force: true }); + } + }, + ); }); describe("import/export (TDD)", () => { @@ -591,11 +785,10 @@ describe("storage", () => { ); }); - it("should enforce MAX_ACCOUNTS during import", async () => { - // @ts-ignore - const { importAccounts } = await import("../lib/storage.js"); - - const manyAccounts = Array.from({ length: 21 }, (_, i) => ({ + it("allows importing more than the old account cap when unlimited", async () => { + const { importAccounts } = await import("../lib/storage.js"); + + const manyAccounts = Array.from({ length: 21 }, (_, i) => ({ accountId: `acct${i}`, refreshToken: `ref${i}`, addedAt: Date.now(), @@ -609,8 +802,40 @@ describe("storage", () => { }; await fs.writeFile(exportPath, JSON.stringify(toImport)); - // @ts-ignore - await expect(importAccounts(exportPath)).rejects.toThrow(/exceed maximum/); + await expect(importAccounts(exportPath)).resolves.toMatchObject({ + imported: 21, + total: 21, + skipped: 0, + }); + }); + + it("never reports a negative imported count when dedupe shrinks existing storage", async () => { + const { importAccounts } = await import("../lib/storage.js"); + + await saveAccounts({ + version: 3, + activeIndex: 0, + accounts: [ + { accountId: "existing-a", refreshToken: "shared-refresh", email: "shared@example.com", addedAt: 1, lastUsed: 1 }, + { accountId: "existing-b", refreshToken: "shared-refresh", email: "shared@example.com", addedAt: 2, lastUsed: 2 }, + ], + }); + + await fs.writeFile( + exportPath, + JSON.stringify({ + version: 3, + activeIndex: 0, + accounts: [ + { accountId: "existing-b", refreshToken: "shared-refresh", email: "shared@example.com", addedAt: 3, lastUsed: 3 }, + ], + }), + ); + + await expect(importAccounts(exportPath)).resolves.toMatchObject({ + imported: 0, + skipped: 1, + }); }); it("should fail export when no accounts exist", async () => { @@ -998,17 +1223,50 @@ describe("storage", () => { expect(result?.activeIndex).toBe(0); }); - it("clamps out-of-bounds activeIndex", () => { - const data = { + it("clamps out-of-bounds activeIndex", () => { + const data = { version: 3, activeIndex: 100, accounts: [{ refreshToken: "t1", accountId: "A" }, { refreshToken: "t2", accountId: "B" }], }; const result = normalizeAccountStorage(data); - expect(result?.activeIndex).toBe(1); - }); - - it("filters out accounts with empty refreshToken", () => { + expect(result?.activeIndex).toBe(1); + }); + + it("preview import never reports a negative imported count after deduplication", async () => { + const { previewImportAccountsWithExistingStorage } = await import("../lib/storage.js"); + const tempDir = await fs.mkdtemp(join(tmpdir(), "storage-preview-")); + const filePath = join(tempDir, "accounts.json"); + await fs.writeFile( + filePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { accountId: "org-a", organizationId: "org-a", accountIdSource: "org", refreshToken: "rt-a", addedAt: 1, lastUsed: 1 }, + ], + }), + "utf8", + ); + try { + const result = await previewImportAccountsWithExistingStorage(filePath, { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { accountId: "org-a", organizationId: "org-a", accountIdSource: "org", refreshToken: "rt-a", addedAt: 2, lastUsed: 2 }, + { accountId: "org-a", organizationId: "org-a", accountIdSource: "org", refreshToken: "rt-a", addedAt: 3, lastUsed: 3 }, + ], + }); + expect(result.imported).toBe(0); + expect(result.skipped).toBeGreaterThanOrEqual(1); + } finally { + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("filters out accounts with empty refreshToken", () => { const data = { version: 3, accounts: [ @@ -1511,6 +1769,70 @@ describe("storage", () => { expect(loaded.accounts).toHaveLength(1); expect(loaded.accounts[0]?.refreshToken).toBe("flagged-ebusy"); }); + + it("updates flagged storage atomically inside withFlaggedAccountsTransaction", async () => { + await saveFlaggedAccounts({ + version: 1, + accounts: [ + { + refreshToken: "flagged-keep", + accountId: "flagged-keep", + flaggedAt: 1, + addedAt: 1, + lastUsed: 1, + }, + { + refreshToken: "flagged-drop", + accountId: "flagged-drop", + flaggedAt: 2, + addedAt: 2, + lastUsed: 2, + }, + ], + }); + + await withFlaggedAccountsTransaction(async (current, persist) => { + await persist({ + version: 1, + accounts: current.accounts.filter((account) => account.refreshToken !== "flagged-drop"), + }); + }); + + const loaded = await loadFlaggedAccounts(); + expect(loaded.accounts.map((account) => account.refreshToken)).toEqual(["flagged-keep"]); + }); + + it("reads accounts and flagged storage from one snapshot helper", async () => { + await saveAccounts({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + refreshToken: "account-refresh", + accountId: "account-id", + addedAt: 1, + lastUsed: 1, + }, + ], + }); + await saveFlaggedAccounts({ + version: 1, + accounts: [ + { + refreshToken: "flagged-refresh", + accountId: "flagged-id", + flaggedAt: 1, + addedAt: 1, + lastUsed: 1, + }, + ], + }); + + const snapshot = await loadAccountAndFlaggedStorageSnapshot(); + expect(snapshot.accounts?.accounts.map((account) => account.refreshToken)).toEqual(["account-refresh"]); + expect(snapshot.flagged.accounts.map((account) => account.refreshToken)).toEqual(["flagged-refresh"]); + }); }); describe("setStoragePath", () => { @@ -1781,6 +2103,64 @@ describe("storage", () => { expect(existsSync(getStoragePath())).toBe(true); }); + it("migrates legacy project storage before duplicate-email cleanup on cold start", async () => { + const fakeHome = join(testWorkDir, "home-legacy-cleanup"); + const projectDir = join(testWorkDir, "project-legacy-cleanup"); + const projectGitDir = join(projectDir, ".git"); + const legacyProjectConfigDir = join(projectDir, ".opencode"); + const legacyStoragePath = join(legacyProjectConfigDir, "openai-codex-accounts.json"); + + await fs.mkdir(fakeHome, { recursive: true }); + await fs.mkdir(projectGitDir, { recursive: true }); + await fs.mkdir(legacyProjectConfigDir, { recursive: true }); + process.env.HOME = fakeHome; + process.env.USERPROFILE = fakeHome; + setStoragePath(projectDir); + + await fs.writeFile( + legacyStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + email: "shared@example.com", + refreshToken: "legacy-older", + addedAt: 1, + lastUsed: 1, + }, + { + email: "shared@example.com", + refreshToken: "legacy-newer", + addedAt: 2, + lastUsed: 2, + }, + ], + }), + "utf-8", + ); + + const result = await cleanupDuplicateEmailAccounts(); + expect(result).toEqual({ + before: 1, + after: 1, + removed: 0, + }); + expect(existsSync(legacyStoragePath)).toBe(false); + + const migrated = await loadAccounts(); + expect(migrated?.accounts).toHaveLength(1); + expect(migrated?.accounts[0]?.refreshToken).toBe("legacy-newer"); + + const preview = await previewDuplicateEmailCleanup(); + expect(preview).toEqual({ + before: 1, + after: 1, + removed: 0, + }); + }); + it("loads global storage as fallback when project-scoped storage is missing", async () => { const fakeHome = join(testWorkDir, "home-fallback"); const projectDir = join(testWorkDir, "project-fallback"); diff --git a/test/sync-prune-backup.test.ts b/test/sync-prune-backup.test.ts new file mode 100644 index 00000000..017b1835 --- /dev/null +++ b/test/sync-prune-backup.test.ts @@ -0,0 +1,88 @@ +import { describe, expect, it } from "vitest"; +import { createSyncPruneBackupPayload } from "../lib/sync-prune-backup.js"; +import type { AccountStorageV3 } from "../lib/storage.js"; + +describe("sync prune backup payload", () => { + it("omits live tokens from the prune backup payload", () => { + const storage: AccountStorageV3 = { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + refreshToken: "refresh-token", + accessToken: "access-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }; + const payload = createSyncPruneBackupPayload(storage, { + version: 1, + accounts: [ + { + refreshToken: "refresh-token", + accessToken: "flagged-access-token", + }, + ], + }); + + expect(payload.accounts.accounts[0]).not.toHaveProperty("accessToken"); + expect(payload.accounts.accounts[0]).not.toHaveProperty("refreshToken"); + expect(payload.flagged.accounts[0]).not.toHaveProperty("accessToken"); + expect(payload.flagged.accounts[0]).not.toHaveProperty("refreshToken"); + }); + + it("deep-clones nested metadata so later mutations do not leak into the snapshot", () => { + const storage: AccountStorageV3 = { + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "org-sync", + organizationId: "org-sync", + accountIdSource: "org", + refreshToken: "refresh-token", + accessToken: "access-token", + accountTags: ["work"], + addedAt: 1, + lastUsed: 1, + lastSelectedModelByFamily: { + codex: "gpt-5.4", + }, + }, + ], + }; + const flagged = { + version: 1 as const, + accounts: [ + { + refreshToken: "refresh-token", + accessToken: "flagged-access-token", + metadata: { + source: "flagged", + }, + }, + ], + }; + + const payload = createSyncPruneBackupPayload(storage, flagged); + + storage.accounts[0]!.accountTags?.push("mutated"); + storage.accounts[0]!.lastSelectedModelByFamily = { codex: "gpt-5.5" }; + flagged.accounts[0]!.metadata.source = "mutated"; + + expect(payload.accounts.accounts[0]?.accountTags).toEqual(["work"]); + expect(payload.accounts.accounts[0]?.lastSelectedModelByFamily).toEqual({ codex: "gpt-5.4" }); + expect(payload.flagged.accounts[0]).toMatchObject({ + metadata: { + source: "flagged", + }, + }); + expect(payload.flagged.accounts[0]).not.toHaveProperty("refreshToken"); + }); +}); diff --git a/test/ui-select.test.ts b/test/ui-select.test.ts new file mode 100644 index 00000000..c4afe8fc --- /dev/null +++ b/test/ui-select.test.ts @@ -0,0 +1,77 @@ +import { describe, expect, it } from "vitest"; +import { coalesceTerminalInput, sanitizeAuditValue, tokenizeTerminalInput, type PendingInputSequence } from "../lib/ui/select.js"; + +describe("ui-select", () => { + it("reconstructs orphan bracket arrow chunks", () => { + const first = coalesceTerminalInput("[", null); + expect(first).toEqual({ + normalizedInput: "[", + pending: null, + }); + }); + + it("reconstructs escape-plus-bracket chunks", () => { + const first = coalesceTerminalInput("\u001b", null); + expect(first).toEqual({ + normalizedInput: null, + pending: { value: "\u001b", hasEscape: true }, + }); + + const second = coalesceTerminalInput("[", first.pending as PendingInputSequence); + expect(second).toEqual({ + normalizedInput: null, + pending: { value: "\u001b[", hasEscape: true }, + }); + + const third = coalesceTerminalInput("B", second.pending as PendingInputSequence); + expect(third).toEqual({ + normalizedInput: "\u001b[B", + pending: null, + }); + }); + + it("reconstructs compact orphan sequences", () => { + const result = coalesceTerminalInput("[B", null); + expect(result).toEqual({ + normalizedInput: "[B", + pending: null, + }); + }); + + it("keeps split CSI numeric tails pending until the final byte arrives", () => { + const first = coalesceTerminalInput("\u001b", null); + const second = coalesceTerminalInput("[", first.pending as PendingInputSequence); + const third = coalesceTerminalInput("1", second.pending as PendingInputSequence); + expect(third).toEqual({ + normalizedInput: null, + pending: { value: "\u001b[1", hasEscape: true }, + }); + + const fourth = coalesceTerminalInput("~", third.pending as PendingInputSequence); + expect(fourth).toEqual({ + normalizedInput: "\u001b[1~", + pending: null, + }); + }); + + it("tokenizes packed escape and control chunks", () => { + expect(tokenizeTerminalInput("\u001b[B\u0003")).toEqual(["\u001b[B", "\u0003"]); + }); + + it("tokenizes CSI tilde sequences without splitting numeric hotkeys", () => { + expect(tokenizeTerminalInput("\u001b[5~1")).toEqual(["\u001b[5~", "1"]); + }); + + it("tokenizes packed SS3 arrow sequences", () => { + expect(tokenizeTerminalInput("\u001bOA\u001bOB")).toEqual(["\u001bOA", "\u001bOB"]); + }); + + it("redacts subtitle and hint audit fields by key", () => { + expect(sanitizeAuditValue("subtitle", "sensitive subtitle")).toBe("[redacted:18]"); + expect(sanitizeAuditValue("hint", "sensitive hint")).toBe("[redacted:14]"); + }); + + it("redacts secret-like audit strings even when the key is not prelisted", () => { + expect(sanitizeAuditValue("custom", "sk-live-abcdefghijklmnopqrstuvwxyz0123456789")).toBe("[redacted-token]"); + }); +});