diff --git a/apps/web/app/docs/ai-sdk/page.tsx b/apps/web/app/docs/ai-sdk/page.tsx index 9dbd4ad..aee8eae 100644 --- a/apps/web/app/docs/ai-sdk/page.tsx +++ b/apps/web/app/docs/ai-sdk/page.tsx @@ -33,7 +33,7 @@ export async function POST(req: Request) { : ''; const result = streamText({ - model: 'anthropic/claude-haiku-4.5', + model: process.env.AI_GATEWAY_MODEL || 'anthropic/claude-haiku-4.5', system: systemPrompt + contextPrompt, prompt, }); @@ -46,6 +46,60 @@ export async function POST(req: Request) { }); }`} +

Choosing a Model

+

+ We recommend starting with smaller, faster models like Haiku for most UI + generation tasks. Use the{" "} + AI_GATEWAY_MODEL environment + variable to switch models without changing code. +

+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelBest ForCostSpeed
claude-haiku-4.5Most UI tasks, fast iterations$Fastest
claude-3-5-sonnetComplex logic, creative layouts$$Fast
claude-3-opusComplex reasoning (overkill for UI)$$$Slow
+
+ +

+ See the full list of available models in the{" "} + + Vercel AI Gateway documentation + + . +

+

Client-Side Hook

Use useUIStream on the client: diff --git a/apps/web/app/docs/quick-start/page.tsx b/apps/web/app/docs/quick-start/page.tsx index 9852384..298eeef 100644 --- a/apps/web/app/docs/quick-start/page.tsx +++ b/apps/web/app/docs/quick-start/page.tsx @@ -100,7 +100,7 @@ export async function POST(req: Request) { const systemPrompt = generateCatalogPrompt(catalog); const result = streamText({ - model: 'anthropic/claude-haiku-4.5', + model: process.env.AI_GATEWAY_MODEL || 'anthropic/claude-haiku-4.5', system: systemPrompt, prompt, });