Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 35 additions & 2 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# AI Provider API Keys Configuration
# Copy this file to .env.local and add your actual API keys

# REQUIRED - Sandboxes for code execution
# Get yours at https://e2b.dev
E2B_API_KEY=your_e2b_api_key_here
Expand All @@ -6,7 +9,33 @@ E2B_API_KEY=your_e2b_api_key_here
# Get yours at https://firecrawl.dev
FIRECRAWL_API_KEY=your_firecrawl_api_key_here

# OPTIONAL - AI Providers (need at least one)
# ==============================================
# MISTRAL AI CONFIGURATION
# ==============================================
# Get your API key from: https://console.mistral.ai/
MISTRAL_API_KEY=your_mistral_api_key_here

# ==============================================
# OPENROUTER CONFIGURATION
# ==============================================
# Get your API key from: https://openrouter.ai/keys
OPENROUTER_API_KEY=your_openrouter_api_key_here

# ==============================================
# OPENAI COMPATIBLE API CONFIGURATION
# ==============================================
# Configure these for your self-hosted or custom OpenAI-compatible API
OPENAI_COMPATIBLE_API_KEY=your_custom_api_key_here
OPENAI_COMPATIBLE_BASE_URL=https://your-custom-api-endpoint.com/v1
# OPENAI_COMPATIBLE_HEADERS={"X-Custom-Header": "value", "Authorization-Extra": "token"}

# Optional: Custom headers for OpenAI Compatible API (JSON format)
# OPENAI_COMPATIBLE_HEADERS={"X-Custom-Header": "value", "Authorization-Extra": "token"}

# ==============================================
# EXISTING PROVIDER CONFIGURATIONS
# ==============================================
# Add your existing provider API keys here
# Get yours at https://console.anthropic.com
ANTHROPIC_API_KEY=your_anthropic_api_key_here

Expand All @@ -17,4 +46,8 @@ OPENAI_API_KEY=your_openai_api_key_here
GEMINI_API_KEY=your_gemini_api_key_here

# Get yours at https://console.groq.com
GROQ_API_KEY=your_groq_api_key_here
GROQ_API_KEY=your_groq_api_key_here

# Default fallback provider if primary fails
# Options: 'anthropic', 'openai', 'mistral', 'openrouter', 'openai-compatible', 'google', 'groq'
AI_FALLBACK_PROVIDER=openrouter
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -56,3 +56,4 @@ e2b-template-*
*.temp
repomix-output.txt
bun.lockb
*.lock
64 changes: 61 additions & 3 deletions app/api/analyze-edit-intent/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,38 @@ import { createGroq } from '@ai-sdk/groq';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { createMistral } from '@ai-sdk/mistral';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
import { generateObject } from 'ai';
import { z } from 'zod';
import type { FileManifest } from '@/types/file-manifest';
import { appConfig } from '@/config/app.config';

// Helper function to get fallback provider
function getFallbackProvider() {
const fallbackProvider = appConfig.ai.fallbackProvider;

switch (fallbackProvider) {
case 'anthropic':
return anthropic('claude-4-sonnet');
case 'openai':
return openai('gpt-5');
case 'mistral':
return mistral('mistral-large-latest');
case 'openrouter':
return openrouter('qwen/qwen3-coder');
case 'openai-compatible':
return openaiCompatible('v0-1.5-md');
case 'google':
return createGoogleGenerativeAI({
apiKey: process.env.GEMINI_API_KEY,
})('gemini-2.5-pro');
case 'groq':
default:
return groq('moonshotai/kimi-k2-instruct');
}
}

const groq = createGroq({
apiKey: process.env.GROQ_API_KEY,
Expand All @@ -21,6 +50,21 @@ const openai = createOpenAI({
baseURL: process.env.OPENAI_BASE_URL,
});

const mistral = createMistral({
apiKey: process.env.MISTRAL_API_KEY,
});

const openrouter = createOpenRouter({
apiKey: process.env.OPENROUTER_API_KEY,
});

const openaiCompatible = createOpenAICompatible({
name: 'openai-compatible',
apiKey: process.env.OPENAI_COMPATIBLE_API_KEY,
baseURL: process.env.OPENAI_COMPATIBLE_BASE_URL || 'https://api.openai.com/v1',
headers: process.env.OPENAI_COMPATIBLE_HEADERS ? JSON.parse(process.env.OPENAI_COMPATIBLE_HEADERS) : undefined,
});

// Schema for the AI's search plan - not file selection!
const searchPlanSchema = z.object({
editType: z.enum([
Expand Down Expand Up @@ -104,10 +148,24 @@ export async function POST(request: NextRequest) {
aiModel = openai(model.replace('openai/', ''));
}
} else if (model.startsWith('google/')) {
aiModel = createGoogleGenerativeAI(model.replace('google/', ''));
aiModel = createGoogleGenerativeAI({
apiKey: process.env.GEMINI_API_KEY,
})(model.replace('google/', ''));
} else if (model.startsWith('mistral/')) {
aiModel = mistral(model.replace('mistral/', ''));
} else if (model.startsWith('openrouter/')) {
aiModel = openrouter(model.replace('openrouter/', ''));
} else if (model.startsWith('openai-compatible/')) {
aiModel = openaiCompatible(model.replace('openai-compatible/', ''));
} else if (model.startsWith('moonshotai/')) {
// Handle Moonshot AI via OpenRouter
aiModel = openrouter(model);
} else if (model.startsWith('groq/')) {
aiModel = groq(model.replace('groq/', ''));
} else {
// Default to groq if model format is unclear
aiModel = groq(model);
// Use configured fallback provider instead of hardcoded groq
console.log('[analyze-edit-intent] Using fallback provider:', appConfig.ai.fallbackProvider);
aiModel = getFallbackProvider();
}

console.log('[analyze-edit-intent] Using AI model:', model);
Expand Down
107 changes: 96 additions & 11 deletions app/api/generate-ai-code-stream/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ import { createGroq } from '@ai-sdk/groq';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { createMistral } from '@ai-sdk/mistral';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
import { streamText } from 'ai';
import type { SandboxState } from '@/types/sandbox';
import { selectFilesForEdit, getFileContents, formatFilesForAI } from '@/lib/context-selector';
Expand All @@ -11,6 +14,31 @@ import { FileManifest } from '@/types/file-manifest';
import type { ConversationState, ConversationMessage, ConversationEdit } from '@/types/conversation';
import { appConfig } from '@/config/app.config';

// Helper function to get fallback provider
function getFallbackProvider() {
const fallbackProvider = appConfig.ai.fallbackProvider;

switch (fallbackProvider) {
case 'anthropic':
return anthropic('claude-4-sonnet');
case 'openai':
return openai('gpt-5');
case 'mistral':
return mistral('mistral-large-latest');
case 'openrouter':
return openrouter('qwen/qwen3-coder');
case 'openai-compatible':
return openaiCompatible('v0-1.5-md');
case 'google':
return createGoogleGenerativeAI({
apiKey: process.env.GEMINI_API_KEY,
})('gemini-2.5-pro');
case 'groq':
default:
return groq('moonshotai/kimi-k2-instruct');
}
}

const groq = createGroq({
apiKey: process.env.GROQ_API_KEY,
});
Expand All @@ -28,6 +56,21 @@ const openai = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
});

const mistral = createMistral({
apiKey: process.env.MISTRAL_API_KEY,
});

const openrouter = createOpenRouter({
apiKey: process.env.OPENROUTER_API_KEY,
});

const openaiCompatible = createOpenAICompatible({
name: 'openai-compatible',
apiKey: process.env.OPENAI_COMPATIBLE_API_KEY,
baseURL: process.env.OPENAI_COMPATIBLE_BASE_URL || 'https://api.openai.com/v1',
headers: process.env.OPENAI_COMPATIBLE_HEADERS ? JSON.parse(process.env.OPENAI_COMPATIBLE_HEADERS) : undefined,
});

// Helper function to analyze user preferences from conversation history
function analyzeUserPreferences(messages: ConversationMessage[]): {
commonPatterns: string[];
Expand Down Expand Up @@ -1155,14 +1198,45 @@ CRITICAL: When files are provided in the context:
const isAnthropic = model.startsWith('anthropic/');
const isGoogle = model.startsWith('google/');
const isOpenAI = model.startsWith('openai/gpt-5');
const modelProvider = isAnthropic ? anthropic : (isOpenAI ? openai : (isGoogle ? googleGenerativeAI : groq));
const actualModel = isAnthropic ? model.replace('anthropic/', '') :
(model === 'openai/gpt-5') ? 'gpt-5' :
(isGoogle ? model.replace('google/', '') : model);
const isMistral = model.startsWith('mistral/');
const isOpenRouter = model.startsWith('openrouter/') || model.startsWith('moonshotai/');
const isOpenAICompatible = model.startsWith('openai-compatible/');
const isGroq = model.startsWith('groq/') || model.includes('gpt-oss');

let modelProvider;
let actualModel;

if (isAnthropic) {
modelProvider = anthropic;
actualModel = model.replace('anthropic/', '');
} else if (isOpenAI) {
modelProvider = openai;
actualModel = 'gpt-5';
} else if (isGoogle) {
modelProvider = googleGenerativeAI;
actualModel = model.replace('google/', '');
} else if (isMistral) {
modelProvider = mistral;
actualModel = model.replace('mistral/', '');
} else if (isOpenRouter) {
modelProvider = openrouter;
actualModel = model.startsWith('openrouter/') ? model.replace('openrouter/', '') : model;
} else if (isOpenAICompatible) {
modelProvider = openaiCompatible;
actualModel = model.replace('openai-compatible/', '');
} else if (isGroq) {
modelProvider = groq;
actualModel = model;
} else {
// Use configured fallback provider
console.log('[generate-ai-code-stream] Using fallback provider:', appConfig.ai.fallbackProvider);
modelProvider = getFallbackProvider();
actualModel = model;
}

// Make streaming API call with appropriate provider
const streamOptions: any = {
model: modelProvider(actualModel),
model: typeof modelProvider === 'function' ? modelProvider(actualModel) : modelProvider,
messages: [
{
role: 'system',
Expand Down Expand Up @@ -1588,23 +1662,34 @@ Provide the complete file content without any truncation. Include all necessary
let completionClient;
if (model.includes('gpt') || model.includes('openai')) {
completionClient = openai;
} else if (model.includes('claude')) {
} else if (model.includes('claude') || model.startsWith('anthropic/')) {
completionClient = anthropic;
} else {
} else if (model.startsWith('mistral/')) {
completionClient = mistral;
} else if (model.startsWith('openrouter/') || model.startsWith('moonshotai/')) {
completionClient = openrouter;
} else if (model.startsWith('openai-compatible/')) {
completionClient = openaiCompatible;
} else if (model.startsWith('google/')) {
completionClient = googleGenerativeAI;
} else if (model.startsWith('groq/')) {
completionClient = groq;
} else {
// Use configured fallback provider
console.log('[generate-ai-code-stream] Using fallback provider for completion:', appConfig.ai.fallbackProvider);
completionClient = getFallbackProvider();
}

const completionResult = await streamText({
model: completionClient(modelMapping[model] || model),
model: typeof completionClient === 'function' ? completionClient(model.replace(/^[^/]+\//, '')) : completionClient,
messages: [
{
role: 'system',
content: 'You are completing a truncated file. Provide the complete, working file content.'
},
{ role: 'user', content: completionPrompt }
],
temperature: isGPT5 ? undefined : appConfig.ai.defaultTemperature,
maxTokens: appConfig.ai.truncationRecoveryMaxTokens
temperature: model.startsWith('openai/gpt-5') ? undefined : appConfig.ai.defaultTemperature
});

// Get the full text from the stream
Expand Down
17 changes: 14 additions & 3 deletions config/app.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,21 +34,32 @@ export const appConfig = {
availableModels: [
'openai/gpt-5',
'moonshotai/kimi-k2-instruct',
'anthropic/claude-sonnet-4-20250514',
'google/gemini-2.5-pro'
'anthropic/claude-sonnet-4',
'google/gemini-2.5-pro',
'mistral/mistral-large-latest',
'openrouter/qwen/qwen3-coder',
'openai-compatible/v0-1.5-md',
'groq/moonshotai/kimi-k2-instruct',
],

// Model display names
modelDisplayNames: {
'openai/gpt-5': 'GPT-5',
'moonshotai/kimi-k2-instruct': 'Kimi K2 Instruct',
'anthropic/claude-sonnet-4-20250514': 'Sonnet 4',
'google/gemini-2.5-pro': 'Gemini 2.5 Pro'
'google/gemini-2.5-pro': 'Gemini 2.5 Pro',
'mistral/mistral-large-latest': 'Mistral Large',
'openrouter/qwen/qwen3-coder': 'Qwen 3 Coder',
'openai-compatible/v0-1.5-md': 'v0 1.5 Medium',
'groq/moonshotai/kimi-k2-instruct': 'Kimi K2 Instruct'
},

// Temperature settings for non-reasoning models
defaultTemperature: 0.7,

// Fallback provider configuration
fallbackProvider: process.env.AI_FALLBACK_PROVIDER || 'openrouter',

// Max tokens for code generation
maxTokens: 8000,

Expand Down
Loading