diff --git a/src/ai-brain.ts b/src/ai-brain.ts index 67d8d97..c0bcb08 100644 --- a/src/ai-brain.ts +++ b/src/ai-brain.ts @@ -303,6 +303,7 @@ export class AIBrain { ollama: 'http://localhost:11434/v1', kimi: 'https://api.moonshot.cn/v1', openai: 'https://api.openai.com/v1', + gemini: 'https://generativelanguage.googleapis.com/v1beta/openai', }; private async callLLM(systemPrompt: string): Promise { diff --git a/src/index.ts b/src/index.ts index e6ca3ea..883ae7c 100644 --- a/src/index.ts +++ b/src/index.ts @@ -103,6 +103,12 @@ program baseUrl: opts.baseUrl, }); + // Read saved pipeline config for model name fallbacks + const { loadPipelineConfig } = await import('./doctor'); + const savedPipeline = loadPipelineConfig(); + const pipelineTextModel = savedPipeline?.layer2?.model || ''; + const pipelineVisionModel = savedPipeline?.layer3?.model || ''; + const config: ClawdConfig = { ...DEFAULT_CONFIG, server: { @@ -117,8 +123,8 @@ program textApiKey: resolvedApi.textApiKey, visionBaseUrl: resolvedApi.visionBaseUrl, visionApiKey: resolvedApi.visionApiKey, - model: opts.textModel || resolvedApi.textModel || opts.model || DEFAULT_CONFIG.ai.model, - visionModel: opts.visionModel || resolvedApi.visionModel || opts.model || DEFAULT_CONFIG.ai.visionModel, + model: opts.textModel || resolvedApi.textModel || opts.model || pipelineTextModel || DEFAULT_CONFIG.ai.model, + visionModel: opts.visionModel || resolvedApi.visionModel || opts.model || pipelineVisionModel || DEFAULT_CONFIG.ai.visionModel, }, debug: opts.debug || false, }; diff --git a/src/openclaw-credentials.ts b/src/openclaw-credentials.ts index ced0ecf..be49308 100644 --- a/src/openclaw-credentials.ts +++ b/src/openclaw-credentials.ts @@ -78,6 +78,7 @@ function inferProviderFromBaseUrl(baseUrl?: string): string | undefined { if (url.includes('groq')) return 'groq'; if (url.includes('together')) return 'together'; if (url.includes('deepseek')) return 'deepseek'; + if (url.includes('generativelanguage') || url.includes('gemini')) return 'gemini'; if (url.includes('nvidia') || url.includes('integrate.api')) return 'nvidia'; if (url.includes('mistral')) return 'mistral'; if (url.includes('fireworks')) return 'fireworks'; @@ -484,7 +485,7 @@ export function resolveApiConfig(opts?: { if (localApiKey || localBaseUrl || localTextModel || localVisionModel || opts?.provider) { return { apiKey: localApiKey, - provider: normalizeProvider(opts?.provider) || inferProviderFromBaseUrl(localBaseUrl), + provider: normalizeProvider(opts?.provider) || inferProviderFromBaseUrl(localBaseUrl) || localProvider, baseUrl: localBaseUrl, textModel: localTextModel, visionModel: localVisionModel, @@ -498,7 +499,7 @@ export function resolveApiConfig(opts?: { return { apiKey: localApiKey, - provider: inferProviderFromBaseUrl(localBaseUrl), + provider: inferProviderFromBaseUrl(localBaseUrl) || localProvider, baseUrl: localBaseUrl, textModel: localTextModel, visionModel: localVisionModel, diff --git a/src/providers.ts b/src/providers.ts index c457d4a..b30358d 100644 --- a/src/providers.ts +++ b/src/providers.ts @@ -90,6 +90,15 @@ export const PROVIDERS: Record = { openaiCompat: true, computerUse: false, }, + gemini: { + name: 'Google Gemini', + baseUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + authHeader: (key) => ({ 'Authorization': `Bearer ${key}` }), + textModel: 'gemini-2.0-flash', + visionModel: 'gemini-2.0-flash', + openaiCompat: true, + computerUse: false, + }, generic: { name: 'OpenAI-Compatible', baseUrl: '', // set from config @@ -244,6 +253,7 @@ const PROVIDER_ENV_VARS: Record = { groq: ['GROQ_API_KEY'], together: ['TOGETHER_API_KEY'], deepseek: ['DEEPSEEK_API_KEY'], + gemini: ['GEMINI_API_KEY'], }; /**