diff --git a/README.md b/README.md index 6b976795..4a39c9af 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ Kite is a lightweight, modern Kubernetes dashboard that unifies real-time observ - Live pod logs with filtering and search - Web terminal for pods and nodes - Built-in kubectl console. -- AI assistant. +- AI assistant with multi-provider support (OpenAI, Anthropic, [MiniMax](https://www.minimaxi.com)). ### Security diff --git a/README_zh.md b/README_zh.md index 053f7c3e..5675e87e 100644 --- a/README_zh.md +++ b/README_zh.md @@ -58,7 +58,7 @@ Kite是一款轻量级、现代化的Kubernetes仪表板工具,它将实时可 - 支持过滤和搜索的实时 Pod 日志 - 面向 Pod 和 Node 的 Web 终端 - 内置 kubectl 控制台 -- AI 助手 +- AI 助手,支持多种 AI 供应商(OpenAI、Anthropic、[MiniMax](https://www.minimaxi.com)) ### 安全 diff --git a/pkg/ai/agent.go b/pkg/ai/agent.go index befa3f34..55753ea8 100644 --- a/pkg/ai/agent.go +++ b/pkg/ai/agent.go @@ -140,6 +140,12 @@ func NewAgent(cs *cluster.ClientSet, cfg *RuntimeConfig) (*Agent, error) { return nil, err } agent.anthropicClient = client + case model.GeneralAIProviderMiniMax: + client, err := NewMiniMaxClient(cfg) + if err != nil { + return nil, err + } + agent.openaiClient = client default: client, err := NewOpenAIClient(cfg) if err != nil { @@ -290,6 +296,7 @@ func buildContextualSystemPrompt(pageCtx *PageContext, runtimeCtx runtimePromptC } // ProcessChat runs the AI conversation loop and sends SSE events via the callback. +// MiniMax uses an OpenAI-compatible API, so it shares the OpenAI code path. func (a *Agent) ProcessChat(c *gin.Context, req *ChatRequest, sendEvent func(SSEEvent)) { switch a.provider { case model.GeneralAIProviderAnthropic: diff --git a/pkg/ai/config.go b/pkg/ai/config.go index ea5c20dc..facd0795 100644 --- a/pkg/ai/config.go +++ b/pkg/ai/config.go @@ -28,10 +28,14 @@ func defaultModelForProvider(provider string) string { return model.DefaultGeneralAIModelByProvider(provider) } +const defaultMiniMaxBaseURL = "https://api.minimax.io/v1" + func providerLabel(provider string) string { switch provider { case model.GeneralAIProviderAnthropic: return "Anthropic" + case model.GeneralAIProviderMiniMax: + return "MiniMax" default: return "OpenAI" } @@ -104,3 +108,28 @@ func NewAnthropicClient(cfg *RuntimeConfig) (anthropic.Client, error) { return anthropic.NewClient(opts...), nil } + +// NewMiniMaxClient creates an OpenAI-compatible client configured for the +// MiniMax API (https://api.minimax.io/v1). MiniMax models are fully +// compatible with the OpenAI chat completions format, so the standard +// openai-go SDK is reused with MiniMax's base URL and API key. +func NewMiniMaxClient(cfg *RuntimeConfig) (openai.Client, error) { + if cfg == nil || !cfg.Enabled { + return openai.Client{}, fmt.Errorf("AI is not enabled") + } + if normalizeProvider(cfg.Provider) != model.GeneralAIProviderMiniMax { + return openai.Client{}, fmt.Errorf("AI provider %s is not supported by MiniMax client", providerLabel(cfg.Provider)) + } + + opts := make([]openaioption.RequestOption, 0, 2) + if cfg.APIKey != "" { + opts = append(opts, openaioption.WithAPIKey(cfg.APIKey)) + } + baseURL := cfg.BaseURL + if baseURL == "" { + baseURL = defaultMiniMaxBaseURL + } + opts = append(opts, openaioption.WithBaseURL(baseURL)) + + return openai.NewClient(opts...), nil +} diff --git a/pkg/ai/config_test.go b/pkg/ai/config_test.go new file mode 100644 index 00000000..ed5bc7bf --- /dev/null +++ b/pkg/ai/config_test.go @@ -0,0 +1,183 @@ +package ai + +import ( + "testing" + + "github.com/zxh326/kite/pkg/model" +) + +func TestNormalizeProvider(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + {name: "openai lowercase", input: "openai", expected: model.GeneralAIProviderOpenAI}, + {name: "openai uppercase", input: "OpenAI", expected: model.GeneralAIProviderOpenAI}, + {name: "anthropic lowercase", input: "anthropic", expected: model.GeneralAIProviderAnthropic}, + {name: "anthropic uppercase", input: "Anthropic", expected: model.GeneralAIProviderAnthropic}, + {name: "minimax lowercase", input: "minimax", expected: model.GeneralAIProviderMiniMax}, + {name: "minimax mixed case", input: "MiniMax", expected: model.GeneralAIProviderMiniMax}, + {name: "minimax uppercase", input: "MINIMAX", expected: model.GeneralAIProviderMiniMax}, + {name: "empty defaults to openai", input: "", expected: model.GeneralAIProviderOpenAI}, + {name: "unknown defaults to openai", input: "unknown", expected: model.GeneralAIProviderOpenAI}, + {name: "whitespace trimmed", input: " minimax ", expected: model.GeneralAIProviderMiniMax}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := normalizeProvider(tc.input) + if result != tc.expected { + t.Fatalf("normalizeProvider(%q) = %q, want %q", tc.input, result, tc.expected) + } + }) + } +} + +func TestDefaultModelForProvider(t *testing.T) { + tests := []struct { + name string + provider string + expected string + }{ + {name: "openai default model", provider: model.GeneralAIProviderOpenAI, expected: model.DefaultGeneralAIModel}, + {name: "anthropic default model", provider: model.GeneralAIProviderAnthropic, expected: model.DefaultGeneralAnthropicModel}, + {name: "minimax default model", provider: model.GeneralAIProviderMiniMax, expected: model.DefaultGeneralMiniMaxModel}, + {name: "empty defaults to openai model", provider: "", expected: model.DefaultGeneralAIModel}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := defaultModelForProvider(tc.provider) + if result != tc.expected { + t.Fatalf("defaultModelForProvider(%q) = %q, want %q", tc.provider, result, tc.expected) + } + }) + } +} + +func TestProviderLabel(t *testing.T) { + tests := []struct { + name string + provider string + expected string + }{ + {name: "openai label", provider: model.GeneralAIProviderOpenAI, expected: "OpenAI"}, + {name: "anthropic label", provider: model.GeneralAIProviderAnthropic, expected: "Anthropic"}, + {name: "minimax label", provider: model.GeneralAIProviderMiniMax, expected: "MiniMax"}, + {name: "unknown defaults to openai", provider: "unknown", expected: "OpenAI"}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := providerLabel(tc.provider) + if result != tc.expected { + t.Fatalf("providerLabel(%q) = %q, want %q", tc.provider, result, tc.expected) + } + }) + } +} + +func TestIsGeneralAIProviderSupported(t *testing.T) { + tests := []struct { + name string + provider string + expected bool + }{ + {name: "openai supported", provider: "openai", expected: true}, + {name: "anthropic supported", provider: "anthropic", expected: true}, + {name: "minimax supported", provider: "minimax", expected: true}, + {name: "MiniMax mixed case supported", provider: "MiniMax", expected: true}, + {name: "unknown not supported", provider: "unknown", expected: false}, + {name: "empty not supported", provider: "", expected: false}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := model.IsGeneralAIProviderSupported(tc.provider) + if result != tc.expected { + t.Fatalf("IsGeneralAIProviderSupported(%q) = %v, want %v", tc.provider, result, tc.expected) + } + }) + } +} + +func TestNewOpenAIClientRejectsWrongProvider(t *testing.T) { + cfg := &RuntimeConfig{ + Enabled: true, + Provider: model.GeneralAIProviderMiniMax, + APIKey: "test-key", + } + _, err := NewOpenAIClient(cfg) + if err == nil { + t.Fatal("expected error when creating OpenAI client with MiniMax provider") + } +} + +func TestNewMiniMaxClientRejectsWrongProvider(t *testing.T) { + cfg := &RuntimeConfig{ + Enabled: true, + Provider: model.GeneralAIProviderOpenAI, + APIKey: "test-key", + } + _, err := NewMiniMaxClient(cfg) + if err == nil { + t.Fatal("expected error when creating MiniMax client with OpenAI provider") + } +} + +func TestNewMiniMaxClientDisabled(t *testing.T) { + cfg := &RuntimeConfig{ + Enabled: false, + Provider: model.GeneralAIProviderMiniMax, + APIKey: "test-key", + } + _, err := NewMiniMaxClient(cfg) + if err == nil { + t.Fatal("expected error when AI is disabled") + } +} + +func TestNewMiniMaxClientNilConfig(t *testing.T) { + _, err := NewMiniMaxClient(nil) + if err == nil { + t.Fatal("expected error with nil config") + } +} + +func TestNewMiniMaxClientSuccess(t *testing.T) { + cfg := &RuntimeConfig{ + Enabled: true, + Provider: model.GeneralAIProviderMiniMax, + APIKey: "test-minimax-key", + } + _, err := NewMiniMaxClient(cfg) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestNewMiniMaxClientCustomBaseURL(t *testing.T) { + cfg := &RuntimeConfig{ + Enabled: true, + Provider: model.GeneralAIProviderMiniMax, + APIKey: "test-minimax-key", + BaseURL: "https://custom.minimax.example.com/v1", + } + _, err := NewMiniMaxClient(cfg) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestNewAnthropicClientRejectsMiniMax(t *testing.T) { + cfg := &RuntimeConfig{ + Enabled: true, + Provider: model.GeneralAIProviderMiniMax, + APIKey: "test-key", + } + _, err := NewAnthropicClient(cfg) + if err == nil { + t.Fatal("expected error when creating Anthropic client with MiniMax provider") + } +} diff --git a/pkg/model/general_setting.go b/pkg/model/general_setting.go index 36ede60b..9680e581 100644 --- a/pkg/model/general_setting.go +++ b/pkg/model/general_setting.go @@ -10,11 +10,13 @@ import ( const DefaultGeneralAIModel = "gpt-4o-mini" const DefaultGeneralAnthropicModel = "claude-sonnet-4-5" +const DefaultGeneralMiniMaxModel = "MiniMax-M2.7" const DefaultGeneralKubectlImage = "zzde/kubectl:latest" const DefaultGeneralNodeTerminalImage = "busybox:latest" const GeneralAIProviderOpenAI = "openai" const GeneralAIProviderAnthropic = "anthropic" +const GeneralAIProviderMiniMax = "minimax" const DefaultGeneralAIProvider = GeneralAIProviderOpenAI func DefaultGeneralNodeTerminalImageValue() string { @@ -44,6 +46,8 @@ func NormalizeGeneralAIProvider(provider string) string { switch strings.ToLower(strings.TrimSpace(provider)) { case GeneralAIProviderAnthropic: return GeneralAIProviderAnthropic + case GeneralAIProviderMiniMax: + return GeneralAIProviderMiniMax default: return GeneralAIProviderOpenAI } @@ -51,13 +55,17 @@ func NormalizeGeneralAIProvider(provider string) string { func IsGeneralAIProviderSupported(provider string) bool { normalized := strings.ToLower(strings.TrimSpace(provider)) - return normalized == GeneralAIProviderOpenAI || normalized == GeneralAIProviderAnthropic + return normalized == GeneralAIProviderOpenAI || + normalized == GeneralAIProviderAnthropic || + normalized == GeneralAIProviderMiniMax } func DefaultGeneralAIModelByProvider(provider string) string { switch NormalizeGeneralAIProvider(provider) { case GeneralAIProviderAnthropic: return DefaultGeneralAnthropicModel + case GeneralAIProviderMiniMax: + return DefaultGeneralMiniMaxModel default: return DefaultGeneralAIModel } diff --git a/pkg/model/general_setting_test.go b/pkg/model/general_setting_test.go new file mode 100644 index 00000000..ef4f6419 --- /dev/null +++ b/pkg/model/general_setting_test.go @@ -0,0 +1,91 @@ +package model + +import "testing" + +func TestNormalizeGeneralAIProvider(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + {name: "openai", input: "openai", expected: GeneralAIProviderOpenAI}, + {name: "OpenAI uppercase", input: "OpenAI", expected: GeneralAIProviderOpenAI}, + {name: "anthropic", input: "anthropic", expected: GeneralAIProviderAnthropic}, + {name: "Anthropic uppercase", input: "Anthropic", expected: GeneralAIProviderAnthropic}, + {name: "minimax", input: "minimax", expected: GeneralAIProviderMiniMax}, + {name: "MiniMax mixed case", input: "MiniMax", expected: GeneralAIProviderMiniMax}, + {name: "MINIMAX uppercase", input: "MINIMAX", expected: GeneralAIProviderMiniMax}, + {name: "empty defaults to openai", input: "", expected: GeneralAIProviderOpenAI}, + {name: "unknown defaults to openai", input: "gemini", expected: GeneralAIProviderOpenAI}, + {name: "whitespace trimmed", input: " minimax ", expected: GeneralAIProviderMiniMax}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := NormalizeGeneralAIProvider(tc.input) + if result != tc.expected { + t.Fatalf("NormalizeGeneralAIProvider(%q) = %q, want %q", tc.input, result, tc.expected) + } + }) + } +} + +func TestIsGeneralAIProviderSupported(t *testing.T) { + tests := []struct { + name string + provider string + expected bool + }{ + {name: "openai supported", provider: "openai", expected: true}, + {name: "anthropic supported", provider: "anthropic", expected: true}, + {name: "minimax supported", provider: "minimax", expected: true}, + {name: "MiniMax mixed case", provider: "MiniMax", expected: true}, + {name: "MINIMAX uppercase", provider: "MINIMAX", expected: true}, + {name: "unknown not supported", provider: "unknown", expected: false}, + {name: "empty not supported", provider: "", expected: false}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := IsGeneralAIProviderSupported(tc.provider) + if result != tc.expected { + t.Fatalf("IsGeneralAIProviderSupported(%q) = %v, want %v", tc.provider, result, tc.expected) + } + }) + } +} + +func TestDefaultGeneralAIModelByProvider(t *testing.T) { + tests := []struct { + name string + provider string + expected string + }{ + {name: "openai default", provider: GeneralAIProviderOpenAI, expected: DefaultGeneralAIModel}, + {name: "anthropic default", provider: GeneralAIProviderAnthropic, expected: DefaultGeneralAnthropicModel}, + {name: "minimax default", provider: GeneralAIProviderMiniMax, expected: DefaultGeneralMiniMaxModel}, + {name: "empty defaults to openai model", provider: "", expected: DefaultGeneralAIModel}, + {name: "unknown defaults to openai model", provider: "gemini", expected: DefaultGeneralAIModel}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := DefaultGeneralAIModelByProvider(tc.provider) + if result != tc.expected { + t.Fatalf("DefaultGeneralAIModelByProvider(%q) = %q, want %q", tc.provider, result, tc.expected) + } + }) + } +} + +func TestMiniMaxProviderConstant(t *testing.T) { + if GeneralAIProviderMiniMax != "minimax" { + t.Fatalf("GeneralAIProviderMiniMax = %q, want %q", GeneralAIProviderMiniMax, "minimax") + } +} + +func TestMiniMaxDefaultModel(t *testing.T) { + if DefaultGeneralMiniMaxModel != "MiniMax-M2.7" { + t.Fatalf("DefaultGeneralMiniMaxModel = %q, want %q", DefaultGeneralMiniMaxModel, "MiniMax-M2.7") + } +} diff --git a/ui/src/components/settings/general-management.tsx b/ui/src/components/settings/general-management.tsx index 066de7c5..986b3c5d 100644 --- a/ui/src/components/settings/general-management.tsx +++ b/ui/src/components/settings/general-management.tsx @@ -26,6 +26,7 @@ import { Switch } from '@/components/ui/switch' const DEFAULT_MODEL = 'gpt-4o-mini' const DEFAULT_ANTHROPIC_MODEL = 'claude-sonnet-4-5' +const DEFAULT_MINIMAX_MODEL = 'MiniMax-M2.7' const DEFAULT_KUBECTL_IMAGE = 'zzde/kubectl:latest' const DEFAULT_NODE_TERMINAL_IMAGE = 'busybox:latest' @@ -88,7 +89,9 @@ export function GeneralManagement() { const defaultModel = formData.aiProvider === 'anthropic' ? DEFAULT_ANTHROPIC_MODEL - : DEFAULT_MODEL + : formData.aiProvider === 'minimax' + ? DEFAULT_MINIMAX_MODEL + : DEFAULT_MODEL if (formData.aiAgentEnabled && !formData.aiModel.trim()) { toast.error( @@ -198,14 +201,18 @@ export function GeneralManagement() { @@ -245,7 +258,9 @@ export function GeneralManagement() { placeholder={ formData.aiProvider === 'anthropic' ? DEFAULT_ANTHROPIC_MODEL - : DEFAULT_MODEL + : formData.aiProvider === 'minimax' + ? DEFAULT_MINIMAX_MODEL + : DEFAULT_MODEL } /> @@ -291,7 +306,9 @@ export function GeneralManagement() { placeholder={ formData.aiProvider === 'anthropic' ? 'https://api.anthropic.com' - : 'https://api.openai.com/v1' + : formData.aiProvider === 'minimax' + ? 'https://api.minimax.io/v1' + : 'https://api.openai.com/v1' } /> diff --git a/ui/src/lib/api.ts b/ui/src/lib/api.ts index c83e596b..1e1ca7b7 100644 --- a/ui/src/lib/api.ts +++ b/ui/src/lib/api.ts @@ -1744,7 +1744,7 @@ export interface APIKeyCreateRequest { export interface GeneralSetting { aiAgentEnabled: boolean - aiProvider: 'openai' | 'anthropic' + aiProvider: 'openai' | 'anthropic' | 'minimax' aiModel: string aiApiKey: string aiApiKeyConfigured: boolean @@ -1759,7 +1759,7 @@ export interface GeneralSetting { export interface GeneralSettingUpdateRequest { aiAgentEnabled: boolean - aiProvider: 'openai' | 'anthropic' + aiProvider: 'openai' | 'anthropic' | 'minimax' aiModel: string aiApiKey?: string aiBaseUrl: string