Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ Kite is a lightweight, modern Kubernetes dashboard that unifies real-time observ
- Live pod logs with filtering and search
- Web terminal for pods and nodes
- Built-in kubectl console.
- AI assistant.
- AI assistant with multi-provider support (OpenAI, Anthropic, [MiniMax](https://www.minimaxi.com)).

### Security

Expand Down
2 changes: 1 addition & 1 deletion README_zh.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ Kite是一款轻量级、现代化的Kubernetes仪表板工具,它将实时可
- 支持过滤和搜索的实时 Pod 日志
- 面向 Pod 和 Node 的 Web 终端
- 内置 kubectl 控制台
- AI 助手
- AI 助手,支持多种 AI 供应商(OpenAI、Anthropic、[MiniMax](https://www.minimaxi.com))

### 安全

Expand Down
7 changes: 7 additions & 0 deletions pkg/ai/agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,12 @@ func NewAgent(cs *cluster.ClientSet, cfg *RuntimeConfig) (*Agent, error) {
return nil, err
}
agent.anthropicClient = client
case model.GeneralAIProviderMiniMax:
client, err := NewMiniMaxClient(cfg)
if err != nil {
return nil, err
}
agent.openaiClient = client
default:
client, err := NewOpenAIClient(cfg)
if err != nil {
Expand Down Expand Up @@ -290,6 +296,7 @@ func buildContextualSystemPrompt(pageCtx *PageContext, runtimeCtx runtimePromptC
}

// ProcessChat runs the AI conversation loop and sends SSE events via the callback.
// MiniMax uses an OpenAI-compatible API, so it shares the OpenAI code path.
func (a *Agent) ProcessChat(c *gin.Context, req *ChatRequest, sendEvent func(SSEEvent)) {
switch a.provider {
case model.GeneralAIProviderAnthropic:
Expand Down
29 changes: 29 additions & 0 deletions pkg/ai/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,14 @@ func defaultModelForProvider(provider string) string {
return model.DefaultGeneralAIModelByProvider(provider)
}

const defaultMiniMaxBaseURL = "https://api.minimax.io/v1"

func providerLabel(provider string) string {
switch provider {
case model.GeneralAIProviderAnthropic:
return "Anthropic"
case model.GeneralAIProviderMiniMax:
return "MiniMax"
default:
return "OpenAI"
}
Expand Down Expand Up @@ -104,3 +108,28 @@ func NewAnthropicClient(cfg *RuntimeConfig) (anthropic.Client, error) {

return anthropic.NewClient(opts...), nil
}

// NewMiniMaxClient creates an OpenAI-compatible client configured for the
// MiniMax API (https://api.minimax.io/v1). MiniMax models are fully
// compatible with the OpenAI chat completions format, so the standard
// openai-go SDK is reused with MiniMax's base URL and API key.
func NewMiniMaxClient(cfg *RuntimeConfig) (openai.Client, error) {
if cfg == nil || !cfg.Enabled {
return openai.Client{}, fmt.Errorf("AI is not enabled")
}
if normalizeProvider(cfg.Provider) != model.GeneralAIProviderMiniMax {
return openai.Client{}, fmt.Errorf("AI provider %s is not supported by MiniMax client", providerLabel(cfg.Provider))
}

opts := make([]openaioption.RequestOption, 0, 2)
if cfg.APIKey != "" {
opts = append(opts, openaioption.WithAPIKey(cfg.APIKey))
}
baseURL := cfg.BaseURL
if baseURL == "" {
baseURL = defaultMiniMaxBaseURL
}
opts = append(opts, openaioption.WithBaseURL(baseURL))

return openai.NewClient(opts...), nil
}
183 changes: 183 additions & 0 deletions pkg/ai/config_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
package ai

import (
"testing"

"github.com/zxh326/kite/pkg/model"
)

func TestNormalizeProvider(t *testing.T) {
tests := []struct {
name string
input string
expected string
}{
{name: "openai lowercase", input: "openai", expected: model.GeneralAIProviderOpenAI},
{name: "openai uppercase", input: "OpenAI", expected: model.GeneralAIProviderOpenAI},
{name: "anthropic lowercase", input: "anthropic", expected: model.GeneralAIProviderAnthropic},
{name: "anthropic uppercase", input: "Anthropic", expected: model.GeneralAIProviderAnthropic},
{name: "minimax lowercase", input: "minimax", expected: model.GeneralAIProviderMiniMax},
{name: "minimax mixed case", input: "MiniMax", expected: model.GeneralAIProviderMiniMax},
{name: "minimax uppercase", input: "MINIMAX", expected: model.GeneralAIProviderMiniMax},
{name: "empty defaults to openai", input: "", expected: model.GeneralAIProviderOpenAI},
{name: "unknown defaults to openai", input: "unknown", expected: model.GeneralAIProviderOpenAI},
{name: "whitespace trimmed", input: " minimax ", expected: model.GeneralAIProviderMiniMax},
}

for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
result := normalizeProvider(tc.input)
if result != tc.expected {
t.Fatalf("normalizeProvider(%q) = %q, want %q", tc.input, result, tc.expected)
}
})
}
}

func TestDefaultModelForProvider(t *testing.T) {
tests := []struct {
name string
provider string
expected string
}{
{name: "openai default model", provider: model.GeneralAIProviderOpenAI, expected: model.DefaultGeneralAIModel},
{name: "anthropic default model", provider: model.GeneralAIProviderAnthropic, expected: model.DefaultGeneralAnthropicModel},
{name: "minimax default model", provider: model.GeneralAIProviderMiniMax, expected: model.DefaultGeneralMiniMaxModel},
{name: "empty defaults to openai model", provider: "", expected: model.DefaultGeneralAIModel},
}

for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
result := defaultModelForProvider(tc.provider)
if result != tc.expected {
t.Fatalf("defaultModelForProvider(%q) = %q, want %q", tc.provider, result, tc.expected)
}
})
}
}

func TestProviderLabel(t *testing.T) {
tests := []struct {
name string
provider string
expected string
}{
{name: "openai label", provider: model.GeneralAIProviderOpenAI, expected: "OpenAI"},
{name: "anthropic label", provider: model.GeneralAIProviderAnthropic, expected: "Anthropic"},
{name: "minimax label", provider: model.GeneralAIProviderMiniMax, expected: "MiniMax"},
{name: "unknown defaults to openai", provider: "unknown", expected: "OpenAI"},
}

for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
result := providerLabel(tc.provider)
if result != tc.expected {
t.Fatalf("providerLabel(%q) = %q, want %q", tc.provider, result, tc.expected)
}
})
}
}

func TestIsGeneralAIProviderSupported(t *testing.T) {
tests := []struct {
name string
provider string
expected bool
}{
{name: "openai supported", provider: "openai", expected: true},
{name: "anthropic supported", provider: "anthropic", expected: true},
{name: "minimax supported", provider: "minimax", expected: true},
{name: "MiniMax mixed case supported", provider: "MiniMax", expected: true},
{name: "unknown not supported", provider: "unknown", expected: false},
{name: "empty not supported", provider: "", expected: false},
}

for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
result := model.IsGeneralAIProviderSupported(tc.provider)
if result != tc.expected {
t.Fatalf("IsGeneralAIProviderSupported(%q) = %v, want %v", tc.provider, result, tc.expected)
}
})
}
}

func TestNewOpenAIClientRejectsWrongProvider(t *testing.T) {
cfg := &RuntimeConfig{
Enabled: true,
Provider: model.GeneralAIProviderMiniMax,
APIKey: "test-key",
}
_, err := NewOpenAIClient(cfg)
if err == nil {
t.Fatal("expected error when creating OpenAI client with MiniMax provider")
}
}

func TestNewMiniMaxClientRejectsWrongProvider(t *testing.T) {
cfg := &RuntimeConfig{
Enabled: true,
Provider: model.GeneralAIProviderOpenAI,
APIKey: "test-key",
}
_, err := NewMiniMaxClient(cfg)
if err == nil {
t.Fatal("expected error when creating MiniMax client with OpenAI provider")
}
}

func TestNewMiniMaxClientDisabled(t *testing.T) {
cfg := &RuntimeConfig{
Enabled: false,
Provider: model.GeneralAIProviderMiniMax,
APIKey: "test-key",
}
_, err := NewMiniMaxClient(cfg)
if err == nil {
t.Fatal("expected error when AI is disabled")
}
}

func TestNewMiniMaxClientNilConfig(t *testing.T) {
_, err := NewMiniMaxClient(nil)
if err == nil {
t.Fatal("expected error with nil config")
}
}

func TestNewMiniMaxClientSuccess(t *testing.T) {
cfg := &RuntimeConfig{
Enabled: true,
Provider: model.GeneralAIProviderMiniMax,
APIKey: "test-minimax-key",
}
_, err := NewMiniMaxClient(cfg)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
}

func TestNewMiniMaxClientCustomBaseURL(t *testing.T) {
cfg := &RuntimeConfig{
Enabled: true,
Provider: model.GeneralAIProviderMiniMax,
APIKey: "test-minimax-key",
BaseURL: "https://custom.minimax.example.com/v1",
}
_, err := NewMiniMaxClient(cfg)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
}

func TestNewAnthropicClientRejectsMiniMax(t *testing.T) {
cfg := &RuntimeConfig{
Enabled: true,
Provider: model.GeneralAIProviderMiniMax,
APIKey: "test-key",
}
_, err := NewAnthropicClient(cfg)
if err == nil {
t.Fatal("expected error when creating Anthropic client with MiniMax provider")
}
}
10 changes: 9 additions & 1 deletion pkg/model/general_setting.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,13 @@ import (

const DefaultGeneralAIModel = "gpt-4o-mini"
const DefaultGeneralAnthropicModel = "claude-sonnet-4-5"
const DefaultGeneralMiniMaxModel = "MiniMax-M2.7"
const DefaultGeneralKubectlImage = "zzde/kubectl:latest"
const DefaultGeneralNodeTerminalImage = "busybox:latest"

const GeneralAIProviderOpenAI = "openai"
const GeneralAIProviderAnthropic = "anthropic"
const GeneralAIProviderMiniMax = "minimax"
const DefaultGeneralAIProvider = GeneralAIProviderOpenAI

func DefaultGeneralNodeTerminalImageValue() string {
Expand Down Expand Up @@ -44,20 +46,26 @@ func NormalizeGeneralAIProvider(provider string) string {
switch strings.ToLower(strings.TrimSpace(provider)) {
case GeneralAIProviderAnthropic:
return GeneralAIProviderAnthropic
case GeneralAIProviderMiniMax:
return GeneralAIProviderMiniMax
default:
return GeneralAIProviderOpenAI
}
}

func IsGeneralAIProviderSupported(provider string) bool {
normalized := strings.ToLower(strings.TrimSpace(provider))
return normalized == GeneralAIProviderOpenAI || normalized == GeneralAIProviderAnthropic
return normalized == GeneralAIProviderOpenAI ||
normalized == GeneralAIProviderAnthropic ||
normalized == GeneralAIProviderMiniMax
}

func DefaultGeneralAIModelByProvider(provider string) string {
switch NormalizeGeneralAIProvider(provider) {
case GeneralAIProviderAnthropic:
return DefaultGeneralAnthropicModel
case GeneralAIProviderMiniMax:
return DefaultGeneralMiniMaxModel
default:
return DefaultGeneralAIModel
}
Expand Down
Loading