Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 10 additions & 8 deletions dto/channel_settings.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,16 @@ const (
)

type ChannelOtherSettings struct {
AzureResponsesVersion string `json:"azure_responses_version,omitempty"`
VertexKeyType VertexKeyType `json:"vertex_key_type,omitempty"` // "json" or "api_key"
OpenRouterEnterprise *bool `json:"openrouter_enterprise,omitempty"`
ClaudeBetaQuery bool `json:"claude_beta_query,omitempty"` // Claude 渠道是否强制追加 ?beta=true
AllowServiceTier bool `json:"allow_service_tier,omitempty"` // 是否允许 service_tier 透传(默认过滤以避免额外计费)
DisableStore bool `json:"disable_store,omitempty"` // 是否禁用 store 透传(默认允许透传,禁用后可能导致 Codex 无法使用)
AllowSafetyIdentifier bool `json:"allow_safety_identifier,omitempty"` // 是否允许 safety_identifier 透传(默认过滤以保护用户隐私)
AwsKeyType AwsKeyType `json:"aws_key_type,omitempty"`
AzureResponsesVersion string `json:"azure_responses_version,omitempty"`
VertexKeyType VertexKeyType `json:"vertex_key_type,omitempty"` // "json" or "api_key"
OpenRouterEnterprise *bool `json:"openrouter_enterprise,omitempty"`
ClaudeBetaQuery bool `json:"claude_beta_query,omitempty"` // Claude 渠道是否强制追加 ?beta=true
AllowServiceTier bool `json:"allow_service_tier,omitempty"` // 是否允许 service_tier 透传(默认过滤以避免额外计费)
AllowInferenceGeo bool `json:"allow_inference_geo,omitempty"` // 是否允许 inference_geo 透传(仅 Claude,默认过滤以满足数据驻留合规)
DisableStore bool `json:"disable_store,omitempty"` // 是否禁用 store 透传(默认允许透传,禁用后可能导致 Codex 无法使用)
AllowSafetyIdentifier bool `json:"allow_safety_identifier,omitempty"` // 是否允许 safety_identifier 透传(默认过滤以保护用户隐私)
AllowIncludeObfuscation bool `json:"allow_include_obfuscation,omitempty"` // 是否允许 stream_options.include_obfuscation 透传(默认过滤以避免关闭流混淆保护)
AwsKeyType AwsKeyType `json:"aws_key_type,omitempty"`
}

func (s *ChannelOtherSettings) IsOpenRouterEnterprise() bool {
Expand Down
14 changes: 9 additions & 5 deletions dto/claude.go
Original file line number Diff line number Diff line change
Expand Up @@ -190,10 +190,13 @@ type ClaudeToolChoice struct {
}

type ClaudeRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt,omitempty"`
System any `json:"system,omitempty"`
Messages []ClaudeMessage `json:"messages,omitempty"`
Model string `json:"model"`
Prompt string `json:"prompt,omitempty"`
System any `json:"system,omitempty"`
Messages []ClaudeMessage `json:"messages,omitempty"`
// InferenceGeo controls Claude data residency region.
// This field is filtered by default and can be enabled via channel setting allow_inference_geo.
InferenceGeo string `json:"inference_geo,omitempty"`
MaxTokens uint `json:"max_tokens,omitempty"`
MaxTokensToSample uint `json:"max_tokens_to_sample,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
Expand All @@ -210,7 +213,8 @@ type ClaudeRequest struct {
Thinking *Thinking `json:"thinking,omitempty"`
McpServers json.RawMessage `json:"mcp_servers,omitempty"`
Metadata json.RawMessage `json:"metadata,omitempty"`
// 服务层级字段,用于指定 API 服务等级。允许透传可能导致实际计费高于预期,默认应过滤
// ServiceTier specifies upstream service level and may affect billing.
// This field is filtered by default and can be enabled via channel setting allow_service_tier.
ServiceTier string `json:"service_tier,omitempty"`
}

Expand Down
75 changes: 40 additions & 35 deletions dto/gemini.go
Original file line number Diff line number Diff line change
Expand Up @@ -324,48 +324,50 @@ type GeminiChatTool struct {
}

type GeminiChatGenerationConfig struct {
Temperature *float64 `json:"temperature,omitempty"`
TopP float64 `json:"topP,omitempty"`
TopK float64 `json:"topK,omitempty"`
MaxOutputTokens uint `json:"maxOutputTokens,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`
StopSequences []string `json:"stopSequences,omitempty"`
ResponseMimeType string `json:"responseMimeType,omitempty"`
ResponseSchema any `json:"responseSchema,omitempty"`
ResponseJsonSchema json.RawMessage `json:"responseJsonSchema,omitempty"`
PresencePenalty *float32 `json:"presencePenalty,omitempty"`
FrequencyPenalty *float32 `json:"frequencyPenalty,omitempty"`
ResponseLogprobs bool `json:"responseLogprobs,omitempty"`
Logprobs *int32 `json:"logprobs,omitempty"`
MediaResolution MediaResolution `json:"mediaResolution,omitempty"`
Seed int64 `json:"seed,omitempty"`
ResponseModalities []string `json:"responseModalities,omitempty"`
ThinkingConfig *GeminiThinkingConfig `json:"thinkingConfig,omitempty"`
SpeechConfig json.RawMessage `json:"speechConfig,omitempty"` // RawMessage to allow flexible speech config
ImageConfig json.RawMessage `json:"imageConfig,omitempty"` // RawMessage to allow flexible image config
Temperature *float64 `json:"temperature,omitempty"`
TopP float64 `json:"topP,omitempty"`
TopK float64 `json:"topK,omitempty"`
MaxOutputTokens uint `json:"maxOutputTokens,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`
StopSequences []string `json:"stopSequences,omitempty"`
ResponseMimeType string `json:"responseMimeType,omitempty"`
ResponseSchema any `json:"responseSchema,omitempty"`
ResponseJsonSchema json.RawMessage `json:"responseJsonSchema,omitempty"`
PresencePenalty *float32 `json:"presencePenalty,omitempty"`
FrequencyPenalty *float32 `json:"frequencyPenalty,omitempty"`
ResponseLogprobs bool `json:"responseLogprobs,omitempty"`
Logprobs *int32 `json:"logprobs,omitempty"`
EnableEnhancedCivicAnswers *bool `json:"enableEnhancedCivicAnswers,omitempty"`
MediaResolution MediaResolution `json:"mediaResolution,omitempty"`
Seed int64 `json:"seed,omitempty"`
ResponseModalities []string `json:"responseModalities,omitempty"`
ThinkingConfig *GeminiThinkingConfig `json:"thinkingConfig,omitempty"`
SpeechConfig json.RawMessage `json:"speechConfig,omitempty"` // RawMessage to allow flexible speech config
ImageConfig json.RawMessage `json:"imageConfig,omitempty"` // RawMessage to allow flexible image config
}

// UnmarshalJSON allows GeminiChatGenerationConfig to accept both snake_case and camelCase fields.
func (c *GeminiChatGenerationConfig) UnmarshalJSON(data []byte) error {
type Alias GeminiChatGenerationConfig
var aux struct {
Alias
TopPSnake float64 `json:"top_p,omitempty"`
TopKSnake float64 `json:"top_k,omitempty"`
MaxOutputTokensSnake uint `json:"max_output_tokens,omitempty"`
CandidateCountSnake int `json:"candidate_count,omitempty"`
StopSequencesSnake []string `json:"stop_sequences,omitempty"`
ResponseMimeTypeSnake string `json:"response_mime_type,omitempty"`
ResponseSchemaSnake any `json:"response_schema,omitempty"`
ResponseJsonSchemaSnake json.RawMessage `json:"response_json_schema,omitempty"`
PresencePenaltySnake *float32 `json:"presence_penalty,omitempty"`
FrequencyPenaltySnake *float32 `json:"frequency_penalty,omitempty"`
ResponseLogprobsSnake bool `json:"response_logprobs,omitempty"`
MediaResolutionSnake MediaResolution `json:"media_resolution,omitempty"`
ResponseModalitiesSnake []string `json:"response_modalities,omitempty"`
ThinkingConfigSnake *GeminiThinkingConfig `json:"thinking_config,omitempty"`
SpeechConfigSnake json.RawMessage `json:"speech_config,omitempty"`
ImageConfigSnake json.RawMessage `json:"image_config,omitempty"`
TopPSnake float64 `json:"top_p,omitempty"`
TopKSnake float64 `json:"top_k,omitempty"`
MaxOutputTokensSnake uint `json:"max_output_tokens,omitempty"`
CandidateCountSnake int `json:"candidate_count,omitempty"`
StopSequencesSnake []string `json:"stop_sequences,omitempty"`
ResponseMimeTypeSnake string `json:"response_mime_type,omitempty"`
ResponseSchemaSnake any `json:"response_schema,omitempty"`
ResponseJsonSchemaSnake json.RawMessage `json:"response_json_schema,omitempty"`
PresencePenaltySnake *float32 `json:"presence_penalty,omitempty"`
FrequencyPenaltySnake *float32 `json:"frequency_penalty,omitempty"`
ResponseLogprobsSnake bool `json:"response_logprobs,omitempty"`
EnableEnhancedCivicAnswersSnake *bool `json:"enable_enhanced_civic_answers,omitempty"`
MediaResolutionSnake MediaResolution `json:"media_resolution,omitempty"`
ResponseModalitiesSnake []string `json:"response_modalities,omitempty"`
ThinkingConfigSnake *GeminiThinkingConfig `json:"thinking_config,omitempty"`
SpeechConfigSnake json.RawMessage `json:"speech_config,omitempty"`
ImageConfigSnake json.RawMessage `json:"image_config,omitempty"`
}

if err := common.Unmarshal(data, &aux); err != nil {
Expand Down Expand Up @@ -408,6 +410,9 @@ func (c *GeminiChatGenerationConfig) UnmarshalJSON(data []byte) error {
if aux.ResponseLogprobsSnake {
c.ResponseLogprobs = aux.ResponseLogprobsSnake
}
if aux.EnableEnhancedCivicAnswersSnake != nil {
c.EnableEnhancedCivicAnswers = aux.EnableEnhancedCivicAnswersSnake
}
if aux.MediaResolutionSnake != "" {
c.MediaResolution = aux.MediaResolutionSnake
}
Expand Down
63 changes: 41 additions & 22 deletions dto/openai_request.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,18 +54,22 @@ type GeneralOpenAIRequest struct {
ParallelTooCalls *bool `json:"parallel_tool_calls,omitempty"`
Tools []ToolCallRequest `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
FunctionCall json.RawMessage `json:"function_call,omitempty"`
User string `json:"user,omitempty"`
LogProbs bool `json:"logprobs,omitempty"`
TopLogProbs int `json:"top_logprobs,omitempty"`
Dimensions int `json:"dimensions,omitempty"`
Modalities json.RawMessage `json:"modalities,omitempty"`
Audio json.RawMessage `json:"audio,omitempty"`
// ServiceTier specifies upstream service level and may affect billing.
// This field is filtered by default and can be enabled via channel setting allow_service_tier.
ServiceTier string `json:"service_tier,omitempty"`
LogProbs bool `json:"logprobs,omitempty"`
TopLogProbs int `json:"top_logprobs,omitempty"`
Dimensions int `json:"dimensions,omitempty"`
Modalities json.RawMessage `json:"modalities,omitempty"`
Audio json.RawMessage `json:"audio,omitempty"`
// 安全标识符,用于帮助 OpenAI 检测可能违反使用政策的应用程序用户
// 注意:此字段会向 OpenAI 发送用户标识信息,默认过滤以保护用户隐私
// 注意:此字段会向 OpenAI 发送用户标识信息,默认过滤,可通过 allow_safety_identifier 开启
SafetyIdentifier string `json:"safety_identifier,omitempty"`
// Whether or not to store the output of this chat completion request for use in our model distillation or evals products.
// 是否存储此次请求数据供 OpenAI 用于评估和优化产品
// 注意:默认过滤此字段以保护用户隐私,但过滤后可能导致 Codex 无法正常使用
// 注意:默认允许透传,可通过 disable_store 禁用;禁用后可能导致 Codex 无法正常使用
Store json.RawMessage `json:"store,omitempty"`
// Used by OpenAI to cache responses for similar requests to optimize your cache hit rates. Replaces the user field
PromptCacheKey string `json:"prompt_cache_key,omitempty"`
Expand Down Expand Up @@ -261,6 +265,9 @@ type FunctionRequest struct {

type StreamOptions struct {
IncludeUsage bool `json:"include_usage,omitempty"`
// IncludeObfuscation is only for /v1/responses stream payload.
// This field is filtered by default and can be enabled via channel setting allow_include_obfuscation.
IncludeObfuscation bool `json:"include_obfuscation,omitempty"`
}

func (r *GeneralOpenAIRequest) GetMaxTokens() uint {
Expand Down Expand Up @@ -799,30 +806,42 @@ type WebSearchOptions struct {

// https://platform.openai.com/docs/api-reference/responses/create
type OpenAIResponsesRequest struct {
Model string `json:"model"`
Input json.RawMessage `json:"input,omitempty"`
Include json.RawMessage `json:"include,omitempty"`
Model string `json:"model"`
Input json.RawMessage `json:"input,omitempty"`
Include json.RawMessage `json:"include,omitempty"`
// 在后台运行推理,暂时还不支持依赖的接口
// Background json.RawMessage `json:"background,omitempty"`
Conversation json.RawMessage `json:"conversation,omitempty"`
ContextManagement json.RawMessage `json:"context_management,omitempty"`
Instructions json.RawMessage `json:"instructions,omitempty"`
MaxOutputTokens uint `json:"max_output_tokens,omitempty"`
TopLogProbs *int `json:"top_logprobs,omitempty"`
Metadata json.RawMessage `json:"metadata,omitempty"`
ParallelToolCalls json.RawMessage `json:"parallel_tool_calls,omitempty"`
PreviousResponseID string `json:"previous_response_id,omitempty"`
Reasoning *Reasoning `json:"reasoning,omitempty"`
// 服务层级字段,用于指定 API 服务等级。允许透传可能导致实际计费高于预期,默认应过滤
ServiceTier string `json:"service_tier,omitempty"`
// ServiceTier specifies upstream service level and may affect billing.
// This field is filtered by default and can be enabled via channel setting allow_service_tier.
ServiceTier string `json:"service_tier,omitempty"`
// Store controls whether upstream may store request/response data.
// This field is allowed by default and can be disabled via channel setting disable_store.
Store json.RawMessage `json:"store,omitempty"`
PromptCacheKey json.RawMessage `json:"prompt_cache_key,omitempty"`
PromptCacheRetention json.RawMessage `json:"prompt_cache_retention,omitempty"`
Stream bool `json:"stream,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
Text json.RawMessage `json:"text,omitempty"`
ToolChoice json.RawMessage `json:"tool_choice,omitempty"`
Tools json.RawMessage `json:"tools,omitempty"` // 需要处理的参数很少,MCP 参数太多不确定,所以用 map
TopP *float64 `json:"top_p,omitempty"`
Truncation string `json:"truncation,omitempty"`
User string `json:"user,omitempty"`
MaxToolCalls uint `json:"max_tool_calls,omitempty"`
Prompt json.RawMessage `json:"prompt,omitempty"`
// SafetyIdentifier carries client identity for policy abuse detection.
// This field is filtered by default and can be enabled via channel setting allow_safety_identifier.
SafetyIdentifier string `json:"safety_identifier,omitempty"`
Stream bool `json:"stream,omitempty"`
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
Text json.RawMessage `json:"text,omitempty"`
ToolChoice json.RawMessage `json:"tool_choice,omitempty"`
Tools json.RawMessage `json:"tools,omitempty"` // 需要处理的参数很少,MCP 参数太多不确定,所以用 map
TopP *float64 `json:"top_p,omitempty"`
Truncation string `json:"truncation,omitempty"`
User string `json:"user,omitempty"`
MaxToolCalls uint `json:"max_tool_calls,omitempty"`
Prompt json.RawMessage `json:"prompt,omitempty"`
// qwen
EnableThinking json.RawMessage `json:"enable_thinking,omitempty"`
// perplexity
Expand Down
4 changes: 2 additions & 2 deletions relay/chat_completions_via_responses.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ func chatCompletionsViaResponses(c *gin.Context, info *relaycommon.RelayInfo, ad
return nil, types.NewError(err, types.ErrorCodeConvertRequestFailed, types.ErrOptionWithSkipRetry())
}

chatJSON, err = relaycommon.RemoveDisabledFields(chatJSON, info.ChannelOtherSettings)
chatJSON, err = relaycommon.RemoveDisabledFields(chatJSON, info.ChannelOtherSettings, info.ChannelSetting.PassThroughBodyEnabled)
if err != nil {
return nil, types.NewError(err, types.ErrorCodeConvertRequestFailed, types.ErrOptionWithSkipRetry())
}
Expand Down Expand Up @@ -120,7 +120,7 @@ func chatCompletionsViaResponses(c *gin.Context, info *relaycommon.RelayInfo, ad
return nil, types.NewError(err, types.ErrorCodeConvertRequestFailed, types.ErrOptionWithSkipRetry())
}

jsonData, err = relaycommon.RemoveDisabledFields(jsonData, info.ChannelOtherSettings)
jsonData, err = relaycommon.RemoveDisabledFields(jsonData, info.ChannelOtherSettings, info.ChannelSetting.PassThroughBodyEnabled)
if err != nil {
return nil, types.NewError(err, types.ErrorCodeConvertRequestFailed, types.ErrOptionWithSkipRetry())
}
Expand Down
2 changes: 1 addition & 1 deletion relay/claude_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ func ClaudeHelper(c *gin.Context, info *relaycommon.RelayInfo) (newAPIError *typ
}

// remove disabled fields for Claude API
jsonData, err = relaycommon.RemoveDisabledFields(jsonData, info.ChannelOtherSettings)
jsonData, err = relaycommon.RemoveDisabledFields(jsonData, info.ChannelOtherSettings, info.ChannelSetting.PassThroughBodyEnabled)
if err != nil {
return types.NewError(err, types.ErrorCodeConvertRequestFailed, types.ErrOptionWithSkipRetry())
}
Expand Down
Loading