Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 10 additions & 8 deletions models/models.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,12 @@ type ChatCompletionRequest struct {

// Message 消息结构
type Message struct {
Role string `json:"role" binding:"required"`
Content interface{} `json:"content"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
Name string `json:"name,omitempty"`
Role string `json:"role" binding:"required"`
Content interface{} `json:"content"`
ReasoningContent string `json:"reasoning_content,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
Name string `json:"name,omitempty"`
}

// ContentPart 消息内容部分(用于多模态内容)
Expand Down Expand Up @@ -127,9 +128,10 @@ type StreamChoice struct {

// StreamDelta 流式增量数据
type StreamDelta struct {
Role string `json:"role,omitempty"`
Content string `json:"content,omitempty"`
ToolCalls []ToolCallDelta `json:"tool_calls,omitempty"`
Role string `json:"role,omitempty"`
Content string `json:"content,omitempty"`
ReasoningContent string `json:"reasoning_content,omitempty"`
ToolCalls []ToolCallDelta `json:"tool_calls,omitempty"`
}

// ToolCallDelta 流式工具调用增量
Expand Down
10 changes: 10 additions & 0 deletions utils/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,10 @@ func StreamChatCompletion(c *gin.Context, chatGenerator <-chan interface{}, mode
if v.Text != "" {
writeChunk(models.StreamDelta{Content: v.Text}, nil)
}
case models.AssistantEventThinking:
if v.Thinking != "" {
writeChunk(models.StreamDelta{ReasoningContent: v.Thinking}, nil)
}
case models.AssistantEventToolCall:
if v.ToolCall != nil {
writeChunk(models.StreamDelta{
Expand Down Expand Up @@ -195,6 +199,7 @@ func StreamChatCompletion(c *gin.Context, chatGenerator <-chan interface{}, mode
// NonStreamChatCompletion 处理非流式聊天完成
func NonStreamChatCompletion(c *gin.Context, chatGenerator <-chan interface{}, modelName string) {
var fullContent strings.Builder
var fullThinking strings.Builder
var usage models.Usage
toolCalls := make([]models.ToolCall, 0, 2)
finishReason := "stop"
Expand All @@ -221,6 +226,9 @@ func NonStreamChatCompletion(c *gin.Context, chatGenerator <-chan interface{}, m
if fullContent.Len() > 0 || len(toolCalls) == 0 {
message.Content = fullContent.String()
}
if fullThinking.Len() > 0 {
message.ReasoningContent = fullThinking.String()
}
if len(toolCalls) > 0 {
message.ToolCalls = toolCalls
finishReason = "tool_calls"
Expand All @@ -241,6 +249,8 @@ func NonStreamChatCompletion(c *gin.Context, chatGenerator <-chan interface{}, m
switch v.Kind {
case models.AssistantEventText:
fullContent.WriteString(v.Text)
case models.AssistantEventThinking:
fullThinking.WriteString(v.Thinking)
case models.AssistantEventToolCall:
if v.ToolCall != nil {
toolCalls = append(toolCalls, *v.ToolCall)
Expand Down
Loading