From 307ed4a9fb1011eb556ee3ebceb8f8207cd611af Mon Sep 17 00:00:00 2001 From: Tyler Rockwood Date: Tue, 3 Sep 2024 17:38:33 +0000 Subject: [PATCH] openai: use interpolated string instead of bloblang for prompt This is for consistency between all the other AI processors. --- .../pages/processors/openai_chat_completion.adoc | 1 + internal/impl/openai/chat_processor.go | 15 +++++++-------- internal/impl/openai/chat_processor_test.go | 3 +-- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/docs/modules/components/pages/processors/openai_chat_completion.adoc b/docs/modules/components/pages/processors/openai_chat_completion.adoc index cd28e97e96..d0cbb8c1c3 100644 --- a/docs/modules/components/pages/processors/openai_chat_completion.adoc +++ b/docs/modules/components/pages/processors/openai_chat_completion.adoc @@ -162,6 +162,7 @@ model: gpt4-turbo === `prompt` The user prompt you want to generate a response for. By default, the processor submits the entire payload as a string. +This field supports xref:configuration:interpolation.adoc#bloblang-queries[interpolation functions]. *Type*: `string` diff --git a/internal/impl/openai/chat_processor.go b/internal/impl/openai/chat_processor.go index 7a6469d9a1..040f26311d 100644 --- a/internal/impl/openai/chat_processor.go +++ b/internal/impl/openai/chat_processor.go @@ -15,7 +15,6 @@ import ( "slices" "time" - "github.com/redpanda-data/benthos/v4/public/bloblang" "github.com/redpanda-data/benthos/v4/public/service" "github.com/redpanda-data/connect/v4/internal/impl/confluent/sr" oai "github.com/sashabaranov/go-openai" @@ -76,7 +75,7 @@ To learn more about chat completion, see the https://platform.openai.com/docs/gu )..., ). Fields( - service.NewBloblangField(ocpFieldUserPrompt). + service.NewInterpolatedStringField(ocpFieldUserPrompt). Description("The user prompt you want to generate a response for. By default, the processor submits the entire payload as a string."). Optional(), service.NewInterpolatedStringField(ocpFieldSystemPrompt). @@ -163,9 +162,9 @@ func makeChatProcessor(conf *service.ParsedConfig, mgr *service.Resources) (serv if err != nil { return nil, err } - var up *bloblang.Executor + var up *service.InterpolatedString if conf.Contains(ocpFieldUserPrompt) { - up, err = conf.FieldBloblang(ocpFieldUserPrompt) + up, err = conf.FieldInterpolatedString(ocpFieldUserPrompt) if err != nil { return nil, err } @@ -334,7 +333,7 @@ func newDynamicSchemaProvider(conf *service.ParsedConfig, mgr *service.Resources type chatProcessor struct { *baseProcessor - userPrompt *bloblang.Executor + userPrompt *service.InterpolatedString systemPrompt *service.InterpolatedString maxTokens *int temperature *float32 @@ -396,13 +395,13 @@ func (p *chatProcessor) Process(ctx context.Context, msg *service.Message) (serv }) } if p.userPrompt != nil { - s, err := msg.BloblangQueryValue(p.userPrompt) + s, err := p.userPrompt.TryString(msg) if err != nil { - return nil, fmt.Errorf("%s execution error: %w", ocpFieldUserPrompt, err) + return nil, fmt.Errorf("%s interpolation error: %w", ocpFieldUserPrompt, err) } body.Messages = append(body.Messages, oai.ChatCompletionMessage{ Role: "user", - Content: bloblang.ValueToString(s), + Content: s, }) } else { b, err := msg.AsBytes() diff --git a/internal/impl/openai/chat_processor_test.go b/internal/impl/openai/chat_processor_test.go index d8f8d7a1cc..1296b441ab 100644 --- a/internal/impl/openai/chat_processor_test.go +++ b/internal/impl/openai/chat_processor_test.go @@ -13,7 +13,6 @@ import ( "testing" "github.com/go-faker/faker/v4" - "github.com/redpanda-data/benthos/v4/public/bloblang" "github.com/redpanda-data/benthos/v4/public/service" oai "github.com/sashabaranov/go-openai" "github.com/stretchr/testify/assert" @@ -54,7 +53,7 @@ func TestChat(t *testing.T) { } func TestChatInterpolationError(t *testing.T) { - text, err := bloblang.GlobalEnvironment().Parse(`throw("kaboom!")`) + text, err := service.NewInterpolatedString(`${!throw("kaboom!")}`) assert.NoError(t, err) p := chatProcessor{ baseProcessor: &baseProcessor{