diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 2456cb5d..022d8b57 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -37,7 +37,7 @@ jobs: - name: Start dependencies run: | - docker-compose \ + docker compose \ -f examples/docker-compose.yml \ up -d @@ -82,4 +82,4 @@ jobs: env: USE_ERROR_SERIALIZATION_V2: 1 run: | - cd examples && bundle exec rspec \ No newline at end of file + cd examples && bundle exec rspec diff --git a/Gemfile b/Gemfile index fa75df15..a98c51b0 100644 --- a/Gemfile +++ b/Gemfile @@ -1,3 +1,5 @@ source 'https://rubygems.org' +gem 'google-protobuf', '~> 3.19' + gemspec diff --git a/README.md b/README.md index 04a1533b..a64c1159 100644 --- a/README.md +++ b/README.md @@ -90,7 +90,8 @@ Temporal::Worker.new( workflow_thread_pool_size: 10, # how many threads poll for workflows binary_checksum: nil, # identifies the version of workflow worker code activity_poll_retry_seconds: 0, # how many seconds to wait after unsuccessful poll for activities - workflow_poll_retry_seconds: 0 # how many seconds to wait after unsuccessful poll for workflows + workflow_poll_retry_seconds: 0, # how many seconds to wait after unsuccessful poll for workflows + activity_max_tasks_per_second: 0 # rate-limit for starting activity tasks (new activities + retries) on the task queue ) ``` @@ -178,6 +179,47 @@ Temporal.configure do |config| end ``` +## Configuration + +This gem is optimised for the smoothest out-of-the-box experience, which is achieved using a global +configuration: + +```ruby +Temporal.configure do |config| + config.host = '127.0.0.1' # sets global host + ... +end + +Temporal::Worker.new # uses global host +Temporal.start_workflow(...) # uses global host +``` + +This will work just fine for simpler use-cases, however at some point you might need to setup +multiple clients and workers within the same instance of your app (e.g. you have different Temporal +hosts, need to use different codecs/converters for different parts of your app, etc). Should this be +the case we recommend using explicit local configurations for each client/worker: + +```ruby +config_1 = Temporal::Configuration.new +config_1.host = 'temporal-01' + +config_2 = Temporal::Configuration.new +config_2.host = 'temporal-01' + +worker_1 = Temporal::Worker.new(config_1) +worker_2 = Temporal::Worker.new(config_2) + +client_1 = Temporal::Client.new(config_1) +client_1.start_workflow(...) + +client_2 = Temporal::Client.new(config_2) +client_2.start_workflow(...) +``` + +*NOTE: Almost all the methods on the `Temporal` module are delegated to the default client that's +initialized using global configuration. The same methods can be used directly on your own client +instances.* + ## Workflows A workflow is defined using pure Ruby code, however it should contain only a high-level @@ -400,6 +442,36 @@ arguments are identical to the `Temporal.start_workflow` API. set it to allow as many invocations as you need. You can also set it to `nil`, which will use a default value of 10 years.* +## Middleware +Middleware sits between the execution of your workflows/activities and the Temporal SDK, allowing you to insert custom code before or after the execution. + +### Activity Middleware Stack +Middleware added to the activity middleware stack will be executed around each activity method. This is useful when you want to perform a certain task before and/or after each activity execution, such as logging, error handling, or measuring execution time. + +### Workflow Middleware Stack +There are actually two types of workflow middleware in Temporal Ruby SDK: + +*Workflow Middleware*: This middleware is executed around each entire workflow. This is similar to activity middleware, but for workflows. + +*Workflow Task Middleware*: This middleware is executed around each workflow task, of which there will be many for each workflow. + +### Example +To add a middleware, you need to define a class that responds to the call method. Within the call method, you should call yield to allow the next middleware in the stack (or the workflow/activity method itself if there are no more middlewares) to execute. Here's an example: + +``` +class MyMiddleware + def call(metadata) + puts "Before execution" + yield + puts "After execution" + result + end +end +``` + +You can add this middleware to the stack like so `worker.add_activity_middleware(MyMiddleware)` + +Please note that the order of middleware in the stack matters. The middleware that is added last will be the first one to execute. In the example above, MyMiddleware will execute before any other middleware in the stack. ## Breaking Changes diff --git a/examples/Gemfile b/examples/Gemfile index 9c543b77..c6fc0199 100644 --- a/examples/Gemfile +++ b/examples/Gemfile @@ -2,7 +2,8 @@ source 'https://rubygems.org' gem 'temporal-ruby', path: '../' -gem 'dry-types', '>= 1.2.0' -gem 'dry-struct', '~> 1.1.1' +gem 'dry-types', '>= 1.7.2' +gem 'dry-struct', '~> 1.6.0' +gem 'google-protobuf', '~> 3.19' gem 'rspec', group: :test diff --git a/examples/bin/update_replay_test_histories b/examples/bin/update_replay_test_histories new file mode 100755 index 00000000..bc0f807a --- /dev/null +++ b/examples/bin/update_replay_test_histories @@ -0,0 +1,51 @@ +#!/usr/bin/env ruby + +# This script regenerates the workflow history files used in the example replay tests +# under examples/spec/replay/histories. It starts the necessary workflow, sends some +# signals, awaits workflow completion, then collects the history into JSON and protobuf +# binary file formats. +# +# To use this, start your Temporal server and bin/worker first. This script can then +# be run without any arguments. It will overwrite existing history files in the tree. +# +# NOTE: By default, collected history files contain the host names of the machines +# where the worker and this script are run because the default identity is pid@hostname. +# If you'd like, you can override this by setting an identity in the configuration in +# init.rb. + +require_relative "../init" +require_relative "../workflows/signal_with_start_workflow" + +workflow_id = SecureRandom.uuid +run_id = Temporal.start_workflow( + SignalWithStartWorkflow, + "hit", + options: { + workflow_id: workflow_id, + timeouts: { + execution: 30 + }, + signal_name: "miss", + signal_input: 1 + } +) +Temporal.logger.info("Started workflow", {workflow_id: workflow_id, run_id: run_id}) +sleep(1) +Temporal.signal_workflow(SignalWithStartWorkflow, "miss", workflow_id, run_id, 2) +sleep(1) +Temporal.signal_workflow(SignalWithStartWorkflow, "hit", workflow_id, run_id, 3) +Temporal.await_workflow_result(SignalWithStartWorkflow, workflow_id: workflow_id, run_id: run_id) + +# Save in JSON, exactly like would be downloaded from Temporal UI +history_json = Temporal.get_workflow_history_json(workflow_id: workflow_id, run_id: run_id) +filename = File.expand_path("../spec/replay/histories/signal_with_start.json", File.dirname(__FILE__)) +File.open(filename, "w") do |f| + f.write(history_json) +end + +# Save in protobuf binary format +history_binary = Temporal.get_workflow_history_protobuf(workflow_id: workflow_id, run_id: run_id) +filename = File.expand_path("../spec/replay/histories/signal_with_start.protobin", File.dirname(__FILE__)) +File.open(filename, "wb") do |f| + f.write(history_binary) +end diff --git a/examples/docker-compose.yml b/examples/docker-compose.yml index 4bff724c..03d87744 100644 --- a/examples/docker-compose.yml +++ b/examples/docker-compose.yml @@ -2,7 +2,7 @@ version: '3.5' services: temporal: - image: temporalio/auto-setup:latest + image: temporalio/auto-setup:1.22.0 ports: - "7233:7233" environment: diff --git a/examples/init.rb b/examples/init.rb index ab4e1b3a..053c0e14 100644 --- a/examples/init.rb +++ b/examples/init.rb @@ -8,10 +8,13 @@ metrics_logger = Logger.new(STDOUT, progname: 'metrics') +DEFAULT_NAMESPACE = 'ruby-samples'.freeze +DEFAULT_TASK_QUEUE = 'general'.freeze + Temporal.configure do |config| config.host = ENV.fetch('TEMPORAL_HOST', 'localhost') config.port = ENV.fetch('TEMPORAL_PORT', 7233).to_i - config.namespace = ENV.fetch('TEMPORAL_NAMESPACE', 'ruby-samples') - config.task_queue = ENV.fetch('TEMPORAL_TASK_QUEUE', 'general') + config.namespace = ENV.fetch('TEMPORAL_NAMESPACE', DEFAULT_NAMESPACE) + config.task_queue = ENV.fetch('TEMPORAL_TASK_QUEUE', DEFAULT_TASK_QUEUE) config.metrics_adapter = Temporal::MetricsAdapters::Log.new(metrics_logger) end diff --git a/examples/spec/helpers.rb b/examples/spec/helpers.rb index f2e614e4..4d4c65a4 100644 --- a/examples/spec/helpers.rb +++ b/examples/spec/helpers.rb @@ -21,7 +21,7 @@ def wait_for_workflow_completion(workflow_id, run_id) def fetch_history(workflow_id, run_id, options = {}) connection = Temporal.send(:default_client).send(:connection) options = { - namespace: Temporal.configuration.namespace, + namespace: integration_spec_namespace, workflow_id: workflow_id, run_id: run_id, }.merge(options) @@ -30,6 +30,10 @@ def fetch_history(workflow_id, run_id, options = {}) end def integration_spec_namespace - ENV.fetch('TEMPORAL_NAMESPACE', 'ruby-samples') + ENV.fetch('TEMPORAL_NAMESPACE', DEFAULT_NAMESPACE) + end + + def integration_spec_task_queue + ENV.fetch('TEMPORAL_TASK_QUEUE', DEFAULT_TASK_QUEUE) end end diff --git a/examples/spec/integration/call_failing_activity_workflow_spec.rb b/examples/spec/integration/call_failing_activity_workflow_spec.rb index c39853a6..090dd312 100644 --- a/examples/spec/integration/call_failing_activity_workflow_spec.rb +++ b/examples/spec/integration/call_failing_activity_workflow_spec.rb @@ -1,11 +1,6 @@ require 'workflows/call_failing_activity_workflow' describe CallFailingActivityWorkflow, :integration do - - class TestDeserializer - include Temporal::Concerns::Payloads - end - it 'correctly re-raises an activity-thrown exception in the workflow' do workflow_id = SecureRandom.uuid expected_message = "a failure message" diff --git a/examples/spec/integration/converter_spec.rb b/examples/spec/integration/converter_spec.rb index 0a97f075..576ff55f 100644 --- a/examples/spec/integration/converter_spec.rb +++ b/examples/spec/integration/converter_spec.rb @@ -3,22 +3,24 @@ require 'grpc/errors' describe 'Converter', :integration do - around(:each) do |example| - task_queue = Temporal.configuration.task_queue + let(:codec) do + Temporal::Connection::Converter::Codec::Chain.new( + payload_codecs: [ + Temporal::CryptPayloadCodec.new + ] + ) + end + around(:each) do |example| Temporal.configure do |config| config.task_queue = 'crypt' - config.payload_codec = Temporal::Connection::Converter::Codec::Chain.new( - payload_codecs: [ - Temporal::CryptPayloadCodec.new - ] - ) + config.payload_codec = codec end example.run ensure Temporal.configure do |config| - config.task_queue = task_queue + config.task_queue = integration_spec_task_queue config.payload_codec = Temporal::Configuration::DEFAULT_PAYLOAD_CODEC end end @@ -67,8 +69,6 @@ completion_event = events[:EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED].first result = completion_event.workflow_execution_completed_event_attributes.result - payload_codec = Temporal.configuration.payload_codec - - expect(payload_codec.decodes(result).payloads.first.data).to eq('"Hello World, Tom"') + expect(codec.decodes(result).payloads.first.data).to eq('"Hello World, Tom"') end end diff --git a/examples/spec/integration/create_schedule_spec.rb b/examples/spec/integration/create_schedule_spec.rb new file mode 100644 index 00000000..a7ae3a40 --- /dev/null +++ b/examples/spec/integration/create_schedule_spec.rb @@ -0,0 +1,87 @@ +require "temporal/errors" +require "temporal/schedule/backfill" +require "temporal/schedule/calendar" +require "temporal/schedule/interval" +require "temporal/schedule/schedule" +require "temporal/schedule/schedule_spec" +require "temporal/schedule/schedule_policies" +require "temporal/schedule/schedule_state" +require "temporal/schedule/start_workflow_action" + +describe "Temporal.create_schedule", :integration do + let(:example_schedule) do + workflow_id = SecureRandom.uuid + Temporal::Schedule::Schedule.new( + spec: Temporal::Schedule::ScheduleSpec.new( + calendars: [Temporal::Schedule::Calendar.new(day_of_week: "*", hour: "18", minute: "30")], + intervals: [Temporal::Schedule::Interval.new(every: 6000, offset: 300)], + cron_expressions: ["@hourly"], + jitter: 30, + # Set an end time so that the test schedule doesn't run forever + end_time: Time.now + 600 + ), + action: Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "Test", + options: { + workflow_id: workflow_id, + task_queue: integration_spec_task_queue + } + ), + policies: Temporal::Schedule::SchedulePolicies.new( + overlap_policy: :buffer_one + ), + state: Temporal::Schedule::ScheduleState.new( + notes: "Created by integration test" + ) + ) + end + + it "can create schedules" do + namespace = integration_spec_namespace + + schedule_id = SecureRandom.uuid + + create_response = Temporal.create_schedule( + namespace, + schedule_id, + example_schedule, + memo: {"schedule_memo" => "schedule memo value"}, + trigger_immediately: true, + backfill: Temporal::Schedule::Backfill.new(start_time: (Date.today - 90).to_time, end_time: Time.now) + ) + expect(create_response).to(be_an_instance_of(Temporalio::Api::WorkflowService::V1::CreateScheduleResponse)) + + describe_response = Temporal.describe_schedule(namespace, schedule_id) + + expect(describe_response.memo).to(eq({"schedule_memo" => "schedule memo value"})) + expect(describe_response.schedule.spec.jitter.seconds).to(eq(30)) + expect(describe_response.schedule.policies.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ONE)) + expect(describe_response.schedule.action.start_workflow.workflow_type.name).to(eq("HelloWorldWorkflow")) + expect(describe_response.schedule.state.notes).to(eq("Created by integration test")) + end + + it "can create schedules with a minimal set of fields" do + namespace = integration_spec_namespace + schedule_id = SecureRandom.uuid + + schedule = Temporal::Schedule::Schedule.new( + spec: Temporal::Schedule::ScheduleSpec.new( + cron_expressions: ["@hourly"], + # Set an end time so that the test schedule doesn't run forever + end_time: Time.now + 600 + ), + action: Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "Test", + options: {task_queue: integration_spec_task_queue} + ) + ) + + Temporal.create_schedule(namespace, schedule_id, schedule) + + describe_response = Temporal.describe_schedule(namespace, schedule_id) + expect(describe_response.schedule.action.start_workflow.workflow_type.name).to(eq("HelloWorldWorkflow")) + expect(describe_response.schedule.policies.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_SKIP)) + end +end diff --git a/examples/spec/integration/delete_schedule_spec.rb b/examples/spec/integration/delete_schedule_spec.rb new file mode 100644 index 00000000..c621710d --- /dev/null +++ b/examples/spec/integration/delete_schedule_spec.rb @@ -0,0 +1,50 @@ +require "temporal/errors" +require "temporal/schedule/schedule" +require "temporal/schedule/schedule_spec" +require "temporal/schedule/start_workflow_action" + +describe "Temporal.delete_schedule", :integration do + let(:example_schedule) do + Temporal::Schedule::Schedule.new( + spec: Temporal::Schedule::ScheduleSpec.new( + cron_expressions: ["@hourly"], + # Set an end time so that the test schedule doesn't run forever + end_time: Time.now + 600 + ), + action: Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "Test", + options: { + task_queue: integration_spec_task_queue + } + ) + ) + end + + it "can delete schedules" do + namespace = integration_spec_namespace + + schedule_id = SecureRandom.uuid + + Temporal.create_schedule(namespace, schedule_id, example_schedule) + describe_response = Temporal.describe_schedule(namespace, schedule_id) + expect(describe_response.schedule.action.start_workflow.workflow_type.name).to(eq("HelloWorldWorkflow")) + + Temporal.delete_schedule(namespace, schedule_id) + + # Now that the schedule is delted it should raise a not found error + expect do + Temporal.describe_schedule(namespace, schedule_id) + end + .to(raise_error(Temporal::NotFoundFailure)) + end + + it "raises a NotFoundFailure if a schedule doesn't exist" do + namespace = integration_spec_namespace + + expect do + Temporal.delete_schedule(namespace, "some-invalid-schedule-id") + end + .to(raise_error(Temporal::NotFoundFailure)) + end +end diff --git a/examples/spec/integration/handling_structured_error_workflow_spec.rb b/examples/spec/integration/handling_structured_error_workflow_spec.rb index 094fb139..91096453 100644 --- a/examples/spec/integration/handling_structured_error_workflow_spec.rb +++ b/examples/spec/integration/handling_structured_error_workflow_spec.rb @@ -5,8 +5,6 @@ # That worker runs a task queue, error_serialization_v2. This setup code will # route workflow requests to that task queue. around(:each) do |example| - task_queue = Temporal.configuration.task_queue - Temporal.configure do |config| config.task_queue = 'error_serialization_v2' end @@ -14,7 +12,7 @@ example.run ensure Temporal.configure do |config| - config.task_queue = task_queue + config.task_queue = integration_spec_task_queue end end diff --git a/examples/spec/integration/list_schedules_spec.rb b/examples/spec/integration/list_schedules_spec.rb new file mode 100644 index 00000000..abd6b862 --- /dev/null +++ b/examples/spec/integration/list_schedules_spec.rb @@ -0,0 +1,109 @@ +require "timeout" +require "temporal/errors" +require "temporal/schedule/backfill" +require "temporal/schedule/calendar" +require "temporal/schedule/interval" +require "temporal/schedule/schedule" +require "temporal/schedule/schedule_spec" +require "temporal/schedule/schedule_policies" +require "temporal/schedule/schedule_state" +require "temporal/schedule/start_workflow_action" + +describe "Temporal.list_schedules", :integration do + let(:example_schedule) do + workflow_id = SecureRandom.uuid + Temporal::Schedule::Schedule.new( + spec: Temporal::Schedule::ScheduleSpec.new( + cron_expressions: ["@hourly"], + # Set an end time so that the test schedule doesn't run forever + end_time: Time.now + 600 + ), + action: Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "Test", + options: { + task_queue: integration_spec_task_queue + } + ) + ) + end + + def cleanup + namespace = integration_spec_namespace + loop do + resp = Temporal.list_schedules(namespace, maximum_page_size: 1000) + resp.schedules.each do |schedule| + begin + Temporal.delete_schedule(namespace, schedule.schedule_id) + rescue Temporal::NotFoundFailure + # This sometimes throws if a schedule has already been 'completed' (end time is reached) + end + end + break if resp.next_page_token == "" + end + end + + before do + cleanup + end + + + it "can list schedules with pagination" do + namespace = integration_spec_namespace + + 10.times do + schedule_id = SecureRandom.uuid + Temporal.create_schedule(namespace, schedule_id, example_schedule) + end + + # list_schedules is eventually consistent. Wait until at least 10 schedules are returned + Timeout.timeout(10) do + loop do + result = Temporal.list_schedules(namespace, maximum_page_size: 100) + + break if result && result.schedules.count >= 10 + + sleep(0.5) + end + end + + page_one = Temporal.list_schedules(namespace, maximum_page_size: 2) + expect(page_one.schedules.count).to(eq(2)) + page_two = Temporal.list_schedules(namespace, next_page_token: page_one.next_page_token, maximum_page_size: 8) + expect(page_two.schedules.count).to(eq(8)) + + # ensure that we got dfifereent schedules in each page + page_two_schedule_ids = page_two.schedules.map(&:schedule_id) + page_one.schedules.each do |schedule| + expect(page_two_schedule_ids).not_to(include(schedule.schedule_id)) + end + end + + it "roundtrip encodes/decodes memo with payload" do + namespace = integration_spec_namespace + schedule_id = "schedule_with_encoded_memo_payload-#{SecureRandom.uuid}}" + Temporal.create_schedule( + namespace, + schedule_id, + example_schedule, + memo: {"schedule_memo" => "schedule memo value"} + ) + + resp = nil + matching_schedule = nil + + # list_schedules is eventually consistent. Wait until our created schedule is returned + Timeout.timeout(10) do + loop do + resp = Temporal.list_schedules(namespace, maximum_page_size: 1000) + + matching_schedule = resp.schedules.find { |s| s.schedule_id == schedule_id } + break unless matching_schedule.nil? + + sleep(0.1) + end + end + + expect(matching_schedule.memo).to(eq({"schedule_memo" => "schedule memo value"})) + end +end diff --git a/examples/spec/integration/metadata_workflow_spec.rb b/examples/spec/integration/metadata_workflow_spec.rb index 508c3af8..2fd0b1e6 100644 --- a/examples/spec/integration/metadata_workflow_spec.rb +++ b/examples/spec/integration/metadata_workflow_spec.rb @@ -16,7 +16,7 @@ run_id: run_id, ) - expect(actual_result.task_queue).to eq(Temporal.configuration.task_queue) + expect(actual_result.task_queue).to eq(integration_spec_task_queue) end it 'workflow can retrieve its headers' do diff --git a/examples/spec/integration/pause_schedule_spec.rb b/examples/spec/integration/pause_schedule_spec.rb new file mode 100644 index 00000000..46e8b8ce --- /dev/null +++ b/examples/spec/integration/pause_schedule_spec.rb @@ -0,0 +1,44 @@ +require "temporal/schedule/schedule" +require "temporal/schedule/calendar" +require "temporal/schedule/schedule_spec" +require "temporal/schedule/schedule_policies" +require "temporal/schedule/schedule_state" +require "temporal/schedule/start_workflow_action" + +describe "Temporal.pause_schedule", :integration do + let(:example_schedule) do + Temporal::Schedule::Schedule.new( + spec: Temporal::Schedule::ScheduleSpec.new( + cron_expressions: ["@hourly"], + # Set an end time so that the test schedule doesn't run forever + end_time: Time.now + 600 + ), + action: Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "Test", + options: { + task_queue: integration_spec_task_queue + } + ) + ) + end + + it "can pause and unpause a schedule" do + namespace = integration_spec_namespace + schedule_id = SecureRandom.uuid + + Temporal.create_schedule(namespace, schedule_id, example_schedule) + describe_response = Temporal.describe_schedule(namespace, schedule_id) + expect(describe_response.schedule.state.paused).to(eq(false)) + + Temporal.pause_schedule(namespace, schedule_id) + + describe_response = Temporal.describe_schedule(namespace, schedule_id) + expect(describe_response.schedule.state.paused).to(eq(true)) + + Temporal.unpause_schedule(namespace, schedule_id) + + describe_response = Temporal.describe_schedule(namespace, schedule_id) + expect(describe_response.schedule.state.paused).to(eq(false)) + end +end diff --git a/examples/spec/integration/reset_workflow_spec.rb b/examples/spec/integration/reset_workflow_spec.rb index 57f41d82..7305fae4 100644 --- a/examples/spec/integration/reset_workflow_spec.rb +++ b/examples/spec/integration/reset_workflow_spec.rb @@ -2,7 +2,7 @@ require 'workflows/query_workflow' require 'temporal/reset_reapply_type' -describe 'Temporal.reset_workflow' do +describe 'Temporal.reset_workflow', :integration do it 'can reset a closed workflow to the beginning' do workflow_id = SecureRandom.uuid original_run_id = Temporal.start_workflow( @@ -19,7 +19,7 @@ expect(original_result).to eq('Hello World, Test') new_run_id = Temporal.reset_workflow( - Temporal.configuration.namespace, + integration_spec_namespace, workflow_id, original_run_id, strategy: Temporal::ResetStrategy::FIRST_WORKFLOW_TASK @@ -36,7 +36,7 @@ def reset_hello_world_workflow_twice(workflow_id, original_run_id, request_id:) 2.times.map do new_run_id = Temporal.reset_workflow( - Temporal.configuration.namespace, + integration_spec_namespace, workflow_id, original_run_id, strategy: Temporal::ResetStrategy::FIRST_WORKFLOW_TASK, @@ -130,7 +130,7 @@ def start_query_workflow_and_signal_three_times workflow_id, original_run_id = start_query_workflow_and_signal_three_times.values_at(:workflow_id, :run_id) new_run_id = Temporal.reset_workflow( - Temporal.configuration.namespace, + integration_spec_namespace, workflow_id, original_run_id, strategy: Temporal::ResetStrategy::FIRST_WORKFLOW_TASK, @@ -147,7 +147,7 @@ def start_query_workflow_and_signal_three_times workflow_id, original_run_id = start_query_workflow_and_signal_three_times.values_at(:workflow_id, :run_id) new_run_id = Temporal.reset_workflow( - Temporal.configuration.namespace, + integration_spec_namespace, workflow_id, original_run_id, strategy: Temporal::ResetStrategy::FIRST_WORKFLOW_TASK, @@ -160,4 +160,4 @@ def start_query_workflow_and_signal_three_times Temporal.terminate_workflow(workflow_id, run_id: new_run_id) end end - \ No newline at end of file + diff --git a/examples/spec/integration/start_workflow_spec.rb b/examples/spec/integration/start_workflow_spec.rb index 99d0d7c4..8cf6a46c 100644 --- a/examples/spec/integration/start_workflow_spec.rb +++ b/examples/spec/integration/start_workflow_spec.rb @@ -1,7 +1,7 @@ require 'workflows/hello_world_workflow' require 'workflows/long_workflow' -describe 'Temporal.start_workflow' do +describe 'Temporal.start_workflow', :integration do let(:workflow_id) { SecureRandom.uuid } it 'starts a workflow using a class reference' do @@ -21,15 +21,15 @@ it 'starts a workflow using a string reference' do run_id = Temporal.start_workflow('HelloWorldWorkflow', 'Test', options: { workflow_id: workflow_id, - namespace: Temporal.configuration.namespace, - task_queue: Temporal.configuration.task_queue + namespace: integration_spec_namespace, + task_queue: integration_spec_task_queue }) result = Temporal.await_workflow_result( 'HelloWorldWorkflow', workflow_id: workflow_id, run_id: run_id, - namespace: Temporal.configuration.namespace + namespace: integration_spec_namespace ) expect(result).to eq('Hello World, Test') @@ -82,11 +82,11 @@ }) execution_1 = Temporal.fetch_workflow_execution_info( - Temporal.configuration.namespace, + integration_spec_namespace, workflow_id, run_id_1) execution_2 = Temporal.fetch_workflow_execution_info( - Temporal.configuration.namespace, + integration_spec_namespace, workflow_id, run_id_2) diff --git a/examples/spec/integration/trigger_schedule_spec.rb b/examples/spec/integration/trigger_schedule_spec.rb new file mode 100644 index 00000000..f90c8f0b --- /dev/null +++ b/examples/spec/integration/trigger_schedule_spec.rb @@ -0,0 +1,49 @@ +require "timeout" +require "temporal/schedule/schedule" +require "temporal/schedule/calendar" +require "temporal/schedule/schedule_spec" +require "temporal/schedule/schedule_policies" +require "temporal/schedule/schedule_state" +require "temporal/schedule/start_workflow_action" + +describe "Temporal.trigger_schedule", :integration do + let(:example_schedule) do + Temporal::Schedule::Schedule.new( + spec: Temporal::Schedule::ScheduleSpec.new( + # Set this to a date in the future to avoid triggering the schedule immediately + calendars: [Temporal::Schedule::Calendar.new(year: "2055", month: "12", day_of_month: "25")] + ), + action: Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "Test", + options: { + task_queue: integration_spec_task_queue + } + ) + ) + end + + it "can trigger a schedule to run immediately" do + namespace = integration_spec_namespace + schedule_id = SecureRandom.uuid + + Temporal.create_schedule(namespace, schedule_id, example_schedule) + describe_response = Temporal.describe_schedule(namespace, schedule_id) + expect(describe_response.info.recent_actions.size).to(eq(0)) + + # Trigger the schedule and wait to see that it actually ran + Temporal.trigger_schedule(namespace, schedule_id, overlap_policy: :buffer_one) + + Timeout.timeout(10) do + loop do + describe_response = Temporal.describe_schedule(namespace, schedule_id) + + break if describe_response.info && describe_response.info.recent_actions.size >= 1 + + sleep(0.5) + end + end + + expect(describe_response.info.recent_actions.size).to(eq(1)) + end +end diff --git a/examples/spec/integration/update_schedule_spec.rb b/examples/spec/integration/update_schedule_spec.rb new file mode 100644 index 00000000..5623894d --- /dev/null +++ b/examples/spec/integration/update_schedule_spec.rb @@ -0,0 +1,103 @@ +require "temporal/errors" +require "temporal/schedule/schedule" +require "temporal/schedule/schedule_spec" +require "temporal/schedule/schedule_policies" +require "temporal/schedule/schedule_state" +require "temporal/schedule/start_workflow_action" + +describe "Temporal.update_schedule", :integration do + let(:example_schedule) do + Temporal::Schedule::Schedule.new( + spec: Temporal::Schedule::ScheduleSpec.new( + cron_expressions: ["@hourly"], + jitter: 30, + # Set an end time so that the test schedule doesn't run forever + end_time: Time.now + 600 + ), + action: Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "Test", + options: { + task_queue: integration_spec_task_queue + } + ), + policies: Temporal::Schedule::SchedulePolicies.new( + overlap_policy: :buffer_one + ), + state: Temporal::Schedule::ScheduleState.new( + notes: "Created by integration test" + ) + ) + end + + let(:updated_schedule) do + Temporal::Schedule::Schedule.new( + spec: Temporal::Schedule::ScheduleSpec.new( + cron_expressions: ["@hourly"], + jitter: 500, + # Set an end time so that the test schedule doesn't run forever + end_time: Time.now + 600 + ), + action: Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "UpdatedInput", + options: { + task_queue: integration_spec_task_queue + } + ), + policies: Temporal::Schedule::SchedulePolicies.new( + overlap_policy: :buffer_all + ), + state: Temporal::Schedule::ScheduleState.new( + notes: "Updated by integration test" + ) + ) + end + + it "can update schedules" do + namespace = integration_spec_namespace + schedule_id = SecureRandom.uuid + + Temporal.create_schedule(namespace, schedule_id, example_schedule) + + describe_response = Temporal.describe_schedule(namespace, schedule_id) + expect(describe_response.schedule.spec.jitter.seconds).to(eq(30)) + expect(describe_response.schedule.policies.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ONE)) + expect(describe_response.schedule.action.start_workflow.workflow_type.name).to(eq("HelloWorldWorkflow")) + expect(describe_response.schedule.state.notes).to(eq("Created by integration test")) + + Temporal.update_schedule(namespace, schedule_id, updated_schedule) + updated_describe = Temporal.describe_schedule(namespace, schedule_id) + expect(updated_describe.schedule.spec.jitter.seconds).to(eq(500)) + expect(updated_describe.schedule.policies.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ALL)) + expect(updated_describe.schedule.state.notes).to(eq("Updated by integration test")) + end + + it "does not update if conflict token doesnt match" do + namespace = integration_spec_namespace + schedule_id = SecureRandom.uuid + + initial_response = Temporal.create_schedule(namespace, schedule_id, example_schedule) + + # Update the schedule but pass the incorrect token + Temporal.update_schedule(namespace, schedule_id, updated_schedule, conflict_token: "invalid token") + + # The schedule should not have been updated (we don't get an error message from the server in this case) + describe_response = Temporal.describe_schedule(namespace, schedule_id) + expect(describe_response.schedule.spec.jitter.seconds).to(eq(30)) + + # If we pass the right conflict token the update should be applied + Temporal.update_schedule(namespace, schedule_id, updated_schedule, conflict_token: initial_response.conflict_token) + updated_describe = Temporal.describe_schedule(namespace, schedule_id) + expect(updated_describe.schedule.spec.jitter.seconds).to(eq(500)) + end + + it "raises a NotFoundFailure if a schedule doesn't exist" do + namespace = integration_spec_namespace + + expect do + Temporal.update_schedule(namespace, "some-invalid-schedule-id", updated_schedule) + end + .to(raise_error(Temporal::NotFoundFailure)) + end +end diff --git a/examples/spec/replay/histories/signal_with_start.binpb b/examples/spec/replay/histories/signal_with_start.binpb new file mode 100644 index 00000000..7d7bf89c Binary files /dev/null and b/examples/spec/replay/histories/signal_with_start.binpb differ diff --git a/examples/spec/replay/histories/signal_with_start.json b/examples/spec/replay/histories/signal_with_start.json new file mode 100644 index 00000000..fe301a04 --- /dev/null +++ b/examples/spec/replay/histories/signal_with_start.json @@ -0,0 +1,361 @@ +{ + "events": [ + { + "eventId": "1", + "eventTime": "2024-05-28T02:46:26.852786129Z", + "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_STARTED", + "taskId": "31457280", + "workflowExecutionStartedEventAttributes": { + "workflowType": { + "name": "SignalWithStartWorkflow" + }, + "taskQueue": { + "name": "general", + "kind": "TASK_QUEUE_KIND_NORMAL" + }, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "ImhpdCI=" + } + ] + }, + "workflowExecutionTimeout": "30s", + "workflowRunTimeout": "30s", + "workflowTaskTimeout": "10s", + "originalExecutionRunId": "c6e8de96-4e18-409d-8e60-38d58f2f11b9", + "identity": "4514@DESKTOP-JRJDVRG\n", + "firstExecutionRunId": "c6e8de96-4e18-409d-8e60-38d58f2f11b9", + "attempt": 1, + "workflowExecutionExpirationTime": "2024-05-28T02:46:56.853Z", + "firstWorkflowTaskBackoff": "0s", + "memo": { + }, + "searchAttributes": { + }, + "header": { + } + } + }, + { + "eventId": "2", + "eventTime": "2024-05-28T02:46:26.852896774Z", + "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_SIGNALED", + "taskId": "31457281", + "workflowExecutionSignaledEventAttributes": { + "signalName": "miss", + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "MQ==" + } + ] + }, + "identity": "4514@DESKTOP-JRJDVRG\n", + "header": { + } + } + }, + { + "eventId": "3", + "eventTime": "2024-05-28T02:46:26.852900524Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_SCHEDULED", + "taskId": "31457282", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "general", + "kind": "TASK_QUEUE_KIND_NORMAL" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "4", + "eventTime": "2024-05-28T02:46:26.873042948Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_STARTED", + "taskId": "31457287", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "3", + "identity": "4417@DESKTOP-JRJDVRG\n", + "requestId": "0074c78e-013b-4845-86d5-f83f1f6feb61", + "historySizeBytes": "421" + } + }, + { + "eventId": "5", + "eventTime": "2024-05-28T02:46:26.896346434Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_COMPLETED", + "taskId": "31457291", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "3", + "startedEventId": "4", + "identity": "4417@DESKTOP-JRJDVRG\n", + "binaryChecksum": "07d96d88e3691440609a4f5de039969b14a4e6f8", + "sdkMetadata": { + "langUsedFlags": [ + 2 + ] + } + } + }, + { + "eventId": "6", + "eventTime": "2024-05-28T02:46:27.869664722Z", + "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_SIGNALED", + "taskId": "31457294", + "workflowExecutionSignaledEventAttributes": { + "signalName": "miss", + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "Mg==" + } + ] + }, + "identity": "4514@DESKTOP-JRJDVRG\n" + } + }, + { + "eventId": "7", + "eventTime": "2024-05-28T02:46:27.869669568Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_SCHEDULED", + "taskId": "31457295", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "general", + "kind": "TASK_QUEUE_KIND_NORMAL" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "8", + "eventTime": "2024-05-28T02:46:27.881436143Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_STARTED", + "taskId": "31457298", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "7", + "identity": "4417@DESKTOP-JRJDVRG\n", + "requestId": "b1c0b0cd-cdb1-4bfd-973c-fa43eef6dfb5", + "historySizeBytes": "749" + } + }, + { + "eventId": "9", + "eventTime": "2024-05-28T02:46:27.907949953Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_COMPLETED", + "taskId": "31457302", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "7", + "startedEventId": "8", + "identity": "4417@DESKTOP-JRJDVRG\n", + "binaryChecksum": "07d96d88e3691440609a4f5de039969b14a4e6f8" + } + }, + { + "eventId": "10", + "eventTime": "2024-05-28T02:46:28.883578435Z", + "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_SIGNALED", + "taskId": "31457304", + "workflowExecutionSignaledEventAttributes": { + "signalName": "hit", + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "Mw==" + } + ] + }, + "identity": "4514@DESKTOP-JRJDVRG\n" + } + }, + { + "eventId": "11", + "eventTime": "2024-05-28T02:46:28.883586706Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_SCHEDULED", + "taskId": "31457305", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "general", + "kind": "TASK_QUEUE_KIND_NORMAL" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "12", + "eventTime": "2024-05-28T02:46:28.899268187Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_STARTED", + "taskId": "31457308", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "11", + "identity": "4417@DESKTOP-JRJDVRG\n", + "requestId": "4840d372-5d7f-46f0-af41-85c9fcac752d", + "historySizeBytes": "1071" + } + }, + { + "eventId": "13", + "eventTime": "2024-05-28T02:46:28.925343005Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_COMPLETED", + "taskId": "31457312", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "11", + "startedEventId": "12", + "identity": "4417@DESKTOP-JRJDVRG\n", + "binaryChecksum": "07d96d88e3691440609a4f5de039969b14a4e6f8" + } + }, + { + "eventId": "14", + "eventTime": "2024-05-28T02:46:28.925386163Z", + "eventType": "EVENT_TYPE_ACTIVITY_TASK_SCHEDULED", + "taskId": "31457313", + "activityTaskScheduledEventAttributes": { + "activityId": "14", + "activityType": { + "name": "HelloWorldActivity" + }, + "taskQueue": { + "name": "general", + "kind": "TASK_QUEUE_KIND_NORMAL" + }, + "header": { + "fields": { + "test-header": { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "InRlc3Qi" + } + } + }, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "ImV4cGVjdGVkIHNpZ25hbCI=" + } + ] + }, + "scheduleToCloseTimeout": "30s", + "scheduleToStartTimeout": "30s", + "startToCloseTimeout": "30s", + "heartbeatTimeout": "0s", + "workflowTaskCompletedEventId": "13", + "retryPolicy": { + "initialInterval": "1s", + "backoffCoefficient": 2, + "maximumInterval": "100s" + } + } + }, + { + "eventId": "15", + "eventTime": "2024-05-28T02:46:28.944893259Z", + "eventType": "EVENT_TYPE_ACTIVITY_TASK_STARTED", + "taskId": "31457317", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "14", + "identity": "4417@DESKTOP-JRJDVRG\n", + "requestId": "73f99ef3-e606-421a-ad79-a4e43e41ceba", + "attempt": 1 + } + }, + { + "eventId": "16", + "eventTime": "2024-05-28T02:46:29.008828231Z", + "eventType": "EVENT_TYPE_ACTIVITY_TASK_COMPLETED", + "taskId": "31457318", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "IkhlbGxvIFdvcmxkLCBleHBlY3RlZCBzaWduYWwi" + } + ] + }, + "scheduledEventId": "14", + "startedEventId": "15", + "identity": "4417@DESKTOP-JRJDVRG\n" + } + }, + { + "eventId": "17", + "eventTime": "2024-05-28T02:46:29.008834769Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_SCHEDULED", + "taskId": "31457319", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "general", + "kind": "TASK_QUEUE_KIND_NORMAL" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "18", + "eventTime": "2024-05-28T02:46:29.022515754Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_STARTED", + "taskId": "31457322", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "17", + "identity": "4417@DESKTOP-JRJDVRG\n", + "requestId": "a24ea1bd-8584-41ae-8cc3-0880b8a946d1", + "historySizeBytes": "1713" + } + }, + { + "eventId": "19", + "eventTime": "2024-05-28T02:46:29.043259634Z", + "eventType": "EVENT_TYPE_WORKFLOW_TASK_COMPLETED", + "taskId": "31457326", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "17", + "startedEventId": "18", + "identity": "4417@DESKTOP-JRJDVRG\n", + "binaryChecksum": "07d96d88e3691440609a4f5de039969b14a4e6f8" + } + }, + { + "eventId": "20", + "eventTime": "2024-05-28T02:46:29.043294503Z", + "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED", + "taskId": "31457327", + "workflowExecutionCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "Mw==" + } + ] + }, + "workflowTaskCompletedEventId": "19" + } + } + ] +} \ No newline at end of file diff --git a/examples/spec/replay/signal_with_start_spec.rb b/examples/spec/replay/signal_with_start_spec.rb new file mode 100644 index 00000000..13c1cb0d --- /dev/null +++ b/examples/spec/replay/signal_with_start_spec.rb @@ -0,0 +1,21 @@ +require "workflows/signal_with_start_workflow" +require "temporal/testing/replay_tester" +require "temporal/workflow/history/serialization" + +describe "signal with start" do + let(:replay_tester) { Temporal::Testing::ReplayTester.new } + + it "two misses, one hit, replay, json" do + replay_tester.replay_history( + SignalWithStartWorkflow, + Temporal::Workflow::History::Serialization.from_json_file("spec/replay/histories/signal_with_start.json") + ) + end + + it "two misses, one hit, replay, binary" do + replay_tester.replay_history( + SignalWithStartWorkflow, + Temporal::Workflow::History::Serialization.from_protobuf_file("spec/replay/histories/signal_with_start.binpb") + ) + end +end diff --git a/examples/workflows/signal_with_start_workflow.rb b/examples/workflows/signal_with_start_workflow.rb index ab94a5d1..cfee2bed 100644 --- a/examples/workflows/signal_with_start_workflow.rb +++ b/examples/workflows/signal_with_start_workflow.rb @@ -1,21 +1,25 @@ -require 'activities/hello_world_activity' +require "activities/hello_world_activity" class SignalWithStartWorkflow < Temporal::Workflow def execute(expected_signal) - initial_value = 'no signal received' + initial_value = "no signal received" received = initial_value workflow.on_signal do |signal, input| if signal == expected_signal - HelloWorldActivity.execute!('expected signal') + workflow.logger.info("Accepting expected signal #{signal}: #{input}") + HelloWorldActivity.execute!("expected signal") received = input + else + workflow.logger.info("Ignoring unexpected signal #{signal}: #{input}") end end # Wait for the activity in signal callbacks to complete. The workflow will # not automatically wait for any blocking calls made in callbacks to complete # before returning. + workflow.logger.info("Waiting for expected signal #{expected_signal}") workflow.wait_until { received != initial_value } received end diff --git a/lib/temporal.rb b/lib/temporal.rb index 617c63b3..b9f49d55 100644 --- a/lib/temporal.rb +++ b/lib/temporal.rb @@ -8,6 +8,7 @@ require 'temporal/metrics' require 'temporal/json' require 'temporal/errors' +require 'temporal/schedule' require 'temporal/workflow/errors' module Temporal @@ -34,11 +35,25 @@ module Temporal :add_custom_search_attributes, :list_custom_search_attributes, :remove_custom_search_attributes, - :connection + :connection, + :list_schedules, + :describe_schedule, + :create_schedule, + :delete_schedule, + :update_schedule, + :trigger_schedule, + :pause_schedule, + :unpause_schedule, + :get_workflow_history, + :get_workflow_history_json, + :get_workflow_history_protobuf class << self def configure(&block) yield config + # Reset the singleton client after configuration was altered to ensure + # it is initialized with the latest attributes + @default_client = nil end def configuration diff --git a/lib/temporal/activity.rb b/lib/temporal/activity.rb index a3a726af..d5524a0a 100644 --- a/lib/temporal/activity.rb +++ b/lib/temporal/activity.rb @@ -1,4 +1,5 @@ require 'temporal/activity/workflow_convenience_methods' +require 'temporal/callable' require 'temporal/concerns/executable' require 'temporal/errors' @@ -9,7 +10,9 @@ class Activity def self.execute_in_context(context, input) activity = new(context) - activity.execute(*input) + callable = Temporal::Callable.new(method: activity.method(:execute)) + + callable.call(input) end def initialize(context) diff --git a/lib/temporal/activity/poller.rb b/lib/temporal/activity/poller.rb index 40259f16..859fb688 100644 --- a/lib/temporal/activity/poller.rb +++ b/lib/temporal/activity/poller.rb @@ -11,7 +11,8 @@ class Activity class Poller DEFAULT_OPTIONS = { thread_pool_size: 20, - poll_retry_seconds: 0 + poll_retry_seconds: 0, + max_tasks_per_second: 0 # unlimited }.freeze def initialize(namespace, task_queue, activity_lookup, config, middleware = [], options = {}) @@ -91,7 +92,8 @@ def poll_loop end def poll_for_task - connection.poll_activity_task_queue(namespace: namespace, task_queue: task_queue) + connection.poll_activity_task_queue(namespace: namespace, task_queue: task_queue, + max_tasks_per_second: max_tasks_per_second) rescue ::GRPC::Cancelled # We're shutting down and we've already reported that in the logs nil @@ -108,13 +110,17 @@ def poll_for_task def process(task) middleware_chain = Middleware::Chain.new(middleware) - TaskProcessor.new(task, namespace, activity_lookup, middleware_chain, config, heartbeat_thread_pool).process + TaskProcessor.new(task, task_queue, namespace, activity_lookup, middleware_chain, config, heartbeat_thread_pool).process end def poll_retry_seconds @options[:poll_retry_seconds] end + def max_tasks_per_second + @options[:max_tasks_per_second] + end + def thread_pool @thread_pool ||= ThreadPool.new( options[:thread_pool_size], diff --git a/lib/temporal/activity/task_processor.rb b/lib/temporal/activity/task_processor.rb index 51ae5408..ef20780b 100644 --- a/lib/temporal/activity/task_processor.rb +++ b/lib/temporal/activity/task_processor.rb @@ -2,7 +2,6 @@ require 'temporal/error_handler' require 'temporal/errors' require 'temporal/activity/context' -require 'temporal/concerns/payloads' require 'temporal/connection/retryer' require 'temporal/connection' require 'temporal/metric_keys' @@ -10,12 +9,11 @@ module Temporal class Activity class TaskProcessor - include Concerns::Payloads - - def initialize(task, namespace, activity_lookup, middleware_chain, config, heartbeat_thread_pool) + def initialize(task, task_queue, namespace, activity_lookup, middleware_chain, config, heartbeat_thread_pool) @task = task + @task_queue = task_queue @namespace = namespace - @metadata = Metadata.generate_activity_metadata(task, namespace) + @metadata = Metadata.generate_activity_metadata(task, namespace, config.converter) @task_token = task.task_token @activity_name = task.activity_type.name @activity_class = activity_lookup.find(activity_name) @@ -28,7 +26,7 @@ def process start_time = Time.now Temporal.logger.debug("Processing Activity task", metadata.to_h) - Temporal.metrics.timing(Temporal::MetricKeys::ACTIVITY_TASK_QUEUE_TIME, queue_time_ms, activity: activity_name, namespace: namespace, workflow: metadata.workflow_name) + Temporal.metrics.timing(Temporal::MetricKeys::ACTIVITY_TASK_QUEUE_TIME, queue_time_ms, metric_tags) context = Activity::Context.new(connection, metadata, config, heartbeat_thread_pool) @@ -37,7 +35,7 @@ def process end result = middleware_chain.invoke(metadata) do - activity_class.execute_in_context(context, from_payloads(task.input)) + activity_class.execute_in_context(context, config.converter.from_payloads(task.input)) end # Do not complete asynchronous activities, these should be completed manually @@ -52,13 +50,22 @@ def process end time_diff_ms = ((Time.now - start_time) * 1000).round - Temporal.metrics.timing(Temporal::MetricKeys::ACTIVITY_TASK_LATENCY, time_diff_ms, activity: activity_name, namespace: namespace, workflow: metadata.workflow_name) + Temporal.metrics.timing(Temporal::MetricKeys::ACTIVITY_TASK_LATENCY, time_diff_ms, metric_tags) Temporal.logger.debug("Activity task processed", metadata.to_h.merge(execution_time: time_diff_ms)) end + def metric_tags + { + activity: activity_name, + namespace: namespace, + task_queue: task_queue, + workflow: metadata.workflow_name + } + end + private - attr_reader :task, :namespace, :task_token, :activity_name, :activity_class, + attr_reader :task, :task_queue, :namespace, :task_token, :activity_name, :activity_class, :middleware_chain, :metadata, :config, :heartbeat_thread_pool def connection diff --git a/lib/temporal/callable.rb b/lib/temporal/callable.rb new file mode 100644 index 00000000..3bec64fd --- /dev/null +++ b/lib/temporal/callable.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Temporal + class Callable + def initialize(method:) + @method = method + end + + def call(input) + if input.is_a?(Array) && input.last.instance_of?(Hash) + *args, kwargs = input + + @method.call(*args, **kwargs) + else + @method.call(*input) + end + end + end +end diff --git a/lib/temporal/client.rb b/lib/temporal/client.rb index 02473432..c2e83e1f 100644 --- a/lib/temporal/client.rb +++ b/lib/temporal/client.rb @@ -1,3 +1,4 @@ +require 'json' require 'temporal/execution_options' require 'temporal/connection' require 'temporal/activity' @@ -5,6 +6,7 @@ require 'temporal/workflow' require 'temporal/workflow/context_helpers' require 'temporal/workflow/history' +require 'temporal/workflow/history/serialization' require 'temporal/workflow/execution_info' require 'temporal/workflow/executions' require 'temporal/workflow/status' @@ -14,6 +16,7 @@ module Temporal class Client def initialize(config) @config = config + @converter = config.converter end # Start a workflow with an optional signal @@ -38,6 +41,7 @@ def initialize(config) # @option options [Hash] :timeouts check Temporal::Configuration::DEFAULT_TIMEOUTS # @option options [Hash] :headers # @option options [Hash] :search_attributes + # @option options [Integer] :start_delay determines the amount of seconds to wait before initiating a Workflow # # @return [String] workflow's run ID def start_workflow(workflow, *input, options: {}, **args) @@ -64,6 +68,7 @@ def start_workflow(workflow, *input, options: {}, **args) headers: config.header_propagator_chain.inject(execution_options.headers), memo: execution_options.memo, search_attributes: Workflow::Context::Helpers.process_search_attributes(execution_options.search_attributes), + start_delay: execution_options.start_delay ) else raise ArgumentError, 'If signal_input is provided, you must also provide signal_name' if signal_name.nil? @@ -82,7 +87,8 @@ def start_workflow(workflow, *input, options: {}, **args) memo: execution_options.memo, search_attributes: Workflow::Context::Helpers.process_search_attributes(execution_options.search_attributes), signal_name: signal_name, - signal_input: signal_input + signal_input: signal_input, + start_delay: execution_options.start_delay ) end @@ -249,7 +255,7 @@ def await_workflow_result(workflow, workflow_id:, run_id: nil, timeout: nil, nam case closed_event.type when 'WORKFLOW_EXECUTION_COMPLETED' payloads = closed_event.attributes.result - return ResultConverter.from_result_payloads(payloads) + return converter.from_result_payloads(payloads) when 'WORKFLOW_EXECUTION_TIMED_OUT' raise Temporal::WorkflowTimedOut when 'WORKFLOW_EXECUTION_TERMINATED' @@ -257,7 +263,7 @@ def await_workflow_result(workflow, workflow_id:, run_id: nil, timeout: nil, nam when 'WORKFLOW_EXECUTION_CANCELED' raise Temporal::WorkflowCanceled when 'WORKFLOW_EXECUTION_FAILED' - raise Temporal::Workflow::Errors.generate_error(closed_event.attributes.failure) + raise Temporal::Workflow::Errors.generate_error(closed_event.attributes.failure, converter) when 'WORKFLOW_EXECUTION_CONTINUED_AS_NEW' new_run_id = closed_event.attributes.new_execution_run_id # Throw to let the caller know they're not getting the result @@ -328,7 +334,7 @@ def reset_workflow(namespace, workflow_id, run_id, strategy: nil, workflow_task_ # for reference # @param details [String, Array, nil] optional details to be stored in history def terminate_workflow(workflow_id, namespace: nil, run_id: nil, reason: nil, details: nil) - namespace ||= Temporal.configuration.namespace + namespace ||= config.namespace connection.terminate_workflow_execution( namespace: namespace, @@ -353,7 +359,7 @@ def fetch_workflow_execution_info(namespace, workflow_id, run_id) run_id: run_id ) - Workflow::ExecutionInfo.generate_from(response.workflow_execution_info) + Workflow::ExecutionInfo.generate_from(response.workflow_execution_info, converter) end # Manually complete an activity @@ -397,36 +403,82 @@ def fail_activity(async_token, exception) # @param run_id [String] # # @return [Temporal::Workflow::History] workflow's execution history - def get_workflow_history(namespace:, workflow_id:, run_id:) + def get_workflow_history(namespace: nil, workflow_id:, run_id:) + next_page_token = nil + events = [] + loop do + response = + connection.get_workflow_execution_history( + namespace: namespace || config.default_execution_options.namespace, + workflow_id: workflow_id, + run_id: run_id, + next_page_token: next_page_token, + ) + events.concat(response.history.events.to_a) + next_page_token = response.next_page_token + + break if next_page_token.empty? + end + + Workflow::History.new(events) + end + + # Fetch workflow's execution history as JSON. This output can be used for replay testing. + # + # @param namespace [String] + # @param workflow_id [String] + # @param run_id [String] optional + # @param pretty_print [Boolean] optional + # + # @return a JSON string representation of the history + def get_workflow_history_json(namespace: nil, workflow_id:, run_id: nil, pretty_print: true) history_response = connection.get_workflow_execution_history( - namespace: namespace, + namespace: namespace || config.default_execution_options.namespace, + workflow_id: workflow_id, + run_id: run_id + ) + Temporal::Workflow::History::Serialization.to_json(history_response.history) + end + + # Fetch workflow's execution history as protobuf binary. This output can be used for replay testing. + # + # @param namespace [String] + # @param workflow_id [String] + # @param run_id [String] optional + # + # @return a binary string representation of the history + def get_workflow_history_protobuf(namespace: nil, workflow_id:, run_id: nil) + history_response = connection.get_workflow_execution_history( + namespace: namespace || config.default_execution_options.namespace, workflow_id: workflow_id, run_id: run_id ) - Workflow::History.new(history_response.history.events) + # Protobuf for Ruby unfortunately does not support textproto. Plain binary provides + # a less debuggable, but compact option. + Temporal::Workflow::History::Serialization.to_protobuf(history_response.history) end def list_open_workflow_executions(namespace, from, to = Time.now, filter: {}, next_page_token: nil, max_page_size: nil) validate_filter(filter, :workflow, :workflow_id) - Temporal::Workflow::Executions.new(connection: connection, status: :open, request_options: { namespace: namespace, from: from, to: to, next_page_token: next_page_token, max_page_size: max_page_size}.merge(filter)) + Temporal::Workflow::Executions.new(converter, connection: connection, status: :open, request_options: { namespace: namespace, from: from, to: to, next_page_token: next_page_token, max_page_size: max_page_size}.merge(filter)) end def list_closed_workflow_executions(namespace, from, to = Time.now, filter: {}, next_page_token: nil, max_page_size: nil) validate_filter(filter, :status, :workflow, :workflow_id) - Temporal::Workflow::Executions.new(connection: connection, status: :closed, request_options: { namespace: namespace, from: from, to: to, next_page_token: next_page_token, max_page_size: max_page_size}.merge(filter)) + Temporal::Workflow::Executions.new(converter, connection: connection, status: :closed, request_options: { namespace: namespace, from: from, to: to, next_page_token: next_page_token, max_page_size: max_page_size}.merge(filter)) end def query_workflow_executions(namespace, query, filter: {}, next_page_token: nil, max_page_size: nil) validate_filter(filter, :status, :workflow, :workflow_id) - - Temporal::Workflow::Executions.new(connection: connection, status: :all, request_options: { namespace: namespace, query: query, next_page_token: next_page_token, max_page_size: max_page_size }.merge(filter)) + + Temporal::Workflow::Executions.new(converter, connection: connection, status: :all, request_options: { namespace: namespace, query: query, next_page_token: next_page_token, max_page_size: max_page_size }.merge(filter)) end # Count the number of workflows matching the provided query - # + # # @param namespace [String] # @param query [String] # @@ -454,18 +506,105 @@ def remove_custom_search_attributes(*attribute_names, namespace: nil) connection.remove_custom_search_attributes(attribute_names, namespace || config.default_execution_options.namespace) end - def connection - @connection ||= Temporal::Connection.generate(config.for_connection) + # List all schedules in a namespace + # + # @param namespace [String] namespace to list schedules in + # @param maximum_page_size [Integer] number of namespace results to return per page. + # @param next_page_token [String] a optional pagination token returned by a previous list_namespaces call + def list_schedules(namespace, maximum_page_size:, next_page_token: '') + connection.list_schedules(namespace: namespace, maximum_page_size: maximum_page_size, next_page_token: next_page_token) end - class ResultConverter - extend Concerns::Payloads + # Describe a schedule in a namespace + # + # @param namespace [String] namespace to list schedules in + # @param schedule_id [String] schedule id + def describe_schedule(namespace, schedule_id) + connection.describe_schedule(namespace: namespace, schedule_id: schedule_id) + end + + # Create a new schedule + # + # + # @param namespace [String] namespace to create schedule in + # @param schedule_id [String] schedule id + # @param schedule [Temporal::Schedule::Schedule] schedule to create + # @param trigger_immediately [Boolean] If set, trigger one action to run immediately + # @param backfill [Temporal::Schedule::Backfill] If set, run through the backfill schedule and trigger actions. + # @param memo [Hash] optional key-value memo map to attach to the schedule + # @param search attributes [Hash] optional key-value search attributes to attach to the schedule + def create_schedule( + namespace, + schedule_id, + schedule, + trigger_immediately: false, + backfill: nil, + memo: nil, + search_attributes: nil + ) + connection.create_schedule( + namespace: namespace, + schedule_id: schedule_id, + schedule: schedule, + trigger_immediately: trigger_immediately, + backfill: backfill, + memo: memo, + search_attributes: search_attributes + ) + end + + # Delete a schedule in a namespace + # + # @param namespace [String] namespace to list schedules in + # @param schedule_id [String] schedule id + def delete_schedule(namespace, schedule_id) + connection.delete_schedule(namespace: namespace, schedule_id: schedule_id) + end + + # Update a schedule in a namespace + # + # @param namespace [String] namespace to list schedules in + # @param schedule_id [String] schedule id + # @param schedule [Temporal::Schedule::Schedule] schedule to update. All fields in the schedule will be replaced completely by this updated schedule. + # @param conflict_token [String] a token that was returned by a previous describe_schedule call. If provided and does not match the current schedule's token, the update will fail. + def update_schedule(namespace, schedule_id, schedule, conflict_token: nil) + connection.update_schedule(namespace: namespace, schedule_id: schedule_id, schedule: schedule, conflict_token: conflict_token) + end + + # Trigger one action of a schedule to run immediately + # + # @param namespace [String] namespace + # @param schedule_id [String] schedule id + # @param overlap_policy [Symbol] Should be one of :skip, :buffer_one, :buffer_all, :cancel_other, :terminate_other, :allow_all + def trigger_schedule(namespace, schedule_id, overlap_policy: nil) + connection.trigger_schedule(namespace: namespace, schedule_id: schedule_id, overlap_policy: overlap_policy) + end + + # Pause a schedule so actions will not run + # + # @param namespace [String] namespace + # @param schedule_id [String] schedule id + # @param note [String] an optional note to explain why the schedule was paused + def pause_schedule(namespace, schedule_id, note: nil) + connection.pause_schedule(namespace: namespace, schedule_id: schedule_id, should_pause: true, note: note) + end + + # Unpause a schedule so actions will run + # + # @param namespace [String] namespace + # @param schedule_id [String] schedule id + # @param note [String] an optional note to explain why the schedule was unpaused + def unpause_schedule(namespace, schedule_id, note: nil) + connection.pause_schedule(namespace: namespace, schedule_id: schedule_id, should_pause: false, note: note) + end + + def connection + @connection ||= Temporal::Connection.generate(config.for_connection) end - private_constant :ResultConverter private - attr_reader :config + attr_reader :config, :converter def compute_run_timeout(execution_options) execution_options.timeouts[:run] || execution_options.timeouts[:execution] diff --git a/lib/temporal/concerns/payloads.rb b/lib/temporal/concerns/payloads.rb deleted file mode 100644 index 5c771e21..00000000 --- a/lib/temporal/concerns/payloads.rb +++ /dev/null @@ -1,86 +0,0 @@ -module Temporal - module Concerns - module Payloads - def from_payloads(payloads) - payloads = payload_codec.decodes(payloads) - payload_converter.from_payloads(payloads) - end - - def from_payload(payload) - payload = payload_codec.decode(payload) - payload_converter.from_payload(payload) - end - - def from_payload_map_without_codec(payload_map) - payload_map.map { |key, value| [key, payload_converter.from_payload(value)] }.to_h - end - - def from_result_payloads(payloads) - from_payloads(payloads)&.first - end - - def from_details_payloads(payloads) - from_payloads(payloads)&.first - end - - def from_signal_payloads(payloads) - from_payloads(payloads)&.first - end - - def from_query_payloads(payloads) - from_payloads(payloads)&.first - end - - def from_payload_map(payload_map) - payload_map.map { |key, value| [key, from_payload(value)] }.to_h - end - - def to_payloads(data) - payloads = payload_converter.to_payloads(data) - payload_codec.encodes(payloads) - end - - def to_payload(data) - payload = payload_converter.to_payload(data) - payload_codec.encode(payload) - end - - def to_payload_map_without_codec(data) - # skips the payload_codec step because search attributes don't use this pipeline - data.transform_values do |value| - payload_converter.to_payload(value) - end - end - - def to_result_payloads(data) - to_payloads([data]) - end - - def to_details_payloads(data) - to_payloads([data]) - end - - def to_signal_payloads(data) - to_payloads([data]) - end - - def to_query_payloads(data) - to_payloads([data]) - end - - def to_payload_map(data) - data.transform_values(&method(:to_payload)) - end - - private - - def payload_converter - Temporal.configuration.converter - end - - def payload_codec - Temporal.configuration.payload_codec - end - end - end -end diff --git a/lib/temporal/concerns/typed.rb b/lib/temporal/concerns/typed.rb index 2a05f144..0b8c6702 100644 --- a/lib/temporal/concerns/typed.rb +++ b/lib/temporal/concerns/typed.rb @@ -32,7 +32,7 @@ def input(klass = nil, &block) private def generate_struct - Class.new(Dry::Struct::Value) { transform_keys(&:to_sym) } + Class.new(Dry::Struct) { transform_keys(&:to_sym) } end end end diff --git a/lib/temporal/configuration.rb b/lib/temporal/configuration.rb index 9deb4226..0506b61f 100644 --- a/lib/temporal/configuration.rb +++ b/lib/temporal/configuration.rb @@ -1,4 +1,5 @@ require 'temporal/capabilities' +require 'temporal/converter_wrapper' require 'temporal/logger' require 'temporal/metrics_adapters/null' require 'temporal/middleware/header_propagator_chain' @@ -12,13 +13,13 @@ module Temporal class Configuration - Connection = Struct.new(:type, :host, :port, :credentials, :identity, keyword_init: true) + Connection = Struct.new(:type, :host, :port, :credentials, :identity, :converter, :connection_options, keyword_init: true) Execution = Struct.new(:namespace, :task_queue, :timeouts, :headers, :search_attributes, keyword_init: true) - attr_reader :timeouts, :error_handlers, :capabilities - attr_accessor :connection_type, :converter, :use_error_serialization_v2, :host, :port, :credentials, :identity, + attr_reader :timeouts, :error_handlers, :capabilities, :payload_codec + attr_accessor :connection_type, :use_error_serialization_v2, :host, :port, :credentials, :identity, :logger, :metrics_adapter, :namespace, :task_queue, :headers, :search_attributes, :header_propagators, - :payload_codec, :legacy_signals, :no_signals_in_first_task + :legacy_signals, :no_signals_in_first_task, :connection_options, :log_on_workflow_replay # See https://docs.temporal.io/blog/activity-timeouts/ for general docs. # We want an infinite execution timeout for cron schedules and other perpetual workflows. @@ -84,6 +85,9 @@ def initialize @search_attributes = {} @header_propagators = [] @capabilities = Capabilities.new(self) + @connection_options = {} + # Setting this to true can be useful when debugging workflow code or running replay tests + @log_on_workflow_replay = false # Signals previously were incorrectly replayed in order within a workflow task window, rather # than at the beginning. Correcting this changes the determinism of any workflow with signals. @@ -120,7 +124,9 @@ def for_connection host: host, port: port, credentials: credentials, - identity: identity || default_identity + identity: identity || default_identity, + converter: converter, + connection_options: connection_options.merge(use_error_serialization_v2: use_error_serialization_v2) ).freeze end @@ -144,6 +150,20 @@ def header_propagator_chain Middleware::HeaderPropagatorChain.new(header_propagators) end + def converter + @converter_wrapper ||= ConverterWrapper.new(@converter, @payload_codec) + end + + def converter=(new_converter) + @converter = new_converter + @converter_wrapper = nil + end + + def payload_codec=(new_codec) + @payload_codec = new_codec + @converter_wrapper = nil + end + private def default_identity diff --git a/lib/temporal/connection.rb b/lib/temporal/connection.rb index b70bcbed..6ee1bcc7 100644 --- a/lib/temporal/connection.rb +++ b/lib/temporal/connection.rb @@ -12,8 +12,10 @@ def self.generate(configuration) port = configuration.port credentials = configuration.credentials identity = configuration.identity + converter = configuration.converter + options = configuration.connection_options - connection_class.new(host, port, identity, credentials) + connection_class.new(host, port, identity, credentials, converter, options) end end end diff --git a/lib/temporal/connection/grpc.rb b/lib/temporal/connection/grpc.rb index 3c206160..f35d2dc3 100644 --- a/lib/temporal/connection/grpc.rb +++ b/lib/temporal/connection/grpc.rb @@ -2,6 +2,7 @@ require 'time' require 'google/protobuf/well_known_types' require 'securerandom' +require 'json' require 'gen/temporal/api/filter/v1/message_pb' require 'gen/temporal/api/workflowservice/v1/service_services_pb' require 'gen/temporal/api/operatorservice/v1/service_services_pb' @@ -11,14 +12,13 @@ require 'temporal/connection/interceptors/client_name_version_interceptor' require 'temporal/connection/serializer' require 'temporal/connection/serializer/failure' +require 'temporal/connection/serializer/backfill' +require 'temporal/connection/serializer/schedule' require 'temporal/connection/serializer/workflow_id_reuse_policy' -require 'temporal/concerns/payloads' module Temporal module Connection class GRPC - include Concerns::Payloads - HISTORY_EVENT_FILTER = { all: Temporalio::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_ALL_EVENT, close: Temporalio::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_CLOSE_EVENT @@ -55,10 +55,11 @@ class GRPC CONNECTION_TIMEOUT_SECONDS = 60 - def initialize(host, port, identity, credentials, options = {}) + def initialize(host, port, identity, credentials, converter, options = {}) @url = "#{host}:#{port}" @identity = identity @credentials = credentials + @converter = converter @poll = true @poll_mutex = Mutex.new @poll_request = nil @@ -119,7 +120,8 @@ def start_workflow_execution( headers: nil, cron_schedule: nil, memo: nil, - search_attributes: nil + search_attributes: nil, + start_delay: nil ) request = Temporalio::Api::WorkflowService::V1::StartWorkflowExecutionRequest.new( identity: identity, @@ -128,24 +130,25 @@ def start_workflow_execution( name: workflow_name ), workflow_id: workflow_id, - workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(workflow_id_reuse_policy).to_proto, + workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(workflow_id_reuse_policy, converter).to_proto, task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new( name: task_queue ), - input: to_payloads(input), + input: converter.to_payloads(input), workflow_execution_timeout: execution_timeout, workflow_run_timeout: run_timeout, workflow_task_timeout: task_timeout, + workflow_start_delay: start_delay, request_id: SecureRandom.uuid, header: Temporalio::Api::Common::V1::Header.new( - fields: to_payload_map(headers || {}) + fields: converter.to_payload_map(headers || {}) ), cron_schedule: cron_schedule, memo: Temporalio::Api::Common::V1::Memo.new( - fields: to_payload_map(memo || {}) + fields: converter.to_payload_map(memo || {}) ), search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new( - indexed_fields: to_payload_map_without_codec(search_attributes || {}) + indexed_fields: converter.to_payload_map_without_codec(search_attributes || {}) ) ) @@ -210,7 +213,7 @@ def poll_workflow_task_queue(namespace:, task_queue:, binary_checksum:) end def respond_query_task_completed(namespace:, task_token:, query_result:) - query_result_proto = Serializer.serialize(query_result) + query_result_proto = Serializer.serialize(query_result, converter) request = Temporalio::Api::WorkflowService::V1::RespondQueryTaskCompletedRequest.new( task_token: task_token, namespace: namespace, @@ -227,8 +230,8 @@ def respond_workflow_task_completed(namespace:, task_token:, commands:, binary_c namespace: namespace, identity: identity, task_token: task_token, - commands: Array(commands).map { |(_, command)| Serializer.serialize(command) }, - query_results: query_results.transform_values { |value| Serializer.serialize(value) }, + commands: Array(commands).map { |(_, command)| Serializer.serialize(command, converter) }, + query_results: query_results.transform_values { |value| Serializer.serialize(value, converter) }, binary_checksum: binary_checksum, sdk_metadata: if new_sdk_flags_used.any? Temporalio::Api::Sdk::V1::WorkflowTaskCompletedMetadata.new( @@ -247,13 +250,13 @@ def respond_workflow_task_failed(namespace:, task_token:, cause:, exception:, bi identity: identity, task_token: task_token, cause: cause, - failure: Serializer::Failure.new(exception).to_proto, + failure: Serializer::Failure.new(exception, converter).to_proto, binary_checksum: binary_checksum ) client.respond_workflow_task_failed(request) end - def poll_activity_task_queue(namespace:, task_queue:) + def poll_activity_task_queue(namespace:, task_queue:, max_tasks_per_second: 0) request = Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueRequest.new( identity: identity, namespace: namespace, @@ -262,6 +265,12 @@ def poll_activity_task_queue(namespace:, task_queue:) ) ) + if max_tasks_per_second > 0 + request.task_queue_metadata = Temporalio::Api::TaskQueue::V1::TaskQueueMetadata.new( + max_tasks_per_second: Google::Protobuf::DoubleValue.new(value: max_tasks_per_second) + ) + end + poll_mutex.synchronize do return unless can_poll? @@ -275,7 +284,7 @@ def record_activity_task_heartbeat(namespace:, task_token:, details: nil) request = Temporalio::Api::WorkflowService::V1::RecordActivityTaskHeartbeatRequest.new( namespace: namespace, task_token: task_token, - details: to_details_payloads(details), + details: converter.to_details_payloads(details), identity: identity ) client.record_activity_task_heartbeat(request) @@ -290,7 +299,7 @@ def respond_activity_task_completed(namespace:, task_token:, result:) namespace: namespace, identity: identity, task_token: task_token, - result: to_result_payloads(result) + result: converter.to_result_payloads(result) ) client.respond_activity_task_completed(request) end @@ -302,18 +311,18 @@ def respond_activity_task_completed_by_id(namespace:, activity_id:, workflow_id: workflow_id: workflow_id, run_id: run_id, activity_id: activity_id, - result: to_result_payloads(result) + result: converter.to_result_payloads(result) ) client.respond_activity_task_completed_by_id(request) end def respond_activity_task_failed(namespace:, task_token:, exception:) - serialize_whole_error = Temporal.configuration.use_error_serialization_v2 + serialize_whole_error = options.fetch(:use_error_serialization_v2) request = Temporalio::Api::WorkflowService::V1::RespondActivityTaskFailedRequest.new( namespace: namespace, identity: identity, task_token: task_token, - failure: Serializer::Failure.new(exception, serialize_whole_error: serialize_whole_error).to_proto + failure: Serializer::Failure.new(exception, converter, serialize_whole_error: serialize_whole_error).to_proto ) client.respond_activity_task_failed(request) end @@ -325,7 +334,7 @@ def respond_activity_task_failed_by_id(namespace:, activity_id:, workflow_id:, r workflow_id: workflow_id, run_id: run_id, activity_id: activity_id, - failure: Serializer::Failure.new(exception).to_proto + failure: Serializer::Failure.new(exception, converter).to_proto ) client.respond_activity_task_failed_by_id(request) end @@ -334,7 +343,7 @@ def respond_activity_task_canceled(namespace:, task_token:, details: nil) request = Temporalio::Api::WorkflowService::V1::RespondActivityTaskCanceledRequest.new( namespace: namespace, task_token: task_token, - details: to_details_payloads(details), + details: converter.to_details_payloads(details), identity: identity ) client.respond_activity_task_canceled(request) @@ -356,7 +365,7 @@ def signal_workflow_execution(namespace:, workflow_id:, run_id:, signal:, input: run_id: run_id ), signal_name: signal, - input: to_signal_payloads(input), + input: converter.to_signal_payloads(input), identity: identity ) client.signal_workflow_execution(request) @@ -372,12 +381,13 @@ def signal_with_start_workflow_execution( headers: nil, cron_schedule: nil, memo: nil, - search_attributes: nil + search_attributes: nil, + start_delay: nil ) proto_header_fields = if headers.nil? - to_payload_map({}) + converter.to_payload_map({}) elsif headers.instance_of?(Hash) - to_payload_map(headers) + converter.to_payload_map(headers) else # Preserve backward compatability for headers specified using proto objects warn '[DEPRECATION] Specify headers using a hash rather than protobuf objects' @@ -391,26 +401,27 @@ def signal_with_start_workflow_execution( name: workflow_name ), workflow_id: workflow_id, - workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(workflow_id_reuse_policy).to_proto, + workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(workflow_id_reuse_policy, converter).to_proto, task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new( name: task_queue ), - input: to_payloads(input), + input: converter.to_payloads(input), workflow_execution_timeout: execution_timeout, workflow_run_timeout: run_timeout, workflow_task_timeout: task_timeout, + workflow_start_delay: start_delay, request_id: SecureRandom.uuid, header: Temporalio::Api::Common::V1::Header.new( fields: proto_header_fields ), cron_schedule: cron_schedule, signal_name: signal_name, - signal_input: to_signal_payloads(signal_input), + signal_input: converter.to_signal_payloads(signal_input), memo: Temporalio::Api::Common::V1::Memo.new( - fields: to_payload_map(memo || {}) + fields: converter.to_payload_map(memo || {}) ), search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new( - indexed_fields: to_payload_map_without_codec(search_attributes || {}) + indexed_fields: converter.to_payload_map_without_codec(search_attributes || {}) ) ) @@ -454,7 +465,7 @@ def terminate_workflow_execution( run_id: run_id ), reason: reason, - details: to_details_payloads(details) + details: converter.to_details_payloads(details) ) client.terminate_workflow_execution(request) @@ -568,7 +579,7 @@ def query_workflow(namespace:, workflow_id:, run_id:, query:, args: nil, query_r ), query: Temporalio::Api::Query::V1::WorkflowQuery.new( query_type: query, - query_args: to_query_payloads(args) + query_args: converter.to_query_payloads(args) ) ) if query_reject_condition @@ -590,7 +601,7 @@ def query_workflow(namespace:, workflow_id:, run_id:, query:, args: nil, query_r elsif !response.query_result raise Temporal::QueryFailed, 'Invalid response from server' else - from_query_payloads(response.query_result) + converter.from_query_payloads(response.query_result) end end @@ -611,7 +622,7 @@ def describe_task_queue(namespace:, task_queue:) task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new( name: task_queue ), - task_queue_type: Temporalio::Api::Enums::V1::TaskQueueType::Workflow, + task_queue_type: Temporalio::Api::Enums::V1::TaskQueueType::TASK_QUEUE_TYPE_WORKFLOW, include_task_queue_status: true ) client.describe_task_queue(request) @@ -628,16 +639,212 @@ def get_system_info client.get_system_info(Temporalio::Api::WorkflowService::V1::GetSystemInfoRequest.new) end + def list_schedules(namespace:, maximum_page_size:, next_page_token:) + request = Temporalio::Api::WorkflowService::V1::ListSchedulesRequest.new( + namespace: namespace, + maximum_page_size: maximum_page_size, + next_page_token: next_page_token + ) + resp = client.list_schedules(request) + + Temporal::Schedule::ListSchedulesResponse.new( + schedules: resp.schedules.map do |schedule| + Temporal::Schedule::ScheduleListEntry.new( + schedule_id: schedule.schedule_id, + memo: converter.from_payload_map(schedule.memo&.fields || {}), + search_attributes: converter.from_payload_map_without_codec(schedule.search_attributes&.indexed_fields || {}), + info: schedule.info + ) + end, + next_page_token: resp.next_page_token, + ) + end + + def describe_schedule(namespace:, schedule_id:) + request = Temporalio::Api::WorkflowService::V1::DescribeScheduleRequest.new( + namespace: namespace, + schedule_id: schedule_id + ) + + resp = nil + begin + resp = client.describe_schedule(request) + rescue ::GRPC::NotFound => e + raise Temporal::NotFoundFailure, e + end + + Temporal::Schedule::DescribeScheduleResponse.new( + schedule: resp.schedule, + info: resp.info, + memo: converter.from_payload_map(resp.memo&.fields || {}), + search_attributes: converter.from_payload_map_without_codec(resp.search_attributes&.indexed_fields || {}), + conflict_token: resp.conflict_token + ) + end + + def create_schedule( + namespace:, + schedule_id:, + schedule:, + trigger_immediately: nil, + backfill: nil, + memo: nil, + search_attributes: nil + ) + initial_patch = nil + if trigger_immediately || backfill + initial_patch = Temporalio::Api::Schedule::V1::SchedulePatch.new + if trigger_immediately + initial_patch.trigger_immediately = Temporalio::Api::Schedule::V1::TriggerImmediatelyRequest.new( + overlap_policy: Temporal::Connection::Serializer::ScheduleOverlapPolicy.new( + schedule.policies&.overlap_policy, + converter + ).to_proto + ) + end + + if backfill + initial_patch.backfill_request += [Temporal::Connection::Serializer::Backfill.new(backfill, converter).to_proto] + end + end + + request = Temporalio::Api::WorkflowService::V1::CreateScheduleRequest.new( + namespace: namespace, + schedule_id: schedule_id, + schedule: Temporal::Connection::Serializer::Schedule.new(schedule, converter).to_proto, + identity: identity, + request_id: SecureRandom.uuid, + memo: Temporalio::Api::Common::V1::Memo.new( + fields: converter.to_payload_map(memo || {}) + ), + search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new( + indexed_fields: converter.to_payload_map_without_codec(search_attributes || {}) + ) + ) + client.create_schedule(request) + end + + def delete_schedule(namespace:, schedule_id:) + request = Temporalio::Api::WorkflowService::V1::DeleteScheduleRequest.new( + namespace: namespace, + schedule_id: schedule_id, + identity: identity + ) + + begin + client.delete_schedule(request) + rescue ::GRPC::NotFound => e + raise Temporal::NotFoundFailure, e + end + end + + def update_schedule(namespace:, schedule_id:, schedule:, conflict_token: nil) + request = Temporalio::Api::WorkflowService::V1::UpdateScheduleRequest.new( + namespace: namespace, + schedule_id: schedule_id, + schedule: Temporal::Connection::Serializer::Schedule.new(schedule, converter).to_proto, + conflict_token: conflict_token, + identity: identity, + request_id: SecureRandom.uuid + ) + + begin + client.update_schedule(request) + rescue ::GRPC::NotFound => e + raise Temporal::NotFoundFailure, e + end + end + + def trigger_schedule(namespace:, schedule_id:, overlap_policy: nil) + request = Temporalio::Api::WorkflowService::V1::PatchScheduleRequest.new( + namespace: namespace, + schedule_id: schedule_id, + patch: Temporalio::Api::Schedule::V1::SchedulePatch.new( + trigger_immediately: Temporalio::Api::Schedule::V1::TriggerImmediatelyRequest.new( + overlap_policy: Temporal::Connection::Serializer::ScheduleOverlapPolicy.new( + overlap_policy, + converter + ).to_proto + ), + ), + identity: identity, + request_id: SecureRandom.uuid + ) + + begin + client.patch_schedule(request) + rescue ::GRPC::NotFound => e + raise Temporal::NotFoundFailure, e + end + end + + def pause_schedule(namespace:, schedule_id:, should_pause:, note: nil) + patch = Temporalio::Api::Schedule::V1::SchedulePatch.new + if should_pause + patch.pause = note || 'Paused by temporal-ruby' + else + patch.unpause = note || 'Unpaused by temporal-ruby' + end + + request = Temporalio::Api::WorkflowService::V1::PatchScheduleRequest.new( + namespace: namespace, + schedule_id: schedule_id, + patch: patch, + identity: identity, + request_id: SecureRandom.uuid + ) + + begin + client.patch_schedule(request) + rescue ::GRPC::NotFound => e + raise Temporal::NotFoundFailure, e + end + end + private - attr_reader :url, :identity, :credentials, :options, :poll_mutex, :poll_request + attr_reader :url, :identity, :credentials, :converter, :options, :poll_mutex, :poll_request def client - @client ||= Temporalio::Api::WorkflowService::V1::WorkflowService::Stub.new( + return @client if @client + + channel_args = {} + + if options[:keepalive_time_ms] + channel_args["grpc.keepalive_time_ms"] = options[:keepalive_time_ms] + end + + if options[:retry_connection] || options[:retry_policy] + channel_args["grpc.enable_retries"] = 1 + + retry_policy = options[:retry_policy] || { + retryableStatusCodes: ["UNAVAILABLE"], + maxAttempts: 3, + initialBackoff: "0.1s", + backoffMultiplier: 2.0, + maxBackoff: "0.3s" + } + + channel_args["grpc.service_config"] = ::JSON.generate( + methodConfig: [ + { + name: [ + { + service: "temporal.api.workflowservice.v1.WorkflowService", + } + ], + retryPolicy: retry_policy + } + ] + ) + end + + @client = Temporalio::Api::WorkflowService::V1::WorkflowService::Stub.new( url, credentials, timeout: CONNECTION_TIMEOUT_SECONDS, - interceptors: [ClientNameVersionInterceptor.new] + interceptors: [ClientNameVersionInterceptor.new], + channel_args: channel_args ) end diff --git a/lib/temporal/connection/serializer.rb b/lib/temporal/connection/serializer.rb index 46070c66..b31c1005 100644 --- a/lib/temporal/connection/serializer.rb +++ b/lib/temporal/connection/serializer.rb @@ -33,9 +33,9 @@ module Serializer Workflow::QueryResult::Failure => Serializer::QueryFailure, }.freeze - def self.serialize(object) + def self.serialize(object, converter) serializer = SERIALIZERS_MAP[object.class] - serializer.new(object).to_proto + serializer.new(object, converter).to_proto end end end diff --git a/lib/temporal/connection/serializer/backfill.rb b/lib/temporal/connection/serializer/backfill.rb new file mode 100644 index 00000000..7abb40a5 --- /dev/null +++ b/lib/temporal/connection/serializer/backfill.rb @@ -0,0 +1,26 @@ +require "temporal/connection/serializer/base" +require "temporal/connection/serializer/schedule_overlap_policy" + +module Temporal + module Connection + module Serializer + class Backfill < Base + def to_proto + return unless object + + Temporalio::Api::Schedule::V1::BackfillRequest.new( + start_time: serialize_time(object.start_time), + end_time: serialize_time(object.end_time), + overlap_policy: Temporal::Connection::Serializer::ScheduleOverlapPolicy.new(object.overlap_policy, converter).to_proto + ) + end + + def serialize_time(input_time) + return unless input_time + + Google::Protobuf::Timestamp.new.from_time(input_time) + end + end + end + end +end diff --git a/lib/temporal/connection/serializer/base.rb b/lib/temporal/connection/serializer/base.rb index 9fcd49c5..79e8767a 100644 --- a/lib/temporal/connection/serializer/base.rb +++ b/lib/temporal/connection/serializer/base.rb @@ -6,8 +6,9 @@ module Temporal module Connection module Serializer class Base - def initialize(object) + def initialize(object, converter) @object = object + @converter = converter end def to_proto @@ -16,7 +17,7 @@ def to_proto private - attr_reader :object + attr_reader :object, :converter end end end diff --git a/lib/temporal/connection/serializer/complete_workflow.rb b/lib/temporal/connection/serializer/complete_workflow.rb index beb3b0ed..8eaa3ed4 100644 --- a/lib/temporal/connection/serializer/complete_workflow.rb +++ b/lib/temporal/connection/serializer/complete_workflow.rb @@ -1,18 +1,15 @@ require 'temporal/connection/serializer/base' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class CompleteWorkflow < Base - include Concerns::Payloads - def to_proto Temporalio::Api::Command::V1::Command.new( command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_COMPLETE_WORKFLOW_EXECUTION, complete_workflow_execution_command_attributes: Temporalio::Api::Command::V1::CompleteWorkflowExecutionCommandAttributes.new( - result: to_result_payloads(object.result) + result: converter.to_result_payloads(object.result) ) ) end diff --git a/lib/temporal/connection/serializer/continue_as_new.rb b/lib/temporal/connection/serializer/continue_as_new.rb index 6573c8ec..989ff2a9 100644 --- a/lib/temporal/connection/serializer/continue_as_new.rb +++ b/lib/temporal/connection/serializer/continue_as_new.rb @@ -1,13 +1,10 @@ require 'temporal/connection/serializer/base' require 'temporal/connection/serializer/retry_policy' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class ContinueAsNew < Base - include Concerns::Payloads - def to_proto Temporalio::Api::Command::V1::Command.new( command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_CONTINUE_AS_NEW_WORKFLOW_EXECUTION, @@ -15,10 +12,10 @@ def to_proto Temporalio::Api::Command::V1::ContinueAsNewWorkflowExecutionCommandAttributes.new( workflow_type: Temporalio::Api::Common::V1::WorkflowType.new(name: object.workflow_type), task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue), - input: to_payloads(object.input), + input: converter.to_payloads(object.input), workflow_run_timeout: object.timeouts[:run], workflow_task_timeout: object.timeouts[:task], - retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy).to_proto, + retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy, converter).to_proto, header: serialize_headers(object.headers), memo: serialize_memo(object.memo), search_attributes: serialize_search_attributes(object.search_attributes), @@ -31,19 +28,19 @@ def to_proto def serialize_headers(headers) return unless headers - Temporalio::Api::Common::V1::Header.new(fields: to_payload_map(headers)) + Temporalio::Api::Common::V1::Header.new(fields: converter.to_payload_map(headers)) end def serialize_memo(memo) return unless memo - Temporalio::Api::Common::V1::Memo.new(fields: to_payload_map(memo)) + Temporalio::Api::Common::V1::Memo.new(fields: converter.to_payload_map(memo)) end def serialize_search_attributes(search_attributes) return unless search_attributes - Temporalio::Api::Common::V1::SearchAttributes.new(indexed_fields: to_payload_map_without_codec(search_attributes)) + Temporalio::Api::Common::V1::SearchAttributes.new(indexed_fields: converter.to_payload_map_without_codec(search_attributes)) end end end diff --git a/lib/temporal/connection/serializer/fail_workflow.rb b/lib/temporal/connection/serializer/fail_workflow.rb index a6ef9ea0..2bedb688 100644 --- a/lib/temporal/connection/serializer/fail_workflow.rb +++ b/lib/temporal/connection/serializer/fail_workflow.rb @@ -10,7 +10,7 @@ def to_proto command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_FAIL_WORKFLOW_EXECUTION, fail_workflow_execution_command_attributes: Temporalio::Api::Command::V1::FailWorkflowExecutionCommandAttributes.new( - failure: Failure.new(object.exception).to_proto + failure: Failure.new(object.exception, converter).to_proto ) ) end diff --git a/lib/temporal/connection/serializer/failure.rb b/lib/temporal/connection/serializer/failure.rb index ddfeb2e3..2d17e949 100644 --- a/lib/temporal/connection/serializer/failure.rb +++ b/lib/temporal/connection/serializer/failure.rb @@ -1,21 +1,18 @@ require 'temporal/connection/serializer/base' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class Failure < Base - include Concerns::Payloads - - def initialize(error, serialize_whole_error: false, max_bytes: 200_000) + def initialize(error, converter, serialize_whole_error: false, max_bytes: 200_000) @serialize_whole_error = serialize_whole_error @max_bytes = max_bytes - super(error) + super(error, converter) end def to_proto if @serialize_whole_error - details = to_details_payloads(object) + details = converter.to_details_payloads(object) if details.payloads.first.data.size > @max_bytes Temporal.logger.error( "Could not serialize exception because it's too large, so we are using a fallback that may not "\ @@ -25,10 +22,10 @@ def to_proto ) # Fallback to a more conservative serialization if the payload is too big to avoid # sending a huge amount of data to temporal and putting it in the history. - details = to_details_payloads(object.message) + details = converter.to_details_payloads(object.message) end else - details = to_details_payloads(object.message) + details = converter.to_details_payloads(object.message) end Temporalio::Api::Failure::V1::Failure.new( message: object.message, diff --git a/lib/temporal/connection/serializer/query_answer.rb b/lib/temporal/connection/serializer/query_answer.rb index 746c50c0..0c98b010 100644 --- a/lib/temporal/connection/serializer/query_answer.rb +++ b/lib/temporal/connection/serializer/query_answer.rb @@ -1,16 +1,13 @@ require 'temporal/connection/serializer/base' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class QueryAnswer < Base - include Concerns::Payloads - def to_proto Temporalio::Api::Query::V1::WorkflowQueryResult.new( result_type: Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_ANSWERED, - answer: to_query_payloads(object.result) + answer: converter.to_query_payloads(object.result) ) end end diff --git a/lib/temporal/connection/serializer/record_marker.rb b/lib/temporal/connection/serializer/record_marker.rb index b29040f3..99fddb8c 100644 --- a/lib/temporal/connection/serializer/record_marker.rb +++ b/lib/temporal/connection/serializer/record_marker.rb @@ -1,12 +1,9 @@ require 'temporal/connection/serializer/base' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class RecordMarker < Base - include Concerns::Payloads - def to_proto Temporalio::Api::Command::V1::Command.new( command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_RECORD_MARKER, @@ -14,7 +11,7 @@ def to_proto Temporalio::Api::Command::V1::RecordMarkerCommandAttributes.new( marker_name: object.name, details: { - 'data' => to_details_payloads(object.details) + 'data' => converter.to_details_payloads(object.details) } ) ) diff --git a/lib/temporal/connection/serializer/schedule.rb b/lib/temporal/connection/serializer/schedule.rb new file mode 100644 index 00000000..3e2fc264 --- /dev/null +++ b/lib/temporal/connection/serializer/schedule.rb @@ -0,0 +1,22 @@ +require "temporal/connection/serializer/base" +require "temporal/connection/serializer/schedule_spec" +require "temporal/connection/serializer/schedule_action" +require "temporal/connection/serializer/schedule_policies" +require "temporal/connection/serializer/schedule_state" + +module Temporal + module Connection + module Serializer + class Schedule < Base + def to_proto + Temporalio::Api::Schedule::V1::Schedule.new( + spec: Temporal::Connection::Serializer::ScheduleSpec.new(object.spec, converter).to_proto, + action: Temporal::Connection::Serializer::ScheduleAction.new(object.action, converter).to_proto, + policies: Temporal::Connection::Serializer::SchedulePolicies.new(object.policies, converter).to_proto, + state: Temporal::Connection::Serializer::ScheduleState.new(object.state, converter).to_proto + ) + end + end + end + end +end diff --git a/lib/temporal/connection/serializer/schedule_action.rb b/lib/temporal/connection/serializer/schedule_action.rb new file mode 100644 index 00000000..b79942be --- /dev/null +++ b/lib/temporal/connection/serializer/schedule_action.rb @@ -0,0 +1,40 @@ +require "temporal/connection/serializer/base" + +module Temporal + module Connection + module Serializer + class ScheduleAction < Base + def to_proto + unless object.is_a?(Temporal::Schedule::StartWorkflowAction) + raise ArgumentError, "Unknown action type #{object.class}" + end + + Temporalio::Api::Schedule::V1::ScheduleAction.new( + start_workflow: Temporalio::Api::Workflow::V1::NewWorkflowExecutionInfo.new( + workflow_id: object.workflow_id, + workflow_type: Temporalio::Api::Common::V1::WorkflowType.new( + name: object.name + ), + task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new( + name: object.task_queue + ), + input: converter.to_payloads(object.input), + workflow_execution_timeout: object.execution_timeout, + workflow_run_timeout: object.run_timeout, + workflow_task_timeout: object.task_timeout, + header: Temporalio::Api::Common::V1::Header.new( + fields: converter.to_payload_map(object.headers || {}) + ), + memo: Temporalio::Api::Common::V1::Memo.new( + fields: converter.to_payload_map(object.memo || {}) + ), + search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new( + indexed_fields: converter.to_payload_map_without_codec(object.search_attributes || {}) + ) + ) + ) + end + end + end + end +end diff --git a/lib/temporal/connection/serializer/schedule_activity.rb b/lib/temporal/connection/serializer/schedule_activity.rb index 10b26570..b3640639 100644 --- a/lib/temporal/connection/serializer/schedule_activity.rb +++ b/lib/temporal/connection/serializer/schedule_activity.rb @@ -1,13 +1,10 @@ require 'temporal/connection/serializer/base' require 'temporal/connection/serializer/retry_policy' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class ScheduleActivity < Base - include Concerns::Payloads - def to_proto Temporalio::Api::Command::V1::Command.new( command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_SCHEDULE_ACTIVITY_TASK, @@ -15,13 +12,13 @@ def to_proto Temporalio::Api::Command::V1::ScheduleActivityTaskCommandAttributes.new( activity_id: object.activity_id.to_s, activity_type: Temporalio::Api::Common::V1::ActivityType.new(name: object.activity_type), - input: to_payloads(object.input), + input: converter.to_payloads(object.input), task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue), schedule_to_close_timeout: object.timeouts[:schedule_to_close], schedule_to_start_timeout: object.timeouts[:schedule_to_start], start_to_close_timeout: object.timeouts[:start_to_close], heartbeat_timeout: object.timeouts[:heartbeat], - retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy).to_proto, + retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy, converter).to_proto, header: serialize_headers(object.headers) ) ) @@ -32,7 +29,7 @@ def to_proto def serialize_headers(headers) return unless headers - Temporalio::Api::Common::V1::Header.new(fields: to_payload_map(headers)) + Temporalio::Api::Common::V1::Header.new(fields: converter.to_payload_map(headers)) end end end diff --git a/lib/temporal/connection/serializer/schedule_overlap_policy.rb b/lib/temporal/connection/serializer/schedule_overlap_policy.rb new file mode 100644 index 00000000..a866c8ee --- /dev/null +++ b/lib/temporal/connection/serializer/schedule_overlap_policy.rb @@ -0,0 +1,26 @@ +require "temporal/connection/serializer/base" + +module Temporal + module Connection + module Serializer + class ScheduleOverlapPolicy < Base + SCHEDULE_OVERLAP_POLICY = { + skip: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_SKIP, + buffer_one: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_BUFFER_ONE, + buffer_all: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_BUFFER_ALL, + cancel_other: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_CANCEL_OTHER, + terminate_other: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_TERMINATE_OTHER, + allow_all: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_ALLOW_ALL + }.freeze + + def to_proto + return unless object + + SCHEDULE_OVERLAP_POLICY.fetch(object) do + raise ArgumentError, "Unknown schedule overlap policy specified: #{object}" + end + end + end + end + end +end diff --git a/lib/temporal/connection/serializer/schedule_policies.rb b/lib/temporal/connection/serializer/schedule_policies.rb new file mode 100644 index 00000000..42558899 --- /dev/null +++ b/lib/temporal/connection/serializer/schedule_policies.rb @@ -0,0 +1,20 @@ +require "temporal/connection/serializer/base" +require "temporal/connection/serializer/schedule_overlap_policy" + +module Temporal + module Connection + module Serializer + class SchedulePolicies < Base + def to_proto + return unless object + + Temporalio::Api::Schedule::V1::SchedulePolicies.new( + overlap_policy: Temporal::Connection::Serializer::ScheduleOverlapPolicy.new(object.overlap_policy, converter).to_proto, + catchup_window: object.catchup_window, + pause_on_failure: object.pause_on_failure + ) + end + end + end + end +end diff --git a/lib/temporal/connection/serializer/schedule_spec.rb b/lib/temporal/connection/serializer/schedule_spec.rb new file mode 100644 index 00000000..7fb07b48 --- /dev/null +++ b/lib/temporal/connection/serializer/schedule_spec.rb @@ -0,0 +1,45 @@ +require "temporal/connection/serializer/base" + +module Temporal + module Connection + module Serializer + class ScheduleSpec < Base + def to_proto + return unless object + + Temporalio::Api::Schedule::V1::ScheduleSpec.new( + cron_string: object.cron_expressions, + interval: object.intervals.map do |interval| + Temporalio::Api::Schedule::V1::IntervalSpec.new( + interval: interval.every, + phase: interval.offset + ) + end, + calendar: object.calendars.map do |calendar| + Temporalio::Api::Schedule::V1::CalendarSpec.new( + second: calendar.second, + minute: calendar.minute, + hour: calendar.hour, + day_of_month: calendar.day_of_month, + month: calendar.month, + year: calendar.year, + day_of_week: calendar.day_of_week, + comment: calendar.comment + ) + end, + jitter: object.jitter, + timezone_name: object.timezone_name, + start_time: serialize_time(object.start_time), + end_time: serialize_time(object.end_time) + ) + end + + def serialize_time(input_time) + return unless input_time + + Google::Protobuf::Timestamp.new.from_time(input_time) + end + end + end + end +end diff --git a/lib/temporal/connection/serializer/schedule_state.rb b/lib/temporal/connection/serializer/schedule_state.rb new file mode 100644 index 00000000..9e243de5 --- /dev/null +++ b/lib/temporal/connection/serializer/schedule_state.rb @@ -0,0 +1,20 @@ +require "temporal/connection/serializer/base" + +module Temporal + module Connection + module Serializer + class ScheduleState < Base + def to_proto + return unless object + + Temporalio::Api::Schedule::V1::ScheduleState.new( + notes: object.notes, + paused: object.paused, + limited_actions: object.limited_actions, + remaining_actions: object.remaining_actions + ) + end + end + end + end +end diff --git a/lib/temporal/connection/serializer/signal_external_workflow.rb b/lib/temporal/connection/serializer/signal_external_workflow.rb index 5cc640fd..ff229ddb 100644 --- a/lib/temporal/connection/serializer/signal_external_workflow.rb +++ b/lib/temporal/connection/serializer/signal_external_workflow.rb @@ -1,12 +1,9 @@ require 'temporal/connection/serializer/base' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class SignalExternalWorkflow < Base - include Concerns::Payloads - def to_proto Temporalio::Api::Command::V1::Command.new( command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_SIGNAL_EXTERNAL_WORKFLOW_EXECUTION, @@ -15,7 +12,7 @@ def to_proto namespace: object.namespace, execution: serialize_execution(object.execution), signal_name: object.signal_name, - input: to_signal_payloads(object.input), + input: converter.to_signal_payloads(object.input), control: "", # deprecated child_workflow_only: object.child_workflow_only ) diff --git a/lib/temporal/connection/serializer/start_child_workflow.rb b/lib/temporal/connection/serializer/start_child_workflow.rb index 90d08c79..dcb2fbf0 100644 --- a/lib/temporal/connection/serializer/start_child_workflow.rb +++ b/lib/temporal/connection/serializer/start_child_workflow.rb @@ -1,14 +1,11 @@ require 'temporal/connection/serializer/base' require 'temporal/connection/serializer/retry_policy' require 'temporal/connection/serializer/workflow_id_reuse_policy' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class StartChildWorkflow < Base - include Concerns::Payloads - PARENT_CLOSE_POLICY = { terminate: Temporalio::Api::Enums::V1::ParentClosePolicy::PARENT_CLOSE_POLICY_TERMINATE, abandon: Temporalio::Api::Enums::V1::ParentClosePolicy::PARENT_CLOSE_POLICY_ABANDON, @@ -24,16 +21,16 @@ def to_proto workflow_id: object.workflow_id.to_s, workflow_type: Temporalio::Api::Common::V1::WorkflowType.new(name: object.workflow_type), task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue), - input: to_payloads(object.input), + input: converter.to_payloads(object.input), workflow_execution_timeout: object.timeouts[:execution], workflow_run_timeout: object.timeouts[:run], workflow_task_timeout: object.timeouts[:task], - retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy).to_proto, + retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy, converter).to_proto, parent_close_policy: serialize_parent_close_policy(object.parent_close_policy), header: serialize_headers(object.headers), cron_schedule: object.cron_schedule, memo: serialize_memo(object.memo), - workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(object.workflow_id_reuse_policy).to_proto, + workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(object.workflow_id_reuse_policy, converter).to_proto, search_attributes: serialize_search_attributes(object.search_attributes), ) ) @@ -44,13 +41,13 @@ def to_proto def serialize_headers(headers) return unless headers - Temporalio::Api::Common::V1::Header.new(fields: to_payload_map(headers)) + Temporalio::Api::Common::V1::Header.new(fields: converter.to_payload_map(headers)) end def serialize_memo(memo) return unless memo - Temporalio::Api::Common::V1::Memo.new(fields: to_payload_map(memo)) + Temporalio::Api::Common::V1::Memo.new(fields: converter.to_payload_map(memo)) end def serialize_parent_close_policy(parent_close_policy) @@ -66,7 +63,7 @@ def serialize_parent_close_policy(parent_close_policy) def serialize_search_attributes(search_attributes) return unless search_attributes - Temporalio::Api::Common::V1::SearchAttributes.new(indexed_fields: to_payload_map_without_codec(search_attributes)) + Temporalio::Api::Common::V1::SearchAttributes.new(indexed_fields: converter.to_payload_map_without_codec(search_attributes)) end end end diff --git a/lib/temporal/connection/serializer/upsert_search_attributes.rb b/lib/temporal/connection/serializer/upsert_search_attributes.rb index e8aa652c..b1b0395a 100644 --- a/lib/temporal/connection/serializer/upsert_search_attributes.rb +++ b/lib/temporal/connection/serializer/upsert_search_attributes.rb @@ -1,19 +1,16 @@ require 'temporal/connection/serializer/base' -require 'temporal/concerns/payloads' module Temporal module Connection module Serializer class UpsertSearchAttributes < Base - include Concerns::Payloads - def to_proto Temporalio::Api::Command::V1::Command.new( command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_UPSERT_WORKFLOW_SEARCH_ATTRIBUTES, upsert_workflow_search_attributes_command_attributes: Temporalio::Api::Command::V1::UpsertWorkflowSearchAttributesCommandAttributes.new( search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new( - indexed_fields: to_payload_map_without_codec(object.search_attributes || {}) + indexed_fields: converter.to_payload_map_without_codec(object.search_attributes || {}) ), ) ) diff --git a/lib/temporal/converter_wrapper.rb b/lib/temporal/converter_wrapper.rb new file mode 100644 index 00000000..a14e2abf --- /dev/null +++ b/lib/temporal/converter_wrapper.rb @@ -0,0 +1,87 @@ +# This class provides convenience methods for accessing the converter/codec. It is fully backwards +# compatible with Temporal::Connection::Converter::Base interface, however it adds new convenience +# methods specific to different conversion scenarios. + +module Temporal + class ConverterWrapper + def initialize(converter, codec) + @converter = converter + @codec = codec + end + + def from_payloads(payloads) + payloads = codec.decodes(payloads) + converter.from_payloads(payloads) + end + + def from_payload(payload) + payload = codec.decode(payload) + converter.from_payload(payload) + end + + def from_payload_map_without_codec(payload_map) + payload_map.map { |key, value| [key, converter.from_payload(value)] }.to_h + end + + def from_result_payloads(payloads) + from_payloads(payloads)&.first + end + + def from_details_payloads(payloads) + from_payloads(payloads)&.first + end + + def from_signal_payloads(payloads) + from_payloads(payloads)&.first + end + + def from_query_payloads(payloads) + from_payloads(payloads)&.first + end + + def from_payload_map(payload_map) + payload_map.map { |key, value| [key, from_payload(value)] }.to_h + end + + def to_payloads(data) + payloads = converter.to_payloads(data) + codec.encodes(payloads) + end + + def to_payload(data) + payload = converter.to_payload(data) + codec.encode(payload) + end + + def to_payload_map_without_codec(data) + # skips the codec step because search attributes don't use this pipeline + data.transform_values do |value| + converter.to_payload(value) + end + end + + def to_result_payloads(data) + to_payloads([data]) + end + + def to_details_payloads(data) + to_payloads([data]) + end + + def to_signal_payloads(data) + to_payloads([data]) + end + + def to_query_payloads(data) + to_payloads([data]) + end + + def to_payload_map(data) + data.transform_values(&method(:to_payload)) + end + + private + + attr_reader :converter, :codec + end +end diff --git a/lib/temporal/errors.rb b/lib/temporal/errors.rb index a13ada62..1c423a6c 100644 --- a/lib/temporal/errors.rb +++ b/lib/temporal/errors.rb @@ -26,7 +26,7 @@ class ChildWorkflowTerminatedError < Error; end # A superclass for activity exceptions raised explicitly # with the intent to propagate to a workflow - # With v2 serialization (set with Temporal.configuration set with use_error_serialization_v2=true) you can + # With v2 serialization (set with Temporal::Configuration#use_error_serialization_v2=true) you can # throw any exception from an activity and expect that it can be handled by the workflow. class ActivityException < ClientError; end diff --git a/lib/temporal/execution_options.rb b/lib/temporal/execution_options.rb index 65f0031c..d3319cb8 100644 --- a/lib/temporal/execution_options.rb +++ b/lib/temporal/execution_options.rb @@ -3,7 +3,8 @@ module Temporal class ExecutionOptions - attr_reader :name, :namespace, :task_queue, :retry_policy, :timeouts, :headers, :memo, :search_attributes + attr_reader :name, :namespace, :task_queue, :retry_policy, :timeouts, :headers, :memo, :search_attributes, + :start_delay def initialize(object, options, defaults = nil) # Options are treated as overrides and take precedence @@ -15,6 +16,7 @@ def initialize(object, options, defaults = nil) @headers = options[:headers] || {} @memo = options[:memo] || {} @search_attributes = options[:search_attributes] || {} + @start_delay = options[:start_delay] || 0 # For Temporal::Workflow and Temporal::Activity use defined values as the next option if has_executable_concern?(object) diff --git a/lib/temporal/metadata.rb b/lib/temporal/metadata.rb index 7be46b31..5439029f 100644 --- a/lib/temporal/metadata.rb +++ b/lib/temporal/metadata.rb @@ -2,15 +2,12 @@ require 'temporal/metadata/activity' require 'temporal/metadata/workflow' require 'temporal/metadata/workflow_task' -require 'temporal/concerns/payloads' module Temporal module Metadata class << self - include Concerns::Payloads - - def generate_activity_metadata(task, namespace) + def generate_activity_metadata(task, namespace, converter) Metadata::Activity.new( namespace: namespace, id: task.activity_id, @@ -20,8 +17,8 @@ def generate_activity_metadata(task, namespace) workflow_run_id: task.workflow_execution.run_id, workflow_id: task.workflow_execution.workflow_id, workflow_name: task.workflow_type.name, - headers: from_payload_map(task.header&.fields || {}), - heartbeat_details: from_details_payloads(task.heartbeat_details), + headers: converter.from_payload_map(task.header&.fields || {}), + heartbeat_details: converter.from_details_payloads(task.heartbeat_details), scheduled_at: task.scheduled_time.to_time, current_attempt_scheduled_at: task.current_attempt_scheduled_time.to_time, heartbeat_timeout: task.heartbeat_timeout.seconds @@ -44,7 +41,7 @@ def generate_workflow_task_metadata(task, namespace) # @param event [Temporal::Workflow::History::Event] Workflow started history event # @param task_metadata [Temporal::Metadata::WorkflowTask] workflow task metadata - def generate_workflow_metadata(event, task_metadata) + def generate_workflow_metadata(event, task_metadata, converter) Metadata::Workflow.new( name: event.attributes.workflow_type.name, id: task_metadata.workflow_id, @@ -54,9 +51,9 @@ def generate_workflow_metadata(event, task_metadata) attempt: event.attributes.attempt, namespace: task_metadata.namespace, task_queue: event.attributes.task_queue.name, - headers: from_payload_map(event.attributes.header&.fields || {}), + headers: converter.from_payload_map(event.attributes.header&.fields || {}), run_started_at: event.timestamp, - memo: from_payload_map(event.attributes.memo&.fields || {}), + memo: converter.from_payload_map(event.attributes.memo&.fields || {}), ) end end diff --git a/lib/temporal/schedule.rb b/lib/temporal/schedule.rb new file mode 100644 index 00000000..8fa84d2d --- /dev/null +++ b/lib/temporal/schedule.rb @@ -0,0 +1,16 @@ +require "temporal/schedule/backfill" +require "temporal/schedule/calendar" +require "temporal/schedule/describe_schedule_response" +require "temporal/schedule/interval" +require "temporal/schedule/list_schedules_response" +require "temporal/schedule/schedule" +require "temporal/schedule/schedule_list_entry" +require "temporal/schedule/schedule_policies" +require "temporal/schedule/schedule_spec" +require "temporal/schedule/schedule_state" +require "temporal/schedule/start_workflow_action" + +module Temporal + module Schedule + end +end diff --git a/lib/temporal/schedule/backfill.rb b/lib/temporal/schedule/backfill.rb new file mode 100644 index 00000000..b107d3f6 --- /dev/null +++ b/lib/temporal/schedule/backfill.rb @@ -0,0 +1,42 @@ +module Temporal + module Schedule + class Backfill + # Controls what happens when a workflow would be started + # by a schedule, and is already running. + # + # If provided, must be one of: + # - :skip (default): means don't start anything. When the workflow + # completes, the next scheduled event after that time will be considered. + # - :buffer_one: means start the workflow again soon as the + # current one completes, but only buffer one start in this way. If + # another start is supposed to happen when the workflow is running, + # and one is already buffered, then only the first one will be + # started after the running workflow finishes. + # - :buffer_all : means buffer up any number of starts to all happen + # sequentially, immediately after the running workflow completes. + # - :cancel_other: means that if there is another workflow running, cancel + # it, and start the new one after the old one completes cancellation. + # - :terminate_other: means that if there is another workflow running, + # terminate it and start the new one immediately. + # - :allow_all: means start any number of concurrent workflows. + # Note that with this policy, last completion result and last failure + # will not be available since workflows are not sequential. + attr_reader :overlap_policy + + # The time to start the backfill + attr_reader :start_time + + # The time to end the backfill + attr_reader :end_time + + # @param start_time [Time] The time to start the backfill + # @param end_time [Time] The time to end the backfill + # @param overlap_policy [Time] Should be one of :skip, :buffer_one, :buffer_all, :cancel_other, :terminate_other, :allow_all + def initialize(start_time: nil, end_time: nil, overlap_policy: nil) + @start_time = start_time + @end_time = end_time + @overlap_policy = overlap_policy + end + end + end +end diff --git a/lib/temporal/schedule/calendar.rb b/lib/temporal/schedule/calendar.rb new file mode 100644 index 00000000..26d24d49 --- /dev/null +++ b/lib/temporal/schedule/calendar.rb @@ -0,0 +1,48 @@ +module Temporal + module Schedule + + # Calendar describes an event specification relative to the calendar, + # similar to a traditional cron specification, but with labeled fields. Each + # field can be one of: + # *: matches always + # x: matches when the field equals x + # x/y : matches when the field equals x+n*y where n is an integer + # x-z: matches when the field is between x and z inclusive + # w,x,y,...: matches when the field is one of the listed values + # + # Each x, y, z, ... is either a decimal integer, or a month or day of week name + # or abbreviation (in the appropriate fields). + # + # A timestamp matches if all fields match. + # + # Note that fields have different default values, for convenience. + # + # Note that the special case that some cron implementations have for treating + # day_of_month and day_of_week as "or" instead of "and" when both are set is + # not implemented. + # + # day_of_week can accept 0 or 7 as Sunday + class Calendar + attr_reader :second, :minute, :hour, :day_of_month, :month, :year, :day_of_week, :comment + + # @param second [String] Expression to match seconds. Default: 0 + # @param minute [String] Expression to match minutes. Default: 0 + # @param hour [String] Expression to match hours. Default: 0 + # @param day_of_month [String] Expression to match days of the month. Default: * + # @param month [String] Expression to match months. Default: * + # @param year [String] Expression to match years. Default: * + # @param day_of_week [String] Expression to match days of the week. Default: * + # @param comment [String] Free form comment describing the intent of this calendar. + def initialize(second: nil, minute: nil, hour: nil, day_of_month: nil, month: nil, year: nil, day_of_week: nil, comment: nil) + @second = second + @minute = minute + @hour = hour + @day_of_month = day_of_month + @month = month + @day_of_week = day_of_week + @year = year + @comment = comment + end + end + end +end diff --git a/lib/temporal/schedule/describe_schedule_response.rb b/lib/temporal/schedule/describe_schedule_response.rb new file mode 100644 index 00000000..d0d3c627 --- /dev/null +++ b/lib/temporal/schedule/describe_schedule_response.rb @@ -0,0 +1,11 @@ +module Temporal + module Schedule + class DescribeScheduleResponse < Struct.new(:schedule, :info, :memo, :search_attributes, :conflict_token, keyword_init: true) + # Override the constructor to make these objects immutable + def initialize(*args) + super(*args) + self.freeze + end + end + end +end diff --git a/lib/temporal/schedule/interval.rb b/lib/temporal/schedule/interval.rb new file mode 100644 index 00000000..0d3650c9 --- /dev/null +++ b/lib/temporal/schedule/interval.rb @@ -0,0 +1,24 @@ +module Temporal + module Schedule + # Interval matches times that can be expressed as: + # Epoch + (n * every) + offset + # where n is all integers ≥ 0. + + # For example, an `every` of 1 hour with `offset` of zero would match + # every hour, on the hour. The same `every` but an `offset` + # of 19 minutes would match every `xx:19:00`. An `every` of 28 days with + # `offset` zero would match `2022-02-17T00:00:00Z` (among other times). + # The same `every` with `offset` of 3 days, 5 hours, and 23 minutes + # would match `2022-02-20T05:23:00Z` instead. + class Interval + attr_reader :every, :offset + + # @param every [Integer] the number of seconds between each interval + # @param offset [Integer] the number of seconds to provide as offset + def initialize(every:, offset: nil) + @every = every + @offset = offset + end + end + end +end diff --git a/lib/temporal/schedule/list_schedules_response.rb b/lib/temporal/schedule/list_schedules_response.rb new file mode 100644 index 00000000..acf90b74 --- /dev/null +++ b/lib/temporal/schedule/list_schedules_response.rb @@ -0,0 +1,11 @@ +module Temporal + module Schedule + class ListSchedulesResponse < Struct.new(:schedules, :next_page_token, keyword_init: true) + # Override the constructor to make these objects immutable + def initialize(*args) + super(*args) + self.freeze + end + end + end +end diff --git a/lib/temporal/schedule/schedule.rb b/lib/temporal/schedule/schedule.rb new file mode 100644 index 00000000..91fcf7d1 --- /dev/null +++ b/lib/temporal/schedule/schedule.rb @@ -0,0 +1,14 @@ +module Temporal + module Schedule + class Schedule + attr_reader :spec, :action, :policies, :state + + def initialize(spec:, action:, policies: nil, state: nil) + @spec = spec + @action = action + @policies = policies + @state = state + end + end + end +end diff --git a/lib/temporal/schedule/schedule_list_entry.rb b/lib/temporal/schedule/schedule_list_entry.rb new file mode 100644 index 00000000..338d966e --- /dev/null +++ b/lib/temporal/schedule/schedule_list_entry.rb @@ -0,0 +1,12 @@ +module Temporal + module Schedule + # ScheduleListEntry is returned by ListSchedules. + class ScheduleListEntry < Struct.new(:schedule_id, :memo, :search_attributes, :info, keyword_init: true) + # Override the constructor to make these objects immutable + def initialize(*args) + super(*args) + self.freeze + end + end + end +end diff --git a/lib/temporal/schedule/schedule_policies.rb b/lib/temporal/schedule/schedule_policies.rb new file mode 100644 index 00000000..f8aeea21 --- /dev/null +++ b/lib/temporal/schedule/schedule_policies.rb @@ -0,0 +1,48 @@ +module Temporal + module Schedule + class SchedulePolicies + # Controls what happens when a workflow would be started + # by a schedule, and is already running. + # + # If provided, must be one of: + # - :skip (default): means don't start anything. When the workflow + # completes, the next scheduled event after that time will be considered. + # - :buffer_one: means start the workflow again soon as the + # current one completes, but only buffer one start in this way. If + # another start is supposed to happen when the workflow is running, + # and one is already buffered, then only the first one will be + # started after the running workflow finishes. + # - :buffer_all : means buffer up any number of starts to all happen + # sequentially, immediately after the running workflow completes. + # - :cancel_other: means that if there is another workflow running, cancel + # it, and start the new one after the old one completes cancellation. + # - :terminate_other: means that if there is another workflow running, + # terminate it and start the new one immediately. + # - :allow_all: means start any number of concurrent workflows. + # Note that with this policy, last completion result and last failure + # will not be available since workflows are not sequential. + attr_reader :overlap_policy + + # Policy for catchups: + # If the Temporal server misses an action due to one or more components + # being down, and comes back up, the action will be run if the scheduled + # time is within this window from the current time. + # This value defaults to 60 seconds, and can't be less than 10 seconds. + attr_reader :catchup_window + + # If true, and a workflow run fails or times out, turn on "paused". + # This applies after retry policies: the full chain of retries must fail to + # trigger a pause here. + attr_reader :pause_on_failure + + # @param overlap_policy [Symbol] Should be one of :skip, :buffer_one, :buffer_all, :cancel_other, :terminate_other, :allow_all + # @param catchup_window [Integer] The number of seconds to catchup if the Temporal server misses an action + # @param pause_on_failure [Boolean] Whether to pause the schedule if the action fails + def initialize(overlap_policy: nil, catchup_window: nil, pause_on_failure: nil) + @overlap_policy = overlap_policy + @catchup_window = catchup_window + @pause_on_failure = pause_on_failure + end + end + end +end diff --git a/lib/temporal/schedule/schedule_spec.rb b/lib/temporal/schedule/schedule_spec.rb new file mode 100644 index 00000000..1034d298 --- /dev/null +++ b/lib/temporal/schedule/schedule_spec.rb @@ -0,0 +1,93 @@ +module Temporal + module Schedule + # ScheduleSpec is a complete description of a set of absolute timestamps + # (possibly infinite) that an action should occur at. The meaning of a + # ScheduleSpec depends only on its contents and never changes, except that the + # definition of a time zone can change over time (most commonly, when daylight + # saving time policy changes for an area). To create a totally self-contained + # ScheduleSpec, use UTC or include timezone_data + + # For input, you can provide zero or more of: calendars, intervals or + # cron_expressions and all of them will be used (the schedule will take + # action at the union of all of their times, minus the ones that match + # exclude_structured_calendar). + class ScheduleSpec + # Calendar-based specifications of times. + # + # @return [Array] + attr_reader :calendars + + # Interval-based specifications of times. + # + # @return [Array] + attr_reader :intervals + + # [Cron expressions](https://crontab.guru/). This is provided for easy + # migration from legacy Cron Workflows. For new use cases, we recommend + # using calendars or intervals for readability and maintainability. + # + # + # The string can have 5, 6, or 7 fields, separated by spaces. + # + # - 5 fields: minute, hour, day_of_month, month, day_of_week + # - 6 fields: minute, hour, day_of_month, month, day_of_week, year + # - 7 fields: second, minute, hour, day_of_month, month, day_of_week, year + # + # Notes: + # + # - If year is not given, it defaults to *. + # - If second is not given, it defaults to 0. + # - Shorthands `@yearly`, `@monthly`, `@weekly`, `@daily`, and `@hourly` are also + # accepted instead of the 5-7 time fields. + # - `@every interval[/]` is accepted and gets compiled into an + # IntervalSpec instead. `` and `` should be a decimal integer + # with a unit suffix s, m, h, or d. + # - Optionally, the string can be preceded by `CRON_TZ=` or + # `TZ=`, which will get copied to {@link timezone}. + # (In which case the {@link timezone} field should be left empty.) + # - Optionally, "#" followed by a comment can appear at the end of the string. + # - Note that the special case that some cron implementations have for + # treating day_of_month and day_of_week as "or" instead of "and" when both + # are set is not implemented. + # + # @return [Array] + attr_reader :cron_expressions + + # If set, any timestamps before start_time will be skipped. + attr_reader :start_time + + # If set, any timestamps after end_time will be skipped. + attr_reader :end_time + + # If set, the schedule will be randomly offset by up to this many seconds. + attr_reader :jitter + + # Time zone to interpret all calendar-based specs in. + # + # If unset, defaults to UTC. We recommend using UTC for your application if + # at all possible, to avoid various surprising properties of time zones. + # + # Time zones may be provided by name, corresponding to names in the IANA + # time zone database (see https://www.iana.org/time-zones). The definition + # will be loaded by the Temporal server from the environment it runs in. + attr_reader :timezone_name + + # @param cron_expressions [Array] + # @param intervals [Array] + # @param calendars [Array] + # @param start_time [Time] If set, any timestamps before start_time will be skipped. + # @param end_time [Time] If set, any timestamps after end_time will be skipped. + # @param jitter [Integer] If set, the schedule will be randomly offset by up to this many seconds. + # @param timezone_name [String] If set, the schedule will be interpreted in this time zone. + def initialize(cron_expressions: nil, intervals: nil, calendars: nil, start_time: nil, end_time: nil, jitter: nil, timezone_name: nil) + @cron_expressions = cron_expressions || [] + @intervals = intervals || [] + @calendars = calendars || [] + @start_time = start_time + @end_time = end_time + @jitter = jitter + @timezone_name = timezone_name + end + end + end +end diff --git a/lib/temporal/schedule/schedule_state.rb b/lib/temporal/schedule/schedule_state.rb new file mode 100644 index 00000000..4debb82c --- /dev/null +++ b/lib/temporal/schedule/schedule_state.rb @@ -0,0 +1,18 @@ +module Temporal + module Schedule + class ScheduleState + attr_reader :notes, :paused, :limited_actions, :remaining_actions + + # @param notes [String] Human-readable notes about the schedule. + # @param paused [Boolean] If true, do not take any actions based on the schedule spec. + # @param limited_actions [Boolean] If true, decrement remaining_actions when an action is taken. + # @param remaining_actions [Integer] The number of actions remaining to be taken. + def initialize(notes: nil, paused: nil, limited_actions: nil, remaining_actions: nil) + @notes = notes + @paused = paused + @limited_actions = limited_actions + @remaining_actions = remaining_actions + end + end + end +end diff --git a/lib/temporal/schedule/start_workflow_action.rb b/lib/temporal/schedule/start_workflow_action.rb new file mode 100644 index 00000000..19348fcd --- /dev/null +++ b/lib/temporal/schedule/start_workflow_action.rb @@ -0,0 +1,58 @@ +require "forwardable" + +module Temporal + module Schedule + class StartWorkflowAction + extend Forwardable + + #target + def_delegators( + :@execution_options, + :name, + :task_queue, + :headers, + :memo + ) + + attr_reader :workflow_id, :input + + # @param workflow [Temporal::Workflow, String] workflow class or name. When a workflow class + # is passed, its config (namespace, task_queue, timeouts, etc) will be used + # @param input [any] arguments to be passed to workflow's #execute method + # @param args [Hash] keyword arguments to be passed to workflow's #execute method + # @param options [Hash, nil] optional overrides + # @option options [String] :workflow_id + # @option options [String] :name workflow name + # @option options [String] :namespace + # @option options [String] :task_queue + # @option options [Hash] :retry_policy check Temporal::RetryPolicy for available options + # @option options [Hash] :timeouts check Temporal::Configuration::DEFAULT_TIMEOUTS + # @option options [Hash] :headers + # @option options [Hash] :search_attributes + # + # @return [String] workflow's run ID + def initialize(workflow, *input, options: {}) + @workflow_id = options[:workflow_id] || SecureRandom.uuid + @input = input + + @execution_options = ExecutionOptions.new(workflow, options) + end + + def execution_timeout + @execution_options.timeouts[:execution] + end + + def run_timeout + @execution_options.timeouts[:run] || @execution_options.timeouts[:execution] + end + + def task_timeout + @execution_options.timeouts[:task] + end + + def search_attributes + Workflow::Context::Helpers.process_search_attributes(@execution_options.search_attributes) + end + end + end +end diff --git a/lib/temporal/testing/local_workflow_context.rb b/lib/temporal/testing/local_workflow_context.rb index 0f30fe0f..7d3321ae 100644 --- a/lib/temporal/testing/local_workflow_context.rb +++ b/lib/temporal/testing/local_workflow_context.rb @@ -180,6 +180,10 @@ def fail(exception) raise exception end + def continue_as_new(*input, **args) + raise NotImplementedError, 'not yet available for testing' + end + def wait_for_all(*futures) futures.each(&:wait) diff --git a/lib/temporal/testing/replay_tester.rb b/lib/temporal/testing/replay_tester.rb new file mode 100644 index 00000000..6a98c86e --- /dev/null +++ b/lib/temporal/testing/replay_tester.rb @@ -0,0 +1,73 @@ +require "gen/temporal/api/history/v1/message_pb" +require "json" +require "temporal/errors" +require "temporal/metadata/workflow_task" +require "temporal/middleware/chain" +require "temporal/workflow/executor" +require "temporal/workflow/stack_trace_tracker" + +module Temporal + module Testing + class ReplayError < StandardError + end + + class ReplayTester + def initialize(config: Temporal.configuration) + @config = config + end + + attr_reader :config + + # Runs a replay test by using the specific Temporal::Workflow::History object. Instances of these objects + # can be obtained using various from_ methods in Temporal::Workflow::History::Serialization. + # + # If the replay test succeeds, the method will return silently. If the replay tests fails, an error will be raised. + def replay_history(workflow_class, history) + # This code roughly resembles the workflow TaskProcessor but with history being fed in rather + # than being pulled via a workflow task, no query support, no metrics, and other + # simplifications. Fake metadata needs to be provided. + start_workflow_event = history.find_event_by_id(1) + if start_workflow_event.nil? || start_workflow_event.type != "WORKFLOW_EXECUTION_STARTED" + raise ReplayError, "History does not start with workflow_execution_started event" + end + + metadata = Temporal::Metadata::WorkflowTask.new( + namespace: config.namespace, + id: 1, + task_token: "", + attempt: 1, + workflow_run_id: "run_id", + workflow_id: "workflow_id", + # Protobuf deserialization will ensure this tree is present + workflow_name: start_workflow_event.attributes.workflow_type.name + ) + + executor = Workflow::Executor.new( + workflow_class, + history, + metadata, + config, + true, + Middleware::Chain.new([]) + ) + + begin + executor.run + rescue StandardError + query = Struct.new(:query_type, :query_args).new( + Temporal::Workflow::StackTraceTracker::STACK_TRACE_QUERY_NAME, + nil + ) + query_result = executor.process_queries( + {"stack_trace" => query} + ) + replay_error = ReplayError.new("Workflow code failed to replay successfully against history") + # Override the stack trace to the point in the workflow code where the failure occured, not the + # point in the StateManager where non-determinism is detected + replay_error.set_backtrace("Fiber backtraces: #{query_result["stack_trace"].result}") + raise replay_error + end + end + end + end +end diff --git a/lib/temporal/worker.rb b/lib/temporal/worker.rb index 5d84df6e..e9a3b2f3 100644 --- a/lib/temporal/worker.rb +++ b/lib/temporal/worker.rb @@ -9,7 +9,7 @@ module Temporal class Worker # activity_thread_pool_size: number of threads that the poller can use to run activities. # can be set to 1 if you want no paralellism in your activities, at the cost of throughput. - + # # binary_checksum: The binary checksum identifies the version of workflow worker code. It is set on each completed or failed workflow # task. It is present in API responses that return workflow execution info, and is shown in temporal-web and tctl. # It is traditionally a checksum of the application binary. However, Temporal server treats this as an opaque @@ -21,13 +21,25 @@ class Worker # from workers with these bad versions. # # See https://docs.temporal.io/docs/tctl/how-to-use-tctl/#recovery-from-bad-deployment----auto-reset-workflow + # + # activity_max_tasks_per_second: Optional: Sets the rate limiting on number of activities that can be executed per second + # + # This limits new activities being started and activity attempts being scheduled. It does NOT + # limit the number of concurrent activities being executed on this task queue. + # + # This is managed by the server and controls activities per second for the entire task queue + # across all the workers. Notice that the number is represented in double, so that you can set + # it to less than 1 if needed. For example, set the number to 0.1 means you want your activity + # to be executed once every 10 seconds. This can be used to protect down stream services from + # flooding. The zero value of this uses the default value. Default is unlimited. def initialize( config = Temporal.configuration, activity_thread_pool_size: Temporal::Activity::Poller::DEFAULT_OPTIONS[:thread_pool_size], workflow_thread_pool_size: Temporal::Workflow::Poller::DEFAULT_OPTIONS[:thread_pool_size], binary_checksum: Temporal::Workflow::Poller::DEFAULT_OPTIONS[:binary_checksum], activity_poll_retry_seconds: Temporal::Activity::Poller::DEFAULT_OPTIONS[:poll_retry_seconds], - workflow_poll_retry_seconds: Temporal::Workflow::Poller::DEFAULT_OPTIONS[:poll_retry_seconds] + workflow_poll_retry_seconds: Temporal::Workflow::Poller::DEFAULT_OPTIONS[:poll_retry_seconds], + activity_max_tasks_per_second: Temporal::Activity::Poller::DEFAULT_OPTIONS[:max_tasks_per_second] ) @config = config @workflows = Hash.new { |hash, key| hash[key] = ExecutableLookup.new } @@ -39,7 +51,8 @@ def initialize( @shutting_down = false @activity_poller_options = { thread_pool_size: activity_thread_pool_size, - poll_retry_seconds: activity_poll_retry_seconds + poll_retry_seconds: activity_poll_retry_seconds, + max_tasks_per_second: activity_max_tasks_per_second } @workflow_poller_options = { thread_pool_size: workflow_thread_pool_size, diff --git a/lib/temporal/workflow.rb b/lib/temporal/workflow.rb index 3b5dcfe6..c135a19c 100644 --- a/lib/temporal/workflow.rb +++ b/lib/temporal/workflow.rb @@ -1,3 +1,4 @@ +require 'temporal/callable' require 'temporal/concerns/executable' require 'temporal/workflow/convenience_methods' require 'temporal/thread_local_context' @@ -13,7 +14,9 @@ def self.execute_in_context(context, input) Temporal::ThreadLocalContext.set(context) workflow = new(context) - result = workflow.execute(*input) + callable = Temporal::Callable.new(method: workflow.method(:execute)) + + result = callable.call(input) context.complete(result) unless context.completed? rescue StandardError, ScriptError => error diff --git a/lib/temporal/workflow/command_state_machine.rb b/lib/temporal/workflow/command_state_machine.rb index 74adcf16..69bb2528 100644 --- a/lib/temporal/workflow/command_state_machine.rb +++ b/lib/temporal/workflow/command_state_machine.rb @@ -48,6 +48,14 @@ def fail def time_out @state = TIMED_OUT_STATE end + + def closed? + @state == COMPLETED_STATE || + @state == CANCELED_STATE || + @state == FAILED_STATE || + @state == TIMED_OUT_STATE || + @state == TERMINATED_STATE + end end end end diff --git a/lib/temporal/workflow/context.rb b/lib/temporal/workflow/context.rb index 2d69bd2d..07b917a2 100644 --- a/lib/temporal/workflow/context.rb +++ b/lib/temporal/workflow/context.rb @@ -47,8 +47,10 @@ def completed? end def logger - @logger ||= ReplayAwareLogger.new(Temporal.logger) - @logger.replay = state_manager.replay? + @logger ||= ReplayAwareLogger.new( + @config.logger, + replaying: -> { state_manager.replay? && !@config.log_on_workflow_replay } + ) @logger end diff --git a/lib/temporal/workflow/errors.rb b/lib/temporal/workflow/errors.rb index 42157376..f13f03bf 100644 --- a/lib/temporal/workflow/errors.rb +++ b/lib/temporal/workflow/errors.rb @@ -3,11 +3,9 @@ module Temporal class Workflow class Errors - extend Concerns::Payloads - # Convert a failure returned from the server to an Error to raise to the client # failure: Temporalio::Api::Failure::V1::Failure - def self.generate_error(failure, default_exception_class = StandardError) + def self.generate_error(failure, converter, default_exception_class = StandardError) case failure.failure_info when :application_failure_info @@ -25,10 +23,10 @@ def self.generate_error(failure, default_exception_class = StandardError) end begin details = failure.application_failure_info.details - exception_or_message = from_details_payloads(details) + exception_or_message = converter.from_details_payloads(details) # v1 serialization only supports StandardErrors with a single "message" argument. # v2 serialization supports complex errors using our converters to serialize them. - # enable v2 serialization in activities with Temporal.configuration.use_error_serialization_v2 + # enable v2 serialization in activities with Temporal::Configuration#use_error_serialization_v2 if exception_or_message.is_a?(Exception) exception = exception_or_message else @@ -39,7 +37,7 @@ def self.generate_error(failure, default_exception_class = StandardError) exception = default_exception_class.new(message) Temporal.logger.error( "Could not instantiate original error. Defaulting to StandardError. Make sure the worker running " \ - "your activities is setting Temporal.configuration.use_error_serialization_v2. If so, make sure the " \ + "your activities is configured with use_error_serialization_v2. If so, make sure the " \ "original error serialized by searching your logs for 'unserializable_error'. If not, you're using "\ "legacy serialization, and it's likely that "\ "your error's initializer takes something other than exactly one positional argument.", @@ -59,7 +57,7 @@ def self.generate_error(failure, default_exception_class = StandardError) TimeoutError.new("Timeout type: #{failure.timeout_failure_info.timeout_type.to_s}") when :canceled_failure_info # TODO: Distinguish between different entity cancellations - StandardError.new(from_payloads(failure.canceled_failure_info.details)) + StandardError.new(converter.from_payloads(failure.canceled_failure_info.details)) else StandardError.new(failure.message) end diff --git a/lib/temporal/workflow/execution_info.rb b/lib/temporal/workflow/execution_info.rb index e3f70021..77a27332 100644 --- a/lib/temporal/workflow/execution_info.rb +++ b/lib/temporal/workflow/execution_info.rb @@ -1,12 +1,9 @@ -require 'temporal/concerns/payloads' require 'temporal/workflow/status' module Temporal class Workflow class ExecutionInfo < Struct.new(:workflow, :workflow_id, :run_id, :start_time, :close_time, :status, :history_length, :memo, :search_attributes, keyword_init: true) - extend Concerns::Payloads - STATUSES = [ Temporal::Workflow::Status::RUNNING, Temporal::Workflow::Status::COMPLETED, @@ -17,8 +14,8 @@ class ExecutionInfo < Struct.new(:workflow, :workflow_id, :run_id, :start_time, Temporal::Workflow::Status::TIMED_OUT ] - def self.generate_from(response) - search_attributes = response.search_attributes.nil? ? {} : from_payload_map_without_codec(response.search_attributes.indexed_fields) + def self.generate_from(response, converter) + search_attributes = response.search_attributes.nil? ? {} : converter.from_payload_map_without_codec(response.search_attributes.indexed_fields) new( workflow: response.type.name, workflow_id: response.execution.workflow_id, @@ -27,7 +24,7 @@ def self.generate_from(response) close_time: response.close_time&.to_time, status: Temporal::Workflow::Status::API_STATUS_MAP.fetch(response.status), history_length: response.history_length, - memo: from_payload_map(response.memo.fields), + memo: converter.from_payload_map(response.memo.fields), search_attributes: search_attributes ).freeze end diff --git a/lib/temporal/workflow/executions.rb b/lib/temporal/workflow/executions.rb index 83079b1a..15fb9109 100644 --- a/lib/temporal/workflow/executions.rb +++ b/lib/temporal/workflow/executions.rb @@ -9,7 +9,8 @@ class Executions next_page_token: nil }.freeze - def initialize(connection:, status:, request_options:) + def initialize(converter, connection:, status:, request_options:) + @converter = converter @connection = connection @status = status @request_options = DEFAULT_REQUEST_OPTIONS.merge(request_options) @@ -20,7 +21,7 @@ def next_page_token end def next_page - self.class.new(connection: @connection, status: @status, request_options: @request_options.merge(next_page_token: next_page_token)) + self.class.new(@converter, connection: @connection, status: @status, request_options: @request_options.merge(next_page_token: next_page_token)) end def each @@ -42,7 +43,7 @@ def each ) paginated_executions = response.executions.map do |raw_execution| - execution = Temporal::Workflow::ExecutionInfo.generate_from(raw_execution) + execution = Temporal::Workflow::ExecutionInfo.generate_from(raw_execution, @converter) if block_given? yield execution end diff --git a/lib/temporal/workflow/executor.rb b/lib/temporal/workflow/executor.rb index 762ae250..f40fef3b 100644 --- a/lib/temporal/workflow/executor.rb +++ b/lib/temporal/workflow/executor.rb @@ -33,7 +33,7 @@ def initialize(workflow_class, history, task_metadata, config, track_stack_trace def run dispatcher.register_handler( - History::EventTarget.workflow, + History::EventTarget.start_workflow, 'started', &method(:execute_workflow) ) @@ -42,7 +42,7 @@ def run state_manager.apply(window) end - RunResult.new(commands: state_manager.commands, new_sdk_flags_used: state_manager.new_sdk_flags_used) + RunResult.new(commands: state_manager.final_commands, new_sdk_flags_used: state_manager.new_sdk_flags_used) end # Process queries using the pre-registered query handlers @@ -71,7 +71,7 @@ def process_query(query) end def execute_workflow(input, workflow_started_event) - metadata = Metadata.generate_workflow_metadata(workflow_started_event, task_metadata) + metadata = Metadata.generate_workflow_metadata(workflow_started_event, task_metadata, config.converter) context = Workflow::Context.new(state_manager, dispatcher, workflow_class, metadata, config, query_registry, track_stack_trace) diff --git a/lib/temporal/workflow/history.rb b/lib/temporal/workflow/history.rb index e11ea9b2..07bbe96d 100644 --- a/lib/temporal/workflow/history.rb +++ b/lib/temporal/workflow/history.rb @@ -51,6 +51,9 @@ def next_window CANCEL_TIMER_FAILED TIMER_CANCELED WORKFLOW_EXECUTION_CANCEL_REQUESTED + WORKFLOW_EXECUTION_COMPLETED + WORKFLOW_EXECUTION_CONTINUED_AS_NEW + WORKFLOW_EXECUTION_FAILED START_CHILD_WORKFLOW_EXECUTION_INITIATED SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_INITIATED REQUEST_CANCEL_ACTIVITY_TASK_FAILED diff --git a/lib/temporal/workflow/history/event_target.rb b/lib/temporal/workflow/history/event_target.rb index d054947f..881a7823 100644 --- a/lib/temporal/workflow/history/event_target.rb +++ b/lib/temporal/workflow/history/event_target.rb @@ -14,8 +14,13 @@ class UnexpectedEventType < InternalError; end MARKER_TYPE = :marker EXTERNAL_WORKFLOW_TYPE = :external_workflow CANCEL_EXTERNAL_WORKFLOW_REQUEST_TYPE = :cancel_external_workflow_request - WORKFLOW_TYPE = :workflow CANCEL_WORKFLOW_REQUEST_TYPE = :cancel_workflow_request + WORKFLOW_TYPE = :workflow + COMPLETE_WORKFLOW_TYPE = :complete_workflow + CONTINUE_AS_NEW_WORKFLOW_TYPE = :continue_as_new_workflow + FAIL_WORKFLOW_TYPE = :fail_workflow + SIGNAL_WORKFLOW_TYPE = :signal_workflow + START_WORKFLOW_TYPE = :start_workflow UPSERT_SEARCH_ATTRIBUTES_REQUEST_TYPE = :upsert_search_attributes_request # NOTE: The order is important, first prefix match wins (will be a longer match) @@ -35,13 +40,21 @@ class UnexpectedEventType < InternalError; end 'REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION' => CANCEL_EXTERNAL_WORKFLOW_REQUEST_TYPE, 'UPSERT_WORKFLOW_SEARCH_ATTRIBUTES' => UPSERT_SEARCH_ATTRIBUTES_REQUEST_TYPE, 'WORKFLOW_EXECUTION_CANCEL' => CANCEL_WORKFLOW_REQUEST_TYPE, + 'WORKFLOW_EXECUTION_COMPLETED' => COMPLETE_WORKFLOW_TYPE, + 'WORKFLOW_EXECUTION_CONTINUED_AS_NEW' => CONTINUE_AS_NEW_WORKFLOW_TYPE, + 'WORKFLOW_EXECUTION_FAILED' => FAIL_WORKFLOW_TYPE, + 'WORKFLOW_EXECUTION_SIGNALED' => SIGNAL_WORKFLOW_TYPE, + 'WORKFLOW_EXECUTION_STARTED' => START_WORKFLOW_TYPE, + # This is a fall-through type for various event types that workflow code cannot + # react to, either because they're externally triggered (workflow termination, + # timeout) or use an unsupported feature (workflow cancellation, updates). 'WORKFLOW_EXECUTION' => WORKFLOW_TYPE, }.freeze attr_reader :id, :type - def self.workflow - @workflow ||= new(1, WORKFLOW_TYPE) + def self.start_workflow + @workflow ||= new(1, START_WORKFLOW_TYPE) end def self.from_event(event) diff --git a/lib/temporal/workflow/history/serialization.rb b/lib/temporal/workflow/history/serialization.rb new file mode 100644 index 00000000..2219dddd --- /dev/null +++ b/lib/temporal/workflow/history/serialization.rb @@ -0,0 +1,61 @@ +module Temporal + class Workflow + class History + # Functions for deserializing workflow histories from JSON and protobuf. These are useful + # in writing replay tests + # + # `from_` methods return Temporal::Workflow::History instances.` + # `to_` methods take Temporalio::Api::History::V1::History instances + # + # This asymmetry stems from our own internal history representation being a projection + # of the "full" history. + class Serialization + # Parse History from a JSON string + def self.from_json(json) + raw_history = Temporalio::Api::History::V1::History.decode_json(json, ignore_unknown_fields: true) + Workflow::History.new(raw_history.events) + end + + # Convert a raw history to JSON. This method is typically only used by methods on Workflow::Client + def self.to_json(raw_history, pretty_print: true) + json = raw_history.to_json + if pretty_print + # pretty print JSON to make it more debuggable + ::JSON.pretty_generate(::JSON.load(json)) + else + json + end + end + + def self.from_json_file(path) + self.from_json(File.read(path)) + end + + def self.to_json_file(raw_history, path, pretty_print: true) + json = self.to_json(raw_history, pretty_print: pretty_print) + File.write(path, json) + end + + def self.from_protobuf(protobuf) + raw_history = Temporalio::Api::History::V1::History.decode(protobuf) + Workflow::History.new(raw_history.events) + end + + def self.to_protobuf(raw_history) + raw_history.to_proto + end + + def self.from_protobuf_file(path) + self.from_protobuf(File.open(path, "rb", &:read)) + end + + def self.to_protobuf_file(raw_history, path) + protobuf = self.to_protobuf(raw_history) + File.open(path, "wb") do |f| + f.write(protobuf) + end + end + end + end + end +end diff --git a/lib/temporal/workflow/poller.rb b/lib/temporal/workflow/poller.rb index 89fed958..198f4502 100644 --- a/lib/temporal/workflow/poller.rb +++ b/lib/temporal/workflow/poller.rb @@ -113,8 +113,8 @@ def process(task) middleware_chain = Middleware::Chain.new(middleware) workflow_middleware_chain = Middleware::Chain.new(workflow_middleware) - TaskProcessor.new(task, namespace, workflow_lookup, middleware_chain, workflow_middleware_chain, config, - binary_checksum).process + TaskProcessor.new(task, task_queue, namespace, workflow_lookup, middleware_chain, workflow_middleware_chain, + config, binary_checksum).process end def thread_pool diff --git a/lib/temporal/workflow/replay_aware_logger.rb b/lib/temporal/workflow/replay_aware_logger.rb index a56494b4..65dafc59 100644 --- a/lib/temporal/workflow/replay_aware_logger.rb +++ b/lib/temporal/workflow/replay_aware_logger.rb @@ -3,11 +3,9 @@ class Workflow class ReplayAwareLogger SEVERITIES = %i[debug info warn error fatal unknown].freeze - attr_writer :replay - - def initialize(main_logger, replay = true) + def initialize(main_logger, replaying:) @main_logger = main_logger - @replay = replay + @replaying = replaying end SEVERITIES.each do |severity| @@ -29,7 +27,7 @@ def log(severity, message, data = {}) attr_reader :main_logger def replay? - @replay + @replaying.call end end end diff --git a/lib/temporal/workflow/state_manager.rb b/lib/temporal/workflow/state_manager.rb index e3809662..c90ed3de 100644 --- a/lib/temporal/workflow/state_manager.rb +++ b/lib/temporal/workflow/state_manager.rb @@ -4,7 +4,6 @@ require 'temporal/workflow/command_state_machine' require 'temporal/workflow/history/event_target' require 'temporal/workflow/history/size' -require 'temporal/concerns/payloads' require 'temporal/workflow/errors' require 'temporal/workflow/sdk_flags' require 'temporal/workflow/signal' @@ -12,15 +11,13 @@ module Temporal class Workflow class StateManager - include Concerns::Payloads - SIDE_EFFECT_MARKER = 'SIDE_EFFECT'.freeze RELEASE_MARKER = 'RELEASE'.freeze class UnsupportedEvent < Temporal::InternalError; end class UnsupportedMarkerType < Temporal::InternalError; end - attr_reader :commands, :local_time, :search_attributes, :new_sdk_flags_used, :sdk_flags, :first_task_signals + attr_reader :local_time, :search_attributes, :new_sdk_flags_used, :sdk_flags, :first_task_signals def initialize(dispatcher, config) @dispatcher = dispatcher @@ -34,6 +31,7 @@ def initialize(dispatcher, config) @replay = false @search_attributes = {} @config = config + @converter = config.converter # Current flags in use, built up from workflow task completed history entries @sdk_flags = Set.new @@ -87,6 +85,24 @@ def schedule(command) [event_target_from(command_id, command), cancelation_id] end + def final_commands + # Filter out any activity or timer cancellation commands if the underlying activity or + # timer has completed. This can occur when an activity or timer completes while a + # workflow task is being processed that would otherwise cancel this time or activity. + commands.filter do |command_pair| + case command_pair.last + when Command::CancelTimer + state_machine = command_tracker[command_pair.last.timer_id] + !state_machine.closed? + when Command::RequestActivityCancellation + state_machine = command_tracker[command_pair.last.activity_id] + !state_machine.closed? + else + true + end + end + end + def release?(release_name) track_release(release_name) unless releases.key?(release_name) @@ -149,7 +165,7 @@ def history_size private - attr_reader :dispatcher, :command_tracker, :marker_ids, :side_effects, :releases, :config + attr_reader :commands, :dispatcher, :command_tracker, :marker_ids, :side_effects, :releases, :config, :converter def use_signals_first(raw_events) # The presence of SAVE_FIRST_TASK_SIGNALS implies HANDLE_SIGNALS_FIRST @@ -232,22 +248,24 @@ def apply_event(event) case event.type when 'WORKFLOW_EXECUTION_STARTED' unless event.attributes.search_attributes.nil? - search_attributes.merge!(from_payload_map(event.attributes.search_attributes&.indexed_fields || {})) + search_attributes.merge!(converter.from_payload_map(event.attributes.search_attributes&.indexed_fields || {})) end state_machine.start dispatch( - History::EventTarget.workflow, + History::EventTarget.start_workflow, 'started', - from_payloads(event.attributes.input), + converter.from_payloads(event.attributes.input), event ) when 'WORKFLOW_EXECUTION_COMPLETED' - # todo + # should only be triggered in query execution and replay testing + discard_command(history_target) when 'WORKFLOW_EXECUTION_FAILED' - # todo + # should only be triggered in query execution and replay testing + discard_command(history_target) when 'WORKFLOW_EXECUTION_TIMED_OUT' # todo @@ -276,16 +294,16 @@ def apply_event(event) when 'ACTIVITY_TASK_COMPLETED' state_machine.complete - dispatch(history_target, 'completed', from_result_payloads(event.attributes.result)) + dispatch(history_target, 'completed', converter.from_result_payloads(event.attributes.result)) when 'ACTIVITY_TASK_FAILED' state_machine.fail dispatch(history_target, 'failed', - Temporal::Workflow::Errors.generate_error(event.attributes.failure, ActivityException)) + Temporal::Workflow::Errors.generate_error(event.attributes.failure, converter, ActivityException)) when 'ACTIVITY_TASK_TIMED_OUT' state_machine.time_out - dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure)) + dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure, converter)) when 'ACTIVITY_TASK_CANCEL_REQUESTED' state_machine.requested @@ -299,7 +317,7 @@ def apply_event(event) when 'ACTIVITY_TASK_CANCELED' state_machine.cancel dispatch(history_target, 'failed', - Temporal::ActivityCanceled.new(from_details_payloads(event.attributes.details))) + Temporal::ActivityCanceled.new(converter.from_details_payloads(event.attributes.details))) when 'TIMER_STARTED' state_machine.start @@ -336,19 +354,20 @@ def apply_event(event) when 'MARKER_RECORDED' state_machine.complete - handle_marker(event.id, event.attributes.marker_name, from_details_payloads(event.attributes.details['data'])) + handle_marker(event.id, event.attributes.marker_name, converter.from_details_payloads(event.attributes.details['data'])) when 'WORKFLOW_EXECUTION_SIGNALED' # relies on Signal#== for matching in Dispatcher signal_target = Signal.new(event.attributes.signal_name) dispatch(signal_target, 'signaled', event.attributes.signal_name, - from_signal_payloads(event.attributes.input)) + converter.from_signal_payloads(event.attributes.input)) when 'WORKFLOW_EXECUTION_TERMINATED' # todo when 'WORKFLOW_EXECUTION_CONTINUED_AS_NEW' - # todo + # should only be triggered in query execution and replay testing + discard_command(history_target) when 'START_CHILD_WORKFLOW_EXECUTION_INITIATED' state_machine.schedule @@ -367,15 +386,15 @@ def apply_event(event) when 'CHILD_WORKFLOW_EXECUTION_COMPLETED' state_machine.complete - dispatch(history_target, 'completed', from_result_payloads(event.attributes.result)) + dispatch(history_target, 'completed', converter.from_result_payloads(event.attributes.result)) when 'CHILD_WORKFLOW_EXECUTION_FAILED' state_machine.fail - dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure)) + dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure, converter)) when 'CHILD_WORKFLOW_EXECUTION_CANCELED' state_machine.cancel - dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure)) + dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure, converter)) when 'CHILD_WORKFLOW_EXECUTION_TIMED_OUT' state_machine.time_out @@ -406,7 +425,7 @@ def apply_event(event) dispatch(history_target, 'completed') when 'UPSERT_WORKFLOW_SEARCH_ATTRIBUTES' - search_attributes.merge!(from_payload_map(event.attributes.search_attributes&.indexed_fields || {})) + search_attributes.merge!(converter.from_payload_map(event.attributes.search_attributes&.indexed_fields || {})) # no need to track state; this is just a synchronous API call. discard_command(history_target) @@ -428,8 +447,12 @@ def event_target_from(command_id, command) History::EventTarget::TIMER_TYPE when Command::CancelTimer History::EventTarget::CANCEL_TIMER_REQUEST_TYPE - when Command::CompleteWorkflow, Command::FailWorkflow - History::EventTarget::WORKFLOW_TYPE + when Command::CompleteWorkflow + History::EventTarget::COMPLETE_WORKFLOW_TYPE + when Command::ContinueAsNew + History::EventTarget::CONTINUE_AS_NEW_WORKFLOW_TYPE + when Command::FailWorkflow + History::EventTarget::FAIL_WORKFLOW_TYPE when Command::StartChildWorkflow History::EventTarget::CHILD_WORKFLOW_TYPE when Command::UpsertSearchAttributes @@ -447,7 +470,7 @@ def dispatch(history_target, name, *attributes) NONDETERMINISM_ERROR_SUGGESTION = 'Likely, either you have made a version-unsafe change to your workflow or have non-deterministic '\ - 'behavior in your workflow. See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning.'.freeze + 'behavior in your workflow. See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning.'.freeze def discard_command(history_target) # Pop the first command from the list, it is expected to match @@ -462,7 +485,7 @@ def discard_command(history_target) return unless history_target != replay_target raise NonDeterministicWorkflowError, - "Unexpected command. The replaying code is issuing: #{replay_target}, "\ + "Unexpected command. The replaying code is issuing: #{replay_target}, "\ "but the history of previous executions recorded: #{history_target}. " + NONDETERMINISM_ERROR_SUGGESTION end diff --git a/lib/temporal/workflow/task_processor.rb b/lib/temporal/workflow/task_processor.rb index 9b79b454..b3620ad8 100644 --- a/lib/temporal/workflow/task_processor.rb +++ b/lib/temporal/workflow/task_processor.rb @@ -9,23 +9,22 @@ module Temporal class Workflow class TaskProcessor - Query = Struct.new(:query) do - include Concerns::Payloads - + Query = Struct.new(:query, :converter) do def query_type query.query_type end def query_args - from_query_payloads(query.query_args) + converter.from_query_payloads(query.query_args) end end MAX_FAILED_ATTEMPTS = 1 LEGACY_QUERY_KEY = :legacy_query - def initialize(task, namespace, workflow_lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum) + def initialize(task, task_queue, namespace, workflow_lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum) @task = task + @task_queue = task_queue @namespace = namespace @metadata = Metadata.generate_workflow_task_metadata(task, namespace) @task_token = task.task_token @@ -40,9 +39,8 @@ def initialize(task, namespace, workflow_lookup, middleware_chain, workflow_midd def process start_time = Time.now - Temporal.logger.debug('Processing Workflow task', metadata.to_h) - Temporal.metrics.timing(Temporal::MetricKeys::WORKFLOW_TASK_QUEUE_TIME, queue_time_ms, workflow: workflow_name, - namespace: namespace) + Temporal.logger.debug("Processing Workflow task", metadata.to_h) + Temporal.metrics.timing(Temporal::MetricKeys::WORKFLOW_TASK_QUEUE_TIME, queue_time_ms, metric_tags) raise Temporal::WorkflowNotRegistered, 'Workflow is not registered with this worker' unless workflow_class @@ -73,14 +71,21 @@ def process fail_task(e) ensure time_diff_ms = ((Time.now - start_time) * 1000).round - Temporal.metrics.timing(Temporal::MetricKeys::WORKFLOW_TASK_LATENCY, time_diff_ms, workflow: workflow_name, - namespace: namespace) + Temporal.metrics.timing(Temporal::MetricKeys::WORKFLOW_TASK_LATENCY, time_diff_ms, metric_tags) Temporal.logger.debug('Workflow task processed', metadata.to_h.merge(execution_time: time_diff_ms)) end + def metric_tags + { + workflow: workflow_name, + namespace: namespace, + task_queue: task_queue + } + end + private - attr_reader :task, :namespace, :task_token, :workflow_name, :workflow_class, + attr_reader :task, :task_queue, :namespace, :task_token, :workflow_name, :workflow_class, :middleware_chain, :workflow_middleware_chain, :metadata, :config, :binary_checksum def connection @@ -118,10 +123,10 @@ def legacy_query_task? def parse_queries # Support for deprecated query style if legacy_query_task? - { LEGACY_QUERY_KEY => Query.new(task.query) } + { LEGACY_QUERY_KEY => Query.new(task.query, config.converter) } else task.queries.each_with_object({}) do |(query_id, query), result| - result[query_id] = Query.new(query) + result[query_id] = Query.new(query, config.converter) end end end @@ -154,8 +159,7 @@ def complete_query(result) end def fail_task(error) - Temporal.metrics.increment(Temporal::MetricKeys::WORKFLOW_TASK_EXECUTION_FAILED, workflow: workflow_name, - namespace: namespace) + Temporal.metrics.increment(Temporal::MetricKeys::WORKFLOW_TASK_EXECUTION_FAILED, metric_tags) Temporal.logger.error('Workflow task failed', metadata.to_h.merge(error: error.inspect)) Temporal.logger.debug(error.backtrace.join("\n")) diff --git a/spec/config/temporal.rb b/spec/config/temporal.rb deleted file mode 100644 index 0d868ffe..00000000 --- a/spec/config/temporal.rb +++ /dev/null @@ -1,5 +0,0 @@ -RSpec.configure do |config| - config.before(:each) do - Temporal.configuration.error_handlers.clear - end -end \ No newline at end of file diff --git a/spec/config/test_converter.rb b/spec/config/test_converter.rb new file mode 100644 index 00000000..6cb9fce5 --- /dev/null +++ b/spec/config/test_converter.rb @@ -0,0 +1,8 @@ +require 'temporal/converter_wrapper' + +# This is a barebones default converter that can be used in tests +# where default conversion behaviour is expected +TEST_CONVERTER = Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC +).freeze diff --git a/spec/fabricators/grpc/activity_task_fabricator.rb b/spec/fabricators/grpc/activity_task_fabricator.rb index 6d2a531d..82e0886f 100644 --- a/spec/fabricators/grpc/activity_task_fabricator.rb +++ b/spec/fabricators/grpc/activity_task_fabricator.rb @@ -6,7 +6,7 @@ activity_id { SecureRandom.uuid } task_token { |attrs| attrs[:task_token] || SecureRandom.uuid } activity_type { Fabricate(:api_activity_type) } - input { Temporal.configuration.converter.to_payloads(nil) } + input { TEST_CONVERTER.to_payloads(nil) } workflow_type { Fabricate(:api_workflow_type) } workflow_execution { Fabricate(:api_workflow_execution) } current_attempt_scheduled_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } } @@ -15,7 +15,7 @@ current_attempt_scheduled_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } } header do |attrs| fields = (attrs[:headers] || {}).each_with_object({}) do |(field, value), h| - h[field] = Temporal.configuration.converter.to_payload(value) + h[field] = TEST_CONVERTER.to_payload(value) end Temporalio::Api::Common::V1::Header.new(fields: fields) end diff --git a/spec/fabricators/grpc/application_failure_fabricator.rb b/spec/fabricators/grpc/application_failure_fabricator.rb index 9d1396d8..95089cb7 100644 --- a/spec/fabricators/grpc/application_failure_fabricator.rb +++ b/spec/fabricators/grpc/application_failure_fabricator.rb @@ -1,7 +1,3 @@ -require 'temporal/concerns/payloads' -class TestDeserializer - include Temporal::Concerns::Payloads -end # Simulates Temporal::Connection::Serializer::Failure Fabricator(:api_application_failure, from: Temporalio::Api::Failure::V1::Failure) do transient :error_class, :backtrace @@ -10,7 +6,7 @@ class TestDeserializer application_failure_info do |attrs| Temporalio::Api::Failure::V1::ApplicationFailureInfo.new( type: attrs[:error_class], - details: TestDeserializer.new.to_details_payloads(attrs[:message]), + details: TEST_CONVERTER.to_details_payloads(attrs[:message]), ) end end diff --git a/spec/fabricators/grpc/history_event_fabricator.rb b/spec/fabricators/grpc/history_event_fabricator.rb index 4562d7ef..ad9a55e8 100644 --- a/spec/fabricators/grpc/history_event_fabricator.rb +++ b/spec/fabricators/grpc/history_event_fabricator.rb @@ -1,11 +1,4 @@ require 'securerandom' -require 'temporal/concerns/payloads' - -class TestSerializer - extend Temporal::Concerns::Payloads -end - -include Temporal::Concerns::Payloads Fabricator(:api_history_event, from: Temporalio::Api::History::V1::HistoryEvent) do event_id { 1 } @@ -17,9 +10,9 @@ class TestSerializer event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_EXECUTION_STARTED } event_time { Time.now } workflow_execution_started_event_attributes do |attrs| - header_fields = to_payload_map(attrs[:headers] || {}) + header_fields = TEST_CONVERTER.to_payload_map(attrs[:headers] || {}) header = Temporalio::Api::Common::V1::Header.new(fields: header_fields) - indexed_fields = attrs[:search_attributes] ? to_payload_map(attrs[:search_attributes]) : nil + indexed_fields = attrs[:search_attributes] ? TEST_CONVERTER.to_payload_map(attrs[:search_attributes]) : nil Temporalio::Api::History::V1::WorkflowExecutionStartedEventAttributes.new( workflow_type: Fabricate(:api_workflow_type), @@ -142,7 +135,7 @@ class TestSerializer event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_CANCELED } activity_task_canceled_event_attributes do |attrs| Temporalio::Api::History::V1::ActivityTaskCanceledEventAttributes.new( - details: TestSerializer.to_details_payloads('ACTIVITY_ID_NOT_STARTED'), + details: TEST_CONVERTER.to_details_payloads('ACTIVITY_ID_NOT_STARTED'), scheduled_event_id: attrs[:event_id] - 2, started_event_id: nil, identity: 'test-worker@test-host' @@ -197,7 +190,7 @@ class TestSerializer transient :search_attributes event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_UPSERT_WORKFLOW_SEARCH_ATTRIBUTES } upsert_workflow_search_attributes_event_attributes do |attrs| - indexed_fields = attrs[:search_attributes] ? to_payload_map(attrs[:search_attributes]) : nil + indexed_fields = attrs[:search_attributes] ? TEST_CONVERTER.to_payload_map(attrs[:search_attributes]) : nil Temporalio::Api::History::V1::UpsertWorkflowSearchAttributesEventAttributes.new( workflow_task_completed_event_id: attrs[:event_id] - 1, search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new( @@ -213,7 +206,7 @@ class TestSerializer Temporalio::Api::History::V1::MarkerRecordedEventAttributes.new( workflow_task_completed_event_id: attrs[:event_id] - 1, marker_name: 'SIDE_EFFECT', - details: to_payload_map({}) + details: TEST_CONVERTER.to_payload_map({}) ) end end diff --git a/spec/fabricators/grpc/memo_fabricator.rb b/spec/fabricators/grpc/memo_fabricator.rb index 38f764f2..cf499c8a 100644 --- a/spec/fabricators/grpc/memo_fabricator.rb +++ b/spec/fabricators/grpc/memo_fabricator.rb @@ -1,7 +1,7 @@ Fabricator(:memo, from: Temporalio::Api::Common::V1::Memo) do fields do Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload).tap do |m| - m['foo'] = Temporal.configuration.converter.to_payload('bar') + m['foo'] = TEST_CONVERTER.to_payload('bar') end end end diff --git a/spec/fabricators/grpc/payload_fabricator.rb b/spec/fabricators/grpc/payload_fabricator.rb index badd8f36..9312da42 100644 --- a/spec/fabricators/grpc/payload_fabricator.rb +++ b/spec/fabricators/grpc/payload_fabricator.rb @@ -1,3 +1,23 @@ Fabricator(:api_payload, from: Temporalio::Api::Common::V1::Payload) do metadata { Google::Protobuf::Map.new(:string, :bytes) } end + +Fabricator(:api_payload_nil, from: :api_payload) do + metadata do + Google::Protobuf::Map.new(:string, :bytes).tap do |m| + m['encoding'] = Temporal::Connection::Converter::Payload::Nil::ENCODING + end + end +end + +Fabricator(:api_payload_bytes, from: :api_payload) do + transient :bytes + + metadata do + Google::Protobuf::Map.new(:string, :bytes).tap do |m| + m['encoding'] = Temporal::Connection::Converter::Payload::Bytes::ENCODING + end + end + + data { |attrs| attrs.fetch(:bytes, 'foobar') } +end diff --git a/spec/fabricators/grpc/payloads_fabricator.rb b/spec/fabricators/grpc/payloads_fabricator.rb new file mode 100644 index 00000000..a8f3aff0 --- /dev/null +++ b/spec/fabricators/grpc/payloads_fabricator.rb @@ -0,0 +1,9 @@ +Fabricator(:api_payloads, from: Temporalio::Api::Common::V1::Payloads) do + transient :payloads_array + + payloads do |attrs| + Google::Protobuf::RepeatedField.new(:message, Temporalio::Api::Common::V1::Payload).tap do |m| + m.concat(Array(attrs.fetch(:payloads_array, Fabricate(:api_payload)))) + end + end +end diff --git a/spec/fabricators/grpc/search_attributes_fabricator.rb b/spec/fabricators/grpc/search_attributes_fabricator.rb index 16a33675..1e98516e 100644 --- a/spec/fabricators/grpc/search_attributes_fabricator.rb +++ b/spec/fabricators/grpc/search_attributes_fabricator.rb @@ -1,7 +1,7 @@ Fabricator(:search_attributes, from: Temporalio::Api::Common::V1::SearchAttributes) do indexed_fields do Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload).tap do |m| - m['foo'] = Temporal.configuration.converter.to_payload('bar') + m['foo'] = TEST_CONVERTER.to_payload('bar') end end end diff --git a/spec/fabricators/grpc/workflow_execution_started_event_attributes_fabricator.rb b/spec/fabricators/grpc/workflow_execution_started_event_attributes_fabricator.rb index 172bd7a5..0c1449fe 100644 --- a/spec/fabricators/grpc/workflow_execution_started_event_attributes_fabricator.rb +++ b/spec/fabricators/grpc/workflow_execution_started_event_attributes_fabricator.rb @@ -12,7 +12,7 @@ task_queue { Fabricate(:api_task_queue) } header do |attrs| fields = (attrs[:headers] || {}).each_with_object({}) do |(field, value), h| - h[field] = Temporal.configuration.converter.to_payload(value) + h[field] = TEST_CONVERTER.to_payload(value) end Temporalio::Api::Common::V1::Header.new(fields: fields) end diff --git a/spec/fabricators/grpc/workflow_query_fabricator.rb b/spec/fabricators/grpc/workflow_query_fabricator.rb index 024cdd59..f8831d49 100644 --- a/spec/fabricators/grpc/workflow_query_fabricator.rb +++ b/spec/fabricators/grpc/workflow_query_fabricator.rb @@ -1,4 +1,4 @@ Fabricator(:api_workflow_query, from: Temporalio::Api::Query::V1::WorkflowQuery) do query_type { 'state' } - query_args { Temporal.configuration.converter.to_payloads(['']) } + query_args { TEST_CONVERTER.to_payloads(['']) } end diff --git a/spec/unit/lib/temporal/activity/poller_spec.rb b/spec/unit/lib/temporal/activity/poller_spec.rb index 0476e950..3e5d24c7 100644 --- a/spec/unit/lib/temporal/activity/poller_spec.rb +++ b/spec/unit/lib/temporal/activity/poller_spec.rb @@ -108,7 +108,7 @@ def poll(task, times: 1) expect(Temporal::Activity::TaskProcessor) .to have_received(:new) - .with(task, namespace, lookup, middleware_chain, config, heartbeat_thread_pool) + .with(task, task_queue, namespace, lookup, middleware_chain, config, heartbeat_thread_pool) expect(task_processor).to have_received(:process) end @@ -143,7 +143,7 @@ def call(_); end expect(Temporal::Middleware::Chain).to have_received(:new).with(middleware) expect(Temporal::Activity::TaskProcessor) .to have_received(:new) - .with(task, namespace, lookup, middleware_chain, config, heartbeat_thread_pool) + .with(task, task_queue, namespace, lookup, middleware_chain, config, heartbeat_thread_pool) end end end @@ -199,6 +199,36 @@ def call(_); end end end + context 'when max_tasks_per_second is set' do + subject do + described_class.new( + namespace, + task_queue, + lookup, + config, + middleware, + { + max_tasks_per_second: 32 + } + ) + end + + it 'sends PollActivityTaskQueue requests with the configured task rate-limit' do + times = poll(nil, times: 2) + expect(times).to be >= 2 + + expect(connection).to have_received(:poll_activity_task_queue) + .with( + namespace: namespace, + task_queue: task_queue, + max_tasks_per_second: 32 + ) + .at_least(2) + .times + end + end + + context 'when connection is unable to poll and poll_retry_seconds is set' do subject do described_class.new( diff --git a/spec/unit/lib/temporal/activity/task_processor_spec.rb b/spec/unit/lib/temporal/activity/task_processor_spec.rb index 41ea952f..6999ba60 100644 --- a/spec/unit/lib/temporal/activity/task_processor_spec.rb +++ b/spec/unit/lib/temporal/activity/task_processor_spec.rb @@ -5,9 +5,10 @@ require 'temporal/scheduled_thread_pool' describe Temporal::Activity::TaskProcessor do - subject { described_class.new(task, namespace, lookup, middleware_chain, config, heartbeat_thread_pool) } + subject { described_class.new(task, task_queue, namespace, lookup, middleware_chain, config, heartbeat_thread_pool) } let(:namespace) { 'test-namespace' } + let(:task_queue) { 'test-queue' } let(:lookup) { instance_double('Temporal::ExecutableLookup', find: nil) } let(:task) do Fabricate( @@ -16,7 +17,7 @@ input: config.converter.to_payloads(input) ) end - let(:metadata) { Temporal::Metadata.generate_activity_metadata(task, namespace) } + let(:metadata) { Temporal::Metadata.generate_activity_metadata(task, namespace, config.converter) } let(:workflow_name) { task.workflow_type.name } let(:activity_name) { 'TestActivity' } let(:connection) { instance_double('Temporal::Connection::GRPC') } @@ -39,7 +40,7 @@ .and_return(connection) allow(Temporal::Metadata) .to receive(:generate_activity_metadata) - .with(task, namespace) + .with(task, namespace, config.converter) .and_return(metadata) allow(Temporal::Activity::Context).to receive(:new).with(connection, metadata, config, heartbeat_thread_pool).and_return(context) @@ -149,9 +150,11 @@ .with( Temporal::MetricKeys::ACTIVITY_TASK_QUEUE_TIME, an_instance_of(Integer), - activity: activity_name, - namespace: namespace, - workflow: workflow_name + hash_including({ + activity: activity_name, + namespace: namespace, + workflow: workflow_name + }) ) end @@ -165,6 +168,7 @@ an_instance_of(Integer), activity: activity_name, namespace: namespace, + task_queue: task_queue, workflow: workflow_name ) end @@ -240,9 +244,11 @@ .with( Temporal::MetricKeys::ACTIVITY_TASK_QUEUE_TIME, an_instance_of(Integer), - activity: activity_name, - namespace: namespace, - workflow: workflow_name + hash_including({ + activity: activity_name, + namespace: namespace, + workflow: workflow_name + }) ) end @@ -256,6 +262,7 @@ an_instance_of(Integer), activity: activity_name, namespace: namespace, + task_queue: task_queue, workflow: workflow_name ) end diff --git a/spec/unit/lib/temporal/activity_spec.rb b/spec/unit/lib/temporal/activity_spec.rb index 47a0e8b0..f7dc5662 100644 --- a/spec/unit/lib/temporal/activity_spec.rb +++ b/spec/unit/lib/temporal/activity_spec.rb @@ -4,15 +4,28 @@ describe Temporal::Activity do it_behaves_like 'an executable' + class ArgsActivity < Temporal::Activity + def execute(a) + 'args result' + end + end + + class KwargsActivity < Temporal::Activity + def execute(a, b:, c:) + 'kwargs result' + end + end + subject { described_class.new(context) } let(:context) { instance_double('Temporal::Activity::Context') } describe '.execute_in_context' do + subject { ArgsActivity.new(context) } + let(:input) { ['test'] } before do allow(described_class).to receive(:new).and_return(subject) - allow(subject).to receive(:execute).and_return('result') end it 'passes the context' do @@ -22,13 +35,41 @@ end it 'calls #execute' do - described_class.execute_in_context(context, input) + expect(subject).to receive(:execute).with(*input) - expect(subject).to have_received(:execute).with(*input) + described_class.execute_in_context(context, input) end it 'returns #execute result' do - expect(described_class.execute_in_context(context, input)).to eq('result') + expect(described_class.execute_in_context(context, input)).to eq('args result') + end + + context 'when using keyword arguments' do + subject { KwargsActivity.new(context) } + + let(:input) { ['test', { b: 'b', c: 'c' }] } + + it 'passes the context' do + described_class.execute_in_context(context, input) + + expect(described_class).to have_received(:new).with(context) + end + + it 'calls #execute' do + expect(subject).to receive(:execute).with('test', b: 'b', c: 'c') + + described_class.execute_in_context(context, input) + end + + it 'does not raise an ArgumentError' do + expect { + described_class.execute_in_context(context, input) + }.not_to raise_error + end + + it 'returns #execute result' do + expect(described_class.execute_in_context(context, input)).to eq('kwargs result') + end end end diff --git a/spec/unit/lib/temporal/client_spec.rb b/spec/unit/lib/temporal/client_spec.rb index 1dd4995d..31dc4a78 100644 --- a/spec/unit/lib/temporal/client_spec.rb +++ b/spec/unit/lib/temporal/client_spec.rb @@ -52,20 +52,21 @@ def inject!(header) subject.start_workflow(TestStartWorkflow, 42) expect(connection) .to have_received(:start_workflow_execution) - .with( - namespace: 'default-test-namespace', - workflow_id: an_instance_of(String), - workflow_name: 'TestStartWorkflow', - task_queue: 'default-test-task-queue', - input: [42], - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], - workflow_id_reuse_policy: nil, - headers: { 'test' => 'asdf' }, - memo: {}, - search_attributes: {}, - ) + .with( + namespace: 'default-test-namespace', + workflow_id: an_instance_of(String), + workflow_name: 'TestStartWorkflow', + task_queue: 'default-test-task-queue', + input: [42], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], + workflow_id_reuse_policy: nil, + headers: { 'test' => 'asdf' }, + memo: {}, + search_attributes: {}, + start_delay: 0 + ) end end @@ -87,13 +88,14 @@ def inject!(header) workflow_name: 'TestStartWorkflow', task_queue: 'default-test-task-queue', input: [42], - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], workflow_id_reuse_policy: nil, headers: {}, memo: {}, search_attributes: {}, + start_delay: 0 ) end @@ -109,6 +111,7 @@ def inject!(header) workflow_id_reuse_policy: :reject, memo: { 'MemoKey1' => 'MemoValue1' }, search_attributes: { 'SearchAttribute1' => 256 }, + start_delay: 10 } ) @@ -120,13 +123,14 @@ def inject!(header) workflow_name: 'test-workflow', task_queue: 'test-task-queue', input: [42], - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], workflow_id_reuse_policy: :reject, headers: { 'Foo' => 'Bar' }, memo: { 'MemoKey1' => 'MemoValue1' }, search_attributes: { 'SearchAttribute1' => 256 }, + start_delay: 10 ) end @@ -147,13 +151,14 @@ def inject!(header) workflow_name: 'test-workflow', task_queue: 'default-test-task-queue', input: [42, { arg_1: 1, arg_2: 2 }], - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], workflow_id_reuse_policy: nil, headers: {}, memo: {}, search_attributes: {}, + start_delay: 0 ) end @@ -168,13 +173,14 @@ def inject!(header) workflow_name: 'TestStartWorkflow', task_queue: 'default-test-task-queue', input: [42], - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], workflow_id_reuse_policy: nil, headers: {}, memo: {}, search_attributes: {}, + start_delay: 0 ) end @@ -191,13 +197,14 @@ def inject!(header) workflow_name: 'TestStartWorkflow', task_queue: 'default-test-task-queue', input: [42], - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], workflow_id_reuse_policy: :allow, headers: {}, memo: {}, search_attributes: {}, + start_delay: 0 ) end end @@ -218,13 +225,14 @@ def inject!(header) workflow_name: 'test-workflow', task_queue: 'test-task-queue', input: [42], - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], workflow_id_reuse_policy: nil, headers: {}, memo: {}, search_attributes: {}, + start_delay: 0 ) end end @@ -246,15 +254,16 @@ def expect_signal_with_start(expected_arguments, expected_signal_argument) workflow_name: 'TestStartWorkflow', task_queue: 'default-test-task-queue', input: expected_arguments, - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], workflow_id_reuse_policy: nil, headers: {}, memo: {}, search_attributes: {}, signal_name: 'the question', signal_input: expected_signal_argument, + start_delay: 0 ) end @@ -300,7 +309,7 @@ def expect_signal_with_start(expected_arguments, expected_signal_argument) it 'raises when signal_input is given but signal_name is not' do expect do subject.start_workflow( - TestStartWorkflow, + TestStartWorkflow, [42, 54], [43, 55], options: { signal_input: 'what do you get if you multiply six by nine?', } @@ -328,9 +337,9 @@ def expect_signal_with_start(expected_arguments, expected_signal_argument) task_queue: 'default-test-task-queue', cron_schedule: '* * * * *', input: [42], - task_timeout: Temporal.configuration.timeouts[:task], - run_timeout: Temporal.configuration.timeouts[:run], - execution_timeout: Temporal.configuration.timeouts[:execution], + task_timeout: config.timeouts[:task], + run_timeout: config.timeouts[:run], + execution_timeout: config.timeouts[:execution], workflow_id_reuse_policy: nil, memo: {}, search_attributes: {}, @@ -361,7 +370,7 @@ def expect_signal_with_start(expected_arguments, expected_signal_argument) describe '#describe_namespace' do before { allow(connection).to receive(:describe_namespace).and_return(Temporalio::Api::WorkflowService::V1::DescribeNamespaceResponse.new) } - + it 'passes the namespace to the connection' do result = subject.describe_namespace('new-namespace') @@ -381,7 +390,7 @@ def expect_signal_with_start(expected_arguments, expected_signal_argument) .to have_received(:signal_workflow_execution) .with( namespace: 'default-test-namespace', - signal: 'signal', + signal: 'signal', workflow_id: 'workflow_id', run_id: 'run_id', input: nil, @@ -395,7 +404,7 @@ def expect_signal_with_start(expected_arguments, expected_signal_argument) .to have_received(:signal_workflow_execution) .with( namespace: 'default-test-namespace', - signal: 'signal', + signal: 'signal', workflow_id: 'workflow_id', run_id: 'run_id', input: 'input', @@ -409,7 +418,7 @@ def expect_signal_with_start(expected_arguments, expected_signal_argument) .to have_received(:signal_workflow_execution) .with( namespace: 'other-test-namespace', - signal: 'signal', + signal: 'signal', workflow_id: 'workflow_id', run_id: 'run_id', input: nil, @@ -449,7 +458,7 @@ class NamespacedWorkflow < Temporal::Workflow ) end - it 'can override the namespace' do + it 'can override the namespace' do completed_event = Fabricate(:workflow_completed_event, result: nil) response = Fabricate(:workflow_execution_history, events: [completed_event]) @@ -482,7 +491,7 @@ class NamespacedWorkflow < Temporal::Workflow it "completes and returns a #{type}" do payload = Temporalio::Api::Common::V1::Payloads.new( payloads: [ - Temporal.configuration.converter.to_payload(expected_result) + config.converter.to_payload(expected_result) ], ) completed_event = Fabricate(:workflow_completed_event, result: payload) @@ -534,7 +543,7 @@ class NamespacedWorkflow < Temporal::Workflow end.to raise_error(Temporal::WorkflowCanceled) end - it 'raises TimeoutError when the server times out' do + it 'raises TimeoutError when the server times out' do response = Fabricate(:workflow_execution_history, events: []) expect(connection) .to receive(:get_workflow_execution_history) @@ -759,7 +768,7 @@ class NamespacedWorkflow < Temporal::Workflow expect(connection) .to have_received(:terminate_workflow_execution) .with( - namespace: 'default-namespace', + namespace: 'default-test-namespace', workflow_id: 'my-workflow', reason: 'just stop it', details: nil, @@ -895,6 +904,32 @@ class NamespacedWorkflow < Temporal::Workflow end end + describe '#get_workflow_history' do + it 'gets full history with pagination' do + completed_event = Fabricate(:workflow_completed_event, result: nil) + response_1 = Fabricate(:workflow_execution_history, events: [completed_event], next_page_token: 'a') + response_2 = Fabricate(:workflow_execution_history, events: [completed_event], next_page_token: '') + + allow(connection) + .to receive(:get_workflow_execution_history) + .and_return(response_1, response_2) + + subject.get_workflow_history(namespace: namespace, workflow_id: workflow_id, run_id: run_id) + + expect(connection) + .to have_received(:get_workflow_execution_history) + .with(namespace: namespace, workflow_id: workflow_id, run_id: run_id, next_page_token: nil) + .ordered + + expect(connection) + .to have_received(:get_workflow_execution_history) + .with(namespace: namespace, workflow_id: workflow_id, run_id: run_id, next_page_token: 'a') + .ordered + + expect(connection).to have_received(:get_workflow_execution_history).exactly(2).times + end + end + describe '#list_open_workflow_executions' do let(:from) { Time.now - 600 } let(:now) { Time.now } @@ -977,7 +1012,7 @@ class NamespacedWorkflow < Temporal::Workflow end end - it 'returns the next page token and paginates correctly' do + it 'returns the next page token and paginates correctly' do executions1 = subject.list_open_workflow_executions(namespace, from, max_page_size: 10) executions1.map do |execution| expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo) @@ -1009,7 +1044,7 @@ class NamespacedWorkflow < Temporal::Workflow .once end - it 'returns the next page and paginates correctly' do + it 'returns the next page and paginates correctly' do executions1 = subject.list_open_workflow_executions(namespace, from, max_page_size: 10) executions1.map do |execution| expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo) diff --git a/spec/unit/lib/temporal/configuration_spec.rb b/spec/unit/lib/temporal/configuration_spec.rb index c1024e34..8ab2e282 100644 --- a/spec/unit/lib/temporal/configuration_spec.rb +++ b/spec/unit/lib/temporal/configuration_spec.rb @@ -62,4 +62,50 @@ def inject!(_); end expect(subject.for_connection).to have_attributes(identity: new_identity) end end -end \ No newline at end of file + + describe '#converter' do + it 'wraps the provided converter and codec' do + converter_wrapper = subject.converter + + expect(converter_wrapper).to be_a(Temporal::ConverterWrapper) + expect(converter_wrapper.send(:converter)).to eq(described_class::DEFAULT_CONVERTER) + expect(converter_wrapper.send(:codec)).to eq(described_class::DEFAULT_PAYLOAD_CODEC) + end + end + + describe '#converter=' do + let(:converter) { instance_double(Temporal::Connection::Converter::Composite) } + + it 'resets the wrapper when converter has changed' do + old_converter_wrapper = subject.converter + + expect(old_converter_wrapper).to be_a(Temporal::ConverterWrapper) + expect(old_converter_wrapper.send(:converter)).to eq(described_class::DEFAULT_CONVERTER) + + subject.converter = converter + new_converter_wrapper = subject.converter + + expect(new_converter_wrapper).to be_a(Temporal::ConverterWrapper) + expect(new_converter_wrapper.send(:converter)).to eq(converter) + expect(new_converter_wrapper.send(:codec)).to eq(old_converter_wrapper.send(:codec)) + end + end + + describe '#payload_codec=' do + let(:codec) { Temporal::Connection::Converter::Codec::Base.new } + + it 'resets the wrapper when converter has changed' do + old_converter_wrapper = subject.converter + + expect(old_converter_wrapper).to be_a(Temporal::ConverterWrapper) + expect(old_converter_wrapper.send(:codec)).to eq(described_class::DEFAULT_PAYLOAD_CODEC) + + subject.payload_codec = codec + new_converter_wrapper = subject.converter + + expect(new_converter_wrapper).to be_a(Temporal::ConverterWrapper) + expect(new_converter_wrapper.send(:codec)).to eq(codec) + expect(new_converter_wrapper.send(:converter)).to eq(old_converter_wrapper.send(:converter)) + end + end +end diff --git a/spec/unit/lib/temporal/connection/serializer/backfill_spec.rb b/spec/unit/lib/temporal/connection/serializer/backfill_spec.rb new file mode 100644 index 00000000..b4505a57 --- /dev/null +++ b/spec/unit/lib/temporal/connection/serializer/backfill_spec.rb @@ -0,0 +1,38 @@ +require "temporal/connection/errors" +require "temporal/schedule/backfill" +require "temporal/connection/serializer/backfill" + +describe Temporal::Connection::Serializer::Backfill do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + let(:example_backfill) do + Temporal::Schedule::Backfill.new( + start_time: Time.new(2000, 1, 1, 0, 0, 0), + end_time: Time.new(2031, 1, 1, 0, 0, 0), + overlap_policy: :buffer_all + ) + end + + describe "to_proto" do + it "raises an error if an invalid overlap_policy is specified" do + invalid = Temporal::Schedule::Backfill.new(overlap_policy: :foobar) + expect do + described_class.new(invalid, converter).to_proto + end + .to(raise_error(Temporal::Connection::ArgumentError, "Unknown schedule overlap policy specified: foobar")) + end + + it "produces well-formed protobuf" do + result = described_class.new(example_backfill, converter).to_proto + + expect(result).to(be_a(Temporalio::Api::Schedule::V1::BackfillRequest)) + expect(result.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ALL)) + expect(result.start_time.to_time).to(eq(example_backfill.start_time)) + expect(result.end_time.to_time).to(eq(example_backfill.end_time)) + end + end +end diff --git a/spec/unit/lib/temporal/connection/serializer/continue_as_new_spec.rb b/spec/unit/lib/temporal/connection/serializer/continue_as_new_spec.rb index 046b066c..398231da 100644 --- a/spec/unit/lib/temporal/connection/serializer/continue_as_new_spec.rb +++ b/spec/unit/lib/temporal/connection/serializer/continue_as_new_spec.rb @@ -2,6 +2,13 @@ require 'temporal/workflow/command' describe Temporal::Connection::Serializer::ContinueAsNew do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + describe 'to_proto' do it 'produces a protobuf' do timeouts = { @@ -19,7 +26,7 @@ search_attributes: {'foo-search-attribute': 'qux'}, ) - result = described_class.new(command).to_proto + result = described_class.new(command, converter).to_proto expect(result).to be_an_instance_of(Temporalio::Api::Command::V1::Command) expect(result.command_type).to eql( diff --git a/spec/unit/lib/temporal/connection/serializer/failure_spec.rb b/spec/unit/lib/temporal/connection/serializer/failure_spec.rb index 4242554e..2bde0337 100644 --- a/spec/unit/lib/temporal/connection/serializer/failure_spec.rb +++ b/spec/unit/lib/temporal/connection/serializer/failure_spec.rb @@ -1,14 +1,17 @@ require 'temporal/connection/serializer/failure' require 'temporal/workflow/command' -class TestDeserializer - include Temporal::Concerns::Payloads -end - describe Temporal::Connection::Serializer::Failure do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + describe 'to_proto' do it 'produces a protobuf' do - result = described_class.new(StandardError.new('test')).to_proto + result = described_class.new(StandardError.new('test'), converter).to_proto expect(result).to be_an_instance_of(Temporalio::Api::Failure::V1::Failure) end @@ -31,10 +34,10 @@ def initialize(foo, bar, bad_class:) it 'Serializes round-trippable full errors when asked to' do # Make sure serializing various bits round-trips e = MyError.new(['seven', 'three'], "Bar", bad_class: NaughtyClass) - failure_proto = described_class.new(e, serialize_whole_error: true).to_proto + failure_proto = described_class.new(e, converter, serialize_whole_error: true).to_proto expect(failure_proto.application_failure_info.type).to eq("MyError") - deserialized_error = TestDeserializer.new.from_details_payloads(failure_proto.application_failure_info.details) + deserialized_error = converter.from_details_payloads(failure_proto.application_failure_info.details) expect(deserialized_error).to be_an_instance_of(MyError) expect(deserialized_error.message).to eq("Hello, Bar!") expect(deserialized_error.foo).to eq(['seven', 'three']) @@ -53,23 +56,23 @@ def initialize(message) it 'deals with too-large serialization using the old path' do e = MyBigError.new('Uh oh!') # Normal serialization path - failure_proto = described_class.new(e, serialize_whole_error: true, max_bytes: 1000).to_proto + failure_proto = described_class.new(e, converter, serialize_whole_error: true, max_bytes: 1000).to_proto expect(failure_proto.application_failure_info.type).to eq('MyBigError') - deserialized_error = TestDeserializer.new.from_details_payloads(failure_proto.application_failure_info.details) + deserialized_error = converter.from_details_payloads(failure_proto.application_failure_info.details) expect(deserialized_error).to be_an_instance_of(MyBigError) expect(deserialized_error.big_payload).to eq('123456789012345678901234567890123456789012345678901234567890') # Exercise legacy serialization mechanism - failure_proto = described_class.new(e, serialize_whole_error: false).to_proto + failure_proto = described_class.new(e, converter, serialize_whole_error: false).to_proto expect(failure_proto.application_failure_info.type).to eq('MyBigError') - old_style_deserialized_error = MyBigError.new(TestDeserializer.new.from_details_payloads(failure_proto.application_failure_info.details)) + old_style_deserialized_error = MyBigError.new(converter.from_details_payloads(failure_proto.application_failure_info.details)) expect(old_style_deserialized_error).to be_an_instance_of(MyBigError) expect(old_style_deserialized_error.message).to eq('Uh oh!') # If the payload size exceeds the max_bytes, we fallback to the old-style serialization. - failure_proto = described_class.new(e, serialize_whole_error: true, max_bytes: 50).to_proto + failure_proto = described_class.new(e, converter, serialize_whole_error: true, max_bytes: 50).to_proto expect(failure_proto.application_failure_info.type).to eq('MyBigError') - avoids_truncation_error = MyBigError.new(TestDeserializer.new.from_details_payloads(failure_proto.application_failure_info.details)) + avoids_truncation_error = MyBigError.new(converter.from_details_payloads(failure_proto.application_failure_info.details)) expect(avoids_truncation_error).to be_an_instance_of(MyBigError) expect(avoids_truncation_error.message).to eq('Uh oh!') @@ -82,7 +85,7 @@ def initialize(message) allow(Temporal.logger).to receive(:error) max_bytes = 50 - described_class.new(e, serialize_whole_error: true, max_bytes: max_bytes).to_proto + described_class.new(e, converter, serialize_whole_error: true, max_bytes: max_bytes).to_proto expect(Temporal.logger) .to have_received(:error) .with( @@ -99,7 +102,7 @@ def initialize; end it 'successfully processes an error with no constructor arguments' do e = MyArglessError.new - failure_proto = described_class.new(e, serialize_whole_error: true).to_proto + failure_proto = described_class.new(e, converter, serialize_whole_error: true).to_proto expect(failure_proto.application_failure_info.type).to eq('MyArglessError') end diff --git a/spec/unit/lib/temporal/connection/serializer/query_answer_spec.rb b/spec/unit/lib/temporal/connection/serializer/query_answer_spec.rb index 62028824..5e912206 100644 --- a/spec/unit/lib/temporal/connection/serializer/query_answer_spec.rb +++ b/spec/unit/lib/temporal/connection/serializer/query_answer_spec.rb @@ -1,23 +1,25 @@ require 'temporal/connection/serializer/query_failure' require 'temporal/workflow/query_result' -require 'temporal/concerns/payloads' describe Temporal::Connection::Serializer::QueryAnswer do - class TestDeserializer - extend Temporal::Concerns::Payloads + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) end describe 'to_proto' do let(:query_result) { Temporal::Workflow::QueryResult.answer(42) } it 'produces a protobuf' do - result = described_class.new(query_result).to_proto + result = described_class.new(query_result, converter).to_proto expect(result).to be_a(Temporalio::Api::Query::V1::WorkflowQueryResult) expect(result.result_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup( Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_ANSWERED) ) - expect(result.answer).to eq(TestDeserializer.to_query_payloads(42)) + expect(result.answer).to eq(converter.to_query_payloads(42)) end end end diff --git a/spec/unit/lib/temporal/connection/serializer/query_failure_spec.rb b/spec/unit/lib/temporal/connection/serializer/query_failure_spec.rb index 0590c0c4..62926aea 100644 --- a/spec/unit/lib/temporal/connection/serializer/query_failure_spec.rb +++ b/spec/unit/lib/temporal/connection/serializer/query_failure_spec.rb @@ -2,12 +2,19 @@ require 'temporal/workflow/query_result' describe Temporal::Connection::Serializer::QueryFailure do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + describe 'to_proto' do let(:exception) { StandardError.new('Test query failure') } let(:query_result) { Temporal::Workflow::QueryResult.failure(exception) } it 'produces a protobuf' do - result = described_class.new(query_result).to_proto + result = described_class.new(query_result, converter).to_proto expect(result).to be_a(Temporalio::Api::Query::V1::WorkflowQueryResult) expect(result.result_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup( diff --git a/spec/unit/lib/temporal/connection/serializer/retry_policy_spec.rb b/spec/unit/lib/temporal/connection/serializer/retry_policy_spec.rb index 211f807f..5e27503f 100644 --- a/spec/unit/lib/temporal/connection/serializer/retry_policy_spec.rb +++ b/spec/unit/lib/temporal/connection/serializer/retry_policy_spec.rb @@ -2,6 +2,13 @@ require 'temporal/connection/serializer/retry_policy' describe Temporal::Connection::Serializer::RetryPolicy do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + describe 'to_proto' do let(:example_policy) do Temporal::RetryPolicy.new( @@ -14,7 +21,7 @@ end it 'converts to proto' do - proto = described_class.new(example_policy).to_proto + proto = described_class.new(example_policy, converter).to_proto expect(proto.initial_interval.seconds).to eq(1) expect(proto.backoff_coefficient).to eq(1.5) expect(proto.maximum_interval.seconds).to eq(5) diff --git a/spec/unit/lib/temporal/connection/serializer/schedule_action_spec.rb b/spec/unit/lib/temporal/connection/serializer/schedule_action_spec.rb new file mode 100644 index 00000000..93f9e87c --- /dev/null +++ b/spec/unit/lib/temporal/connection/serializer/schedule_action_spec.rb @@ -0,0 +1,56 @@ +require "temporal/connection/errors" +require "temporal/schedule/start_workflow_action" +require "temporal/connection/serializer/schedule_action" + +describe Temporal::Connection::Serializer::ScheduleAction do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + let(:timeouts) { {run: 100, task: 10} } + + let(:example_action) do + Temporal::Schedule::StartWorkflowAction.new( + "HelloWorldWorkflow", + "one", + "two", + options: { + workflow_id: "foobar", + task_queue: "my-task-queue", + timeouts: timeouts, + memo: {:"foo-memo" => "baz"}, + search_attributes: {:"foo-search-attribute" => "qux"}, + headers: {:"foo-header" => "bar"} + } + ) + end + + describe "to_proto" do + it "raises an error if an invalid action is specified" do + expect do + described_class.new(123, converter).to_proto + end + .to(raise_error(Temporal::Connection::ArgumentError)) do |e| + expect(e.message).to(eq("Unknown action type Integer")) + end + end + + it "produces well-formed protobuf" do + result = described_class.new(example_action, converter).to_proto + + expect(result).to(be_a(Temporalio::Api::Schedule::V1::ScheduleAction)) + + action = result.start_workflow + expect(action).to(be_a(Temporalio::Api::Workflow::V1::NewWorkflowExecutionInfo)) + expect(action.task_queue.name).to(eq("my-task-queue")) + expect(action.input.payloads.map(&:data)).to(eq(["\"one\"", "\"two\""])) + expect(action.header.fields["foo-header"].data).to(eq("\"bar\"")) + expect(action.memo.fields["foo-memo"].data).to(eq("\"baz\"")) + expect(action.search_attributes.indexed_fields["foo-search-attribute"].data).to(eq("\"qux\"")) + expect(action.workflow_run_timeout.seconds).to(eq(timeouts[:run])) + expect(action.workflow_task_timeout.seconds).to(eq(timeouts[:task])) + end + end +end diff --git a/spec/unit/lib/temporal/connection/serializer/schedule_policies_spec.rb b/spec/unit/lib/temporal/connection/serializer/schedule_policies_spec.rb new file mode 100644 index 00000000..2b51cee3 --- /dev/null +++ b/spec/unit/lib/temporal/connection/serializer/schedule_policies_spec.rb @@ -0,0 +1,37 @@ +require "temporal/schedule/schedule_policies" +require "temporal/connection/serializer/schedule_policies" + +describe Temporal::Connection::Serializer::SchedulePolicies do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + let(:example_policies) do + Temporal::Schedule::SchedulePolicies.new( + overlap_policy: :buffer_one, + catchup_window: 600, + pause_on_failure: true + ) + end + + describe "to_proto" do + it "produces well-formed protobuf" do + result = described_class.new(example_policies, converter).to_proto + + expect(result).to(be_a(Temporalio::Api::Schedule::V1::SchedulePolicies)) + expect(result.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ONE)) + expect(result.catchup_window.seconds).to(eq(600)) + expect(result.pause_on_failure).to(eq(true)) + end + + it "should raise if an unknown overlap policy is specified" do + invalid_policies = Temporal::Schedule::SchedulePolicies.new(overlap_policy: :foobar) + expect do + described_class.new(invalid_policies, converter).to_proto + end + .to(raise_error(Temporal::Connection::ArgumentError, "Unknown schedule overlap policy specified: foobar")) + end + end +end diff --git a/spec/unit/lib/temporal/connection/serializer/schedule_spec_spec.rb b/spec/unit/lib/temporal/connection/serializer/schedule_spec_spec.rb new file mode 100644 index 00000000..ee0cd0f8 --- /dev/null +++ b/spec/unit/lib/temporal/connection/serializer/schedule_spec_spec.rb @@ -0,0 +1,63 @@ +require "temporal/schedule/schedule_spec" +require "temporal/schedule/interval" +require "temporal/schedule/calendar" +require "temporal/connection/serializer/schedule_spec" + +describe Temporal::Connection::Serializer::ScheduleSpec do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + let(:example_spec) do + Temporal::Schedule::ScheduleSpec.new( + cron_expressions: ["@hourly"], + intervals: [ + Temporal::Schedule::Interval.new(every: 50, offset: 30), + Temporal::Schedule::Interval.new(every: 60) + ], + calendars: [ + Temporal::Schedule::Calendar.new( + hour: "7", + minute: "0,3,15", + day_of_week: "MONDAY", + month: "1-6", + comment: "some comment explaining intent" + ), + Temporal::Schedule::Calendar.new( + minute: "8", + hour: "*" + ) + ], + start_time: Time.new(2000, 1, 1, 0, 0, 0), + end_time: Time.new(2031, 1, 1, 0, 0, 0), + jitter: 500, + timezone_name: "America/New_York" + ) + end + + describe "to_proto" do + it "produces well-formed protobuf" do + result = described_class.new(example_spec, converter).to_proto + + expect(result).to(be_a(Temporalio::Api::Schedule::V1::ScheduleSpec)) + expect(result.cron_string).to(eq(["@hourly"])) + expect(result.interval[0].interval.seconds).to(eq(50)) + expect(result.interval[0].phase.seconds).to(eq(30)) + expect(result.interval[1].interval.seconds).to(eq(60)) + expect(result.interval[1].phase).to(be_nil) + expect(result.calendar[0].hour).to(eq("7")) + expect(result.calendar[0].minute).to(eq("0,3,15")) + expect(result.calendar[0].day_of_week).to(eq("MONDAY")) + expect(result.calendar[0].month).to(eq("1-6")) + expect(result.calendar[0].comment).to(eq("some comment explaining intent")) + expect(result.calendar[1].hour).to(eq("*")) + expect(result.calendar[1].minute).to(eq("8")) + expect(result.start_time.to_time).to(eq(example_spec.start_time)) + expect(result.end_time.to_time).to(eq(example_spec.end_time)) + expect(result.jitter.seconds).to(eq(500)) + expect(result.timezone_name).to(eq("America/New_York")) + end + end +end diff --git a/spec/unit/lib/temporal/connection/serializer/schedule_state_spec.rb b/spec/unit/lib/temporal/connection/serializer/schedule_state_spec.rb new file mode 100644 index 00000000..3fbe8051 --- /dev/null +++ b/spec/unit/lib/temporal/connection/serializer/schedule_state_spec.rb @@ -0,0 +1,31 @@ +require "temporal/schedule/schedule_state" +require "temporal/connection/serializer/schedule_state" + +describe Temporal::Connection::Serializer::ScheduleState do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + let(:example_state) do + Temporal::Schedule::ScheduleState.new( + notes: "some notes", + paused: true, + limited_actions: true, + remaining_actions: 500 + ) + end + + describe "to_proto" do + it "produces well-formed protobuf" do + result = described_class.new(example_state, converter).to_proto + + expect(result).to(be_a(Temporalio::Api::Schedule::V1::ScheduleState)) + expect(result.notes).to(eq("some notes")) + expect(result.paused).to(eq(true)) + expect(result.limited_actions).to(eq(true)) + expect(result.remaining_actions).to(eq(500)) + end + end +end diff --git a/spec/unit/lib/temporal/connection/serializer/start_child_workflow_spec.rb b/spec/unit/lib/temporal/connection/serializer/start_child_workflow_spec.rb index 2e72951c..ae26f88f 100644 --- a/spec/unit/lib/temporal/connection/serializer/start_child_workflow_spec.rb +++ b/spec/unit/lib/temporal/connection/serializer/start_child_workflow_spec.rb @@ -3,6 +3,12 @@ require 'temporal/connection/serializer/start_child_workflow' describe Temporal::Connection::Serializer::StartChildWorkflow do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end let(:example_command) do Temporal::Workflow::Command::StartChildWorkflow.new( workflow_id: SecureRandom.uuid, @@ -24,7 +30,7 @@ command.parent_close_policy = :invalid expect do - described_class.new(command).to_proto + described_class.new(command, converter).to_proto end.to raise_error(Temporal::Connection::ArgumentError) do |e| expect(e.message).to eq("Unknown parent_close_policy '#{command.parent_close_policy}' specified") end @@ -40,7 +46,7 @@ command = example_command command.parent_close_policy = policy_name - result = described_class.new(command).to_proto + result = described_class.new(command, converter).to_proto attribs = result.start_child_workflow_execution_command_attributes expect(attribs.parent_close_policy).to eq(expected_parent_close_policy) end diff --git a/spec/unit/lib/temporal/connection/serializer/upsert_search_attributes_spec.rb b/spec/unit/lib/temporal/connection/serializer/upsert_search_attributes_spec.rb index bc94128f..5bdace1a 100644 --- a/spec/unit/lib/temporal/connection/serializer/upsert_search_attributes_spec.rb +++ b/spec/unit/lib/temporal/connection/serializer/upsert_search_attributes_spec.rb @@ -3,11 +3,14 @@ require 'temporal/connection/serializer/upsert_search_attributes' require 'temporal/workflow/command' -class TestDeserializer - extend Temporal::Concerns::Payloads -end - describe Temporal::Connection::Serializer::UpsertSearchAttributes do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + it 'produces a protobuf that round-trips' do expected_attributes = { 'CustomStringField' => 'moo', @@ -22,14 +25,14 @@ class TestDeserializer search_attributes: expected_attributes ) - result = described_class.new(command).to_proto + result = described_class.new(command, converter).to_proto expect(result).to be_an_instance_of(Temporalio::Api::Command::V1::Command) expect(result.command_type).to eql( :COMMAND_TYPE_UPSERT_WORKFLOW_SEARCH_ATTRIBUTES ) command_attributes = result.upsert_workflow_search_attributes_command_attributes expect(command_attributes).not_to be_nil - actual_attributes = TestDeserializer.from_payload_map_without_codec(command_attributes&.search_attributes&.indexed_fields) + actual_attributes = converter.from_payload_map_without_codec(command_attributes&.search_attributes&.indexed_fields) expect(actual_attributes).to eql(expected_attributes) end diff --git a/spec/unit/lib/temporal/connection/serializer/workflow_id_reuse_policy_spec.rb b/spec/unit/lib/temporal/connection/serializer/workflow_id_reuse_policy_spec.rb index ce139325..b1ee6cad 100644 --- a/spec/unit/lib/temporal/connection/serializer/workflow_id_reuse_policy_spec.rb +++ b/spec/unit/lib/temporal/connection/serializer/workflow_id_reuse_policy_spec.rb @@ -2,6 +2,13 @@ require 'temporal/connection/serializer/retry_policy' describe Temporal::Connection::Serializer::WorkflowIdReusePolicy do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + describe 'to_proto' do SYM_TO_PROTO = { allow_failed: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY, @@ -12,7 +19,7 @@ def self.test_valid_policy(policy_sym) it "serializes #{policy_sym}" do - proto_enum = described_class.new(policy_sym).to_proto + proto_enum = described_class.new(policy_sym, converter).to_proto expected = SYM_TO_PROTO[policy_sym] expect(proto_enum).to eq(expected) end @@ -25,7 +32,7 @@ def self.test_valid_policy(policy_sym) it "rejects invalid policies" do expect do - described_class.new(:not_a_valid_policy).to_proto + described_class.new(:not_a_valid_policy, converter).to_proto end.to raise_error(Temporal::Connection::ArgumentError, 'Unknown workflow_id_reuse_policy specified: not_a_valid_policy') end end diff --git a/spec/unit/lib/temporal/connection_spec.rb b/spec/unit/lib/temporal/connection_spec.rb index f334d6d8..a3e5642f 100644 --- a/spec/unit/lib/temporal/connection_spec.rb +++ b/spec/unit/lib/temporal/connection_spec.rb @@ -25,6 +25,7 @@ expect(subject).to be_kind_of(Temporal::Connection::GRPC) expect(subject.send(:identity)).not_to be_nil expect(subject.send(:credentials)).to eq(:this_channel_is_insecure) + expect(subject.send(:converter)).to eq(config.converter) end end @@ -35,6 +36,7 @@ expect(subject).to be_kind_of(Temporal::Connection::GRPC) expect(subject.send(:identity)).not_to be_nil expect(subject.send(:credentials)).to be_kind_of(GRPC::Core::ChannelCredentials) + expect(subject.send(:converter)).to eq(config.converter) end end @@ -45,6 +47,7 @@ expect(subject).to be_kind_of(Temporal::Connection::GRPC) expect(subject.send(:identity)).not_to be_nil expect(subject.send(:credentials)).to be_kind_of(GRPC::Core::CallCredentials) + expect(subject.send(:converter)).to eq(config.converter) end end @@ -61,6 +64,7 @@ expect(subject).to be_kind_of(Temporal::Connection::GRPC) expect(subject.send(:identity)).not_to be_nil expect(subject.send(:credentials)).to be_kind_of(GRPC::Core::ChannelCredentials) + expect(subject.send(:converter)).to eq(config.converter) end end end diff --git a/spec/unit/lib/temporal/converter_wrapper_spec.rb b/spec/unit/lib/temporal/converter_wrapper_spec.rb new file mode 100644 index 00000000..f5b06af4 --- /dev/null +++ b/spec/unit/lib/temporal/converter_wrapper_spec.rb @@ -0,0 +1,175 @@ +require 'temporal/converter_wrapper' +require 'temporal/connection/converter/payload/bytes' +require 'temporal/connection/converter/payload/nil' +require 'temporal/connection/converter/composite' + +describe Temporal::ConverterWrapper do + class TestCodec < Temporal::Connection::Converter::Codec::Base + def encode(payload) + return payload + end + + def decode(payload) + return payload + end + end + + subject { described_class.new(converter, codec) } + let(:converter) do + Temporal::Connection::Converter::Composite.new(payload_converters: [ + Temporal::Connection::Converter::Payload::Bytes.new, + Temporal::Connection::Converter::Payload::Nil.new + ]) + end + let(:codec) { Temporal::Connection::Converter::Codec::Chain.new(payload_codecs: [TestCodec.new]) } + let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes, payload_nil]) } + let(:payload_bytes) { Fabricate(:api_payload_bytes, bytes: 'test-payload') } + let(:payload_nil) { Fabricate(:api_payload_nil) } + + before do + allow(codec).to receive(:encode).and_call_original + allow(codec).to receive(:encodes).and_call_original + allow(codec).to receive(:decode).and_call_original + allow(codec).to receive(:decodes).and_call_original + end + + describe '#from_payloads' do + it 'decodes and converts' do + expect(subject.from_payloads(payloads)).to eq(['test-payload', nil]) + expect(codec).to have_received(:decodes) + end + end + + describe '#from_payload' do + it 'decodes and converts' do + expect(subject.from_payload(payload_bytes)).to eq('test-payload') + expect(codec).to have_received(:decode) + end + end + + describe '#from_payload_map_without_codec' do + let(:payload_map) do + Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload).tap do |m| + m['first'] = payload_bytes + m['second'] = payload_nil + end + end + + it 'converts' do + expect(subject.from_payload_map_without_codec(payload_map)) + .to eq('first' => 'test-payload', 'second' => nil) + expect(codec).not_to have_received(:decode) + end + end + + describe '#from_result_payloads' do + it 'decodes and converts' do + expect(subject.from_result_payloads(payloads)).to eq('test-payload') + expect(codec).to have_received(:decodes) + end + end + + describe '#from_details_payloads' do + it 'decodes and converts first payload' do + expect(subject.from_details_payloads(payloads)).to eq('test-payload') + expect(codec).to have_received(:decodes) + end + end + + describe '#from_signal_payloads' do + it 'decodes and converts first payload' do + expect(subject.from_signal_payloads(payloads)).to eq('test-payload') + expect(codec).to have_received(:decodes) + end + end + + describe '#from_query_payloads' do + it 'decodes and converts first payload' do + expect(subject.from_query_payloads(payloads)).to eq('test-payload') + expect(codec).to have_received(:decodes) + end + end + + describe '#from_payload_map' do + let(:payload_map) do + Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload).tap do |m| + m['first'] = payload_bytes + m['second'] = payload_nil + end + end + + it 'decodes and converts first payload' do + expect(subject.from_payload_map(payload_map)) + .to eq('first' => 'test-payload', 'second' => nil) + expect(codec).to have_received(:decode).twice + end + end + + describe '#to_payloads' do + it 'converts and encodes' do + expect(subject.to_payloads(['test-payload'.b, nil])).to eq(payloads) + expect(codec).to have_received(:encodes) + end + end + + describe '#to_payload' do + it 'converts and encodes' do + expect(subject.to_payload('test-payload'.b)).to eq(payload_bytes) + expect(codec).to have_received(:encode) + end + end + + describe '#to_payload_map_without_codec' do + let(:payload_map) { { first: payload_bytes, second: payload_nil } } + + it 'converts' do + expect(subject.to_payload_map_without_codec(first: 'test-payload'.b, second: nil)).to eq(payload_map) + expect(codec).not_to have_received(:encode) + end + end + + describe '#to_result_payloads' do + let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes]) } + + it 'converts and encodes' do + expect(subject.to_result_payloads('test-payload'.b)).to eq(payloads) + expect(codec).to have_received(:encodes) + end + end + + describe '#to_details_payloads' do + let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes]) } + + it 'converts and encodes' do + expect(subject.to_details_payloads('test-payload'.b)).to eq(payloads) + expect(codec).to have_received(:encodes) + end + end + + describe '#to_signal_payloads' do + let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes]) } + + it 'converts and encodes' do + expect(subject.to_signal_payloads('test-payload'.b)).to eq(payloads) + expect(codec).to have_received(:encodes) + end + end + + describe '#to_query_payloads' do + let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes]) } + + it 'converts and encodes' do + expect(subject.to_query_payloads('test-payload'.b)).to eq(payloads) + expect(codec).to have_received(:encodes) + end + end + + describe '#to_payload_map' do + let(:payload_map) { { first: payload_bytes, second: payload_nil } } + + it 'converts and encodes' do + expect(subject.to_payload_map(first: 'test-payload'.b, second: nil)).to eq(payload_map) + expect(codec).to have_received(:encode).twice + end + end +end diff --git a/spec/unit/lib/temporal/execution_options_spec.rb b/spec/unit/lib/temporal/execution_options_spec.rb index 98fbe380..d0c9d017 100644 --- a/spec/unit/lib/temporal/execution_options_spec.rb +++ b/spec/unit/lib/temporal/execution_options_spec.rb @@ -99,10 +99,11 @@ class TestExecutionOptionsWorkflow < Temporal::Workflow task_queue: 'test-task-queue', retry_policy: { interval: 1, backoff: 2, max_attempts: 5 }, timeouts: { start_to_close: 10 }, - headers: { 'TestHeader' => 'Test' } + headers: { 'TestHeader' => 'Test' }, + start_delay: 10 } end - + it 'is initialized with full options' do expect(subject.name).to eq(options[:name]) expect(subject.namespace).to eq(options[:namespace]) @@ -113,12 +114,13 @@ class TestExecutionOptionsWorkflow < Temporal::Workflow expect(subject.retry_policy.max_attempts).to eq(options[:retry_policy][:max_attempts]) expect(subject.timeouts).to eq(options[:timeouts]) expect(subject.headers).to eq(options[:headers]) + expect(subject.start_delay).to eq(options[:start_delay]) end end - + context 'when retry policy options are invalid' do let(:options) { { retry_policy: { max_attempts: 10 } } } - + it 'raises' do expect { subject }.to raise_error( Temporal::RetryPolicy::InvalidRetryPolicy, diff --git a/spec/unit/lib/temporal/grpc_spec.rb b/spec/unit/lib/temporal/grpc_spec.rb index ee3c1fcb..5639a0e9 100644 --- a/spec/unit/lib/temporal/grpc_spec.rb +++ b/spec/unit/lib/temporal/grpc_spec.rb @@ -1,8 +1,15 @@ require 'temporal/connection/grpc' +require 'temporal/converter_wrapper' require 'temporal/workflow/query_result' describe Temporal::Connection::GRPC do let(:identity) { 'my-identity' } + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end let(:binary_checksum) { 'v1.0.0' } let(:grpc_stub) { double('grpc stub') } let(:grpc_operator_stub) { double('grpc stub') } @@ -10,12 +17,9 @@ let(:workflow_id) { SecureRandom.uuid } let(:run_id) { SecureRandom.uuid } let(:now) { Time.now} + let(:options) { {} } - subject { Temporal::Connection::GRPC.new(nil, nil, identity, :this_channel_is_insecure) } - - class TestDeserializer - extend Temporal::Concerns::Payloads - end + subject { Temporal::Connection::GRPC.new(nil, nil, identity, :this_channel_is_insecure, converter, options) } before do allow(subject).to receive(:client).and_return(grpc_stub) @@ -62,6 +66,7 @@ class TestDeserializer execution_timeout: 1, run_timeout: 2, task_timeout: 3, + start_delay: 10, memo: {}, search_attributes: { 'foo-int-attribute' => 256, @@ -86,6 +91,7 @@ class TestDeserializer expect(request.workflow_execution_timeout.seconds).to eq(1) expect(request.workflow_run_timeout.seconds).to eq(2) expect(request.workflow_task_timeout.seconds).to eq(3) + expect(request.workflow_start_delay.seconds).to eq(10) expect(request.workflow_id_reuse_policy).to eq(:WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE) expect(request.search_attributes.indexed_fields).to eq({ 'foo-int-attribute' => Temporalio::Api::Common::V1::Payload.new(data: '256', metadata: { 'encoding' => 'json/plain' }), @@ -134,6 +140,7 @@ class TestDeserializer execution_timeout: 1, run_timeout: 2, task_timeout: 3, + start_delay: 10, workflow_id_reuse_policy: :allow, signal_name: 'the question', signal_input: 'what do you get if you multiply six by nine?' @@ -149,6 +156,7 @@ class TestDeserializer expect(request.workflow_execution_timeout.seconds).to eq(1) expect(request.workflow_run_timeout.seconds).to eq(2) expect(request.workflow_task_timeout.seconds).to eq(3) + expect(request.workflow_start_delay.seconds).to eq(10) expect(request.signal_name).to eq('the question') expect(request.signal_input.payloads[0].data).to eq('"what do you get if you multiply six by nine?"') expect(request.workflow_id_reuse_policy).to eq(:WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE) @@ -535,7 +543,7 @@ class TestDeserializer expect(request.completed_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup( Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_ANSWERED) ) - expect(request.query_result).to eq(TestDeserializer.to_query_payloads(42)) + expect(request.query_result).to eq(converter.to_query_payloads(42)) expect(request.error_message).to eq('') end end @@ -606,7 +614,7 @@ class TestDeserializer expect(request.query_results['1'].result_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup( Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_ANSWERED) ) - expect(request.query_results['1'].answer).to eq(TestDeserializer.to_query_payloads(42)) + expect(request.query_results['1'].answer).to eq(converter.to_query_payloads(42)) expect(request.query_results['2']).to be_a(Temporalio::Api::Query::V1::WorkflowQueryResult) expect(request.query_results['2'].result_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup( @@ -646,6 +654,49 @@ class TestDeserializer end end + describe '#poll_activity_task_queue' do + let(:task_queue) { 'test-task-queue' } + let(:temporal_response) do + Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueResponse.new + end + let(:poll_request) do + instance_double( + "GRPC::ActiveCall::Operation", + execute: temporal_response + ) + end + + before do + allow(grpc_stub).to receive(:poll_activity_task_queue).with(anything, return_op: true).and_return(poll_request) + end + + it 'makes an API request' do + subject.poll_activity_task_queue(namespace: namespace, task_queue: task_queue) + + expect(grpc_stub).to have_received(:poll_activity_task_queue) do |request| + expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueRequest) + expect(request.namespace).to eq(namespace) + expect(request.task_queue.name).to eq(task_queue) + expect(request.identity).to eq(identity) + expect(request.task_queue_metadata).to be_nil + end + end + + it 'makes an API request with max_tasks_per_second in the metadata' do + subject.poll_activity_task_queue(namespace: namespace, task_queue: task_queue, max_tasks_per_second: 10) + + expect(grpc_stub).to have_received(:poll_activity_task_queue) do |request| + expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueRequest) + expect(request.namespace).to eq(namespace) + expect(request.task_queue.name).to eq(task_queue) + expect(request.identity).to eq(identity) + expect(request.task_queue_metadata).to_not be_nil + expect(request.task_queue_metadata.max_tasks_per_second).to_not be_nil + expect(request.task_queue_metadata.max_tasks_per_second.value).to eq(10) + end + end + end + describe '#add_custom_search_attributes' do it 'calls GRPC service with supplied arguments' do allow(grpc_operator_stub).to receive(:add_search_attributes) @@ -830,4 +881,100 @@ class TestDeserializer end end end + + describe "passing in options" do + before do + allow(subject).to receive(:client).and_call_original + end + + context "when keepalive_time_ms is passed" do + let(:options) { { keepalive_time_ms: 30_000 } } + + it "passes the option to the channel args" do + expect(Temporalio::Api::WorkflowService::V1::WorkflowService::Stub).to receive(:new).with( + ":", + :this_channel_is_insecure, + timeout: 60, + interceptors: [instance_of(Temporal::Connection::ClientNameVersionInterceptor)], + channel_args: { + "grpc.keepalive_time_ms" => 30_000 + } + ) + subject.send(:client) + end + end + + context "when passing retry_connection" do + let(:options) { { retry_connection: true } } + + it "passes the option to the channel args" do + expect(Temporalio::Api::WorkflowService::V1::WorkflowService::Stub).to receive(:new).with( + ":", + :this_channel_is_insecure, + timeout: 60, + interceptors: [instance_of(Temporal::Connection::ClientNameVersionInterceptor)], + channel_args: { + "grpc.enable_retries" => 1, + "grpc.service_config" => { + methodConfig: [ + { + name: [ + { + service: "temporal.api.workflowservice.v1.WorkflowService", + } + ], + retryPolicy: { + retryableStatusCodes: ["UNAVAILABLE"], + maxAttempts: 3, + initialBackoff: "0.1s", + backoffMultiplier: 2.0, + maxBackoff: "0.3s" + } + } + ] + }.to_json + } + ) + subject.send(:client) + end + end + + context "when passing a custom retry policy" do + let(:options) { { retry_policy: retry_policy } } + let(:retry_policy) do + { + retryableStatusCodes: ["UNAVAILABLE", "INTERNAL"], + maxAttempts: 1, + initialBackoff: "0.2s", + backoffMultiplier: 1.0, + maxBackoff: "0.5s" + } + end + + it "passes the policy to the channel args" do + expect(Temporalio::Api::WorkflowService::V1::WorkflowService::Stub).to receive(:new).with( + ":", + :this_channel_is_insecure, + timeout: 60, + interceptors: [instance_of(Temporal::Connection::ClientNameVersionInterceptor)], + channel_args: { + "grpc.enable_retries" => 1, + "grpc.service_config" => { + methodConfig: [ + { + name: [ + { + service: "temporal.api.workflowservice.v1.WorkflowService", + } + ], + retryPolicy: retry_policy + } + ] + }.to_json + } + ) + subject.send(:client) + end + end + end end diff --git a/spec/unit/lib/temporal/metadata_spec.rb b/spec/unit/lib/temporal/metadata_spec.rb index cd21fb76..b3f02955 100644 --- a/spec/unit/lib/temporal/metadata_spec.rb +++ b/spec/unit/lib/temporal/metadata_spec.rb @@ -1,8 +1,15 @@ require 'temporal/metadata' describe Temporal::Metadata do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + describe '.generate_activity_metadata' do - subject { described_class.generate_activity_metadata(data, namespace) } + subject { described_class.generate_activity_metadata(data, namespace, converter) } let(:data) { Fabricate(:api_activity_task) } let(:namespace) { 'test-namespace' } @@ -46,7 +53,7 @@ end context '.generate_workflow_metadata' do - subject { described_class.generate_workflow_metadata(event, task_metadata) } + subject { described_class.generate_workflow_metadata(event, task_metadata, converter) } let(:event) { Temporal::Workflow::History::Event.new(Fabricate(:api_workflow_execution_started_event)) } let(:task_metadata) { Fabricate(:workflow_task_metadata) } let(:namespace) { nil } diff --git a/spec/unit/lib/temporal/testing/local_workflow_context_spec.rb b/spec/unit/lib/temporal/testing/local_workflow_context_spec.rb index 66c68769..75600fb3 100644 --- a/spec/unit/lib/temporal/testing/local_workflow_context_spec.rb +++ b/spec/unit/lib/temporal/testing/local_workflow_context_spec.rb @@ -9,6 +9,7 @@ let(:run_id) { 'run_id_1' } let(:execution) { Temporal::Testing::WorkflowExecution.new } let(:task_queue) { 'my_test_queue' } + let(:config) { Temporal::Configuration.new } let(:workflow_context) do Temporal::Testing::LocalWorkflowContext.new( execution, @@ -27,13 +28,14 @@ headers: {}, run_started_at: Time.now, memo: {}, - ) + ), + config ) end let(:async_token) do # Generate the async token Temporal::Activity::AsyncToken.encode( - Temporal.configuration.namespace, + config.namespace, 1, # activity ID starts at 1 for each workflow workflow_id, run_id diff --git a/spec/unit/lib/temporal/testing/replay_histories/do_nothing.json b/spec/unit/lib/temporal/testing/replay_histories/do_nothing.json new file mode 100644 index 00000000..45a0ef20 --- /dev/null +++ b/spec/unit/lib/temporal/testing/replay_histories/do_nothing.json @@ -0,0 +1,103 @@ +{ + "events":[ + { + "eventId":"1", + "eventTime":"2024-05-27T18:53:53.483530640Z", + "eventType":"EVENT_TYPE_WORKFLOW_EXECUTION_STARTED", + "taskId":"27263213", + "workflowExecutionStartedEventAttributes":{ + "workflowType":{ + "name":"TestReplayWorkflow" + }, + "taskQueue":{ + "name":"general", + "kind":"TASK_QUEUE_KIND_NORMAL" + }, + "input":{ + "payloads":[ + { + "metadata":{ + "encoding":"anNvbi9wbGFpbg==" + }, + "data":"eyI6cmVzdWx0Ijoic3VjY2VzcyJ9Cg==" + } + ] + }, + "workflowExecutionTimeout":"30s", + "workflowRunTimeout":"30s", + "workflowTaskTimeout":"10s", + "originalExecutionRunId":"b3711f7b-2693-4c1b-ab67-24e73f80bdcf", + "identity":"123@test", + "firstExecutionRunId":"b3711f7b-2693-4c1b-ab67-24e73f80bdcf", + "attempt":1, + "workflowExecutionExpirationTime":"2024-05-27T18:54:23.483Z", + "firstWorkflowTaskBackoff":"0s", + "memo":{}, + "searchAttributes":{}, + "header":{} + } + }, + { + "eventId":"2", + "eventTime":"2024-05-27T18:53:53.483621296Z", + "eventType":"EVENT_TYPE_WORKFLOW_TASK_SCHEDULED", + "taskId":"27263215", + "workflowTaskScheduledEventAttributes":{ + "taskQueue":{ + "name":"general", + "kind":"TASK_QUEUE_KIND_NORMAL" + }, + "startToCloseTimeout":"10s", + "attempt":1 + } + }, + { + "eventId":"3", + "eventTime":"2024-05-27T18:53:53.504351823Z", + "eventType":"EVENT_TYPE_WORKFLOW_TASK_STARTED", + "taskId":"27263220", + "workflowTaskStartedEventAttributes":{ + "scheduledEventId":"2", + "identity":"123@test", + "requestId":"195003c8-4c89-486b-8ae8-85cb209dc8b9", + "historySizeBytes":"395" + } + }, + { + "eventId":"4", + "eventTime":"2024-05-27T18:53:53.620416193Z", + "eventType":"EVENT_TYPE_WORKFLOW_TASK_COMPLETED", + "taskId":"27263224", + "workflowTaskCompletedEventAttributes":{ + "scheduledEventId":"2", + "startedEventId":"3", + "identity":"123@test", + "binaryChecksum":"d1feac6b4ac2fb57a304ddf1419efd6e06088e41", + "sdkMetadata":{ + "langUsedFlags":[ + 2 + ] + } + } + }, + { + "eventId":"5", + "eventTime":"2024-05-27T18:53:55.790974964Z", + "eventType":"EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED", + "taskId":"27263260", + "workflowExecutionCompletedEventAttributes":{ + "result":{ + "payloads":[ + { + "metadata":{ + "encoding":"anNvbi9wbGFpbg==" + }, + "data":"ImRvbmUiCg==" + } + ] + }, + "workflowTaskCompletedEventId":"4" + } + } + ] +} \ No newline at end of file diff --git a/spec/unit/lib/temporal/testing/replay_tester_spec.rb b/spec/unit/lib/temporal/testing/replay_tester_spec.rb new file mode 100644 index 00000000..861a5813 --- /dev/null +++ b/spec/unit/lib/temporal/testing/replay_tester_spec.rb @@ -0,0 +1,142 @@ +require "base64" +require "json" +require "temporal/testing/replay_tester" +require "temporal/workflow" +require "temporal/workflow/history" + +describe Temporal::Testing::ReplayTester do + class TestReplayActivity < Temporal::Activity + def execute + raise "should never run" + end + end + + class TestReplayWorkflow < Temporal::Workflow + def execute(run_activity: false, run_sleep: false, result: "success") + TestReplayActivity.execute! if run_activity + + workflow.sleep(1) if run_sleep + + case result + when "success" + "done" + when "continue_as_new" + workflow.continue_as_new + nil + when "await" + # wait forever + workflow.wait_until { false } + when "fail" + raise "failed" + end + end + end + + let(:replay_tester) { Temporal::Testing::ReplayTester.new } + let(:do_nothing_json) do + File.read( + "spec/unit/lib/temporal/testing/replay_histories/do_nothing.json" + ) + end + + let(:do_nothing) do + Temporal::Workflow::History::Serialization.from_json(do_nothing_json) + end + + it "replay do nothing successful" do + replay_tester.replay_history( + TestReplayWorkflow, + do_nothing + ) + end + + def remove_first_history_event(history) + history.events.shift + history + end + + it "replay missing start workflow execution event" do + replay_tester.replay_history( + TestReplayWorkflow, + remove_first_history_event(do_nothing) + ) + raise "Expected error to raise" + rescue Temporal::Testing::ReplayError => e + expect(e.message).to(eq("History does not start with workflow_execution_started event")) + end + + def set_workflow_args_in_history(json_args) + obj = JSON.load(do_nothing_json) + obj["events"][0]["workflowExecutionStartedEventAttributes"]["input"]["payloads"][0]["data"] = Base64.strict_encode64( + json_args + ) + new_json = JSON.generate(obj) + Temporal::Workflow::History::Serialization.from_json(new_json) + end + + it "replay extra activity" do + # The linked history will cause an error because it will cause an activity run even though + # there isn't one in the history. + + replay_tester.replay_history( + TestReplayWorkflow, + set_workflow_args_in_history("{\":run_activity\":true}") + ) + raise "Expected error to raise" + rescue Temporal::Testing::ReplayError => e + expect(e.message).to(eq("Workflow code failed to replay successfully against history")) + # Ensure backtrace was overwritten + expect(e.backtrace.first).to(start_with("Fiber backtraces:")) + expect(e.cause).to(be_a(Temporal::NonDeterministicWorkflowError)) + expect(e.cause.message).to( + eq( + "Unexpected command. The replaying code is issuing: activity (5), but the history of previous executions " \ + "recorded: complete_workflow (5). Likely, either you have made a version-unsafe change to your workflow or " \ + "have non-deterministic behavior in your workflow. See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning." + ) + ) + end + + it "replay continues as new when history completed" do + # The linked history will cause an error because it will cause the workflow to continue + # as new on replay when in the history, it completed successfully. + + replay_tester.replay_history( + TestReplayWorkflow, + set_workflow_args_in_history("{\":result\":\"continue_as_new\"}") + ) + raise "Expected error to raise" + rescue Temporal::Testing::ReplayError => e + expect(e.message).to(eq("Workflow code failed to replay successfully against history")) + expect(e.cause).to(be_a(Temporal::NonDeterministicWorkflowError)) + expect(e.cause.message).to( + eq( + "Unexpected command. The replaying code is issuing: continue_as_new_workflow (5), but the history of " \ + "previous executions recorded: complete_workflow (5). Likely, either you have made a version-unsafe " \ + "change to your workflow or have non-deterministic behavior in your workflow. " \ + "See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning." + ) + ) + end + + it "replay keeps going when history succeeded" do + # The linked history will cause an error because it will cause the workflow to keep running + # when in the history, it completed successfully. + + replay_tester.replay_history( + TestReplayWorkflow, + set_workflow_args_in_history("{\":result\":\"await\"}") + ) + raise "Expected error to raise" + rescue Temporal::Testing::ReplayError => e + expect(e.message).to(eq("Workflow code failed to replay successfully against history")) + expect(e.cause).to(be_a(Temporal::NonDeterministicWorkflowError)) + expect(e.cause.message).to( + eq( + "A command in the history of previous executions, complete_workflow (5), was not scheduled upon replay. " \ + "Likely, either you have made a version-unsafe change to your workflow or have non-deterministic behavior " \ + "in your workflow. See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning." + ) + ) + end +end diff --git a/spec/unit/lib/temporal/worker_spec.rb b/spec/unit/lib/temporal/worker_spec.rb index 685e07a0..2c379567 100644 --- a/spec/unit/lib/temporal/worker_spec.rb +++ b/spec/unit/lib/temporal/worker_spec.rb @@ -293,7 +293,8 @@ def start_and_stop(worker) config, [], thread_pool_size: 20, - poll_retry_seconds: 0 + poll_retry_seconds: 0, + max_tasks_per_second: 0 ) .and_return(activity_poller_1) @@ -306,7 +307,8 @@ def start_and_stop(worker) config, [], thread_pool_size: 20, - poll_retry_seconds: 0 + poll_retry_seconds: 0, + max_tasks_per_second: 0 ) .and_return(activity_poller_2) @@ -333,7 +335,7 @@ def start_and_stop(worker) an_instance_of(Temporal::ExecutableLookup), an_instance_of(Temporal::Configuration), [], - {thread_pool_size: 10, poll_retry_seconds: 0} + {thread_pool_size: 10, poll_retry_seconds: 0, max_tasks_per_second: 0} ) .and_return(activity_poller) @@ -342,7 +344,7 @@ def start_and_stop(worker) .to receive(:new) .and_return(workflow_poller) - worker = Temporal::Worker.new(activity_thread_pool_size: 10) + worker = Temporal::Worker.new(config, activity_thread_pool_size: 10) worker.register_workflow(TestWorkerWorkflow) worker.register_activity(TestWorkerActivity) @@ -387,7 +389,7 @@ def start_and_stop(worker) ) .and_return(workflow_poller) - worker = Temporal::Worker.new(binary_checksum: binary_checksum) + worker = Temporal::Worker.new(config, binary_checksum: binary_checksum) worker.register_workflow(TestWorkerWorkflow) worker.register_activity(TestWorkerActivity) @@ -406,11 +408,11 @@ def start_and_stop(worker) an_instance_of(Temporal::ExecutableLookup), an_instance_of(Temporal::Configuration), [], - {thread_pool_size: 20, poll_retry_seconds: 10} + {thread_pool_size: 20, poll_retry_seconds: 10, max_tasks_per_second: 0} ) .and_return(activity_poller) - worker = Temporal::Worker.new(activity_poll_retry_seconds: 10) + worker = Temporal::Worker.new(config, activity_poll_retry_seconds: 10) worker.register_activity(TestWorkerActivity) start_and_stop(worker) @@ -433,7 +435,7 @@ def start_and_stop(worker) ) .and_return(workflow_poller) - worker = Temporal::Worker.new(workflow_poll_retry_seconds: 10) + worker = Temporal::Worker.new(config, workflow_poll_retry_seconds: 10) worker.register_workflow(TestWorkerWorkflow) start_and_stop(worker) @@ -441,6 +443,28 @@ def start_and_stop(worker) expect(workflow_poller).to have_received(:start) end + it 'can have an activity poller that registers a task rate limit' do + activity_poller = instance_double(Temporal::Activity::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil) + expect(Temporal::Activity::Poller) + .to receive(:new) + .with( + 'default-namespace', + 'default-task-queue', + an_instance_of(Temporal::ExecutableLookup), + an_instance_of(Temporal::Configuration), + [], + {thread_pool_size: 20, poll_retry_seconds: 0, max_tasks_per_second: 5} + ) + .and_return(activity_poller) + + worker = Temporal::Worker.new(config, activity_max_tasks_per_second: 5) + worker.register_activity(TestWorkerActivity) + + start_and_stop(worker) + + expect(activity_poller).to have_received(:start) + end + context 'when middleware is configured' do let(:entry_1) { instance_double(Temporal::Middleware::Entry) } let(:entry_2) { instance_double(Temporal::Middleware::Entry) } @@ -492,7 +516,8 @@ def start_and_stop(worker) config, [entry_2], thread_pool_size: 20, - poll_retry_seconds: 0 + poll_retry_seconds: 0, + max_tasks_per_second: 0 ) .and_return(activity_poller_1) diff --git a/spec/unit/lib/temporal/workflow/context_spec.rb b/spec/unit/lib/temporal/workflow/context_spec.rb index 6dddf3b2..05a61282 100644 --- a/spec/unit/lib/temporal/workflow/context_spec.rb +++ b/spec/unit/lib/temporal/workflow/context_spec.rb @@ -27,7 +27,7 @@ def execute end let(:metadata_hash) { Fabricate(:workflow_metadata).to_h } let(:metadata) { Temporal::Metadata::Workflow.new(**metadata_hash) } - let(:config) { Temporal.configuration } + let(:config) { Temporal::Configuration.new } let(:workflow_context) do Temporal::Workflow::Context.new( diff --git a/spec/unit/lib/temporal/workflow/errors_spec.rb b/spec/unit/lib/temporal/workflow/errors_spec.rb index 53d86b68..bce9d477 100644 --- a/spec/unit/lib/temporal/workflow/errors_spec.rb +++ b/spec/unit/lib/temporal/workflow/errors_spec.rb @@ -27,6 +27,13 @@ def initialize(foo, bar) end describe Temporal::Workflow::Errors do + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end + describe '.generate_error' do it "instantiates properly when the client has the error" do message = "An error message" @@ -38,7 +45,7 @@ def initialize(foo, bar) error_class: SomeError.to_s ) - e = Temporal::Workflow::Errors.generate_error(failure) + e = Temporal::Workflow::Errors.generate_error(failure, converter) expect(e).to be_a(SomeError) expect(e.message).to eq(message) expect(e.backtrace).to eq(stack_trace) @@ -47,9 +54,9 @@ def initialize(foo, bar) it 'correctly deserializes a complex error' do error = MyFancyError.new('foo', 'bar') - failure = Temporal::Connection::Serializer::Failure.new(error, serialize_whole_error: true).to_proto + failure = Temporal::Connection::Serializer::Failure.new(error, converter, serialize_whole_error: true).to_proto - e = Temporal::Workflow::Errors.generate_error(failure) + e = Temporal::Workflow::Errors.generate_error(failure, converter) expect(e).to be_a(MyFancyError) expect(e.foo).to eq('foo') expect(e.bar).to eq('bar') @@ -68,7 +75,7 @@ def initialize(foo, bar) error_class: 'NonexistentError', ) - e = Temporal::Workflow::Errors.generate_error(failure) + e = Temporal::Workflow::Errors.generate_error(failure, converter) expect(e).to be_a(StandardError) expect(e.message).to eq("NonexistentError: An error message") expect(e.backtrace).to eq(stack_trace) @@ -94,7 +101,7 @@ def initialize(foo, bar) error_class: ErrorWithTwoArgs.to_s, ) - e = Temporal::Workflow::Errors.generate_error(failure) + e = Temporal::Workflow::Errors.generate_error(failure, converter) expect(e).to be_a(StandardError) expect(e.message).to eq("ErrorWithTwoArgs: An error message") expect(e.backtrace).to eq(stack_trace) @@ -102,7 +109,7 @@ def initialize(foo, bar) .to have_received(:error) .with( "Could not instantiate original error. Defaulting to StandardError. "\ - "Make sure the worker running your activities is setting Temporal.configuration.use_error_serialization_v2. "\ + "Make sure the worker running your activities is configured with use_error_serialization_v2. "\ "If so, make sure the original error serialized by searching your logs for 'unserializable_error'. "\ "If not, you're using legacy serialization, and it's likely that "\ "your error's initializer takes something other than exactly one positional argument.", @@ -127,7 +134,7 @@ def initialize(foo, bar) error_class: ErrorThatRaisesInInitialize.to_s, ) - e = Temporal::Workflow::Errors.generate_error(failure) + e = Temporal::Workflow::Errors.generate_error(failure, converter) expect(e).to be_a(StandardError) expect(e.message).to eq("ErrorThatRaisesInInitialize: An error message") expect(e.backtrace).to eq(stack_trace) @@ -135,7 +142,7 @@ def initialize(foo, bar) .to have_received(:error) .with( "Could not instantiate original error. Defaulting to StandardError. "\ - "Make sure the worker running your activities is setting Temporal.configuration.use_error_serialization_v2. "\ + "Make sure the worker running your activities is configured with use_error_serialization_v2. "\ "If so, make sure the original error serialized by searching your logs for 'unserializable_error'. "\ "If not, you're using legacy serialization, and it's likely that "\ "your error's initializer takes something other than exactly one positional argument.", diff --git a/spec/unit/lib/temporal/workflow/execution_info_spec.rb b/spec/unit/lib/temporal/workflow/execution_info_spec.rb index ad3368f2..6bef7b2d 100644 --- a/spec/unit/lib/temporal/workflow/execution_info_spec.rb +++ b/spec/unit/lib/temporal/workflow/execution_info_spec.rb @@ -1,7 +1,13 @@ require 'temporal/workflow/execution_info' describe Temporal::Workflow::ExecutionInfo do - subject { described_class.generate_from(api_info) } + subject { described_class.generate_from(api_info, converter) } + let(:converter) do + Temporal::ConverterWrapper.new( + Temporal::Configuration::DEFAULT_CONVERTER, + Temporal::Configuration::DEFAULT_PAYLOAD_CODEC + ) + end let(:api_info) { Fabricate(:api_workflow_execution_info, workflow: 'TestWorkflow', workflow_id: '') } describe '.generate_for' do @@ -25,7 +31,7 @@ it 'deserializes if search_attributes is nil' do api_info.search_attributes = nil - result = described_class.generate_from(api_info) + result = described_class.generate_from(api_info, converter) expect(result.search_attributes).to eq({}) end end diff --git a/spec/unit/lib/temporal/workflow/executor_spec.rb b/spec/unit/lib/temporal/workflow/executor_spec.rb index ee567a8b..714dc72b 100644 --- a/spec/unit/lib/temporal/workflow/executor_spec.rb +++ b/spec/unit/lib/temporal/workflow/executor_spec.rb @@ -134,9 +134,9 @@ def execute let(:query_2_error) { StandardError.new('Test query failure') } let(:queries) do { - '1' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'success')), - '2' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'failure')), - '3' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'unknown')) + '1' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'success'), config.converter), + '2' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'failure'), config.converter), + '3' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'unknown'), config.converter) } end diff --git a/spec/unit/lib/temporal/workflow/poller_spec.rb b/spec/unit/lib/temporal/workflow/poller_spec.rb index e8d5692b..020e2e91 100644 --- a/spec/unit/lib/temporal/workflow/poller_spec.rb +++ b/spec/unit/lib/temporal/workflow/poller_spec.rb @@ -113,7 +113,7 @@ def poll(task, times: 1) expect(Temporal::Workflow::TaskProcessor) .to have_received(:new) - .with(task, namespace, lookup, empty_middleware_chain, empty_middleware_chain, config, binary_checksum) + .with(task, task_queue, namespace, lookup, empty_middleware_chain, empty_middleware_chain, config, binary_checksum) expect(task_processor).to have_received(:process) end @@ -151,7 +151,7 @@ def call(_); end expect(Temporal::Middleware::Chain).to have_received(:new).with(workflow_middleware) expect(Temporal::Workflow::TaskProcessor) .to have_received(:new) - .with(task, namespace, lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum) + .with(task, task_queue, namespace, lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum) end end end diff --git a/spec/unit/lib/temporal/workflow/state_manager_spec.rb b/spec/unit/lib/temporal/workflow/state_manager_spec.rb index 50aa74d3..8aa8f9aa 100644 --- a/spec/unit/lib/temporal/workflow/state_manager_spec.rb +++ b/spec/unit/lib/temporal/workflow/state_manager_spec.rb @@ -59,7 +59,7 @@ class MyWorkflow < Temporal::Workflow; end it 'dispatcher invoked for start' do expect(dispatcher).to receive(:dispatch).with( - Temporal::Workflow::History::EventTarget.workflow, 'started', instance_of(Array) + Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array) ).once state_manager.apply(history.next_window) end @@ -88,7 +88,7 @@ class MyWorkflow < Temporal::Workflow; end ] ).once.ordered expect(dispatcher).to receive(:dispatch).with( - Temporal::Workflow::History::EventTarget.workflow, 'started', instance_of(Array) + Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array) ).once.ordered state_manager.apply(history.next_window) @@ -119,7 +119,7 @@ class MyWorkflow < Temporal::Workflow; end allow(connection).to receive(:get_system_info).and_return(system_info) expect(dispatcher).to receive(:dispatch).with( - Temporal::Workflow::History::EventTarget.workflow, 'started', instance_of(Array) + Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array) ).once.ordered expect(dispatcher).to receive(:dispatch).with( Temporal::Workflow::Signal.new(signal_entry.workflow_execution_signaled_event_attributes.signal_name), @@ -140,7 +140,7 @@ class MyWorkflow < Temporal::Workflow; end allow(connection).to receive(:get_system_info).and_return(system_info) expect(dispatcher).to receive(:dispatch).with( - Temporal::Workflow::History::EventTarget.workflow, 'started', instance_of(Array) + Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array) ).once.ordered expect(dispatcher).to receive(:dispatch).with( Temporal::Workflow::Signal.new(signal_entry.workflow_execution_signaled_event_attributes.signal_name), @@ -173,7 +173,7 @@ class MyWorkflow < Temporal::Workflow; end ] ).once.ordered expect(dispatcher).to receive(:dispatch).with( - Temporal::Workflow::History::EventTarget.workflow, 'started', instance_of(Array) + Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array) ).once.ordered state_manager.apply(history.next_window) @@ -204,7 +204,7 @@ class MyWorkflow < Temporal::Workflow; end it 'marker handled first' do activity_target = nil - dispatcher.register_handler(Temporal::Workflow::History::EventTarget.workflow, 'started') do + dispatcher.register_handler(Temporal::Workflow::History::EventTarget.start_workflow, 'started') do activity_target, = state_manager.schedule( Temporal::Workflow::Command::ScheduleActivity.new( activity_id: activity_entry.event_id, @@ -229,7 +229,7 @@ class MyWorkflow < Temporal::Workflow; end state_manager.schedule( Temporal::Workflow::Command::RecordMarker.new( name: marker_entry.marker_recorded_event_attributes.marker_name, - details: to_payload_map({}) + details: TEST_CONVERTER.to_payload_map({}) ) ) @@ -249,7 +249,7 @@ def test_order(signal_first) activity_target = nil signaled = false - dispatcher.register_handler(Temporal::Workflow::History::EventTarget.workflow, 'started') do + dispatcher.register_handler(Temporal::Workflow::History::EventTarget.start_workflow, 'started') do activity_target, = state_manager.schedule( Temporal::Workflow::Command::ScheduleActivity.new( activity_id: activity_entry.event_id, @@ -469,6 +469,89 @@ def test_order_one_task(*expected_sdk_flags) end end + describe "#final_commands" do + let(:dispatcher) { Temporal::Workflow::Dispatcher.new } + let(:state_manager) do + Temporal::Workflow::StateManager.new(dispatcher, config) + end + + let(:config) { Temporal::Configuration.new } + + it "preserves canceled activity or timer commands when not completed" do + schedule_activity_command = Temporal::Workflow::Command::ScheduleActivity.new + state_manager.schedule(schedule_activity_command) + + start_timer_command = Temporal::Workflow::Command::StartTimer.new + state_manager.schedule(start_timer_command) + + cancel_activity_command = Temporal::Workflow::Command::RequestActivityCancellation.new( + activity_id: schedule_activity_command.activity_id + ) + state_manager.schedule(cancel_activity_command) + + cancel_timer_command = Temporal::Workflow::Command::CancelTimer.new( + timer_id: start_timer_command.timer_id + ) + state_manager.schedule(cancel_timer_command) + + expect(state_manager.final_commands).to( + eq( + [ + [1, schedule_activity_command], + [2, start_timer_command], + [3, cancel_activity_command], + [4, cancel_timer_command] + ] + ) + ) + end + + it "drop cancel activity command when completed" do + schedule_activity_command = Temporal::Workflow::Command::ScheduleActivity.new + state_manager.schedule(schedule_activity_command) + + cancel_command = Temporal::Workflow::Command::RequestActivityCancellation.new( + activity_id: schedule_activity_command.activity_id + ) + state_manager.schedule(cancel_command) + + # Fake completing the activity + window = Temporal::Workflow::History::Window.new + # The fake assumes an activity event completed two events ago, so fix the event id to +2 + window.add( + Temporal::Workflow::History::Event.new( + Fabricate(:api_activity_task_completed_event, event_id: schedule_activity_command.activity_id + 2) + ) + ) + state_manager.apply(window) + + expect(state_manager.final_commands).to(eq([[1, schedule_activity_command]])) + end + + it "drop cancel timer command when completed" do + start_timer_command = Temporal::Workflow::Command::StartTimer.new + state_manager.schedule(start_timer_command) + + cancel_command = Temporal::Workflow::Command::CancelTimer.new( + timer_id: start_timer_command.timer_id + ) + state_manager.schedule(cancel_command) + + # Fake completing the timer + window = Temporal::Workflow::History::Window.new + # The fake assumes an activity event completed four events ago, so fix the event id to +4 + window.add( + Temporal::Workflow::History::Event.new( + Fabricate(:api_timer_fired_event, event_id: start_timer_command.timer_id + 4) + ) + ) + state_manager.apply(window) + + expect(state_manager.final_commands).to(eq([[1, start_timer_command]])) + end + end + + describe '#search_attributes' do let(:initial_search_attributes) do { diff --git a/spec/unit/lib/temporal/workflow/task_processor_spec.rb b/spec/unit/lib/temporal/workflow/task_processor_spec.rb index 33d5506f..6ad3c12c 100644 --- a/spec/unit/lib/temporal/workflow/task_processor_spec.rb +++ b/spec/unit/lib/temporal/workflow/task_processor_spec.rb @@ -5,10 +5,11 @@ describe Temporal::Workflow::TaskProcessor do subject do - described_class.new(task, namespace, lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum) + described_class.new(task, task_queue, namespace, lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum) end let(:namespace) { 'test-namespace' } + let(:task_queue) { 'test-queue' } let(:lookup) { instance_double('Temporal::ExecutableLookup', find: nil) } let(:query) { nil } let(:queries) { nil } @@ -73,8 +74,10 @@ .to have_received(:increment) .with( Temporal::MetricKeys::WORKFLOW_TASK_EXECUTION_FAILED, - workflow: workflow_name, - namespace: namespace + hash_including({ + workflow: workflow_name, + namespace: namespace + }) ) end end @@ -203,8 +206,10 @@ .with( Temporal::MetricKeys::WORKFLOW_TASK_QUEUE_TIME, an_instance_of(Integer), - workflow: workflow_name, - namespace: namespace + hash_including({ + workflow: workflow_name, + namespace: namespace + }) ) end @@ -216,8 +221,10 @@ .with( Temporal::MetricKeys::WORKFLOW_TASK_LATENCY, an_instance_of(Integer), - workflow: workflow_name, - namespace: namespace + hash_including({ + workflow: workflow_name, + namespace: namespace + }) ) end end @@ -251,8 +258,10 @@ .to have_received(:increment) .with( Temporal::MetricKeys::WORKFLOW_TASK_EXECUTION_FAILED, - workflow: workflow_name, - namespace: namespace + hash_including({ + workflow: workflow_name, + namespace: namespace + }) ) end end @@ -312,8 +321,10 @@ .with( Temporal::MetricKeys::WORKFLOW_TASK_QUEUE_TIME, an_instance_of(Integer), - workflow: workflow_name, - namespace: namespace + hash_including({ + workflow: workflow_name, + namespace: namespace + }) ) end @@ -325,8 +336,10 @@ .with( Temporal::MetricKeys::WORKFLOW_TASK_LATENCY, an_instance_of(Integer), - workflow: workflow_name, - namespace: namespace + hash_including({ + workflow: workflow_name, + namespace: namespace + }) ) end end diff --git a/spec/unit/lib/temporal/workflow_spec.rb b/spec/unit/lib/temporal/workflow_spec.rb index b8f6af5f..fb8cc32a 100644 --- a/spec/unit/lib/temporal/workflow_spec.rb +++ b/spec/unit/lib/temporal/workflow_spec.rb @@ -1,6 +1,78 @@ require 'temporal/workflow' +require 'temporal/workflow/context' require 'shared_examples/an_executable' describe Temporal::Workflow do it_behaves_like 'an executable' + + class ArgsWorkflow < Temporal::Workflow + def execute(a) + 'args result' + end + end + + class KwargsWorkflow < Temporal::Workflow + def execute(a, b:, c:) + 'kwargs result' + end + end + + subject { described_class.new(ctx) } + let(:ctx) { instance_double('Temporal::Workflow::Context') } + + before do + allow(ctx).to receive(:completed?).and_return(true) + end + + describe '.execute_in_context' do + subject { ArgsWorkflow.new(ctx) } + + let(:input) { ['test'] } + + before do + allow(described_class).to receive(:new).and_return(subject) + end + + it 'passes the context' do + described_class.execute_in_context(ctx, input) + + expect(described_class).to have_received(:new).with(ctx) + end + + it 'calls #execute' do + expect(subject).to receive(:execute).with(*input) + + described_class.execute_in_context(ctx, input) + end + + context 'when using keyword arguments' do + subject { KwargsWorkflow.new(ctx) } + + let(:input) { ['test', { b: 'b', c: 'c' }] } + + it 'passes the context' do + described_class.execute_in_context(ctx, input) + + expect(described_class).to have_received(:new).with(ctx) + end + + it 'calls #execute' do + expect(subject).to receive(:execute).with('test', b: 'b', c: 'c') + + described_class.execute_in_context(ctx, input) + end + + it 'does not raise an ArgumentError' do + expect { + described_class.execute_in_context(ctx, input) + }.not_to raise_error + end + end + end + + describe '#execute' do + it 'is not implemented on a superclass' do + expect { subject.execute }.to raise_error(NotImplementedError) + end + end end diff --git a/spec/unit/lib/temporal_spec.rb b/spec/unit/lib/temporal_spec.rb index 47ccd73d..49e57664 100644 --- a/spec/unit/lib/temporal_spec.rb +++ b/spec/unit/lib/temporal_spec.rb @@ -67,19 +67,24 @@ it 'calls a block with the configuration' do expect do |block| described_class.configure(&block) - end.to yield_with_args(described_class.configuration) + end.to yield_with_args(described_class.send(:config)) end end describe '.configuration' do + before { allow(described_class).to receive(:warn) } + it 'returns Temporal::Configuration object' do expect(described_class.configuration).to be_an_instance_of(Temporal::Configuration) + expect(described_class) + .to have_received(:warn) + .with('[DEPRECATION] This method is now deprecated without a substitution') end end describe '.logger' do it 'returns preconfigured Temporal logger' do - expect(described_class.logger).to eq(described_class.configuration.logger) + expect(described_class.logger).to eq(described_class.send(:config).logger) end end diff --git a/temporal.gemspec b/temporal.gemspec index 8c51adef..3cc624ab 100644 --- a/temporal.gemspec +++ b/temporal.gemspec @@ -14,6 +14,7 @@ Gem::Specification.new do |spec| spec.require_paths = ['lib'] spec.files = Dir["{lib,rbi}/**/*.*"] + %w(temporal.gemspec Gemfile LICENSE README.md) + spec.add_dependency 'base64' spec.add_dependency 'grpc' spec.add_dependency 'oj'