@@ -650,9 +650,9 @@ def execute_complete(self, context: dict | None, event: dict):
650
650
651
651
class DatabricksRunNowOperator (BaseOperator ):
652
652
"""
653
- Runs an existing Spark job run to Databricks using the api/2.1/jobs/run-now API endpoint.
653
+ Runs an existing job in Databricks using the api/2.1/jobs/run-now API endpoint.
654
654
655
- See: https://docs.databricks.com/dev-tools/ api/latest/jobs.html#operation/JobsRunNow
655
+ See: https://docs.databricks.com/api/workspace/jobs_21/runnow
656
656
657
657
There are two ways to instantiate this operator.
658
658
@@ -675,23 +675,11 @@ class DatabricksRunNowOperator(BaseOperator):
675
675
676
676
job_id = 42
677
677
678
- dbt_commands = ["dbt deps", "dbt seed ", "dbt run"]
678
+ job_parameters = {"dry-run": "true ", "oldest-time-to-consider": "1457570074236"}
679
679
680
- notebook_params = {"dry-run": "true", "oldest-time-to-consider": "1457570074236"}
681
-
682
- python_params = ["douglas adams", "42"]
683
-
684
- jar_params = ["douglas adams", "42"]
685
-
686
- spark_submit_params = ["--class", "org.apache.spark.examples.SparkPi"]
687
-
688
- notebook_run = DatabricksRunNowOperator(
680
+ job_run = DatabricksRunNowOperator(
689
681
job_id=job_id,
690
- dbt_commands=dbt_commands,
691
- notebook_params=notebook_params,
692
- python_params=python_params,
693
- jar_params=jar_params,
694
- spark_submit_params=spark_submit_params,
682
+ job_parameters=job_parameters,
695
683
)
696
684
697
685
In the case where both the json parameter **AND** the named parameters
0 commit comments