Skip to content

Commit 74e90e4

Browse files
committed
Improve DatabricksRunNowOperator docs and document job parameter support
1 parent f4af596 commit 74e90e4

File tree

1 file changed

+5
-17
lines changed
  • providers/databricks/src/airflow/providers/databricks/operators

1 file changed

+5
-17
lines changed

providers/databricks/src/airflow/providers/databricks/operators/databricks.py

+5-17
Original file line numberDiff line numberDiff line change
@@ -650,9 +650,9 @@ def execute_complete(self, context: dict | None, event: dict):
650650

651651
class DatabricksRunNowOperator(BaseOperator):
652652
"""
653-
Runs an existing Spark job run to Databricks using the api/2.1/jobs/run-now API endpoint.
653+
Runs an existing job in Databricks using the api/2.1/jobs/run-now API endpoint.
654654
655-
See: https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunNow
655+
See: https://docs.databricks.com/api/workspace/jobs_21/runnow
656656
657657
There are two ways to instantiate this operator.
658658
@@ -675,23 +675,11 @@ class DatabricksRunNowOperator(BaseOperator):
675675
676676
job_id = 42
677677
678-
dbt_commands = ["dbt deps", "dbt seed", "dbt run"]
678+
job_parameters = {"dry-run": "true", "oldest-time-to-consider": "1457570074236"}
679679
680-
notebook_params = {"dry-run": "true", "oldest-time-to-consider": "1457570074236"}
681-
682-
python_params = ["douglas adams", "42"]
683-
684-
jar_params = ["douglas adams", "42"]
685-
686-
spark_submit_params = ["--class", "org.apache.spark.examples.SparkPi"]
687-
688-
notebook_run = DatabricksRunNowOperator(
680+
job_run = DatabricksRunNowOperator(
689681
job_id=job_id,
690-
dbt_commands=dbt_commands,
691-
notebook_params=notebook_params,
692-
python_params=python_params,
693-
jar_params=jar_params,
694-
spark_submit_params=spark_submit_params,
682+
job_parameters=job_parameters,
695683
)
696684
697685
In the case where both the json parameter **AND** the named parameters

0 commit comments

Comments
 (0)