Skip to content

Commit

Permalink
Allow for serverless tasks
Browse files Browse the repository at this point in the history
  • Loading branch information
Kyle Valade committed Sep 27, 2024
1 parent 3f100c4 commit 13033ed
Showing 1 changed file with 2 additions and 10 deletions.
12 changes: 2 additions & 10 deletions dbt/adapters/databricks/python_submissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -627,14 +627,6 @@ def check_credentials(self) -> None:
"workflow_job_config is required for the `workflow_job_config` submission method."
)

job_cluster_config = self.parsed_model["config"].get("job_cluster_config", None)

if job_cluster_config is None and workflow_config.get("existing_cluster_id", None) is None:
raise ValueError(
"""job_cluster_config or an existing_cluster_id is required for the "
`workflow_job_config` submission method."""
)

def submit(self, compiled_code: str) -> None:
workflow_spec = self.parsed_model["config"]["workflow_job_config"]
cluster_spec = self.parsed_model["config"].get("job_cluster_config", None)
Expand All @@ -648,10 +640,10 @@ def submit(self, compiled_code: str) -> None:
def _build_job_spec(self, workflow_spec, cluster_spec):
workflow_spec["name"] = workflow_spec.get('name', self.default_job_name)

cluster_settings = {}
cluster_settings = {} # Undefined cluster settings defaults to serverless in the Databricks API
if cluster_spec is not None:
cluster_settings["new_cluster"] = cluster_spec
else:
elif 'existing_cluster_id' in workflow_spec:
cluster_settings['existing_cluster_id'] = workflow_spec['existing_cluster_id']

notebook_task = {
Expand Down

0 comments on commit 13033ed

Please sign in to comment.