diff --git a/src/pymor/core/gather_inputs.py b/src/pymor/core/gather_inputs.py index 6d419c99..1cc32fd3 100644 --- a/src/pymor/core/gather_inputs.py +++ b/src/pymor/core/gather_inputs.py @@ -277,6 +277,8 @@ def load_mfdataset(data, rule_spec): for f in all_files: logger.info(f" * {f}") mf_ds = xr.open_mfdataset(all_files, parallel=True, use_cftime=True, engine=engine) + if rule.get("time_dimname") in mf_ds.dims and "time" not in mf_ds.dims: + mf_ds = mf_ds.rename({rule.get("time_dimname"): "time"}) return mf_ds diff --git a/src/pymor/core/pipeline.py b/src/pymor/core/pipeline.py index 38b08da7..93b858c2 100644 --- a/src/pymor/core/pipeline.py +++ b/src/pymor/core/pipeline.py @@ -130,7 +130,7 @@ def _run_prefect(self, data, rule_spec): logger.debug("Dynamically creating workflow with DaskTaskRunner...") cmor_name = rule_spec.get("cmor_name") rule_name = rule_spec.get("name", cmor_name) - if self._cluster is None: + if getattr(self, "_cluster", None) is None: logger.warning( "No cluster assigned to this pipeline. Using local Dask cluster." )