Skip to content
This repository was archived by the owner on Jun 11, 2024. It is now read-only.

Commit 163d29c

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 384363b commit 163d29c

File tree

4 files changed

+48
-25
lines changed

4 files changed

+48
-25
lines changed

nwp/icon/app.py

Lines changed: 29 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -33,13 +33,13 @@ def download_model_files(runs=None, parent_folder=None, model="global", delay=0)
3333
var_2d_list = GLOBAL_VAR2D_LIST
3434
invariant = GLOBAL_INVARIENT_LIST
3535
pressure_levels = GLOBAL_PRESSURE_LEVELS
36-
f_steps = list(range(0, 79)) + list(range(81, 99, 3)) # 4 days
36+
f_steps = list(range(0, 79)) + list(range(81, 99, 3)) # 4 days
3737
else:
3838
var_3d_list = EU_VAR3D_LIST
3939
var_2d_list = EU_VAR2D_LIST
4040
invariant = None
4141
pressure_levels = EU_PRESSURE_LEVELS
42-
f_steps = list(range(0, 79)) + list(range(81, 123, 3)) # 5 days
42+
f_steps = list(range(0, 79)) + list(range(81, 123, 3)) # 5 days
4343
for run in runs:
4444
run_folder = os.path.join(parent_folder, run)
4545
if not os.path.exists(run_folder):
@@ -61,7 +61,7 @@ def download_model_files(runs=None, parent_folder=None, model="global", delay=0)
6161
run=run,
6262
f_times=f_steps,
6363
model=model,
64-
delay=delay
64+
delay=delay,
6565
)
6666
not_done = False
6767
except Exception as e:
@@ -70,26 +70,40 @@ def download_model_files(runs=None, parent_folder=None, model="global", delay=0)
7070

7171

7272
def process_model_files(
73-
folder, var_3d_list=None, var_2d_list=None, invariant_list=None, model="global", run="00", delay=0
73+
folder,
74+
var_3d_list=None,
75+
var_2d_list=None,
76+
invariant_list=None,
77+
model="global",
78+
run="00",
79+
delay=0,
7480
):
7581
date_string, _ = get_run(run, delay=delay)
7682
if model == "global":
7783
var_base = "icon_global_icosahedral"
7884
var_3d_list = GLOBAL_VAR3D_LIST
7985
var_2d_list = GLOBAL_VAR2D_LIST
8086
lon_ds = xr.open_dataset(
81-
list(glob(os.path.join(folder, run, f"{var_base}_time-invariant_{date_string}_CLON.grib2")))[0],
87+
list(
88+
glob(
89+
os.path.join(folder, run, f"{var_base}_time-invariant_{date_string}_CLON.grib2")
90+
)
91+
)[0],
8292
engine="cfgrib",
8393
backend_kwargs={"errors": "ignore"},
8494
)
8595
lat_ds = xr.open_dataset(
86-
list(glob(os.path.join(folder, run, f"{var_base}_time-invariant_{date_string}_CLAT.grib2")))[0],
96+
list(
97+
glob(
98+
os.path.join(folder, run, f"{var_base}_time-invariant_{date_string}_CLAT.grib2")
99+
)
100+
)[0],
87101
engine="cfgrib",
88102
backend_kwargs={"errors": "ignore"},
89103
)
90104
lons = lon_ds.tlon.values
91105
lats = lat_ds.tlat.values
92-
f_steps = list(range(0, 79)) + list(range(81, 99, 3)) # 4 days
106+
f_steps = list(range(0, 79)) + list(range(81, 99, 3)) # 4 days
93107
else:
94108
var_base = "icon-eu_europe_regular-lat-lon"
95109
var_3d_list = EU_VAR3D_LIST
@@ -145,7 +159,9 @@ def process_model_files(
145159
print(var_2d)
146160
try:
147161
ds = xr.open_mfdataset(
148-
os.path.join(folder, run, f"{var_base}_single-level_{date_string}_*_{var_2d.upper()}.grib2"),
162+
os.path.join(
163+
folder, run, f"{var_base}_single-level_{date_string}_*_{var_2d.upper()}.grib2"
164+
),
149165
engine="cfgrib",
150166
combine="nested",
151167
concat_dim="step",
@@ -207,9 +223,11 @@ def upload_to_hf(dataset_xr, folder, model="global", run="00", token=None):
207223
f"{dataset_xr.time.dt.day.values}/"
208224
f"{dataset_xr.time.dt.year.values}{str(dataset_xr.time.dt.month.values).zfill(2)}{str(dataset_xr.time.dt.day.values).zfill(2)}"
209225
f"_{str(dataset_xr.time.dt.hour.values).zfill(2)}.zarr.zip",
210-
repo_id="openclimatefix/dwd-icon-global"
211-
if model == "global"
212-
else "openclimatefix/dwd-icon-eu",
226+
repo_id=(
227+
"openclimatefix/dwd-icon-global"
228+
if model == "global"
229+
else "openclimatefix/dwd-icon-eu"
230+
),
213231
repo_type="dataset",
214232
)
215233
done = True

nwp/icon/utils.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Utilities for downloading the DWD ICON models"""
2+
23
import bz2
34
import os
45
from datetime import datetime, timedelta
@@ -125,7 +126,7 @@ def download_extract_url(url_and_folder):
125126
else:
126127
r = requests.get(url, stream=True)
127128
if r.status_code == requests.codes.ok:
128-
#print(f"Downloading {url_and_folder[0]}")
129+
# print(f"Downloading {url_and_folder[0]}")
129130
with r.raw as source, open(filename, "wb") as dest:
130131
dest.write(bz2.decompress(source.read()))
131132
extracted_files = filename
@@ -154,9 +155,9 @@ def get_dset(
154155
invarient=invarient,
155156
f_times=f_times,
156157
model_url="icon/grib" if model == "global" else "icon-eu/grib",
157-
var_url_base="icon_global_icosahedral"
158-
if model == "global"
159-
else "icon-eu_europe_regular-lat-lon",
158+
var_url_base=(
159+
"icon_global_icosahedral" if model == "global" else "icon-eu_europe_regular-lat-lon"
160+
),
160161
run=run,
161162
delay=delay,
162163
)

scripts/convert_icon_archive.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from pathlib import Path
2424
import multiprocessing as mp
2525

26+
2627
def decompress(full_bzip_filename: Path, temp_pth: Path) -> str:
2728
"""
2829
Decompresses .bz2 file and returns the non-compressed filename
@@ -38,7 +39,7 @@ def decompress(full_bzip_filename: Path, temp_pth: Path) -> str:
3839
base_nat_filename = os.path.splitext(base_bzip_filename)[0]
3940
full_nat_filename = os.path.join(temp_pth, base_nat_filename)
4041
if os.path.exists(full_nat_filename):
41-
return full_nat_filename # Don't decompress a second time
42+
return full_nat_filename # Don't decompress a second time
4243
with open(full_nat_filename, "wb") as nat_file_handler:
4344
process = subprocess.run(
4445
["pbzip2", "--decompress", "--keep", "--stdout", full_bzip_filename],
@@ -179,8 +180,8 @@ def upload_to_hf(dataset_xr, folder, model="eu", run="00", token=None):
179180
encoding = {var: {"compressor": Blosc2("zstd", clevel=9)} for var in dataset_xr.data_vars}
180181
encoding["time"] = {"units": "nanoseconds since 1970-01-01"}
181182
with zarr.ZipStore(
182-
zarr_path,
183-
mode="w",
183+
zarr_path,
184+
mode="w",
184185
) as store:
185186
dataset_xr.chunk(chunking).to_zarr(store, encoding=encoding, compute=True)
186187
done = False
@@ -189,13 +190,15 @@ def upload_to_hf(dataset_xr, folder, model="eu", run="00", token=None):
189190
api.upload_file(
190191
path_or_fileobj=zarr_path,
191192
path_in_repo=f"data/{dataset_xr.time.dt.year.values}/"
192-
f"{dataset_xr.time.dt.month.values}/"
193-
f"{dataset_xr.time.dt.day.values}/"
194-
f"{dataset_xr.time.dt.year.values}{str(dataset_xr.time.dt.month.values).zfill(2)}{str(dataset_xr.time.dt.day.values).zfill(2)}"
195-
f"_{str(dataset_xr.time.dt.hour.values).zfill(2)}.zarr.zip",
196-
repo_id="openclimatefix/dwd-icon-global"
197-
if model == "global"
198-
else "openclimatefix/dwd-icon-eu",
193+
f"{dataset_xr.time.dt.month.values}/"
194+
f"{dataset_xr.time.dt.day.values}/"
195+
f"{dataset_xr.time.dt.year.values}{str(dataset_xr.time.dt.month.values).zfill(2)}{str(dataset_xr.time.dt.day.values).zfill(2)}"
196+
f"_{str(dataset_xr.time.dt.hour.values).zfill(2)}.zarr.zip",
197+
repo_id=(
198+
"openclimatefix/dwd-icon-global"
199+
if model == "global"
200+
else "openclimatefix/dwd-icon-eu"
201+
),
199202
repo_type="dataset",
200203
)
201204
done = True

setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
""" Usual setup file for package """
2+
23
# read the contents of your README file
34
from pathlib import Path
45

0 commit comments

Comments
 (0)