-
Notifications
You must be signed in to change notification settings - Fork 171
Expand file tree
/
Copy pathparticleset.py
More file actions
1172 lines (1031 loc) · 48.3 KB
/
particleset.py
File metadata and controls
1172 lines (1031 loc) · 48.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import sys
import warnings
from collections.abc import Iterable
from copy import copy
from datetime import date, datetime, timedelta
import cftime
import numpy as np
import xarray as xr
from scipy.spatial import KDTree
from tqdm import tqdm
from parcels._compat import MPI
from parcels.application_kernels.advection import AdvectionRK4
from parcels.field import Field, NestedField
from parcels.grid import CurvilinearGrid, GridType
from parcels.interaction.interactionkernel import InteractionKernel
from parcels.interaction.neighborsearch import (
BruteFlatNeighborSearch,
BruteSphericalNeighborSearch,
HashSphericalNeighborSearch,
KDTreeFlatNeighborSearch,
)
from parcels.kernel import Kernel
from parcels.particle import ScipyParticle, Variable
from parcels.particledata import ParticleData, ParticleDataIterator
from parcels.particlefile import ParticleFile
from parcels.tools._helpers import particleset_repr, timedelta_to_float
from parcels.tools.converters import _get_cftime_calendars, convert_to_flat_array
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import StatusCode
from parcels.tools.warnings import ParticleSetWarning
__all__ = ["ParticleSet"]
def _convert_to_reltime(time):
"""Check to determine if the value of the time parameter needs to be converted to a relative value (relative to the time_origin)."""
if isinstance(time, np.datetime64) or (hasattr(time, "calendar") and time.calendar in _get_cftime_calendars()):
return True
return False
class ParticleSet:
"""Class for storing particle and executing kernel over them.
Please note that this currently only supports fixed size particle sets, meaning that the particle set only
holds the particles defined on construction. Individual particles can neither be added nor deleted individually,
and individual particles can only be deleted as a set procedurally (i.e. by 'particle.delete()'-call during
kernel execution).
Parameters
----------
fieldset :
mod:`parcels.fieldset.FieldSet` object from which to sample velocity.
pclass : parcels.particle.ScipyParticle
Optional object that inherits from :mod:`parcels.particle.ScipyParticle` object that defines custom particle
lon :
List of initial longitude values for particles
lat :
List of initial latitude values for particles
depth :
Optional list of initial depth values for particles. Default is 0m
time :
Optional list of initial time values for particles. Default is fieldset.U.grid.time[0]
repeatdt : datetime.timedelta or float, optional
Optional interval on which to repeat the release of the ParticleSet. Either timedelta object, or float in seconds.
lonlatdepth_dtype :
Floating precision for lon, lat, depth particle coordinates.
It is either np.float32 or np.float64. Default is np.float32 if fieldset.U.interp_method is 'linear'
and np.float64 if the interpolation method is 'cgrid_velocity'
pid_orig :
Optional list of (offsets for) the particle IDs
partition_function :
Function to use for partitioning particles over processors. Default is to use kMeans
periodic_domain_zonal :
Zonal domain size, used to apply zonally periodic boundaries for particle-particle
interaction. If None, no zonally periodic boundaries are applied
Other Variables can be initialised using further arguments (e.g. v=... for a Variable named 'v')
"""
def __init__(
self,
fieldset,
pclass=ScipyParticle,
lon=None,
lat=None,
depth=None,
time=None,
repeatdt=None,
lonlatdepth_dtype=None,
pid_orig=None,
interaction_distance=None,
periodic_domain_zonal=None,
**kwargs,
):
self.particledata = None
self._repeat_starttime = None
self._repeatlon = None
self._repeatlat = None
self._repeatdepth = None
self._repeatpclass = None
self._repeatkwargs = None
self._kernel = None
self._interaction_kernel = None
self.fieldset = fieldset
self.fieldset._check_complete()
self.time_origin = fieldset.time_origin
self._pclass = pclass
# ==== first: create a new subclass of the pclass that includes the required variables ==== #
# ==== see dynamic-instantiation trick here: https://www.python-course.eu/python3_classes_and_type.php ==== #
class_name = pclass.__name__
array_class = None
if class_name not in dir():
def ArrayClass_init(self, *args, **kwargs):
fieldset = kwargs.get("fieldset", None)
ngrids = kwargs.get("ngrids", None)
if type(self).ngrids.initial < 0:
numgrids = ngrids
if numgrids is None and fieldset is not None:
numgrids = fieldset.gridset.size
assert numgrids is not None, "Neither fieldsets nor number of grids are specified - exiting."
type(self).ngrids.initial = numgrids
self.ngrids = type(self).ngrids.initial
if self.ngrids >= 0:
for index in ["xi", "yi", "zi", "ti"]:
if index != "ti":
setattr(self, index, np.zeros(self.ngrids, dtype=np.int32))
else:
setattr(self, index, -1 * np.ones(self.ngrids, dtype=np.int32))
super(type(self), self).__init__(*args, **kwargs)
array_class_vdict = {
"ngrids": Variable("ngrids", dtype=np.int32, to_write=False, initial=-1),
"xi": Variable("xi", dtype=np.int32, to_write=False),
"yi": Variable("yi", dtype=np.int32, to_write=False),
"zi": Variable("zi", dtype=np.int32, to_write=False),
"ti": Variable("ti", dtype=np.int32, to_write=False, initial=-1),
"__init__": ArrayClass_init,
}
array_class = type(class_name, (pclass,), array_class_vdict)
else:
array_class = locals()[class_name]
# ==== dynamic re-classing completed ==== #
_pclass = array_class
lon = np.empty(shape=0) if lon is None else convert_to_flat_array(lon)
lat = np.empty(shape=0) if lat is None else convert_to_flat_array(lat)
if isinstance(pid_orig, (type(None), bool)):
pid_orig = np.arange(lon.size)
if depth is None:
mindepth = self.fieldset.gridset.dimrange("depth")[0]
depth = np.ones(lon.size) * mindepth
else:
depth = convert_to_flat_array(depth)
assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts"
time = convert_to_flat_array(time)
time = np.repeat(time, lon.size) if time.size == 1 else time
if time.size > 0 and type(time[0]) in [datetime, date]:
time = np.array([np.datetime64(t) for t in time])
if time.size > 0 and isinstance(time[0], np.timedelta64) and not self.time_origin:
raise NotImplementedError("If fieldset.time_origin is not a date, time of a particle must be a double")
time = np.array([self.time_origin.reltime(t) if _convert_to_reltime(t) else t for t in time])
assert lon.size == time.size, "time and positions (lon, lat, depth) do not have the same lengths."
if isinstance(fieldset.U, Field) and (not fieldset.U.allow_time_extrapolation):
_warn_particle_times_outside_fieldset_time_bounds(time, fieldset.U.grid.time_full)
if lonlatdepth_dtype is None:
lonlatdepth_dtype = self.lonlatdepth_dtype_from_field_interp_method(fieldset.U)
assert lonlatdepth_dtype in [
np.float32,
np.float64,
], "lon lat depth precision should be set to either np.float32 or np.float64"
for kwvar in kwargs:
if kwvar not in ["partition_function"]:
kwargs[kwvar] = convert_to_flat_array(kwargs[kwvar])
assert (
lon.size == kwargs[kwvar].size
), f"{kwvar} and positions (lon, lat, depth) don't have the same lengths."
self.repeatdt = timedelta_to_float(repeatdt) if repeatdt is not None else None
if self.repeatdt:
if self.repeatdt <= 0:
raise ValueError("Repeatdt should be > 0")
if time[0] and not np.allclose(time, time[0]):
raise ValueError("All Particle.time should be the same when repeatdt is not None")
self._repeatpclass = pclass
self._repeatkwargs = kwargs
self._repeatkwargs.pop("partition_function", None)
ngrids = fieldset.gridset.size
# Variables used for interaction kernels.
inter_dist_horiz = None
inter_dist_vert = None
# The _dirty_neighbor attribute keeps track of whether
# the neighbor search structure needs to be rebuilt.
# If indices change (for example adding/deleting a particle)
# The NS structure needs to be rebuilt and _dirty_neighbor should be
# set to true. Since the NS structure isn't immediately initialized,
# it is set to True here.
self._dirty_neighbor = True
self.particledata = ParticleData(
_pclass,
lon=lon,
lat=lat,
depth=depth,
time=time,
lonlatdepth_dtype=lonlatdepth_dtype,
pid_orig=pid_orig,
ngrid=ngrids,
**kwargs,
)
# Initialize neighbor search data structure (used for interaction).
if interaction_distance is not None:
meshes = [g.mesh for g in fieldset.gridset.grids]
# Assert all grids have the same mesh type
assert np.all(np.array(meshes) == meshes[0])
mesh_type = meshes[0]
if mesh_type == "spherical":
if len(self) < 1000:
interaction_class = BruteSphericalNeighborSearch
else:
interaction_class = HashSphericalNeighborSearch
elif mesh_type == "flat":
if len(self) < 1000:
interaction_class = BruteFlatNeighborSearch
else:
interaction_class = KDTreeFlatNeighborSearch
else:
assert False, "Interaction is only possible on 'flat' and 'spherical' meshes"
try:
if len(interaction_distance) == 2:
inter_dist_vert, inter_dist_horiz = interaction_distance
else:
inter_dist_vert = interaction_distance[0]
inter_dist_horiz = interaction_distance[0]
except TypeError:
inter_dist_vert = interaction_distance
inter_dist_horiz = interaction_distance
self._neighbor_tree = interaction_class(
inter_dist_vert=inter_dist_vert,
inter_dist_horiz=inter_dist_horiz,
periodic_domain_zonal=periodic_domain_zonal,
)
# End of neighbor search data structure initialization.
if self.repeatdt:
if len(time) > 0 and time[0] is None:
self._repeat_starttime = time[0]
else:
if self.particledata.data["time"][0] and not np.allclose(
self.particledata.data["time"], self.particledata.data["time"][0]
):
raise ValueError("All Particle.time should be the same when repeatdt is not None")
self._repeat_starttime = copy(self.particledata.data["time"][0])
self._repeatlon = copy(self.particledata.data["lon"])
self._repeatlat = copy(self.particledata.data["lat"])
self._repeatdepth = copy(self.particledata.data["depth"])
for kwvar in kwargs:
if kwvar not in ["partition_function"]:
self._repeatkwargs[kwvar] = copy(self.particledata.data[kwvar])
if self.repeatdt:
if MPI and self.particledata.pu_indicators is not None:
mpi_comm = MPI.COMM_WORLD
mpi_rank = mpi_comm.Get_rank()
self._repeatpid = pid_orig[self.particledata.pu_indicators == mpi_rank]
self._kernel = None
def __del__(self):
if self.particledata is not None and isinstance(self.particledata, ParticleData):
del self.particledata
self.particledata = None
def __iter__(self):
return iter(self.particledata)
def __getattr__(self, name):
"""
Access a single property of all particles.
Parameters
----------
name : str
Name of the property
"""
for v in self.particledata.ptype.variables:
if v.name == name:
return getattr(self.particledata, name)
if name in self.__dict__ and name[0] != "_":
return self.__dict__[name]
else:
return False
def __getitem__(self, index):
"""Get a single particle by index."""
return self.particledata.get_single_by_index(index)
@staticmethod
def lonlatdepth_dtype_from_field_interp_method(field):
if isinstance(field, NestedField):
for f in field:
if f.interp_method == "cgrid_velocity":
return np.float64
else:
if field.interp_method == "cgrid_velocity":
return np.float64
return np.float32
def cstruct(self):
cstruct = self.particledata.cstruct()
return cstruct
@property
def ctypes_struct(self):
return self.cstruct()
@property
def size(self):
# ==== to change at some point - len and size are different things ==== #
return len(self.particledata)
@property
def pclass(self):
return self._pclass
def __repr__(self):
return particleset_repr(self)
def __len__(self):
return len(self.particledata)
def __sizeof__(self):
return sys.getsizeof(self.particledata)
def add(self, particles):
"""Add particles to the ParticleSet. Note that this is an
incremental add, the particles will be added to the ParticleSet
on which this function is called.
Parameters
----------
particles :
Another ParticleSet containing particles to add to this one.
Returns
-------
type
The current ParticleSet
"""
if isinstance(particles, type(self)):
particles = particles.particledata
self.particledata += particles
# Adding particles invalidates the neighbor search structure.
self._dirty_neighbor = True
return self
def __iadd__(self, particles):
"""Add particles to the ParticleSet.
Note that this is an incremental add, the particles will be added to the ParticleSet
on which this function is called.
Parameters
----------
particles :
Another ParticleSet containing particles to add to this one.
Returns
-------
type
The current ParticleSet
"""
self.add(particles)
return self
def remove_indices(self, indices):
"""Method to remove particles from the ParticleSet, based on their `indices`."""
# Removing particles invalidates the neighbor search structure.
self._dirty_neighbor = True
if type(indices) in [int, np.int32, np.intp]:
self.particledata.remove_single_by_index(indices)
else:
self.particledata.remove_multi_by_indices(indices)
def remove_booleanvector(self, indices):
"""Method to remove particles from the ParticleSet, based on an array of booleans."""
# Removing particles invalidates the neighbor search structure.
self._dirty_neighbor = True
self.remove_indices(np.where(indices)[0])
def _active_particles_mask(self, time, dt):
active_indices = (time - self.particledata.data["time"]) / dt >= 0
non_err_indices = np.isin(self.particledata.data["state"], [StatusCode.Success, StatusCode.Evaluate])
active_indices = np.logical_and(active_indices, non_err_indices)
self._active_particle_idx = np.where(active_indices)[0]
return active_indices
def _compute_neighbor_tree(self, time, dt):
active_mask = self._active_particles_mask(time, dt)
self._values = np.vstack(
(
self.particledata.data["depth"],
self.particledata.data["lat"],
self.particledata.data["lon"],
)
)
if self._dirty_neighbor:
self._neighbor_tree.rebuild(self._values, active_mask=active_mask)
self._dirty_neighbor = False
else:
self._neighbor_tree.update_values(self._values, new_active_mask=active_mask)
def _neighbors_by_index(self, particle_idx):
neighbor_idx, distances = self._neighbor_tree.find_neighbors_by_idx(particle_idx)
neighbor_idx = self._active_particle_idx[neighbor_idx]
mask = neighbor_idx != particle_idx
neighbor_idx = neighbor_idx[mask]
if "horiz_dist" in self.particledata._ptype.variables:
self.particledata.data["vert_dist"][neighbor_idx] = distances[0, mask]
self.particledata.data["horiz_dist"][neighbor_idx] = distances[1, mask]
return ParticleDataIterator(self.particledata, subset=neighbor_idx)
def _neighbors_by_coor(self, coor):
neighbor_idx = self._neighbor_tree.find_neighbors_by_coor(coor)
neighbor_ids = self.particledata.data["id"][neighbor_idx]
return neighbor_ids
# TODO: This method is only tested in tutorial notebook. Add unit test?
def populate_indices(self):
"""Pre-populate guesses of particle xi/yi indices using a kdtree.
This is only intended for curvilinear grids, where the initial index search
may be quite expensive.
"""
for i, grid in enumerate(self.fieldset.gridset.grids):
if not isinstance(grid, CurvilinearGrid):
continue
tree_data = np.stack((grid.lon.flat, grid.lat.flat), axis=-1)
IN = np.all(~np.isnan(tree_data), axis=1)
tree = KDTree(tree_data[IN, :])
# stack all the particle positions for a single query
pts = np.stack((self.particledata.data["lon"], self.particledata.data["lat"]), axis=-1)
# query datatype needs to match tree datatype
_, idx_nan = tree.query(pts.astype(tree_data.dtype))
idx = np.where(IN)[0][idx_nan]
yi, xi = np.unravel_index(idx, grid.lon.shape)
self.particledata.data["xi"][:, i] = xi
self.particledata.data["yi"][:, i] = yi
@classmethod
def from_list(
cls, fieldset, pclass, lon, lat, depth=None, time=None, repeatdt=None, lonlatdepth_dtype=None, **kwargs
):
"""Initialise the ParticleSet from lists of lon and lat.
Parameters
----------
fieldset :
mod:`parcels.fieldset.FieldSet` object from which to sample velocity
pclass :
Particle class. May be a parcels.particle.ScipyParticle class as defined in parcels, or a subclass defining a custom particle.
lon :
List of initial longitude values for particles
lat :
List of initial latitude values for particles
depth :
Optional list of initial depth values for particles. Default is 0m
time :
Optional list of start time values for particles. Default is fieldset.U.time[0]
repeatdt :
Optional interval (in seconds) on which to repeat the release of the ParticleSet (Default value = None)
lonlatdepth_dtype :
Floating precision for lon, lat, depth particle coordinates.
It is either np.float32 or np.float64. Default is np.float32 if fieldset.U.interp_method is 'linear'
and np.float64 if the interpolation method is 'cgrid_velocity'
Other Variables can be initialised using further arguments (e.g. v=... for a Variable named 'v')
**kwargs :
Keyword arguments passed to the particleset constructor.
"""
return cls(
fieldset=fieldset,
pclass=pclass,
lon=lon,
lat=lat,
depth=depth,
time=time,
repeatdt=repeatdt,
lonlatdepth_dtype=lonlatdepth_dtype,
**kwargs,
)
@classmethod
def from_line(
cls,
fieldset,
pclass,
start,
finish,
size,
depth=None,
time=None,
repeatdt=None,
lonlatdepth_dtype=None,
**kwargs,
):
"""Create a particleset in the shape of a line (according to a cartesian grid).
Initialise the ParticleSet from start/finish coordinates with equidistant spacing
Note that this method uses simple numpy.linspace calls and does not take into account
great circles, so may not be a exact on a globe
Parameters
----------
fieldset :
mod:`parcels.fieldset.FieldSet` object from which to sample velocity
pclass :
Particle class. May be a parcels.particle.ScipyParticle as defined in parcels, or a subclass defining a custom particle.
start :
Start point (longitude, latitude) for initialisation of particles on a straight line.
finish :
End point (longitude, latitude) for initialisation of particles on a straight line.
size :
Initial size of particle set
depth :
Optional list of initial depth values for particles. Default is 0m
time :
Optional start time value for particles. Default is fieldset.U.time[0]
repeatdt :
Optional interval (in seconds) on which to repeat the release of the ParticleSet (Default value = None)
lonlatdepth_dtype :
Floating precision for lon, lat, depth particle coordinates.
It is either np.float32 or np.float64. Default is np.float32 if fieldset.U.interp_method is 'linear'
and np.float64 if the interpolation method is 'cgrid_velocity'
"""
lon = np.linspace(start[0], finish[0], size)
lat = np.linspace(start[1], finish[1], size)
if type(depth) in [int, float]:
depth = [depth] * size
return cls(
fieldset=fieldset,
pclass=pclass,
lon=lon,
lat=lat,
depth=depth,
time=time,
repeatdt=repeatdt,
lonlatdepth_dtype=lonlatdepth_dtype,
**kwargs,
)
@classmethod
def _monte_carlo_sample(cls, start_field, size, mode="monte_carlo"):
"""Converts a starting field into a monte-carlo sample of lons and lats.
Parameters
----------
start_field : parcels.field.Field
mod:`parcels.fieldset.Field` object for initialising particles stochastically (horizontally) according to the presented density field.
size :
mode :
(Default value = 'monte_carlo')
Returns
-------
list of float
A list of longitude values.
list of float
A list of latitude values.
"""
if mode == "monte_carlo":
data = start_field.data if isinstance(start_field.data, np.ndarray) else np.array(start_field.data)
if start_field.interp_method == "cgrid_tracer":
p_interior = np.squeeze(data[0, 1:, 1:])
else: # if A-grid
d = data
p_interior = (d[0, :-1, :-1] + d[0, 1:, :-1] + d[0, :-1, 1:] + d[0, 1:, 1:]) / 4.0
p_interior = np.where(d[0, :-1, :-1] == 0, 0, p_interior)
p_interior = np.where(d[0, 1:, :-1] == 0, 0, p_interior)
p_interior = np.where(d[0, 1:, 1:] == 0, 0, p_interior)
p_interior = np.where(d[0, :-1, 1:] == 0, 0, p_interior)
p = np.reshape(p_interior, (1, p_interior.size))
inds = np.random.choice(p_interior.size, size, replace=True, p=p[0] / np.sum(p))
xsi = np.random.uniform(size=len(inds))
eta = np.random.uniform(size=len(inds))
j, i = np.unravel_index(inds, p_interior.shape)
grid = start_field.grid
lon, lat = ([], [])
if grid._gtype in [GridType.RectilinearZGrid, GridType.RectilinearSGrid]:
lon = grid.lon[i] + xsi * (grid.lon[i + 1] - grid.lon[i])
lat = grid.lat[j] + eta * (grid.lat[j + 1] - grid.lat[j])
else:
lons = np.array([grid.lon[j, i], grid.lon[j, i + 1], grid.lon[j + 1, i + 1], grid.lon[j + 1, i]])
if grid.mesh == "spherical":
lons[1:] = np.where(lons[1:] - lons[0] > 180, lons[1:] - 360, lons[1:])
lons[1:] = np.where(-lons[1:] + lons[0] > 180, lons[1:] + 360, lons[1:])
lon = (
(1 - xsi) * (1 - eta) * lons[0]
+ xsi * (1 - eta) * lons[1]
+ xsi * eta * lons[2]
+ (1 - xsi) * eta * lons[3]
)
lat = (
(1 - xsi) * (1 - eta) * grid.lat[j, i]
+ xsi * (1 - eta) * grid.lat[j, i + 1]
+ xsi * eta * grid.lat[j + 1, i + 1]
+ (1 - xsi) * eta * grid.lat[j + 1, i]
)
return list(lat), list(lon)
else:
raise NotImplementedError(f'Mode {mode} not implemented. Please use "monte carlo" algorithm instead.')
@classmethod
def from_field(
cls,
fieldset,
pclass,
start_field,
size,
mode="monte_carlo",
depth=None,
time=None,
repeatdt=None,
lonlatdepth_dtype=None,
):
"""Initialise the ParticleSet randomly drawn according to distribution from a field.
Parameters
----------
fieldset : parcels.fieldset.FieldSet
mod:`parcels.fieldset.FieldSet` object from which to sample velocity
pclass :
Particle class. May be a parcels.particle.ScipyParticle class as defined in parcels, or a subclass defining a custom particle.
start_field : parcels.field.Field
Field for initialising particles stochastically (horizontally) according to the presented density field.
size :
Initial size of particle set
mode :
Type of random sampling. Currently only 'monte_carlo' is implemented (Default value = 'monte_carlo')
depth :
Optional list of initial depth values for particles. Default is 0m
time :
Optional start time value for particles. Default is fieldset.U.time[0]
repeatdt :
Optional interval (in seconds) on which to repeat the release of the ParticleSet (Default value = None)
lonlatdepth_dtype :
Floating precision for lon, lat, depth particle coordinates.
It is either np.float32 or np.float64. Default is np.float32 if fieldset.U.interp_method is 'linear'
and np.float64 if the interpolation method is 'cgrid_velocity'
"""
lat, lon = cls._monte_carlo_sample(start_field, size, mode)
return cls(
fieldset=fieldset,
pclass=pclass,
lon=lon,
lat=lat,
depth=depth,
time=time,
lonlatdepth_dtype=lonlatdepth_dtype,
repeatdt=repeatdt,
)
@classmethod
def from_particlefile(
cls, fieldset, pclass, filename, restart=True, restarttime=None, repeatdt=None, lonlatdepth_dtype=None, **kwargs
):
"""Initialise the ParticleSet from a zarr ParticleFile.
This creates a new ParticleSet based on locations of all particles written
in a zarr ParticleFile at a certain time. Particle IDs are preserved if restart=True
Parameters
----------
fieldset : parcels.fieldset.FieldSet
mod:`parcels.fieldset.FieldSet` object from which to sample velocity
pclass :
Particle class. May be a parcels.particle.ScipyParticle class as defined in parcels, or a subclass defining a custom particle.
filename : str
Name of the particlefile from which to read initial conditions
restart : bool
BSignal if pset is used for a restart (default is True).
In that case, Particle IDs are preserved.
restarttime :
time at which the Particles will be restarted. Default is the last time written.
Alternatively, restarttime could be a time value (including np.datetime64) or
a callable function such as np.nanmin. The last is useful when running with dt < 0.
repeatdt : datetime.timedelta or float, optional
Optional interval on which to repeat the release of the ParticleSet. Either timedelta object, or float in seconds.
lonlatdepth_dtype :
Floating precision for lon, lat, depth particle coordinates.
It is either np.float32 or np.float64. Default is np.float32 if fieldset.U.interp_method is 'linear'
and np.float64 if the interpolation method is 'cgrid_velocity'
**kwargs :
Keyword arguments passed to the particleset constructor.
"""
if repeatdt is not None:
warnings.warn(
f"Note that the `repeatdt` argument is not retained from {filename}, and that "
"setting a new repeatdt will start particles from the _new_ particle "
"locations.",
ParticleSetWarning,
stacklevel=2,
)
pfile = xr.open_zarr(str(filename))
pfile_vars = [v for v in pfile.data_vars]
vars = {}
to_write = {}
for v in pclass.getPType().variables:
if v.name in pfile_vars:
vars[v.name] = np.ma.filled(pfile.variables[v.name], np.nan)
elif (
v.name
not in [
"xi",
"yi",
"zi",
"ti",
"dt",
"depth",
"id",
"obs_written",
"state",
"lon_nextloop",
"lat_nextloop",
"depth_nextloop",
"time_nextloop",
]
and v.to_write
):
raise RuntimeError(f"Variable {v.name} is in pclass but not in the particlefile")
to_write[v.name] = v.to_write
vars["depth"] = np.ma.filled(pfile.variables["z"], np.nan)
vars["id"] = np.ma.filled(pfile.variables["trajectory"], np.nan)
for v in ["lon", "lat", "depth", "time"]:
to_write[v] = True
if isinstance(vars["time"][0, 0], np.timedelta64):
vars["time"] = np.array([t / np.timedelta64(1, "s") for t in vars["time"]])
if restarttime is None:
restarttime = np.nanmax(vars["time"])
elif callable(restarttime):
restarttime = restarttime(vars["time"])
else:
restarttime = restarttime
inds = np.where(vars["time"] == restarttime)
for v in vars:
if to_write[v] is True:
vars[v] = vars[v][inds]
elif to_write[v] == "once":
vars[v] = vars[v][inds[0]]
if v not in ["lon", "lat", "depth", "time", "id"]:
kwargs[v] = vars[v]
if restart:
pclass.setLastID(0) # reset to zero offset
else:
vars["id"] = None
return cls(
fieldset=fieldset,
pclass=pclass,
lon=vars["lon"],
lat=vars["lat"],
depth=vars["depth"],
time=vars["time"],
pid_orig=vars["id"],
lonlatdepth_dtype=lonlatdepth_dtype,
repeatdt=repeatdt,
**kwargs,
)
def Kernel(self, pyfunc):
"""Wrapper method to convert a `pyfunc` into a :class:`parcels.kernel.Kernel` object.
Conversion is based on `fieldset` and `ptype` of the ParticleSet.
Parameters
----------
pyfunc : function or list of functions
Python function to convert into kernel. If a list of functions is provided,
the functions will be converted to kernels and combined into a single kernel.
"""
if isinstance(pyfunc, list):
return Kernel.from_list(
self.fieldset,
self.particledata.ptype,
pyfunc,
)
return Kernel(
self.fieldset,
self.particledata.ptype,
pyfunc=pyfunc,
)
def InteractionKernel(self, pyfunc_inter):
if pyfunc_inter is None:
return None
return InteractionKernel(self.fieldset, self.particledata.ptype, pyfunc=pyfunc_inter)
def ParticleFile(self, *args, **kwargs):
"""Wrapper method to initialise a :class:`parcels.particlefile.ParticleFile` object from the ParticleSet."""
return ParticleFile(*args, particleset=self, **kwargs)
def data_indices(self, variable_name, compare_values, invert=False):
"""Get the indices of all particles where the value of `variable_name` equals (one of) `compare_values`.
Parameters
----------
variable_name : str
Name of the variable to check.
compare_values :
Value or list of values to compare to.
invert :
Whether to invert the selection. I.e., when True,
return all indices that do not equal (one of)
`compare_values`. (Default value = False)
Returns
-------
np.ndarray
Numpy array of indices that satisfy the test.
"""
compare_values = (
np.array([compare_values]) if type(compare_values) not in [list, dict, np.ndarray] else compare_values
)
return np.where(np.isin(self.particledata.data[variable_name], compare_values, invert=invert))[0]
@property
def _error_particles(self):
"""Get an iterator over all particles that are in an error state.
Returns
-------
iterator
ParticleDataIterator over error particles.
"""
error_indices = self.data_indices("state", [StatusCode.Success, StatusCode.Evaluate], invert=True)
return ParticleDataIterator(self.particledata, subset=error_indices)
@property
def _num_error_particles(self):
"""Get the number of particles that are in an error state.
Returns
-------
int
Number of error particles.
"""
return np.sum(np.isin(self.particledata.data["state"], [StatusCode.Success, StatusCode.Evaluate], invert=True))
def set_variable_write_status(self, var, write_status):
"""Method to set the write status of a Variable.
Parameters
----------
var :
Name of the variable (string)
write_status :
Write status of the variable (True, False or 'once')
"""
self.particledata.set_variable_write_status(var, write_status)
def execute(
self,
pyfunc=AdvectionRK4,
pyfunc_inter=None,
endtime=None,
runtime: float | timedelta | np.timedelta64 | None = None,
dt: float | timedelta | np.timedelta64 = 1.0,
output_file=None,
verbose_progress=True,
postIterationCallbacks=None,
callbackdt: float | timedelta | np.timedelta64 | None = None,
):
"""Execute a given kernel function over the particle set for multiple timesteps.
Optionally also provide sub-timestepping
for particle output.
Parameters
----------
pyfunc :
Kernel function to execute. This can be the name of a
defined Python function or a :class:`parcels.kernel.Kernel` object.
Kernels can be concatenated using the + operator (Default value = AdvectionRK4)
endtime :
End time for the timestepping loop.
It is either a datetime object or a positive double. (Default value = None)
runtime :
Length of the timestepping loop. Use instead of endtime.
It is either a timedelta object or a positive double. (Default value = None)
dt :
Timestep interval (in seconds) to be passed to the kernel.
It is either a timedelta object or a double.
Use a negative value for a backward-in-time simulation. (Default value = 1 second)
output_file :
mod:`parcels.particlefile.ParticleFile` object for particle output (Default value = None)
verbose_progress : bool
Boolean for providing a progress bar for the kernel execution loop. (Default value = True)
postIterationCallbacks :
Optional, array of functions that are to be called after each iteration (post-process, non-Kernel) (Default value = None)
callbackdt :
Optional, in conjecture with 'postIterationCallbacks', timestep interval to (latest) interrupt the running kernel and invoke post-iteration callbacks from 'postIterationCallbacks' (Default value = None)
pyfunc_inter :
(Default value = None)
Notes
-----
``ParticleSet.execute()`` acts as the main entrypoint for simulations, and provides the simulation time-loop. This method encapsulates the logic controlling the switching between kernel execution, output file writing, reading in fields for new timesteps, adding new particles to the simulation domain, stopping the simulation, and executing custom functions (``postIterationCallbacks`` provided by the user).
"""
# check if particleset is empty. If so, return immediately
if len(self) == 0:
return
# check if pyfunc has changed since last generation. If so, regenerate
if self._kernel is None or (self._kernel.pyfunc is not pyfunc and self._kernel is not pyfunc):
# Generate and store Kernel
if isinstance(pyfunc, Kernel):
self._kernel = pyfunc
else:
self._kernel = self.Kernel(pyfunc)
if output_file:
output_file.metadata["parcels_kernels"] = self._kernel.name
# Set up the interaction kernel(s) if not set and given.
if self._interaction_kernel is None and pyfunc_inter is not None:
if isinstance(pyfunc_inter, InteractionKernel):
self._interaction_kernel = pyfunc_inter
else:
self._interaction_kernel = self.InteractionKernel(pyfunc_inter)
# Convert all time variables to seconds
if isinstance(endtime, timedelta):
raise TypeError("endtime must be either a datetime or a double")
if isinstance(endtime, datetime):
endtime = np.datetime64(endtime)
elif isinstance(endtime, cftime.datetime):
endtime = self.time_origin.reltime(endtime)
if isinstance(endtime, np.datetime64):
if self.time_origin.calendar is None:
raise NotImplementedError("If fieldset.time_origin is not a date, execution endtime must be a double")
endtime = self.time_origin.reltime(endtime)
if runtime is not None:
runtime = timedelta_to_float(runtime)
dt = timedelta_to_float(dt)
if abs(dt) <= 1e-6:
raise ValueError("Time step dt is too small")
if (dt * 1e6) % 1 != 0:
raise ValueError("Output interval should not have finer precision than 1e-6 s")
outputdt = timedelta_to_float(output_file.outputdt) if output_file else np.inf
if callbackdt is not None:
callbackdt = timedelta_to_float(callbackdt)
assert runtime is None or runtime >= 0, "runtime must be positive"
assert outputdt is None or outputdt >= 0, "outputdt must be positive"
if runtime is not None and endtime is not None:
raise RuntimeError("Only one of (endtime, runtime) can be specified")
mintime, maxtime = self.fieldset.gridset.dimrange("time_full")
default_release_time = mintime if dt >= 0 else maxtime
if np.any(np.isnan(self.particledata.data["time"])):
self.particledata.data["time"][np.isnan(self.particledata.data["time"])] = default_release_time
self.particledata.data["time_nextloop"][np.isnan(self.particledata.data["time_nextloop"])] = (
default_release_time
)
min_rt = np.min(self.particledata.data["time_nextloop"])
max_rt = np.max(self.particledata.data["time_nextloop"])