From 13d2e36b30e8fd180ab45b9995269995d306ca16 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:09:24 +0200 Subject: [PATCH 01/14] Update maintainer docs --- docs/community/maintainer.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/community/maintainer.md b/docs/community/maintainer.md index 5831f4e99..36b10bd80 100644 --- a/docs/community/maintainer.md +++ b/docs/community/maintainer.md @@ -20,7 +20,7 @@ ## Release checklist - Go to GitHub, draft new release. Enter name of version and "create new tag" if it doesn't already exist. Click "Generate Release Notes". Currate release notes as needed. Look at a previous version release to match the format (title, header, section organisation etc.) -- Go to [conda-forge/parcels-feedstock](https://github.com/conda-forge/parcels-feedstock), create new issue with `@conda-forge-admin, please update version`. This will prompt a build, otherwise there can be a delay in the build. +- Go to [conda-forge/parcels-feedstock](https://github.com/conda-forge/parcels-feedstock), create a new issue (select the "Bot Commands" issue from the menu) with title `@conda-forge-admin, please update version`. This will prompt a build, otherwise there can be a delay in the build. - Approve PR and merge on green - Update version in `CITATION.cff` file - Check "publish to PyPI" workflow succeeded From f174e369e8daef6ef521ceec61ce4729b78d8b84 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:44:52 +0200 Subject: [PATCH 02/14] update deprecation tests --- tests/test_deprecations.py | 247 ++++++++++++++++++++++++------------- 1 file changed, 158 insertions(+), 89 deletions(-) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index 2704df25c..ddc1affa9 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -1,112 +1,181 @@ import inspect +from typing import Literal import pytest from parcels import Field, FieldSet from tests.utils import create_fieldset_unit_mesh -fieldset = create_fieldset_unit_mesh() -field = fieldset.U -private_field_attrs = [ - "_dataFiles", - "_loaded_time_indices", - "_creation_log", - "_data_chunks", - "_c_data_chunks", - "_chunk_set", +class Action: + """Utility class to help manage, document, and test deprecations.""" + + def __init__(self, class_: Literal["Field", "FieldSet"], name: str, type_: Literal["read_only", "make_private"]): + if name.startswith("_"): + raise ValueError("name should not start with an underscore") + + self.class_ = class_ + self._raw_name = name + self.type_ = type_ + + if type_ == "read_only" and self.is_method: + raise ValueError("read_only attributes should not be methods") + + @property + def public_name(self): + return self._raw_name.strip("()") + + @property + def private_name(self): + if self.type_ == "make_private": + return f"_{self.public_name}" + return None + + @property + def is_method(self): + if self._raw_name.endswith("()"): + return True + return False + + def __str__(self): + return f"{self.class_}.{self.public_name}" + + def __repr__(self): + return f"Action(class_={self.class_!r}, name={self._raw_name!r}, type_={self.type_!r})" + + +def test_testing_action_class(): + """Testing the Action class used for testing.""" + action = Action("MyClass", "my_attribute", "make_private") + assert not action.is_method + assert action.public_name == "my_attribute" + assert action.private_name == "_my_attribute" + assert action.class_ == "MyClass" + assert action.type_ == "make_private" + + action = Action("Field", "my_attribute", "read_only") + assert not action.is_method + assert action.public_name == "my_attribute" + assert action.private_name is None + assert not action.is_method + + action = Action("Field", "my_method()", "make_private") + assert action.is_method + assert action.public_name == "my_method" + assert action.private_name == "_my_method" + + with pytest.raises(ValueError): # Can't have underscore in name + Action("Field", "_my_attribute", "make_private") + + with pytest.raises(ValueError): # Can't have read-only method + Action("Field", "my_method()", "read_only") + + +# fmt: off +actions = [ + Action("Field", "dataFiles", "make_private" ), + Action("Field", "netcdf_engine", "read_only" ), + Action("Field", "loaded_time_indices", "make_private" ), + Action("Field", "creation_log", "make_private" ), + Action("Field", "data_chunks", "make_private" ), + Action("Field", "c_data_chunks", "make_private" ), + Action("Field", "chunk_set", "make_private" ), + Action("Field", "cell_edge_sizes", "read_only" ), + Action("Field", "get_dim_filenames()", "make_private" ), + Action("Field", "collect_timeslices()", "make_private" ), + Action("Field", "reshape()", "make_private" ), + Action("Field", "calc_cell_edge_sizes()", "make_private" ), + Action("Field", "search_indices_vertical_z()", "make_private" ), + Action("Field", "search_indices_vertical_s()", "make_private" ), + Action("Field", "reconnect_bnd_indices()", "make_private" ), + Action("Field", "search_indices_rectilinear()", "make_private" ), + Action("Field", "search_indices_curvilinear()", "make_private" ), + Action("Field", "search_indices()", "make_private" ), + Action("Field", "interpolator2D()", "make_private" ), + Action("Field", "interpolator3D()", "make_private" ), + Action("Field", "spatial_interpolation()", "make_private" ), + Action("Field", "time_index()", "make_private" ), + Action("Field", "ccode_eval()", "make_private" ), + Action("Field", "ccode_convert()", "make_private" ), + Action("Field", "get_block_id()", "make_private" ), + Action("Field", "get_block()", "make_private" ), + Action("Field", "chunk_setup()", "make_private" ), + Action("Field", "chunk_data()", "make_private" ), + Action("Field", "rescale_and_set_minmax()", "make_private" ), + Action("Field", "data_concatenate()", "make_private" ), + Action("FieldSet", "completed", "make_private" ), + Action("FieldSet", "particlefile", "read_only" ), + Action("FieldSet", "add_UVfield()", "make_private" ), + Action("FieldSet", "check_complete()", "make_private" ), + Action("FieldSet", "parse_wildcards()", "make_private" ), ] +# fmt: on +# Create test data dictionary +fieldset = create_fieldset_unit_mesh() +field = fieldset.U -class FieldPrivate: - attributes = [ - "_dataFiles", - "_loaded_time_indices", - "_creation_log", - "_data_chunks", - "_c_data_chunks", - "_chunk_set", - ] - methods = [ - "_get_dim_filenames", - "_collect_timeslices", - "_reshape", - "_calc_cell_edge_sizes", - "_search_indices_vertical_z", - "_search_indices_vertical_s", - "_reconnect_bnd_indices", - "_search_indices_rectilinear", - "_search_indices_curvilinear", - "_search_indices", - "_interpolator2D", - "_interpolator3D", - "_ccode_eval", - "_ccode_convert", - "_get_block_id", - "_get_block", - "_chunk_setup", - "_chunk_data", - "_rescale_and_set_minmax", - "_data_concatenate", - "_spatial_interpolation", - "_time_index", - ] - - -class FieldSetPrivate: - attributes = [ - "_completed", - ] - methods = [ - "_add_UVfield", - "_parse_wildcards", - "_check_complete", - ] - - -def assert_private_public_attribute_equiv(obj, private_attribute: str): - assert private_attribute.startswith("_") - attribute = private_attribute.lstrip("_") +test_data = { + "Field": { + "class": Field, + "object": field, + }, + "FieldSet": { + "class": FieldSet, + "object": fieldset, + }, +} + + +@pytest.mark.parametrize( + "private_attribute_action", + filter(lambda action: not action.is_method and action.type_ == "make_private", actions), + ids=str, +) +def test_private_attrib(private_attribute_action: Action): + """Checks that the public attribute is equivalent to the private attribute.""" + action = private_attribute_action + + obj = test_data[action.class_]["object"] with pytest.raises(DeprecationWarning): - assert hasattr(obj, attribute) - assert hasattr(obj, private_attribute) - assert getattr(obj, attribute) is getattr(obj, private_attribute) + assert hasattr(obj, action.public_name) + assert hasattr(obj, action.private_name) + assert getattr(obj, action.public_name) is getattr(obj, action.private_name) -def assert_public_method_calls_private(type_, private_method): +@pytest.mark.parametrize( + "private_method_action", + filter(lambda action: action.is_method and action.type_ == "make_private", actions), + ids=str, +) +def test_private_method(private_method_action: Action): """Looks at the source code to ensure that `public_method` calls `private_method`. Looks for the string `.{method_name}(` in the source code of `public_method`. """ - assert private_method.startswith("_") - public_method_str = private_method.lstrip("_") - private_method_str = private_method + action = private_method_action - public_method = getattr(type_, public_method_str) - private_method = getattr(type_, private_method_str) + class_ = test_data[action.class_]["class"] + + public_method = getattr(class_, action.public_name) + private_method = getattr(class_, action.private_name) assert callable(public_method) assert callable(private_method) - - assert f".{private_method_str}(" in inspect.getsource(public_method) - - -@pytest.mark.parametrize("private_attribute", FieldPrivate.attributes) -def test_private_attribute_field(private_attribute): - assert_private_public_attribute_equiv(field, private_attribute) - - -@pytest.mark.parametrize("private_attribute", FieldSetPrivate.attributes) -def test_private_attribute_fieldset(private_attribute): - assert_private_public_attribute_equiv(fieldset, private_attribute) - - -@pytest.mark.parametrize("private_method", FieldPrivate.methods) -def test_private_method_field(private_method): - assert_public_method_calls_private(Field, private_method) - - -@pytest.mark.parametrize("private_method", FieldSetPrivate.methods) -def test_private_method_fieldset(private_method): - assert_public_method_calls_private(FieldSet, private_method) + assert f".{action.private_name}(" in inspect.getsource(public_method) + + +@pytest.mark.parametrize( + "read_only_attribute_action", + filter(lambda action: not action.is_method and action.type_ == "read_only", actions), + ids=str, +) +def test_read_only_attr(read_only_attribute_action: Action): + """Tries to store a variable in the read-only attribute.""" + action = read_only_attribute_action + obj = test_data[action.class_]["object"] + + assert hasattr(obj, action.public_name) + with pytest.raises(AttributeError): + setattr(obj, action.public_name, None) From 27a0537272c4a4319fc3036bca21c327ab68e4d6 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Fri, 27 Sep 2024 11:59:17 +0200 Subject: [PATCH 03/14] Privatise methods in particleset.py and grid.py, and attribteus in particleset.py --- parcels/fieldset.py | 4 +- parcels/grid.py | 23 ++- parcels/interaction/interactionkernel.py | 10 +- parcels/kernel.py | 6 +- parcels/particlefile.py | 2 +- parcels/particleset.py | 177 ++++++++++++++++------- tests/test_deprecations.py | 151 ++++++++++++------- tests/test_kernel_execution.py | 6 +- 8 files changed, 260 insertions(+), 119 deletions(-) diff --git a/parcels/fieldset.py b/parcels/fieldset.py index db99185b5..b7dc1bd86 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -307,7 +307,7 @@ def check_velocityfields(U, V, W): check_velocityfields(self.U, self.V, W) for g in self.gridset.grids: - g.check_zonal_periodic() + g._check_zonal_periodic() if len(g.time) == 1: continue assert isinstance( @@ -1459,7 +1459,7 @@ def computeTimeChunk(self, time=0.0, dt=1): if isinstance(f, (VectorField, NestedField)) or not f.grid.defer_load: continue if f.grid.update_status == "not_updated": - nextTime_loc = f.grid.computeTimeChunk(f, time, signdt) + nextTime_loc = f.grid._computeTimeChunk(f, time, signdt) if time == nextTime_loc and signdt != 0: raise TimeExtrapolationError(time, field=f, msg="In fset.computeTimeChunk") nextTime = min(nextTime, nextTime_loc) if signdt >= 0 else max(nextTime, nextTime_loc) diff --git a/parcels/grid.py b/parcels/grid.py index 955abd9f4..95a3320ae 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -7,6 +7,7 @@ import numpy.typing as npt from parcels._typing import Mesh, UpdateStatus, assert_valid_mesh +from parcels.tools._helpers import deprecated_made_private from parcels.tools.converters import TimeConverter from parcels.tools.warnings import FieldSetWarning @@ -203,7 +204,11 @@ def lon_particle_to_target(self, lon): return self.lon_remapping.particle_to_target(lon) return lon - def check_zonal_periodic(self): + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def check_zonal_periodic(self, *args, **kwargs): + return self._check_zonal_periodic(*args, **kwargs) + + def _check_zonal_periodic(self): if self.zonal_periodic or self.mesh == "flat" or self.lon.size == 1: return dx = (self.lon[1:] - self.lon[:-1]) if len(self.lon.shape) == 1 else self.lon[0, 1:] - self.lon[0, :-1] @@ -211,7 +216,11 @@ def check_zonal_periodic(self): dx = np.where(dx > 180, dx - 360, dx) self.zonal_periodic = sum(dx) > 359.9 - def add_Sdepth_periodic_halo(self, zonal, meridional, halosize): + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def add_Sdepth_periodic_halo(self, *args, **kwargs): + return self._add_Sdepth_periodic_halo(*args, **kwargs) + + def _add_Sdepth_periodic_halo(self, zonal, meridional, halosize): if zonal: if len(self.depth.shape) == 3: self.depth = np.concatenate( @@ -239,7 +248,11 @@ def add_Sdepth_periodic_halo(self, zonal, meridional, halosize): ) assert self.depth.shape[2] == self.ydim, "Third dim must be y." - def computeTimeChunk(self, f, time, signdt): + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def computeTimeChunk(self, *args, **kwargs): + return self._computeTimeChunk(*args, **kwargs) + + def _computeTimeChunk(self, f, time, signdt): nextTime_loc = np.inf if signdt >= 0 else -np.inf periods = self.periods.value if isinstance(self.periods, c_int) else self.periods prev_time_indices = self.time @@ -406,7 +419,7 @@ def add_periodic_halo(self, zonal: bool, meridional: bool, halosize: int = 5): [np.nanmin(self.lon), np.nanmax(self.lon), np.nanmin(self.lat), np.nanmax(self.lat)], dtype=np.float32 ) if isinstance(self, RectilinearSGrid): - self.add_Sdepth_periodic_halo(zonal, meridional, halosize) + self._add_Sdepth_periodic_halo(zonal, meridional, halosize) class RectilinearZGrid(RectilinearGrid): @@ -607,7 +620,7 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): self.ydim = self.lat.shape[0] self.meridional_halo = halosize if isinstance(self, CurvilinearSGrid): - self.add_Sdepth_periodic_halo(zonal, meridional, halosize) + self._add_Sdepth_periodic_halo(zonal, meridional, halosize) class CurvilinearZGrid(CurvilinearGrid): diff --git a/parcels/interaction/interactionkernel.py b/parcels/interaction/interactionkernel.py index 7df5e5bed..07c76e6a8 100644 --- a/parcels/interaction/interactionkernel.py +++ b/parcels/interaction/interactionkernel.py @@ -177,7 +177,7 @@ def execute_python(self, pset, endtime, dt): reset_particle_idx = [] for pyfunc in self._pyfunc: - pset.compute_neighbor_tree(endtime, dt) + pset._compute_neighbor_tree(endtime, dt) active_idx = pset._active_particle_idx mutator = defaultdict(lambda: []) @@ -192,7 +192,7 @@ def execute_python(self, pset, endtime, dt): p.dt = endtime - p.time reset_particle_idx.append(particle_idx) - neighbors = pset.neighbors_by_index(particle_idx) + neighbors = pset._neighbors_by_index(particle_idx) try: res = pyfunc(p, pset.fieldset, p.time, neighbors, mutator) except Exception as e: @@ -252,10 +252,10 @@ def execute(self, pset, endtime, dt, output_file=None): self.remove_deleted(pset) # Generalizable version! # Identify particles that threw errors - n_error = pset.num_error_particles + n_error = pset._num_error_particles while n_error > 0: - error_pset = pset.error_particles + error_pset = pset._error_particles # Check for StatusCodes for p in error_pset: if p.state == StatusCode.StopExecution: @@ -281,4 +281,4 @@ def execute(self, pset, endtime, dt, output_file=None): else: self.execute_python(pset, endtime, dt) - n_error = pset.num_error_particles + n_error = pset._num_error_particles diff --git a/parcels/kernel.py b/parcels/kernel.py index 442e6e4d7..08eb1d6fb 100644 --- a/parcels/kernel.py +++ b/parcels/kernel.py @@ -655,10 +655,10 @@ def execute(self, pset, endtime, dt): self.remove_deleted(pset) # Identify particles that threw errors - n_error = pset.num_error_particles + n_error = pset._num_error_particles while n_error > 0: - error_pset = pset.error_particles + error_pset = pset._error_particles # Check for StatusCodes for p in error_pset: if p.state == StatusCode.StopExecution: @@ -694,7 +694,7 @@ def execute(self, pset, endtime, dt): else: self.execute_python(pset, endtime, dt) - n_error = pset.num_error_particles + n_error = pset._num_error_particles def evaluate_particle(self, p, endtime): """Execute the kernel evaluation of for an individual particle. diff --git a/parcels/particlefile.py b/parcels/particlefile.py index 24bb178a8..04e9dda33 100644 --- a/parcels/particlefile.py +++ b/parcels/particlefile.py @@ -235,7 +235,7 @@ def write(self, pset, time, indices=None): if self.create_new_zarrfile: if self.chunks is None: self.chunks = (len(ids), 1) - if pset.repeatpclass is not None and self.chunks[0] < 1e4: + if pset._repeatpclass is not None and self.chunks[0] < 1e4: warnings.warn( f"ParticleFile chunks are set to {self.chunks}, but this may lead to " f"a significant slowdown in Parcels when many calls to repeatdt. " diff --git a/parcels/particleset.py b/parcels/particleset.py index a7dc0602d..fbba2c97f 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -26,6 +26,7 @@ from parcels.particle import JITParticle, Variable from parcels.particledata import ParticleData, ParticleDataIterator from parcels.particlefile import ParticleFile +from parcels.tools._helpers import deprecated_made_private from parcels.tools.converters import _get_cftime_calendars, convert_to_flat_array from parcels.tools.global_statics import get_package_dir from parcels.tools.loggers import logger @@ -98,14 +99,14 @@ def __init__( **kwargs, ): self.particledata = None - self.repeat_starttime = None - self.repeatlon = None - self.repeatlat = None - self.repeatdepth = None - self.repeatpclass = None - self.repeatkwargs = None - self.kernel = None - self.interaction_kernel = None + self._repeat_starttime = None + self._repeatlon = None + self._repeatlat = None + self._repeatdepth = None + self._repeatpclass = None + self._repeatkwargs = None + self._kernel = None + self._interaction_kernel = None self.fieldset = fieldset self.fieldset._check_complete() @@ -192,9 +193,9 @@ def ArrayClass_init(self, *args, **kwargs): raise "Repeatdt should be > 0" if time[0] and not np.allclose(time, time[0]): raise "All Particle.time should be the same when repeatdt is not None" - self.repeatpclass = pclass - self.repeatkwargs = kwargs - self.repeatkwargs.pop("partition_function", None) + self._repeatpclass = pclass + self._repeatkwargs = kwargs + self._repeatkwargs.pop("partition_function", None) ngrids = fieldset.gridset.size @@ -257,27 +258,72 @@ def ArrayClass_init(self, *args, **kwargs): if self.repeatdt: if len(time) > 0 and time[0] is None: - self.repeat_starttime = time[0] + self._repeat_starttime = time[0] else: if self.particledata.data["time"][0] and not np.allclose( self.particledata.data["time"], self.particledata.data["time"][0] ): raise ValueError("All Particle.time should be the same when repeatdt is not None") - self.repeat_starttime = copy(self.particledata.data["time"][0]) - self.repeatlon = copy(self.particledata.data["lon"]) - self.repeatlat = copy(self.particledata.data["lat"]) - self.repeatdepth = copy(self.particledata.data["depth"]) + self._repeat_starttime = copy(self.particledata.data["time"][0]) + self._repeatlon = copy(self.particledata.data["lon"]) + self._repeatlat = copy(self.particledata.data["lat"]) + self._repeatdepth = copy(self.particledata.data["depth"]) for kwvar in kwargs: if kwvar not in ["partition_function"]: - self.repeatkwargs[kwvar] = copy(self.particledata.data[kwvar]) + self._repeatkwargs[kwvar] = copy(self.particledata.data[kwvar]) if self.repeatdt: if MPI and self.particledata.pu_indicators is not None: mpi_comm = MPI.COMM_WORLD mpi_rank = mpi_comm.Get_rank() - self.repeatpid = pid_orig[self.particledata.pu_indicators == mpi_rank] + self._repeatpid = pid_orig[self.particledata.pu_indicators == mpi_rank] - self.kernel = None + self._kernel = None + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def repeat_starttime(self): + return self._repeat_starttime + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def repeatlon(self): + return self._repeatlon + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def repeatlat(self): + return self._repeatlat + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def repeatdepth(self): + return self._repeatdepth + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def repeatpclass(self): + return self._repeatpclass + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def repeatkwargs(self): + return self._repeatkwargs + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def kernel(self): + return self._kernel + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def interaction_kernel(self): + return self._interaction_kernel + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def repeatpid(self): + return self._repeatpid def __del__(self): if self.particledata is not None and isinstance(self.particledata, ParticleData): @@ -402,15 +448,23 @@ def remove_booleanvector(self, indices): self._dirty_neighbor = True self.remove_indices(np.where(indices)[0]) - def active_particles_mask(self, time, dt): + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def active_particles_mask(self, *args, **kwargs): + return self._active_particles_mask(*args, **kwargs) + + def _active_particles_mask(self, time, dt): active_indices = (time - self.particledata.data["time"]) / dt >= 0 non_err_indices = np.isin(self.particledata.data["state"], [StatusCode.Success, StatusCode.Evaluate]) active_indices = np.logical_and(active_indices, non_err_indices) self._active_particle_idx = np.where(active_indices)[0] return active_indices - def compute_neighbor_tree(self, time, dt): - active_mask = self.active_particles_mask(time, dt) + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def compute_neighbor_tree(self, *args, **kwargs): + return self._compute_neighbor_tree(*args, **kwargs) + + def _compute_neighbor_tree(self, time, dt): + active_mask = self._active_particles_mask(time, dt) self._values = np.vstack( ( @@ -425,7 +479,11 @@ def compute_neighbor_tree(self, time, dt): else: self._neighbor_tree.update_values(self._values, new_active_mask=active_mask) - def neighbors_by_index(self, particle_idx): + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def neighbors_by_index(self, *args, **kwargs): + return self._neighbors_by_index(*args, **kwargs) + + def _neighbors_by_index(self, particle_idx): neighbor_idx, distances = self._neighbor_tree.find_neighbors_by_idx(particle_idx) neighbor_idx = self._active_particle_idx[neighbor_idx] mask = neighbor_idx != particle_idx @@ -435,7 +493,11 @@ def neighbors_by_index(self, particle_idx): self.particledata.data["horiz_dist"][neighbor_idx] = distances[1, mask] return ParticleDataIterator(self.particledata, subset=neighbor_idx) - def neighbors_by_coor(self, coor): + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def neighbors_by_coor(self, *args, **kwargs): + return self._neighbors_by_coor(*args, **kwargs) + + def _neighbors_by_coor(self, coor): neighbor_idx = self._neighbor_tree.find_neighbors_by_coor(coor) neighbor_ids = self.particledata.data["id"][neighbor_idx] return neighbor_ids @@ -567,7 +629,12 @@ def from_line( ) @classmethod - def monte_carlo_sample(cls, start_field, size, mode="monte_carlo"): + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def monte_carlo_sample(self, *args, **kwargs): + return self._monte_carlo_sample(*args, **kwargs) + + @classmethod + def _monte_carlo_sample(cls, start_field, size, mode="monte_carlo"): """Converts a starting field into a monte-carlo sample of lons and lats. Parameters @@ -666,7 +733,7 @@ def from_field( It is either np.float32 or np.float64. Default is np.float32 if fieldset.U.interp_method is 'linear' and np.float64 if the interpolation method is 'cgrid_velocity' """ - lon, lat = cls.monte_carlo_sample(start_field, size, mode) + lon, lat = cls._monte_carlo_sample(start_field, size, mode) return cls( fieldset=fieldset, @@ -862,7 +929,12 @@ def data_indices(self, variable_name, compare_values, invert=False): return np.where(np.isin(self.particledata.data[variable_name], compare_values, invert=invert))[0] @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def error_particles(self): + return self._error_particles + + @property + def _error_particles(self): """Get an iterator over all particles that are in an error state. Returns @@ -874,7 +946,12 @@ def error_particles(self): return ParticleDataIterator(self.particledata, subset=error_indices) @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def num_error_particles(self): + return self._num_error_particles + + @property + def _num_error_particles(self): """Get the number of particles that are in an error state. Returns @@ -952,29 +1029,29 @@ def execute( return # check if pyfunc has changed since last compile. If so, recompile - if self.kernel is None or (self.kernel.pyfunc is not pyfunc and self.kernel is not pyfunc): + if self._kernel is None or (self._kernel.pyfunc is not pyfunc and self._kernel is not pyfunc): # Generate and store Kernel if isinstance(pyfunc, Kernel): - self.kernel = pyfunc + self._kernel = pyfunc else: - self.kernel = self.Kernel(pyfunc, delete_cfiles=delete_cfiles) + self._kernel = self.Kernel(pyfunc, delete_cfiles=delete_cfiles) # Prepare JIT kernel execution if self.particledata.ptype.uses_jit: - self.kernel.remove_lib() + self._kernel.remove_lib() cppargs = ["-DDOUBLE_COORD_VARIABLES"] if self.particledata.lonlatdepth_dtype else None - self.kernel.compile( + self._kernel.compile( compiler=GNUCompiler(cppargs=cppargs, incdirs=[os.path.join(get_package_dir(), "include"), "."]) ) - self.kernel.load_lib() + self._kernel.load_lib() if output_file: - output_file.add_metadata("parcels_kernels", self.kernel.name) + output_file.add_metadata("parcels_kernels", self._kernel.name) # Set up the interaction kernel(s) if not set and given. - if self.interaction_kernel is None and pyfunc_inter is not None: + if self._interaction_kernel is None and pyfunc_inter is not None: if isinstance(pyfunc_inter, InteractionKernel): - self.interaction_kernel = pyfunc_inter + self._interaction_kernel = pyfunc_inter else: - self.interaction_kernel = self.InteractionKernel(pyfunc_inter, delete_cfiles=delete_cfiles) + self._interaction_kernel = self.InteractionKernel(pyfunc_inter, delete_cfiles=delete_cfiles) # Convert all time variables to seconds if isinstance(endtime, timedelta): @@ -1020,8 +1097,8 @@ def execute( # Derive starttime and endtime from arguments or fieldset defaults starttime = min_rt if dt >= 0 else max_rt - if self.repeatdt is not None and self.repeat_starttime is None: - self.repeat_starttime = starttime + if self.repeatdt is not None and self._repeat_starttime is None: + self._repeat_starttime = starttime if runtime is not None: endtime = starttime + runtime * np.sign(dt) elif endtime is None: @@ -1051,8 +1128,8 @@ def execute( # Set up variables for first iteration if self.repeatdt: - next_prelease = self.repeat_starttime + ( - abs(starttime - self.repeat_starttime) // self.repeatdt + 1 + next_prelease = self._repeat_starttime + ( + abs(starttime - self._repeat_starttime) // self.repeatdt + 1 ) * self.repeatdt * np.sign(dt) else: next_prelease = np.inf if dt > 0 else -np.inf @@ -1077,8 +1154,8 @@ def execute( next_time = max(next_prelease, next_input, next_output, next_callback, endtime) # If we don't perform interaction, only execute the normal kernel efficiently. - if self.interaction_kernel is None: - res = self.kernel.execute(self, endtime=next_time, dt=dt) + if self._interaction_kernel is None: + res = self._kernel.execute(self, endtime=next_time, dt=dt) if res == StatusCode.StopAllExecution: return StatusCode.StopAllExecution # Interaction: interleave the interaction and non-interaction kernel for each time step. @@ -1090,8 +1167,8 @@ def execute( cur_end_time = min(cur_time + dt, next_time) else: cur_end_time = max(cur_time + dt, next_time) - self.kernel.execute(self, endtime=cur_end_time, dt=dt) - self.interaction_kernel.execute(self, endtime=cur_end_time, dt=dt) + self._kernel.execute(self, endtime=cur_end_time, dt=dt) + self._interaction_kernel.execute(self, endtime=cur_end_time, dt=dt) cur_time += dt # End of interaction specific code time = next_time @@ -1127,14 +1204,14 @@ def execute( pset_new = self.__class__( fieldset=self.fieldset, time=time, - lon=self.repeatlon, - lat=self.repeatlat, - depth=self.repeatdepth, - pclass=self.repeatpclass, + lon=self._repeatlon, + lat=self._repeatlat, + depth=self._repeatdepth, + pclass=self._repeatpclass, lonlatdepth_dtype=self.particledata.lonlatdepth_dtype, partition_function=False, - pid_orig=self.repeatpid, - **self.repeatkwargs, + pid_orig=self._repeatpid, + **self._repeatkwargs, ) for p in pset_new: p.dt = dt diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index ddc1affa9..c8ccc6c05 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -1,9 +1,10 @@ import inspect from typing import Literal +import numpy as np import pytest -from parcels import Field, FieldSet +from parcels import Field, FieldSet, Grid, JITParticle, ParticleSet, RectilinearZGrid from tests.utils import create_fieldset_unit_mesh @@ -73,58 +74,108 @@ def test_testing_action_class(): # fmt: off actions = [ - Action("Field", "dataFiles", "make_private" ), - Action("Field", "netcdf_engine", "read_only" ), - Action("Field", "loaded_time_indices", "make_private" ), - Action("Field", "creation_log", "make_private" ), - Action("Field", "data_chunks", "make_private" ), - Action("Field", "c_data_chunks", "make_private" ), - Action("Field", "chunk_set", "make_private" ), - Action("Field", "cell_edge_sizes", "read_only" ), - Action("Field", "get_dim_filenames()", "make_private" ), - Action("Field", "collect_timeslices()", "make_private" ), - Action("Field", "reshape()", "make_private" ), - Action("Field", "calc_cell_edge_sizes()", "make_private" ), - Action("Field", "search_indices_vertical_z()", "make_private" ), - Action("Field", "search_indices_vertical_s()", "make_private" ), - Action("Field", "reconnect_bnd_indices()", "make_private" ), - Action("Field", "search_indices_rectilinear()", "make_private" ), - Action("Field", "search_indices_curvilinear()", "make_private" ), - Action("Field", "search_indices()", "make_private" ), - Action("Field", "interpolator2D()", "make_private" ), - Action("Field", "interpolator3D()", "make_private" ), - Action("Field", "spatial_interpolation()", "make_private" ), - Action("Field", "time_index()", "make_private" ), - Action("Field", "ccode_eval()", "make_private" ), - Action("Field", "ccode_convert()", "make_private" ), - Action("Field", "get_block_id()", "make_private" ), - Action("Field", "get_block()", "make_private" ), - Action("Field", "chunk_setup()", "make_private" ), - Action("Field", "chunk_data()", "make_private" ), - Action("Field", "rescale_and_set_minmax()", "make_private" ), - Action("Field", "data_concatenate()", "make_private" ), - Action("FieldSet", "completed", "make_private" ), - Action("FieldSet", "particlefile", "read_only" ), - Action("FieldSet", "add_UVfield()", "make_private" ), - Action("FieldSet", "check_complete()", "make_private" ), - Action("FieldSet", "parse_wildcards()", "make_private" ), + # 1709 + Action("Field", "dataFiles", "make_private" ), + Action("Field", "netcdf_engine", "read_only" ), + Action("Field", "loaded_time_indices", "make_private" ), + Action("Field", "creation_log", "make_private" ), + Action("Field", "data_chunks", "make_private" ), + Action("Field", "c_data_chunks", "make_private" ), + Action("Field", "chunk_set", "make_private" ), + Action("Field", "cell_edge_sizes", "read_only" ), + Action("Field", "get_dim_filenames()", "make_private" ), + Action("Field", "collect_timeslices()", "make_private" ), + Action("Field", "reshape()", "make_private" ), + Action("Field", "calc_cell_edge_sizes()", "make_private" ), + Action("Field", "search_indices_vertical_z()", "make_private" ), + Action("Field", "search_indices_vertical_s()", "make_private" ), + Action("Field", "reconnect_bnd_indices()", "make_private" ), + Action("Field", "search_indices_rectilinear()", "make_private" ), + Action("Field", "search_indices_curvilinear()", "make_private" ), + Action("Field", "search_indices()", "make_private" ), + Action("Field", "interpolator2D()", "make_private" ), + Action("Field", "interpolator3D()", "make_private" ), + Action("Field", "spatial_interpolation()", "make_private" ), + Action("Field", "time_index()", "make_private" ), + Action("Field", "ccode_eval()", "make_private" ), + Action("Field", "ccode_convert()", "make_private" ), + Action("Field", "get_block_id()", "make_private" ), + Action("Field", "get_block()", "make_private" ), + Action("Field", "chunk_setup()", "make_private" ), + Action("Field", "chunk_data()", "make_private" ), + Action("Field", "rescale_and_set_minmax()", "make_private" ), + Action("Field", "data_concatenate()", "make_private" ), + Action("FieldSet", "completed", "make_private" ), + Action("FieldSet", "particlefile", "read_only" ), + Action("FieldSet", "add_UVfield()", "make_private" ), + Action("FieldSet", "check_complete()", "make_private" ), + Action("FieldSet", "parse_wildcards()", "make_private" ), + + # 1713 + Action("ParticleSet", "active_particles_mask()", "make_private" ), + Action("ParticleSet", "compute_neighbor_tree()", "make_private" ), + Action("ParticleSet", "neighbors_by_index()", "make_private" ), + Action("ParticleSet", "neighbors_by_coor()", "make_private" ), + Action("ParticleSet", "monte_carlo_sample()", "make_private" ), + Action("Grid", "check_zonal_periodic()", "make_private" ), + Action("Grid", "add_Sdepth_periodic_halo()", "make_private" ), + Action("Grid", "computeTimeChunk()", "make_private" ), + Action("ParticleSet", "repeat_starttime", "make_private" ), + Action("ParticleSet", "repeatlon", "make_private" ), + Action("ParticleSet", "repeatlat", "make_private" ), + Action("ParticleSet", "repeatdepth", "make_private" ), + Action("ParticleSet", "repeatpclass", "make_private" ), + Action("ParticleSet", "repeatkwargs", "make_private" ), + Action("ParticleSet", "kernel", "make_private" ), + Action("ParticleSet", "interaction_kernel", "make_private" ), + Action("ParticleSet", "repeatpid", "make_private" ), + Action("ParticleSet", "error_particles", "make_private" ), + Action("ParticleSet", "num_error_particles", "make_private" ), + + ] # fmt: on -# Create test data dictionary -fieldset = create_fieldset_unit_mesh() -field = fieldset.U - -test_data = { - "Field": { - "class": Field, - "object": field, - }, - "FieldSet": { - "class": FieldSet, - "object": fieldset, - }, -} + +def create_test_data(): + """Creates and returns the test data dictionary.""" + fieldset = create_fieldset_unit_mesh() + field = fieldset.U + + npart = 100 + pset = ParticleSet( + fieldset, + lon=np.linspace(0, 1, npart, dtype=np.float32), + lat=np.linspace(1, 0, npart, dtype=np.float32), + pclass=JITParticle, + ) + + lon_g0 = np.linspace(0, 1000, 11, dtype=np.float32) + lat_g0 = np.linspace(0, 1000, 11, dtype=np.float32) + time_g0 = np.linspace(0, 1000, 2, dtype=np.float64) + grid = RectilinearZGrid(lon_g0, lat_g0, time=time_g0) + + return { + "Field": { + "class": Field, + "object": field, + }, + "FieldSet": { + "class": FieldSet, + "object": fieldset, + }, + "ParticleSet": { + "class": ParticleSet, + "object": pset, + }, + "Grid": { + "class": Grid, + "object": grid, + }, + } + + +test_data = create_test_data() @pytest.mark.parametrize( diff --git a/tests/test_kernel_execution.py b/tests/test_kernel_execution.py index 684fb66e3..36cd4bc88 100644 --- a/tests/test_kernel_execution.py +++ b/tests/test_kernel_execution.py @@ -322,9 +322,9 @@ def MoveWest(particle, fieldset, time): def test_execution_keep_cfiles_and_nocompilation_warnings(fieldset_unit_mesh, delete_cfiles): pset = ParticleSet(fieldset_unit_mesh, pclass=JITParticle, lon=[0.0], lat=[0.0]) pset.execute(AdvectionRK4, delete_cfiles=delete_cfiles, endtime=1.0, dt=1.0) - cfile = pset.kernel.src_file - logfile = pset.kernel.log_file - del pset.kernel + cfile = pset._kernel.src_file + logfile = pset._kernel.log_file + del pset._kernel if delete_cfiles: assert not os.path.exists(cfile) else: From b6e86bed833af3912f6547ee5415a63c7814435a Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Fri, 27 Sep 2024 13:15:58 +0200 Subject: [PATCH 04/14] skip mpi if not installed --- tests/test_mpirun.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_mpirun.py b/tests/test_mpirun.py index 0188c56c1..690e719d8 100644 --- a/tests/test_mpirun.py +++ b/tests/test_mpirun.py @@ -1,15 +1,15 @@ import os -import sys from glob import glob import numpy as np import pytest import xarray as xr +from parcels._compat import MPI from tests.utils import PROJECT_ROOT -@pytest.mark.skipif(sys.platform.startswith("win"), reason="skipping windows as mpi4py not available for windows") +@pytest.mark.skipif(MPI is None, reason="MPI not installed") @pytest.mark.parametrize("repeatdt, maxage", [(200 * 86400, 600 * 86400), (100 * 86400, 100 * 86400)]) @pytest.mark.parametrize("nump", [8]) def test_mpi_run(tmpdir, repeatdt, maxage, nump): From 3e00b85293c68cc9369949283ee90124f7b151eb Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Fri, 27 Sep 2024 14:16:49 +0200 Subject: [PATCH 05/14] make grid attributes private --- docs/examples/example_dask_chunk_OCMs.py | 96 ++++++------ parcels/_typing.py | 4 +- parcels/application_kernels/advection.py | 2 +- parcels/field.py | 64 ++++---- parcels/fieldset.py | 32 ++-- parcels/grid.py | 177 ++++++++++++++++------- parcels/interaction/interactionkernel.py | 6 +- parcels/kernel.py | 20 ++- parcels/particleset.py | 2 +- tests/test_data/create_testfields.py | 4 +- tests/test_deprecations.py | 89 +++++++++++- 11 files changed, 326 insertions(+), 170 deletions(-) diff --git a/docs/examples/example_dask_chunk_OCMs.py b/docs/examples/example_dask_chunk_OCMs.py index 4c0e63adc..fd2b2d91a 100644 --- a/docs/examples/example_dask_chunk_OCMs.py +++ b/docs/examples/example_dask_chunk_OCMs.py @@ -134,32 +134,32 @@ def test_nemo_3D(mode, chunk_mode): compute_nemo_particle_advection(fieldset, mode) # Nemo sample file dimensions: depthu=75, y=201, x=151 if chunk_mode != "failsafe": - assert len(fieldset.U.grid.load_chunk) == len(fieldset.V.grid.load_chunk) - assert len(fieldset.U.grid.load_chunk) == len(fieldset.W.grid.load_chunk) + assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk) + assert len(fieldset.U.grid._load_chunk) == len(fieldset.W.grid._load_chunk) if chunk_mode is False: - assert len(fieldset.U.grid.load_chunk) == 1 + assert len(fieldset.U.grid._load_chunk) == 1 elif chunk_mode == "auto": assert ( fieldset.gridset.size == 3 ) # because three different grids in 'auto' mode - assert len(fieldset.U.grid.load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) != 1 elif chunk_mode == "specific": - assert len(fieldset.U.grid.load_chunk) == ( + assert len(fieldset.U.grid._load_chunk) == ( 1 * int(math.ceil(75.0 / 75.0)) * int(math.ceil(201.0 / 16.0)) * int(math.ceil(151.0 / 16.0)) ) elif chunk_mode == "failsafe": # chunking time and depth but not lat and lon - assert len(fieldset.U.grid.load_chunk) != 1 - assert len(fieldset.U.grid.load_chunk) == ( + assert len(fieldset.U.grid._load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) == ( 1 * int(math.ceil(75.0 / 25.0)) * int(math.ceil(201.0 / 171.0)) * int(math.ceil(151.0 / 151.0)) ) - assert len(fieldset.V.grid.load_chunk) != 1 - assert len(fieldset.V.grid.load_chunk) == ( + assert len(fieldset.V.grid._load_chunk) != 1 + assert len(fieldset.V.grid._load_chunk) == ( 1 * int(math.ceil(75.0 / 75.0)) * int(math.ceil(201.0 / 171.0)) @@ -214,18 +214,18 @@ def test_globcurrent_2D(mode, chunk_mode): return # GlobCurrent sample file dimensions: time=UNLIMITED, lat=41, lon=81 if chunk_mode != "failsafe": # chunking time but not lat - assert len(fieldset.U.grid.load_chunk) == len(fieldset.V.grid.load_chunk) + assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk) if chunk_mode is False: - assert len(fieldset.U.grid.load_chunk) == 1 + assert len(fieldset.U.grid._load_chunk) == 1 elif chunk_mode == "auto": - assert len(fieldset.U.grid.load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) != 1 elif chunk_mode == "specific": - assert len(fieldset.U.grid.load_chunk) == ( + assert len(fieldset.U.grid._load_chunk) == ( 1 * int(math.ceil(41.0 / 8.0)) * int(math.ceil(81.0 / 8.0)) ) elif chunk_mode == "failsafe": # chunking time but not lat - assert len(fieldset.U.grid.load_chunk) != 1 - assert len(fieldset.V.grid.load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) != 1 + assert len(fieldset.V.grid._load_chunk) != 1 assert abs(pset[0].lon - 23.8) < 1 assert abs(pset[0].lat - -35.3) < 1 @@ -267,33 +267,33 @@ def test_pop(mode, chunk_mode): pset = parcels.ParticleSet.from_list(fieldset, ptype[mode], lon=lonp, lat=latp) pset.execute(parcels.AdvectionRK4, runtime=timedelta(days=90), dt=timedelta(days=2)) # POP sample file dimensions: k=21, j=60, i=60 - assert len(fieldset.U.grid.load_chunk) == len(fieldset.V.grid.load_chunk) - assert len(fieldset.U.grid.load_chunk) == len(fieldset.W.grid.load_chunk) + assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk) + assert len(fieldset.U.grid._load_chunk) == len(fieldset.W.grid._load_chunk) if chunk_mode is False: assert fieldset.gridset.size == 1 - assert len(fieldset.U.grid.load_chunk) == 1 - assert len(fieldset.V.grid.load_chunk) == 1 - assert len(fieldset.W.grid.load_chunk) == 1 + assert len(fieldset.U.grid._load_chunk) == 1 + assert len(fieldset.V.grid._load_chunk) == 1 + assert len(fieldset.W.grid._load_chunk) == 1 elif chunk_mode == "auto": assert ( fieldset.gridset.size == 3 ) # because three different grids in 'auto' mode - assert len(fieldset.U.grid.load_chunk) != 1 - assert len(fieldset.V.grid.load_chunk) != 1 - assert len(fieldset.W.grid.load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) != 1 + assert len(fieldset.V.grid._load_chunk) != 1 + assert len(fieldset.W.grid._load_chunk) != 1 elif chunk_mode == "specific": assert fieldset.gridset.size == 1 - assert len(fieldset.U.grid.load_chunk) == ( + assert len(fieldset.U.grid._load_chunk) == ( int(math.ceil(21.0 / 3.0)) * int(math.ceil(60.0 / 8.0)) * int(math.ceil(60.0 / 8.0)) ) elif chunk_mode == "failsafe": # here: done a typo in the netcdf dimname field assert fieldset.gridset.size == 1 - assert len(fieldset.U.grid.load_chunk) != 1 - assert len(fieldset.V.grid.load_chunk) != 1 - assert len(fieldset.W.grid.load_chunk) != 1 - assert len(fieldset.U.grid.load_chunk) == ( + assert len(fieldset.U.grid._load_chunk) != 1 + assert len(fieldset.V.grid._load_chunk) != 1 + assert len(fieldset.W.grid._load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) == ( int(math.ceil(21.0 / 3.0)) * int(math.ceil(60.0 / 8.0)) * int(math.ceil(60.0 / 8.0)) @@ -376,33 +376,33 @@ def test_swash(mode, chunk_mode): if chunk_mode not in [ "failsafe", ]: - assert len(fieldset.U.grid.load_chunk) == len( - fieldset.V.grid.load_chunk + assert len(fieldset.U.grid._load_chunk) == len( + fieldset.V.grid._load_chunk ), f"U {fieldset.U.grid.chunk_info} vs V {fieldset.V.grid.chunk_info}" if chunk_mode not in ["failsafe", "auto"]: - assert len(fieldset.U.grid.load_chunk) == len( - fieldset.W.grid.load_chunk + assert len(fieldset.U.grid._load_chunk) == len( + fieldset.W.grid._load_chunk ), f"U {fieldset.U.grid.chunk_info} vs W {fieldset.W.grid.chunk_info}" if chunk_mode is False: - assert len(fieldset.U.grid.load_chunk) == 1 + assert len(fieldset.U.grid._load_chunk) == 1 else: - assert len(fieldset.U.grid.load_chunk) != 1 - assert len(fieldset.V.grid.load_chunk) != 1 - assert len(fieldset.W.grid.load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) != 1 + assert len(fieldset.V.grid._load_chunk) != 1 + assert len(fieldset.W.grid._load_chunk) != 1 if chunk_mode == "specific": - assert len(fieldset.U.grid.load_chunk) == ( + assert len(fieldset.U.grid._load_chunk) == ( 1 * int(math.ceil(6.0 / 6.0)) * int(math.ceil(21.0 / 4.0)) * int(math.ceil(51.0 / 4.0)) ) - assert len(fieldset.V.grid.load_chunk) == ( + assert len(fieldset.V.grid._load_chunk) == ( 1 * int(math.ceil(6.0 / 6.0)) * int(math.ceil(21.0 / 4.0)) * int(math.ceil(51.0 / 4.0)) ) - assert len(fieldset.W.grid.load_chunk) == ( + assert len(fieldset.W.grid._load_chunk) == ( 1 * int(math.ceil(7.0 / 7.0)) * int(math.ceil(21.0 / 4.0)) @@ -452,11 +452,11 @@ def test_ofam_3D(mode, chunk_mode): parcels.AdvectionRK4, runtime=timedelta(days=10), dt=timedelta(minutes=5) ) # OFAM sample file dimensions: time=UNLIMITED, st_ocean=1, st_edges_ocean=52, lat=601, lon=2001 - assert len(fieldset.U.grid.load_chunk) == len(fieldset.V.grid.load_chunk) + assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk) if chunk_mode is False: - assert len(fieldset.U.grid.load_chunk) == 1 + assert len(fieldset.U.grid._load_chunk) == 1 elif chunk_mode == "auto": - assert len(fieldset.U.grid.load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) != 1 elif chunk_mode == "specific": numblocks = [i for i in fieldset.U.grid.chunk_info[1:3]] dblocks = 1 @@ -470,7 +470,7 @@ def test_ofam_3D(mode, chunk_mode): ublocks += bsize matching_numblocks = ublocks == 2001 and vblocks == 601 and dblocks == 1 matching_fields = fieldset.U.grid.chunk_info == fieldset.V.grid.chunk_info - matching_uniformblocks = len(fieldset.U.grid.load_chunk) == ( + matching_uniformblocks = len(fieldset.U.grid._load_chunk) == ( 1 * int(math.ceil(1.0 / 60.0)) * int(math.ceil(601.0 / 50.0)) @@ -548,17 +548,17 @@ def test_mitgcm(mode, chunk_mode, using_add_field): ) # MITgcm sample file dimensions: time=10, XG=400, YG=200 if chunk_mode != "specific_different": - assert len(fieldset.U.grid.load_chunk) == len(fieldset.V.grid.load_chunk) + assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk) if chunk_mode in [ False, ]: - assert len(fieldset.U.grid.load_chunk) == 1 + assert len(fieldset.U.grid._load_chunk) == 1 elif chunk_mode in [ "auto", ]: - assert len(fieldset.U.grid.load_chunk) != 1 + assert len(fieldset.U.grid._load_chunk) != 1 elif "specific" in chunk_mode: - assert len(fieldset.U.grid.load_chunk) == ( + assert len(fieldset.U.grid._load_chunk) == ( 1 * int(math.ceil(400.0 / 50.0)) * int(math.ceil(200.0 / 100.0)) ) if chunk_mode == "specific_same": @@ -593,7 +593,7 @@ def test_diff_entry_dimensions_chunks(mode): ) compute_nemo_particle_advection(fieldset, mode) # Nemo sample file dimensions: depthu=75, y=201, x=151 - assert len(fieldset.U.grid.load_chunk) == len(fieldset.V.grid.load_chunk) + assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk) @pytest.mark.parametrize("mode", ["scipy", "jit"]) diff --git a/parcels/_typing.py b/parcels/_typing.py index 533170acc..92af7ba2f 100644 --- a/parcels/_typing.py +++ b/parcels/_typing.py @@ -38,8 +38,8 @@ class ParcelsAST(ast.AST): VectorType = Literal["3D", "2D"] | None # corresponds with `vector_type` ChunkMode = Literal["auto", "specific", "failsafe"] # corresponds with `chunk_mode` GridIndexingType = Literal["pop", "mom5", "mitgcm", "nemo"] # corresponds with `gridindexingtype` -UpdateStatus = Literal["not_updated", "first_updated", "updated"] # corresponds with `update_status` -TimePeriodic = float | datetime.timedelta | Literal[False] # corresponds with `update_status` +UpdateStatus = Literal["not_updated", "first_updated", "updated"] # corresponds with `_update_status` +TimePeriodic = float | datetime.timedelta | Literal[False] # corresponds with `time_periodic` NetcdfEngine = Literal["netcdf4", "xarray", "scipy"] diff --git a/parcels/application_kernels/advection.py b/parcels/application_kernels/advection.py index ff848bb02..e73b62e56 100644 --- a/parcels/application_kernels/advection.py +++ b/parcels/application_kernels/advection.py @@ -163,7 +163,7 @@ def AdvectionAnalytical(particle, fieldset, time): particle.zi[:] = zi grid = fieldset.U.grid - if grid.gtype < 2: + if grid._gtype < 2: px = np.array([grid.lon[xi], grid.lon[xi + 1], grid.lon[xi + 1], grid.lon[xi]]) py = np.array([grid.lat[yi], grid.lat[yi], grid.lat[yi + 1], grid.lat[yi + 1]]) else: diff --git a/parcels/field.py b/parcels/field.py index b3a63cc98..7f135659e 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -217,7 +217,7 @@ def __init__( self.interp_method = interp_method assert_valid_gridindexingtype(gridindexingtype) self._gridindexingtype = gridindexingtype - if self.interp_method in ["bgrid_velocity", "bgrid_w_velocity", "bgrid_tracer"] and self.grid.gtype in [ + if self.interp_method in ["bgrid_velocity", "bgrid_w_velocity", "bgrid_tracer"] and self.grid._gtype in [ GridType.RectilinearSGrid, GridType.CurvilinearSGrid, ]: @@ -296,7 +296,7 @@ def __init__( if self.grid.depth_field == "not_yet_set": assert ( - self.grid.z4d + self.grid._z4d ), "Providing the depth dimensions from another field data is only available for 4d S grids" # data_full_zdim is the vertical dimension of the complete field data, ignoring the indices. @@ -719,7 +719,7 @@ def from_netcdf( data = lib.concatenate(data_list, axis=0) else: grid.defer_load = True - grid.ti = -1 + grid._ti = -1 data = DeferredArray() data.compute_shape(grid.xdim, grid.ydim, grid.zdim, grid.tdim, len(grid.timeslices)) @@ -816,7 +816,7 @@ def _reshape(self, data, transpose=False): lib = np if isinstance(data, np.ndarray) else da if transpose: data = lib.transpose(data) - if self.grid.lat_flipped: + if self.grid._lat_flipped: data = lib.flip(data, axis=-2) if self.grid.xdim == 1 or self.grid.ydim == 1: @@ -913,7 +913,7 @@ def _calc_cell_edge_sizes(self): Currently only works for Rectilinear Grids """ if not self.grid.cell_edge_sizes: - if self.grid.gtype in (GridType.RectilinearZGrid, GridType.RectilinearSGrid): + if self.grid._gtype in (GridType.RectilinearZGrid, GridType.RectilinearSGrid): self.grid.cell_edge_sizes["x"] = np.zeros((self.grid.ydim, self.grid.xdim), dtype=np.float32) self.grid.cell_edge_sizes["y"] = np.zeros((self.grid.ydim, self.grid.xdim), dtype=np.float32) @@ -925,7 +925,7 @@ def _calc_cell_edge_sizes(self): self.grid.cell_edge_sizes["y"][y, x] = y_conv.to_source(dy, lon, lat, self.grid.depth[0]) else: raise ValueError( - f"Field.cell_edge_sizes() not implemented for {self.grid.gtype} grids. " + f"Field.cell_edge_sizes() not implemented for {self.grid._gtype} grids. " "You can provide Field.grid.cell_edge_sizes yourself by in, e.g., " "NEMO using the e1u fields etc from the mesh_mask.nc file." ) @@ -986,7 +986,7 @@ def _search_indices_vertical_s( eta = 1 if time < grid.time[ti]: ti -= 1 - if grid.z4d: + if grid._z4d: if ti == len(grid.time) - 1: depth_vector = ( (1 - xsi) * (1 - eta) * grid.depth[-1, :, yi, xi] @@ -1127,7 +1127,7 @@ def _search_indices_rectilinear(self, x: float, y: float, z: float, ti=-1, time= yi, eta = -1, 0 if grid.zdim > 1 and not search2D: - if grid.gtype == GridType.RectilinearZGrid: + if grid._gtype == GridType.RectilinearZGrid: # Never passes here, because in this case, we work with scipy try: (zi, zeta) = self._search_indices_vertical_z(z) @@ -1135,7 +1135,7 @@ def _search_indices_rectilinear(self, x: float, y: float, z: float, ti=-1, time= raise FieldOutOfBoundError(x, y, z, field=self) except FieldOutOfBoundSurfaceError: raise FieldOutOfBoundSurfaceError(x, y, z, field=self) - elif grid.gtype == GridType.RectilinearSGrid: + elif grid._gtype == GridType.RectilinearSGrid: (zi, zeta) = self._search_indices_vertical_s(x, y, z, xi, yi, xsi, eta, ti, time) else: zi, zeta = -1, 0 @@ -1224,12 +1224,12 @@ def _search_indices_curvilinear(self, x, y, z, ti=-1, time=-1, particle=None, se eta = min(1.0, eta) if grid.zdim > 1 and not search2D: - if grid.gtype == GridType.CurvilinearZGrid: + if grid._gtype == GridType.CurvilinearZGrid: try: (zi, zeta) = self._search_indices_vertical_z(z) except FieldOutOfBoundError: raise FieldOutOfBoundError(x, y, z, field=self) - elif grid.gtype == GridType.CurvilinearSGrid: + elif grid._gtype == GridType.CurvilinearSGrid: (zi, zeta) = self._search_indices_vertical_s(x, y, z, xi, yi, xsi, eta, ti, time) else: zi = -1 @@ -1250,7 +1250,7 @@ def search_indices(self, *args, **kwargs): return self._search_indices(*args, **kwargs) def _search_indices(self, x, y, z, ti=-1, time=-1, particle=None, search2D=False): - if self.grid.gtype in [GridType.RectilinearSGrid, GridType.RectilinearZGrid]: + if self.grid._gtype in [GridType.RectilinearSGrid, GridType.RectilinearZGrid]: return self._search_indices_rectilinear(x, y, z, ti, time, particle=particle, search2D=search2D) else: return self._search_indices_curvilinear(x, y, z, ti, time, particle=particle, search2D=search2D) @@ -1581,7 +1581,7 @@ def _chunk_setup(self): self._data_chunks = [None] * npartitions self._c_data_chunks = [None] * npartitions - self.grid.load_chunk = np.zeros(npartitions, dtype=c_int, order="C") + self.grid._load_chunk = np.zeros(npartitions, dtype=c_int, order="C") # self.grid.chunk_info format: number of dimensions (without tdim); number of chunks per dimensions; # chunksizes (the 0th dim sizes for all chunk of dim[0], then so on for next dims self.grid.chunk_info = [ @@ -1601,17 +1601,17 @@ def _chunk_data(self): self._chunk_setup() g = self.grid if isinstance(self.data, da.core.Array): - for block_id in range(len(self.grid.load_chunk)): + for block_id in range(len(self.grid._load_chunk)): if ( - g.load_chunk[block_id] == g.chunk_loading_requested - or g.load_chunk[block_id] in g.chunk_loaded + g._load_chunk[block_id] == g._chunk_loading_requested + or g._load_chunk[block_id] in g._chunk_loaded and self._data_chunks[block_id] is None ): block = self._get_block(block_id) self._data_chunks[block_id] = np.array( self.data.blocks[(slice(self.grid.tdim),) + block], order="C" ) - elif g.load_chunk[block_id] == g.chunk_not_loaded: + elif g._load_chunk[block_id] == g._chunk_not_loaded: if isinstance(self._data_chunks, list): self._data_chunks[block_id] = None else: @@ -1623,7 +1623,7 @@ def _chunk_data(self): else: self._data_chunks[0, :] = None self._c_data_chunks[0] = None - self.grid.load_chunk[0] = g.chunk_loaded_touched + self.grid._load_chunk[0] = g._chunk_loaded_touched self._data_chunks[0] = np.array(self.data, order="C") @property @@ -1647,12 +1647,12 @@ class CField(Structure): # Create and populate the c-struct object allow_time_extrapolation = 1 if self.allow_time_extrapolation else 0 time_periodic = 1 if self.time_periodic else 0 - for i in range(len(self.grid.load_chunk)): - if self.grid.load_chunk[i] == self.grid.chunk_loading_requested: + for i in range(len(self.grid._load_chunk)): + if self.grid._load_chunk[i] == self.grid._chunk_loading_requested: raise ValueError( - "data_chunks should have been loaded by now if requested. grid.load_chunk[bid] cannot be 1" + "data_chunks should have been loaded by now if requested. grid._load_chunk[bid] cannot be 1" ) - if self.grid.load_chunk[i] in self.grid.chunk_loaded: + if self.grid._load_chunk[i] in self.grid._chunk_loaded: if not self._data_chunks[i].flags["C_CONTIGUOUS"]: self._data_chunks[i] = np.array(self._data_chunks[i], order="C") self._c_data_chunks[i] = self._data_chunks[i].ctypes.data_as(POINTER(POINTER(c_float))) @@ -1738,7 +1738,7 @@ def write(self, filename, varname=None): vname_depth = f"depth{self.name.lower()}" # Create DataArray objects for file I/O - if self.grid.gtype == GridType.RectilinearZGrid: + if self.grid._gtype == GridType.RectilinearZGrid: nav_lon = xr.DataArray( self.grid.lon + np.zeros((self.grid.ydim, self.grid.xdim), dtype=np.float32), coords=[("y", self.grid.lat), ("x", self.grid.lon)], @@ -1747,7 +1747,7 @@ def write(self, filename, varname=None): self.grid.lat.reshape(self.grid.ydim, 1) + np.zeros(self.grid.xdim, dtype=np.float32), coords=[("y", self.grid.lat), ("x", self.grid.lon)], ) - elif self.grid.gtype == GridType.CurvilinearZGrid: + elif self.grid._gtype == GridType.CurvilinearZGrid: nav_lon = xr.DataArray(self.grid.lon, coords=[("y", range(self.grid.ydim)), ("x", range(self.grid.xdim))]) nav_lat = xr.DataArray(self.grid.lat, coords=[("y", range(self.grid.ydim)), ("x", range(self.grid.xdim))]) else: @@ -1806,15 +1806,15 @@ def computeTimeChunk(self, data, tindex): timestamp = self.timestamps if timestamp is not None: summedlen = np.cumsum([len(ls) for ls in self.timestamps]) - if g.ti + tindex >= summedlen[-1]: - ti = g.ti + tindex - summedlen[-1] + if g._ti + tindex >= summedlen[-1]: + ti = g._ti + tindex - summedlen[-1] else: - ti = g.ti + tindex + ti = g._ti + tindex timestamp = self.timestamps[np.where(ti < summedlen)[0][0]] rechunk_callback_fields = self._chunk_setup if isinstance(tindex, list) else None filebuffer = self._field_fb_class( - self._dataFiles[g.ti + tindex], + self._dataFiles[g._ti + tindex], self.dimensions, self.indices, netcdf_engine=self.netcdf_engine, @@ -1919,7 +1919,7 @@ def spatial_c_grid_interpolation2D(self, ti, z, y, x, time, particle=None, apply grid = self.U.grid (xsi, eta, zeta, xi, yi, zi) = self.U._search_indices(x, y, z, ti, time, particle=particle) - if grid.gtype in [GridType.RectilinearSGrid, GridType.RectilinearZGrid]: + if grid._gtype in [GridType.RectilinearSGrid, GridType.RectilinearZGrid]: px = np.array([grid.lon[xi], grid.lon[xi + 1], grid.lon[xi + 1], grid.lon[xi]]) py = np.array([grid.lat[yi], grid.lat[yi], grid.lat[yi + 1], grid.lat[yi + 1]]) else: @@ -1991,7 +1991,7 @@ def spatial_c_grid_interpolation3D_full(self, ti, z, y, x, time, particle=None): grid = self.U.grid (xsi, eta, zet, xi, yi, zi) = self.U._search_indices(x, y, z, ti, time, particle=particle) - if grid.gtype in [GridType.RectilinearSGrid, GridType.RectilinearZGrid]: + if grid._gtype in [GridType.RectilinearSGrid, GridType.RectilinearZGrid]: px = np.array([grid.lon[xi], grid.lon[xi + 1], grid.lon[xi + 1], grid.lon[xi]]) py = np.array([grid.lat[yi], grid.lat[yi], grid.lat[yi + 1], grid.lat[yi + 1]]) else: @@ -2008,7 +2008,7 @@ def spatial_c_grid_interpolation3D_full(self, ti, z, y, x, time, particle=None): px = np.concatenate((px, px)) py = np.concatenate((py, py)) - if grid.z4d: + if grid._z4d: pz = np.array( [ grid.depth[0, zi, yi, xi], @@ -2208,7 +2208,7 @@ def spatial_c_grid_interpolation3D(self, ti, z, y, x, time, particle=None, apply interpolating linearly V depending on the latitude coordinate. Curvilinear grids are treated properly, since the element is projected to a rectilinear parent element. """ - if self.U.grid.gtype in [GridType.RectilinearSGrid, GridType.CurvilinearSGrid]: + if self.U.grid._gtype in [GridType.RectilinearSGrid, GridType.CurvilinearSGrid]: (u, v, w) = self.spatial_c_grid_interpolation3D_full(ti, z, y, x, time, particle=particle) else: (u, v) = self.spatial_c_grid_interpolation2D(ti, z, y, x, time, particle=particle) diff --git a/parcels/fieldset.py b/parcels/fieldset.py index b7dc1bd86..5dca27ff4 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -1454,11 +1454,11 @@ def computeTimeChunk(self, time=0.0, dt=1): nextTime = np.inf if dt > 0 else -np.inf for g in self.gridset.grids: - g.update_status = "not_updated" + g._update_status = "not_updated" for f in self.get_fields(): if isinstance(f, (VectorField, NestedField)) or not f.grid.defer_load: continue - if f.grid.update_status == "not_updated": + if f.grid._update_status == "not_updated": nextTime_loc = f.grid._computeTimeChunk(f, time, signdt) if time == nextTime_loc and signdt != 0: raise TimeExtrapolationError(time, field=f, msg="In fset.computeTimeChunk") @@ -1468,7 +1468,7 @@ def computeTimeChunk(self, time=0.0, dt=1): if isinstance(f, (VectorField, NestedField)) or not f.grid.defer_load or f._dataFiles is None: continue g = f.grid - if g.update_status == "first_updated": # First load of data + if g._update_status == "first_updated": # First load of data if f.data is not None and not isinstance(f.data, DeferredArray): if not isinstance(f.data, list): f.data = None @@ -1498,13 +1498,13 @@ def computeTimeChunk(self, time=0.0, dt=1): f.data = f._reshape(data) if not f._chunk_set: f._chunk_setup() - if len(g.load_chunk) > g.chunk_not_loaded: - g.load_chunk = np.where( - g.load_chunk == g.chunk_loaded_touched, g.chunk_loading_requested, g.load_chunk + if len(g._load_chunk) > g._chunk_not_loaded: + g._load_chunk = np.where( + g._load_chunk == g._chunk_loaded_touched, g._chunk_loading_requested, g._load_chunk ) - g.load_chunk = np.where(g.load_chunk == g.chunk_deprecated, g.chunk_not_loaded, g.load_chunk) + g._load_chunk = np.where(g._load_chunk == g._chunk_deprecated, g._chunk_not_loaded, g._load_chunk) - elif g.update_status == "updated": + elif g._update_status == "updated": lib = np if isinstance(f.data, np.ndarray) else da if f.gridindexingtype == "pop" and g.zdim > 1: zd = g.zdim - 1 @@ -1552,12 +1552,14 @@ def computeTimeChunk(self, time=0.0, dt=1): f.data[1, :] = None f.data[1, :] = f.data[0, :] f.data[0, :] = data - g.load_chunk = np.where(g.load_chunk == g.chunk_loaded_touched, g.chunk_loading_requested, g.load_chunk) - g.load_chunk = np.where(g.load_chunk == g.chunk_deprecated, g.chunk_not_loaded, g.load_chunk) - if isinstance(f.data, da.core.Array) and len(g.load_chunk) > 0: + g._load_chunk = np.where( + g._load_chunk == g._chunk_loaded_touched, g._chunk_loading_requested, g._load_chunk + ) + g._load_chunk = np.where(g._load_chunk == g._chunk_deprecated, g._chunk_not_loaded, g._load_chunk) + if isinstance(f.data, da.core.Array) and len(g._load_chunk) > 0: if signdt >= 0: - for block_id in range(len(g.load_chunk)): - if g.load_chunk[block_id] == g.chunk_loaded_touched: + for block_id in range(len(g._load_chunk)): + if g._load_chunk[block_id] == g._chunk_loaded_touched: if f._data_chunks[block_id] is None: # file chunks were never loaded. # happens when field not called by kernel, but shares a grid with another field called by kernel @@ -1566,8 +1568,8 @@ def computeTimeChunk(self, time=0.0, dt=1): f._data_chunks[block_id][0] = None f._data_chunks[block_id][1] = np.array(f.data.blocks[(slice(2),) + block][1]) else: - for block_id in range(len(g.load_chunk)): - if g.load_chunk[block_id] == g.chunk_loaded_touched: + for block_id in range(len(g._load_chunk)): + if g._load_chunk[block_id] == g._chunk_loaded_touched: if f._data_chunks[block_id] is None: # file chunks were never loaded. # happens when field not called by kernel, but shares a grid with another field called by kernel diff --git a/parcels/grid.py b/parcels/grid.py index 95a3320ae..d4d7d90d9 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -50,12 +50,9 @@ def __init__( time_origin: TimeConverter | None, mesh: Mesh, ): - self.xi = None - self.yi = None - self.zi = None - self.ti = -1 + self._ti = -1 self.lon = lon - self.update_status: UpdateStatus | None = None + self._update_status: UpdateStatus | None = None if not self.lon.flags["C_CONTIGUOUS"]: self.lon = np.array(self.lon, order="C") self.lat = lat @@ -78,18 +75,18 @@ def __init__( assert isinstance(self.time_origin, TimeConverter), "time_origin needs to be a TimeConverter object" assert_valid_mesh(mesh) self.mesh = mesh - self.cstruct = None + self._cstruct = None self.cell_edge_sizes: dict[str, npt.NDArray] = {} self.zonal_periodic = False self.zonal_halo = 0 self.meridional_halo = 0 - self.lat_flipped = False + self._lat_flipped = False self.defer_load = False self.lonlat_minmax = np.array( [np.nanmin(lon), np.nanmax(lon), np.nanmin(lat), np.nanmax(lat)], dtype=np.float32 ) self.periods = 0 - self.load_chunk: npt.NDArray = np.array([]) + self._load_chunk: npt.NDArray = np.array([]) self.chunk_info = None self.chunksize = None self._add_last_periodic_data_timestep = False @@ -103,6 +100,46 @@ def __repr__(self): f"time_origin={self.time_origin!r}, mesh={self.mesh!r})" ) + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def ti(self): + return self._ti + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def cstruct(self): + return self._cstruct + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def lat_flipped(self): + return self._lat_flipped + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def cgrid(self): + return self._cgrid + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def gtype(self): + return self._gtype + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def z4d(self): + return self._z4d + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def update_status(self): + return self._update_status + + @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 + def load_chunk(self): + return self._load_chunk + @staticmethod def create_grid( lon: npt.ArrayLike, @@ -133,12 +170,17 @@ def create_grid( @property def ctypes_struct(self): # This is unnecessary for the moment, but it could be useful when going will fully unstructured grids - self.cgrid = cast(pointer(self.child_ctypes_struct), c_void_p) - cstruct = CGrid(self.gtype, self.cgrid.value) + self._cgrid = cast(pointer(self._child_ctypes_struct), c_void_p) + cstruct = CGrid(self._gtype, self._cgrid.value) return cstruct @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def child_ctypes_struct(self): + return self._child_ctypes_struct + + @property + def _child_ctypes_struct(self): """Returns a ctypes struct object containing all relevant pointers and sizes for this grid. """ @@ -166,20 +208,20 @@ class CStructuredGrid(Structure): ] # Create and populate the c-struct object - if not self.cstruct: # Not to point to the same grid various times if grid in various fields + if not self._cstruct: # Not to point to the same grid various times if grid in various fields if not isinstance(self.periods, c_int): self.periods = c_int() self.periods.value = 0 - self.cstruct = CStructuredGrid( + self._cstruct = CStructuredGrid( self.xdim, self.ydim, self.zdim, self.tdim, - self.z4d, + self._z4d, int(self.mesh == "spherical"), int(self.zonal_periodic), (c_int * len(self.chunk_info))(*self.chunk_info), - self.load_chunk.ctypes.data_as(POINTER(c_int)), + self._load_chunk.ctypes.data_as(POINTER(c_int)), self.time_full[0], self.time_full[-1], pointer(self.periods), @@ -189,7 +231,7 @@ class CStructuredGrid(Structure): self.depth.ctypes.data_as(POINTER(c_float)), self.time.ctypes.data_as(POINTER(c_double)), ) - return self.cstruct + return self._cstruct def lon_grid_to_target(self): if self.lon_remapping: @@ -256,94 +298,119 @@ def _computeTimeChunk(self, f, time, signdt): nextTime_loc = np.inf if signdt >= 0 else -np.inf periods = self.periods.value if isinstance(self.periods, c_int) else self.periods prev_time_indices = self.time - if self.update_status == "not_updated": - if self.ti >= 0: + if self._update_status == "not_updated": + if self._ti >= 0: if ( time - periods * (self.time_full[-1] - self.time_full[0]) < self.time[0] or time - periods * (self.time_full[-1] - self.time_full[0]) > self.time[1] ): - self.ti = -1 # reset + self._ti = -1 # reset elif signdt >= 0 and ( time - periods * (self.time_full[-1] - self.time_full[0]) < self.time_full[0] or time - periods * (self.time_full[-1] - self.time_full[0]) >= self.time_full[-1] ): - self.ti = -1 # reset + self._ti = -1 # reset elif signdt < 0 and ( time - periods * (self.time_full[-1] - self.time_full[0]) <= self.time_full[0] or time - periods * (self.time_full[-1] - self.time_full[0]) > self.time_full[-1] ): - self.ti = -1 # reset + self._ti = -1 # reset elif ( signdt >= 0 and time - periods * (self.time_full[-1] - self.time_full[0]) >= self.time[1] - and self.ti < len(self.time_full) - 2 + and self._ti < len(self.time_full) - 2 ): - self.ti += 1 - self.time = self.time_full[self.ti : self.ti + 2] - self.update_status = "updated" + self._ti += 1 + self.time = self.time_full[self._ti : self._ti + 2] + self._update_status = "updated" elif ( signdt < 0 and time - periods * (self.time_full[-1] - self.time_full[0]) <= self.time[0] - and self.ti > 0 + and self._ti > 0 ): - self.ti -= 1 - self.time = self.time_full[self.ti : self.ti + 2] - self.update_status = "updated" - if self.ti == -1: + self._ti -= 1 + self.time = self.time_full[self._ti : self._ti + 2] + self._update_status = "updated" + if self._ti == -1: self.time = self.time_full - self.ti, _ = f._time_index(time) + self._ti, _ = f._time_index(time) periods = self.periods.value if isinstance(self.periods, c_int) else self.periods if ( signdt == -1 - and self.ti == 0 + and self._ti == 0 and (time - periods * (self.time_full[-1] - self.time_full[0])) == self.time[0] and f.time_periodic ): - self.ti = len(self.time) - 1 + self._ti = len(self.time) - 1 periods -= 1 - if signdt == -1 and self.ti > 0 and self.time_full[self.ti] == time: - self.ti -= 1 - if self.ti >= len(self.time_full) - 1: - self.ti = len(self.time_full) - 2 + if signdt == -1 and self._ti > 0 and self.time_full[self._ti] == time: + self._ti -= 1 + if self._ti >= len(self.time_full) - 1: + self._ti = len(self.time_full) - 2 - self.time = self.time_full[self.ti : self.ti + 2] + self.time = self.time_full[self._ti : self._ti + 2] self.tdim = 2 if prev_time_indices is None or len(prev_time_indices) != 2 or len(prev_time_indices) != len(self.time): - self.update_status = "first_updated" + self._update_status = "first_updated" elif functools.reduce( lambda i, j: i and j, map(lambda m, k: m == k, self.time, prev_time_indices), True ) and len(prev_time_indices) == len(self.time): - self.update_status = "not_updated" + self._update_status = "not_updated" elif functools.reduce( lambda i, j: i and j, map(lambda m, k: m == k, self.time[:1], prev_time_indices[:1]), True ) and len(prev_time_indices) == len(self.time): - self.update_status = "updated" + self._update_status = "updated" else: - self.update_status = "first_updated" - if signdt >= 0 and (self.ti < len(self.time_full) - 2 or not f.allow_time_extrapolation): + self._update_status = "first_updated" + if signdt >= 0 and (self._ti < len(self.time_full) - 2 or not f.allow_time_extrapolation): nextTime_loc = self.time[1] + periods * (self.time_full[-1] - self.time_full[0]) - elif signdt < 0 and (self.ti > 0 or not f.allow_time_extrapolation): + elif signdt < 0 and (self._ti > 0 or not f.allow_time_extrapolation): nextTime_loc = self.time[0] + periods * (self.time_full[-1] - self.time_full[0]) return nextTime_loc @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def chunk_not_loaded(self): + return self._chunk_not_loaded + + @property + def _chunk_not_loaded(self): return 0 @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def chunk_loading_requested(self): + return self._chunk_loading_requested + + @property + def _chunk_loading_requested(self): return 1 @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def chunk_loaded_touched(self): + return self._chunk_loaded_touched + + @property + def _chunk_loaded_touched(self): return 2 @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def chunk_deprecated(self): + return self._chunk_deprecated + + @property + def _chunk_deprecated(self): return 3 @property + @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def chunk_loaded(self): + return self._chunk_loaded + + @property + def _chunk_loaded(self): return [2, 3] @@ -367,7 +434,7 @@ def __init__(self, lon, lat, time, time_origin, mesh: Mesh): self.tdim = self.time.size if self.ydim > 1 and self.lat[-1] < self.lat[0]: self.lat = np.flip(self.lat, axis=0) - self.lat_flipped = True + self._lat_flipped = True warnings.warn( "Flipping lat data from North-South to South-North. " "Note that this may lead to wrong sign for meridional velocity, so tread very carefully", @@ -452,12 +519,12 @@ def __init__(self, lon, lat, depth=None, time=None, time_origin=None, mesh: Mesh if isinstance(depth, np.ndarray): assert len(depth.shape) <= 1, "depth is not a vector" - self.gtype = GridType.RectilinearZGrid + self._gtype = GridType.RectilinearZGrid self.depth = np.zeros(1, dtype=np.float32) if depth is None else depth if not self.depth.flags["C_CONTIGUOUS"]: self.depth = np.array(self.depth, order="C") self.zdim = self.depth.size - self.z4d = -1 # only used in RectilinearSGrid + self._z4d = -1 # only used in RectilinearSGrid if not self.depth.dtype == np.float32: self.depth = self.depth.astype(np.float32) @@ -505,13 +572,13 @@ def __init__( super().__init__(lon, lat, time, time_origin, mesh) assert isinstance(depth, np.ndarray) and len(depth.shape) in [3, 4], "depth is not a 3D or 4D numpy array" - self.gtype = GridType.RectilinearSGrid + self._gtype = GridType.RectilinearSGrid self.depth = depth if not self.depth.flags["C_CONTIGUOUS"]: self.depth = np.array(self.depth, order="C") self.zdim = self.depth.shape[-3] - self.z4d = 1 if len(self.depth.shape) == 4 else 0 - if self.z4d: + self._z4d = 1 if len(self.depth.shape) == 4 else 0 + if self._z4d: # self.depth.shape[0] is 0 for S grids loaded from netcdf file assert ( self.tdim == self.depth.shape[0] or self.depth.shape[0] == 0 @@ -531,7 +598,7 @@ def __init__( ), "depth dimension has the wrong format. It should be [zdim, ydim, xdim]" if not self.depth.dtype == np.float32: self.depth = self.depth.astype(np.float32) - if self.lat_flipped: + if self._lat_flipped: self.depth = np.flip(self.depth, axis=-2) @@ -661,12 +728,12 @@ def __init__( if isinstance(depth, np.ndarray): assert len(depth.shape) == 1, "depth is not a vector" - self.gtype = GridType.CurvilinearZGrid + self._gtype = GridType.CurvilinearZGrid self.depth = np.zeros(1, dtype=np.float32) if depth is None else depth if not self.depth.flags["C_CONTIGUOUS"]: self.depth = np.array(self.depth, order="C") self.zdim = self.depth.size - self.z4d = -1 # only for SGrid + self._z4d = -1 # only for SGrid if not self.depth.dtype == np.float32: self.depth = self.depth.astype(np.float32) @@ -713,13 +780,13 @@ def __init__( super().__init__(lon, lat, time, time_origin, mesh) assert isinstance(depth, np.ndarray) and len(depth.shape) in [3, 4], "depth is not a 4D numpy array" - self.gtype = GridType.CurvilinearSGrid + self._gtype = GridType.CurvilinearSGrid self.depth = depth # should be a C-contiguous array of floats if not self.depth.flags["C_CONTIGUOUS"]: self.depth = np.array(self.depth, order="C") self.zdim = self.depth.shape[-3] - self.z4d = 1 if len(self.depth.shape) == 4 else 0 - if self.z4d: + self._z4d = 1 if len(self.depth.shape) == 4 else 0 + if self._z4d: # self.depth.shape[0] is 0 for S grids loaded from netcdf file assert ( self.tdim == self.depth.shape[0] or self.depth.shape[0] == 0 diff --git a/parcels/interaction/interactionkernel.py b/parcels/interaction/interactionkernel.py index 07c76e6a8..0e3979a2e 100644 --- a/parcels/interaction/interactionkernel.py +++ b/parcels/interaction/interactionkernel.py @@ -237,8 +237,10 @@ def execute(self, pset, endtime, dt, output_file=None): if pset.fieldset is not None: for g in pset.fieldset.gridset.grids: - if len(g.load_chunk) > g.chunk_not_loaded: # not the case if a field in not called in the kernel - g.load_chunk = np.where(g.load_chunk == g.chunk_loaded_touched, g.chunk_deprecated, g.load_chunk) + if len(g._load_chunk) > g._chunk_not_loaded: # not the case if a field in not called in the kernel + g._load_chunk = np.where( + g._load_chunk == g._chunk_loaded_touched, g._chunk_deprecated, g._load_chunk + ) # Execute the kernel over the particle set if self.ptype.uses_jit: diff --git a/parcels/kernel.py b/parcels/kernel.py index 08eb1d6fb..9a2dbef6a 100644 --- a/parcels/kernel.py +++ b/parcels/kernel.py @@ -367,7 +367,7 @@ def check_fieldsets_in_kernels(self, pyfunc): raise NotImplementedError("Analytical Advection only works in Scipy mode") if self._fieldset.U.interp_method != "cgrid_velocity": raise NotImplementedError("Analytical Advection only works with C-grids") - if self._fieldset.U.grid.gtype not in [GridType.CurvilinearZGrid, GridType.RectilinearZGrid]: + if self._fieldset.U.grid._gtype not in [GridType.CurvilinearZGrid, GridType.RectilinearZGrid]: raise NotImplementedError("Analytical Advection only works with Z-grids in the vertical") elif pyfunc is AdvectionRK45: if not hasattr(self.fieldset, "RK45_tol"): @@ -576,7 +576,7 @@ def load_fieldset_jit(self, pset): """Updates the loaded fields of pset's fieldset according to the chunk information within their grids.""" if pset.fieldset is not None: for g in pset.fieldset.gridset.grids: - g.cstruct = None # This force to point newly the grids from Python to C + g._cstruct = None # This force to point newly the grids from Python to C # Make a copy of the transposed array to enforce # C-contiguous memory layout for JIT mode. for f in pset.fieldset.get_fields(): @@ -592,10 +592,12 @@ def load_fieldset_jit(self, pset): f._c_data_chunks[block_id] = None for g in pset.fieldset.gridset.grids: - g.load_chunk = np.where(g.load_chunk == g.chunk_loading_requested, g.chunk_loaded_touched, g.load_chunk) - if len(g.load_chunk) > g.chunk_not_loaded: # not the case if a field in not called in the kernel - if not g.load_chunk.flags["C_CONTIGUOUS"]: - g.load_chunk = np.array(g.load_chunk, order="C") + g._load_chunk = np.where( + g._load_chunk == g._chunk_loading_requested, g._chunk_loaded_touched, g._load_chunk + ) + if len(g._load_chunk) > g._chunk_not_loaded: # not the case if a field in not called in the kernel + if not g._load_chunk.flags["C_CONTIGUOUS"]: + g._load_chunk = np.array(g._load_chunk, order="C") if not g.depth.flags.c_contiguous: g.depth = np.array(g.depth, order="C") if not g.lon.flags.c_contiguous: @@ -642,8 +644,10 @@ def execute(self, pset, endtime, dt): if pset.fieldset is not None: for g in pset.fieldset.gridset.grids: - if len(g.load_chunk) > g.chunk_not_loaded: # not the case if a field in not called in the kernel - g.load_chunk = np.where(g.load_chunk == g.chunk_loaded_touched, g.chunk_deprecated, g.load_chunk) + if len(g._load_chunk) > g._chunk_not_loaded: # not the case if a field in not called in the kernel + g._load_chunk = np.where( + g._load_chunk == g._chunk_loaded_touched, g._chunk_deprecated, g._load_chunk + ) # Execute the kernel over the particle set if self.ptype.uses_jit: diff --git a/parcels/particleset.py b/parcels/particleset.py index fbba2c97f..d7179cf37 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -671,7 +671,7 @@ def _monte_carlo_sample(cls, start_field, size, mode="monte_carlo"): j, i = np.unravel_index(inds, p_interior.shape) grid = start_field.grid lon, lat = ([], []) - if grid.gtype in [GridType.RectilinearZGrid, GridType.RectilinearSGrid]: + if grid._gtype in [GridType.RectilinearZGrid, GridType.RectilinearSGrid]: lon = grid.lon[i] + xsi * (grid.lon[i + 1] - grid.lon[i]) lat = grid.lat[j] + eta * (grid.lat[j + 1] - grid.lat[j]) else: diff --git a/tests/test_data/create_testfields.py b/tests/test_data/create_testfields.py index acf192158..52bd88d2d 100644 --- a/tests/test_data/create_testfields.py +++ b/tests/test_data/create_testfields.py @@ -89,7 +89,7 @@ def write_simple_2Dt(field, filename, varname=None): varname = field.name # Create DataArray objects for file I/O - if field.grid.gtype == GridType.RectilinearZGrid: + if field.grid._gtype == GridType.RectilinearZGrid: nav_lon = xr.DataArray( field.grid.lon + np.zeros((field.grid.ydim, field.grid.xdim), dtype=np.float32), coords=[("y", field.grid.lat), ("x", field.grid.lon)], @@ -98,7 +98,7 @@ def write_simple_2Dt(field, filename, varname=None): field.grid.lat.reshape(field.grid.ydim, 1) + np.zeros(field.grid.xdim, dtype=np.float32), coords=[("y", field.grid.lat), ("x", field.grid.lon)], ) - elif field.grid.gtype == GridType.CurvilinearZGrid: + elif field.grid._gtype == GridType.CurvilinearZGrid: nav_lon = xr.DataArray(field.grid.lon, coords=[("y", range(field.grid.ydim)), ("x", range(field.grid.xdim))]) nav_lat = xr.DataArray(field.grid.lat, coords=[("y", range(field.grid.ydim)), ("x", range(field.grid.xdim))]) else: diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index c8ccc6c05..c6457917b 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -4,20 +4,35 @@ import numpy as np import pytest -from parcels import Field, FieldSet, Grid, JITParticle, ParticleSet, RectilinearZGrid +from parcels import Field, FieldSet, JITParticle, ParticleSet +from parcels.grid import ( + CurvilinearSGrid, + CurvilinearZGrid, + Grid, + RectilinearGrid, + RectilinearSGrid, + RectilinearZGrid, +) from tests.utils import create_fieldset_unit_mesh class Action: """Utility class to help manage, document, and test deprecations.""" - def __init__(self, class_: Literal["Field", "FieldSet"], name: str, type_: Literal["read_only", "make_private"]): + def __init__( + self, + class_: Literal["Field", "FieldSet"], + name: str, + type_: Literal["read_only", "make_private", "remove"], + skip_reason: str = "", + ): if name.startswith("_"): raise ValueError("name should not start with an underscore") self.class_ = class_ self._raw_name = name self.type_ = type_ + self.skip_reason = skip_reason if type_ == "read_only" and self.is_method: raise ValueError("read_only attributes should not be methods") @@ -38,6 +53,10 @@ def is_method(self): return True return False + @property + def skip(self): + return bool(self.skip_reason) + def __str__(self): return f"{self.class_}.{self.public_name}" @@ -131,11 +150,38 @@ def test_testing_action_class(): Action("ParticleSet", "repeatpid", "make_private" ), Action("ParticleSet", "error_particles", "make_private" ), Action("ParticleSet", "num_error_particles", "make_private" ), - - + Action("Grid", "xi", "remove" ), + Action("Grid", "yi", "remove" ), + Action("Grid", "zi", "remove" ), + Action("Grid", "ti", "make_private" ), + Action("Grid", "cstruct", "make_private" ), + Action("Grid", "lat_flipped", "make_private" ), + Action("Grid", "load_chunk", "make_private" ), + Action("Grid", "cgrid", "make_private" ), + Action("Grid", "child_ctypes_struct", "make_private" ), + Action("Grid", "gtype", "make_private" ), + Action("Grid", "z4d", "make_private" ), + Action("Grid", "update_status", "make_private" ), + Action("Grid", "chunk_not_loaded", "make_private" ), + Action("Grid", "chunk_loading_requested", "make_private" ), + Action("Grid", "chunk_loaded_touched", "make_private" ), + Action("Grid", "chunk_deprecated", "make_private" ), + Action("Grid", "chunk_loaded", "make_private" ), + Action("RectilinearGrid", "lat_flipped", "make_private" ), + Action("RectilinearZGrid", "gtype", "make_private" ), + Action("RectilinearZGrid", "z4d", "make_private" ), + Action("RectilinearSGrid", "gtype", "make_private" ), + Action("RectilinearSGrid", "z4d", "make_private" ), + Action("RectilinearSGrid", "lat_flipped", "make_private" ), + Action("CurvilinearZGrid", "gtype", "make_private" ), + Action("CurvilinearZGrid", "z4d", "make_private" ), + Action("CurvilinearSGrid", "gtype", "make_private" ), + Action("CurvilinearSGrid", "z4d", "make_private" ), ] # fmt: on +actions = filter(lambda action: not action.skip, actions) + def create_test_data(): """Creates and returns the test data dictionary.""" @@ -172,6 +218,26 @@ def create_test_data(): "class": Grid, "object": grid, }, + "RectilinearGrid": { + "class": RectilinearGrid, + "object": grid, + }, + "RectilinearZGrid": { + "class": RectilinearZGrid, + "object": grid, + }, + "RectilinearSGrid": { + "class": RectilinearSGrid, + "object": grid, + }, + "CurvilinearZGrid": { + "class": CurvilinearZGrid, + "object": grid, + }, + "CurvilinearSGrid": { + "class": CurvilinearSGrid, + "object": grid, + }, } @@ -230,3 +296,18 @@ def test_read_only_attr(read_only_attribute_action: Action): assert hasattr(obj, action.public_name) with pytest.raises(AttributeError): setattr(obj, action.public_name, None) + + +@pytest.mark.parametrize( + "removed_attribute_action", + filter(lambda action: not action.is_method and action.type_ == "remove", actions), + ids=str, +) +def test_removed_attrib(removed_attribute_action: Action): + """Checks that attribute has been deleted.""" + action = removed_attribute_action + + obj = test_data[action.class_]["object"] + + with pytest.raises(AttributeError): + getattr(obj, action.public_name) From 1cf7d2bf4c57353321bdd218b356983e0e68abd7 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:48:37 +0200 Subject: [PATCH 06/14] update deprecation testing --- tests/test_deprecations.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index c6457917b..33745723b 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -24,6 +24,7 @@ def __init__( class_: Literal["Field", "FieldSet"], name: str, type_: Literal["read_only", "make_private", "remove"], + *, skip_reason: str = "", ): if name.startswith("_"): @@ -90,6 +91,9 @@ def test_testing_action_class(): with pytest.raises(ValueError): # Can't have read-only method Action("Field", "my_method()", "read_only") + action = Action("Field", "my_method()", "make_private", skip_reason="Reason") + assert action.skip + # fmt: off actions = [ @@ -180,7 +184,7 @@ def test_testing_action_class(): ] # fmt: on -actions = filter(lambda action: not action.skip, actions) +actions = list(filter(lambda action: not action.skip, actions)) def create_test_data(): From d31517bfda83719e66fe0b49d07eb32420293f44 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Mon, 30 Sep 2024 16:55:58 +0200 Subject: [PATCH 07/14] Grid attributes read only lon, lon, depth --- parcels/fieldset.py | 4 +- parcels/grid.py | 98 ++++++++++++++++++++++++++------------------- parcels/kernel.py | 6 +-- 3 files changed, 61 insertions(+), 47 deletions(-) diff --git a/parcels/fieldset.py b/parcels/fieldset.py index 5dca27ff4..b3167e334 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -339,7 +339,7 @@ def check_velocityfields(U, V, W): ) if not f.grid.defer_load: depth_data = f.grid.depth_field.data - f.grid.depth = depth_data if isinstance(depth_data, np.ndarray) else np.array(depth_data) + f.grid._depth = depth_data if isinstance(depth_data, np.ndarray) else np.array(depth_data) self._completed = True @classmethod @@ -1587,7 +1587,7 @@ def computeTimeChunk(self, time=0.0, dt=1): continue if f.grid.depth_field is not None: depth_data = f.grid.depth_field.data - f.grid.depth = depth_data if isinstance(depth_data, np.ndarray) else np.array(depth_data) + f.grid._depth = depth_data if isinstance(depth_data, np.ndarray) else np.array(depth_data) if abs(nextTime) == np.inf or np.isnan(nextTime): # Second happens when dt=0 return nextTime diff --git a/parcels/grid.py b/parcels/grid.py index d4d7d90d9..4bc08e9ef 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -51,25 +51,27 @@ def __init__( mesh: Mesh, ): self._ti = -1 - self.lon = lon self._update_status: UpdateStatus | None = None - if not self.lon.flags["C_CONTIGUOUS"]: - self.lon = np.array(self.lon, order="C") - self.lat = lat - if not self.lat.flags["C_CONTIGUOUS"]: - self.lat = np.array(self.lat, order="C") - self.time = np.zeros(1, dtype=np.float64) if time is None else time - if not self.time.flags["C_CONTIGUOUS"]: - self.time = np.array(self.time, order="C") - if not self.lon.dtype == np.float32: - self.lon = self.lon.astype(np.float32) - if not self.lat.dtype == np.float32: - self.lat = self.lat.astype(np.float32) - if not self.time.dtype == np.float64: + if not lon.flags["C_CONTIGUOUS"]: + lon = np.array(lon, order="C") + if not lat.flags["C_CONTIGUOUS"]: + lat = np.array(lat, order="C") + time = np.zeros(1, dtype=np.float64) if time is None else time + if not time.flags["C_CONTIGUOUS"]: + time = np.array(time, order="C") + if not lon.dtype == np.float32: + lon = lon.astype(np.float32) + if not lat.dtype == np.float32: + lat = lat.astype(np.float32) + if not time.dtype == np.float64: assert isinstance( - self.time[0], (np.integer, np.floating, float, int) + time[0], (np.integer, np.floating, float, int) ), "Time vector must be an array of int or floats" - self.time = self.time.astype(np.float64) + time = time.astype(np.float64) + + self._lon = lon + self._lat = lat + self.time = time self.time_full = self.time # needed for deferred_loaded Fields self.time_origin = TimeConverter() if time_origin is None else time_origin assert isinstance(self.time_origin, TimeConverter), "time_origin needs to be a TimeConverter object" @@ -100,6 +102,18 @@ def __repr__(self): f"time_origin={self.time_origin!r}, mesh={self.mesh!r})" ) + @property + def lon(self): + return self._lon + + @property + def lat(self): + return self._lat + + @property + def depth(self): + return self._depth + @property @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def ti(self): @@ -235,11 +249,11 @@ class CStructuredGrid(Structure): def lon_grid_to_target(self): if self.lon_remapping: - self.lon = self.lon_remapping.to_target(self.lon) + self._lon = self.lon_remapping.to_target(self.lon) def lon_grid_to_source(self): if self.lon_remapping: - self.lon = self.lon_remapping.to_source(self.lon) + self._lon = self.lon_remapping.to_source(self.lon) def lon_particle_to_target(self, lon): if self.lon_remapping: @@ -265,26 +279,26 @@ def add_Sdepth_periodic_halo(self, *args, **kwargs): def _add_Sdepth_periodic_halo(self, zonal, meridional, halosize): if zonal: if len(self.depth.shape) == 3: - self.depth = np.concatenate( + self._depth = np.concatenate( (self.depth[:, :, -halosize:], self.depth, self.depth[:, :, 0:halosize]), axis=len(self.depth.shape) - 1, ) assert self.depth.shape[2] == self.xdim, "Third dim must be x." else: - self.depth = np.concatenate( + self._depth = np.concatenate( (self.depth[:, :, :, -halosize:], self.depth, self.depth[:, :, :, 0:halosize]), axis=len(self.depth.shape) - 1, ) assert self.depth.shape[3] == self.xdim, "Fourth dim must be x." if meridional: if len(self.depth.shape) == 3: - self.depth = np.concatenate( + self._depth = np.concatenate( (self.depth[:, -halosize:, :], self.depth, self.depth[:, 0:halosize, :]), axis=len(self.depth.shape) - 2, ) assert self.depth.shape[1] == self.ydim, "Second dim must be y." else: - self.depth = np.concatenate( + self._depth = np.concatenate( (self.depth[:, :, -halosize:, :], self.depth, self.depth[:, :, 0:halosize, :]), axis=len(self.depth.shape) - 2, ) @@ -433,7 +447,7 @@ def __init__(self, lon, lat, time, time_origin, mesh: Mesh): self.ydim = self.lat.size self.tdim = self.time.size if self.ydim > 1 and self.lat[-1] < self.lat[0]: - self.lat = np.flip(self.lat, axis=0) + self._lat = np.flip(self.lat, axis=0) self._lat_flipped = True warnings.warn( "Flipping lat data from North-South to South-North. " @@ -465,7 +479,7 @@ def add_periodic_halo(self, zonal: bool, meridional: bool, halosize: int = 5): FieldSetWarning, stacklevel=2, ) - self.lon = np.concatenate((self.lon[-halosize:] - lonshift, self.lon, self.lon[0:halosize] + lonshift)) + self._lon = np.concatenate((self.lon[-halosize:] - lonshift, self.lon, self.lon[0:halosize] + lonshift)) self.xdim = self.lon.size self.zonal_periodic = True self.zonal_halo = halosize @@ -479,7 +493,7 @@ def add_periodic_halo(self, zonal: bool, meridional: bool, halosize: int = 5): stacklevel=2, ) latshift = self.lat[-1] - 2 * self.lat[0] + self.lat[1] - self.lat = np.concatenate((self.lat[-halosize:] - latshift, self.lat, self.lat[0:halosize] + latshift)) + self._lat = np.concatenate((self.lat[-halosize:] - latshift, self.lat, self.lat[0:halosize] + latshift)) self.ydim = self.lat.size self.meridional_halo = halosize self.lonlat_minmax = np.array( @@ -520,13 +534,13 @@ def __init__(self, lon, lat, depth=None, time=None, time_origin=None, mesh: Mesh assert len(depth.shape) <= 1, "depth is not a vector" self._gtype = GridType.RectilinearZGrid - self.depth = np.zeros(1, dtype=np.float32) if depth is None else depth + self._depth = np.zeros(1, dtype=np.float32) if depth is None else depth if not self.depth.flags["C_CONTIGUOUS"]: - self.depth = np.array(self.depth, order="C") + self._depth = np.array(self.depth, order="C") self.zdim = self.depth.size self._z4d = -1 # only used in RectilinearSGrid if not self.depth.dtype == np.float32: - self.depth = self.depth.astype(np.float32) + self._depth = self.depth.astype(np.float32) class RectilinearSGrid(RectilinearGrid): @@ -573,9 +587,9 @@ def __init__( assert isinstance(depth, np.ndarray) and len(depth.shape) in [3, 4], "depth is not a 3D or 4D numpy array" self._gtype = GridType.RectilinearSGrid - self.depth = depth + self._depth = depth if not self.depth.flags["C_CONTIGUOUS"]: - self.depth = np.array(self.depth, order="C") + self._depth = np.array(self.depth, order="C") self.zdim = self.depth.shape[-3] self._z4d = 1 if len(self.depth.shape) == 4 else 0 if self._z4d: @@ -597,9 +611,9 @@ def __init__( self.ydim == self.depth.shape[-2] ), "depth dimension has the wrong format. It should be [zdim, ydim, xdim]" if not self.depth.dtype == np.float32: - self.depth = self.depth.astype(np.float32) + self._depth = self.depth.astype(np.float32) if self._lat_flipped: - self.depth = np.flip(self.depth, axis=-2) + self._depth = np.flip(self.depth, axis=-2) class CurvilinearGrid(Grid): @@ -647,7 +661,7 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): FieldSetWarning, stacklevel=2, ) - self.lon = np.concatenate( + self._lon = np.concatenate( ( self.lon[:, -halosize:] - lonshift[:, np.newaxis], self.lon, @@ -655,7 +669,7 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): ), axis=len(self.lon.shape) - 1, ) - self.lat = np.concatenate( + self._lat = np.concatenate( (self.lat[:, -halosize:], self.lat, self.lat[:, 0:halosize]), axis=len(self.lat.shape) - 1 ) self.xdim = self.lon.shape[1] @@ -672,7 +686,7 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): stacklevel=2, ) latshift = self.lat[-1, :] - 2 * self.lat[0, :] + self.lat[1, :] - self.lat = np.concatenate( + self._lat = np.concatenate( ( self.lat[-halosize:, :] - latshift[np.newaxis, :], self.lat, @@ -680,7 +694,7 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): ), axis=len(self.lat.shape) - 2, ) - self.lon = np.concatenate( + self._lon = np.concatenate( (self.lon[-halosize:, :], self.lon, self.lon[0:halosize, :]), axis=len(self.lon.shape) - 2 ) self.xdim = self.lon.shape[1] @@ -729,13 +743,13 @@ def __init__( assert len(depth.shape) == 1, "depth is not a vector" self._gtype = GridType.CurvilinearZGrid - self.depth = np.zeros(1, dtype=np.float32) if depth is None else depth + self._depth = np.zeros(1, dtype=np.float32) if depth is None else depth if not self.depth.flags["C_CONTIGUOUS"]: - self.depth = np.array(self.depth, order="C") + self._depth = np.array(self.depth, order="C") self.zdim = self.depth.size self._z4d = -1 # only for SGrid if not self.depth.dtype == np.float32: - self.depth = self.depth.astype(np.float32) + self._depth = self.depth.astype(np.float32) class CurvilinearSGrid(CurvilinearGrid): @@ -781,9 +795,9 @@ def __init__( assert isinstance(depth, np.ndarray) and len(depth.shape) in [3, 4], "depth is not a 4D numpy array" self._gtype = GridType.CurvilinearSGrid - self.depth = depth # should be a C-contiguous array of floats + self._depth = depth # should be a C-contiguous array of floats if not self.depth.flags["C_CONTIGUOUS"]: - self.depth = np.array(self.depth, order="C") + self._depth = np.array(self.depth, order="C") self.zdim = self.depth.shape[-3] self._z4d = 1 if len(self.depth.shape) == 4 else 0 if self._z4d: @@ -805,4 +819,4 @@ def __init__( self.ydim == self.depth.shape[-2] ), "depth dimension has the wrong format. It should be [zdim, ydim, xdim]" if not self.depth.dtype == np.float32: - self.depth = self.depth.astype(np.float32) + self._depth = self.depth.astype(np.float32) diff --git a/parcels/kernel.py b/parcels/kernel.py index 9a2dbef6a..7baf4d9ac 100644 --- a/parcels/kernel.py +++ b/parcels/kernel.py @@ -599,11 +599,11 @@ def load_fieldset_jit(self, pset): if not g._load_chunk.flags["C_CONTIGUOUS"]: g._load_chunk = np.array(g._load_chunk, order="C") if not g.depth.flags.c_contiguous: - g.depth = np.array(g.depth, order="C") + g._depth = np.array(g.depth, order="C") if not g.lon.flags.c_contiguous: - g.lon = np.array(g.lon, order="C") + g._lon = np.array(g.lon, order="C") if not g.lat.flags.c_contiguous: - g.lat = np.array(g.lat, order="C") + g._lat = np.array(g.lat, order="C") def execute_jit(self, pset, endtime, dt): """Invokes JIT engine to perform the core update loop.""" From ffecd26ef8b1e7613e4b5e841490b321131da2ea Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:14:32 +0200 Subject: [PATCH 08/14] Grid attributes read only lonlat_minmax, meridional_halo, mesh, time_origin --- parcels/fieldset.py | 2 +- parcels/grid.py | 30 +++++++++++++++++++++++------- tests/test_fieldset.py | 4 ++-- 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/parcels/fieldset.py b/parcels/fieldset.py index b3167e334..adc9c1027 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -316,7 +316,7 @@ def check_velocityfields(U, V, W): g.time = g.time + self.time_origin.reltime(g.time_origin) if g.defer_load: g.time_full = g.time_full + self.time_origin.reltime(g.time_origin) - g.time_origin = self.time_origin + g._time_origin = self.time_origin self._add_UVfield() ccode_fieldnames = [] diff --git a/parcels/grid.py b/parcels/grid.py index 4bc08e9ef..134cc60e1 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -73,18 +73,18 @@ def __init__( self._lat = lat self.time = time self.time_full = self.time # needed for deferred_loaded Fields - self.time_origin = TimeConverter() if time_origin is None else time_origin + self._time_origin = TimeConverter() if time_origin is None else time_origin assert isinstance(self.time_origin, TimeConverter), "time_origin needs to be a TimeConverter object" assert_valid_mesh(mesh) - self.mesh = mesh + self._mesh = mesh self._cstruct = None self.cell_edge_sizes: dict[str, npt.NDArray] = {} self.zonal_periodic = False self.zonal_halo = 0 - self.meridional_halo = 0 + self._meridional_halo = 0 self._lat_flipped = False self.defer_load = False - self.lonlat_minmax = np.array( + self._lonlat_minmax = np.array( [np.nanmin(lon), np.nanmax(lon), np.nanmin(lat), np.nanmax(lat)], dtype=np.float32 ) self.periods = 0 @@ -114,6 +114,22 @@ def lat(self): def depth(self): return self._depth + @property + def mesh(self): + return self._mesh + + @property + def meridional_halo(self): + return self._meridional_halo + + @property + def lonlat_minmax(self): + return self._lonlat_minmax + + @property + def time_origin(self): + return self._time_origin + @property @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def ti(self): @@ -495,8 +511,8 @@ def add_periodic_halo(self, zonal: bool, meridional: bool, halosize: int = 5): latshift = self.lat[-1] - 2 * self.lat[0] + self.lat[1] self._lat = np.concatenate((self.lat[-halosize:] - latshift, self.lat, self.lat[0:halosize] + latshift)) self.ydim = self.lat.size - self.meridional_halo = halosize - self.lonlat_minmax = np.array( + self._meridional_halo = halosize + self._lonlat_minmax = np.array( [np.nanmin(self.lon), np.nanmax(self.lon), np.nanmin(self.lat), np.nanmax(self.lat)], dtype=np.float32 ) if isinstance(self, RectilinearSGrid): @@ -699,7 +715,7 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): ) self.xdim = self.lon.shape[1] self.ydim = self.lat.shape[0] - self.meridional_halo = halosize + self._meridional_halo = halosize if isinstance(self, CurvilinearSGrid): self._add_Sdepth_periodic_halo(zonal, meridional, halosize) diff --git a/tests/test_fieldset.py b/tests/test_fieldset.py index 5c7a6e073..a69da7f05 100644 --- a/tests/test_fieldset.py +++ b/tests/test_fieldset.py @@ -933,8 +933,8 @@ def test_fieldset_defer_loading_with_diff_time_origin(tmpdir, fail): data0, dims0 = generate_fieldset_data(10, 10, 1, 10) dims0["time"] = np.arange(0, 10, 1) * 3600 fieldset_out = FieldSet.from_data(data0, dims0) - fieldset_out.U.grid.time_origin = TimeConverter(np.datetime64("2018-04-20")) - fieldset_out.V.grid.time_origin = TimeConverter(np.datetime64("2018-04-20")) + fieldset_out.U.grid._time_origin = TimeConverter(np.datetime64("2018-04-20")) + fieldset_out.V.grid._time_origin = TimeConverter(np.datetime64("2018-04-20")) data1, dims1 = generate_fieldset_data(10, 10, 1, 10) if fail: dims1["time"] = np.arange(0, 10, 1) * 3600 From 95b4c2b0ca711397a1ab026106aa74de39b6f410 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:36:33 +0200 Subject: [PATCH 09/14] Grid attributes read only zonal_halo, zonal_periodic, defer_load, cell_edge_sizes --- parcels/field.py | 2 +- parcels/grid.py | 34 +++++++++++++++++++++++++--------- 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index 7f135659e..df3f6b991 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -718,7 +718,7 @@ def from_netcdf( lib = np if isinstance(data_list[0], np.ndarray) else da data = lib.concatenate(data_list, axis=0) else: - grid.defer_load = True + grid._defer_load = True grid._ti = -1 data = DeferredArray() data.compute_shape(grid.xdim, grid.ydim, grid.zdim, grid.tdim, len(grid.timeslices)) diff --git a/parcels/grid.py b/parcels/grid.py index 134cc60e1..931d6f613 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -78,12 +78,12 @@ def __init__( assert_valid_mesh(mesh) self._mesh = mesh self._cstruct = None - self.cell_edge_sizes: dict[str, npt.NDArray] = {} - self.zonal_periodic = False - self.zonal_halo = 0 + self._cell_edge_sizes: dict[str, npt.NDArray] = {} + self._zonal_periodic = False + self._zonal_halo = 0 self._meridional_halo = 0 self._lat_flipped = False - self.defer_load = False + self._defer_load = False self._lonlat_minmax = np.array( [np.nanmin(lon), np.nanmax(lon), np.nanmin(lat), np.nanmax(lat)], dtype=np.float32 ) @@ -130,6 +130,22 @@ def lonlat_minmax(self): def time_origin(self): return self._time_origin + @property + def zonal_periodic(self): + return self._zonal_periodic + + @property + def zonal_halo(self): + return self._zonal_halo + + @property + def defer_load(self): + return self._defer_load + + @property + def cell_edge_sizes(self): + return self._cell_edge_sizes + @property @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def ti(self): @@ -286,7 +302,7 @@ def _check_zonal_periodic(self): dx = (self.lon[1:] - self.lon[:-1]) if len(self.lon.shape) == 1 else self.lon[0, 1:] - self.lon[0, :-1] dx = np.where(dx < -180, dx + 360, dx) dx = np.where(dx > 180, dx - 360, dx) - self.zonal_periodic = sum(dx) > 359.9 + self._zonal_periodic = sum(dx) > 359.9 @deprecated_made_private # TODO: Remove 6 months after v3.1.0 def add_Sdepth_periodic_halo(self, *args, **kwargs): @@ -497,8 +513,8 @@ def add_periodic_halo(self, zonal: bool, meridional: bool, halosize: int = 5): ) self._lon = np.concatenate((self.lon[-halosize:] - lonshift, self.lon, self.lon[0:halosize] + lonshift)) self.xdim = self.lon.size - self.zonal_periodic = True - self.zonal_halo = halosize + self._zonal_periodic = True + self._zonal_halo = halosize if meridional: if not np.allclose(self.lat[1] - self.lat[0], self.lat[-1] - self.lat[-2]): warnings.warn( @@ -690,8 +706,8 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): ) self.xdim = self.lon.shape[1] self.ydim = self.lat.shape[0] - self.zonal_periodic = True - self.zonal_halo = halosize + self._zonal_periodic = True + self._zonal_halo = halosize if meridional: if not np.allclose(self.lat[1, :] - self.lat[0, :], self.lat[-1, :] - self.lat[-2, :]): warnings.warn( From cf92019c6aad56f133a92e31e4c7738bafc46a83 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:37:39 +0200 Subject: [PATCH 10/14] Update pytest conf and mark flaky test --- pyproject.toml | 10 +++++++++- tests/test_fieldset.py | 1 + 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0487fe167..55ded6ffb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,10 +51,18 @@ write_to = "parcels/_version_setup.py" local_scheme = "no-local-version" [tool.pytest.ini_options] +addopts = ["--strict-config", "--strict-markers"] +testpaths = ["parcels/tests", "docs/examples"] python_files = ["test_*.py", "example_*.py", "*tutorial*"] +minversion = "7" +markers = [ # can be skipped by doing `pytest -m "not slow"` etc. + "flaky: flaky tests", + "slow: slow tests", +] + filterwarnings = [ "error:.*removed in a future release of Parcels.*:DeprecationWarning", # Have Parcels DeprecationWarnings fail CI (prevents deprecated items being used in internal code) - ] +] [tool.ruff] line-length = 120 diff --git a/tests/test_fieldset.py b/tests/test_fieldset.py index a69da7f05..1c43493c5 100644 --- a/tests/test_fieldset.py +++ b/tests/test_fieldset.py @@ -683,6 +683,7 @@ def UpdateU(particle, fieldset, time): assert np.allclose(fieldset.U.data, da["U"].values, atol=1.0) +@pytest.mark.flaky @pytest.mark.parametrize("mode", ["scipy", "jit"]) @pytest.mark.parametrize("time_periodic", [4 * 86400.0, False]) @pytest.mark.parametrize("dt", [-3600, 3600]) From b0eae3c4170d49c9815c0285b7330de74fa9d61d Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:15:53 +0200 Subject: [PATCH 11/14] Grid attributes read only xdim, ydim --- parcels/grid.py | 47 +++++++++++++++++++++++++++++++++-------------- 1 file changed, 33 insertions(+), 14 deletions(-) diff --git a/parcels/grid.py b/parcels/grid.py index 931d6f613..0d8745a09 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -475,9 +475,8 @@ def __init__(self, lon, lat, time, time_origin, mesh: Mesh): assert len(time.shape) == 1, "time is not a vector" super().__init__(lon, lat, time, time_origin, mesh) - self.xdim = self.lon.size - self.ydim = self.lat.size self.tdim = self.time.size + if self.ydim > 1 and self.lat[-1] < self.lat[0]: self._lat = np.flip(self.lat, axis=0) self._lat_flipped = True @@ -488,6 +487,14 @@ def __init__(self, lon, lat, time, time_origin, mesh: Mesh): stacklevel=2, ) + @property + def xdim(self): + return self.lon.size + + @property + def ydim(self): + return self.lat.size + def add_periodic_halo(self, zonal: bool, meridional: bool, halosize: int = 5): """Add a 'halo' to the Grid, through extending the Grid (and lon/lat) similarly to the halo created for the Fields @@ -512,7 +519,6 @@ def add_periodic_halo(self, zonal: bool, meridional: bool, halosize: int = 5): stacklevel=2, ) self._lon = np.concatenate((self.lon[-halosize:] - lonshift, self.lon, self.lon[0:halosize] + lonshift)) - self.xdim = self.lon.size self._zonal_periodic = True self._zonal_halo = halosize if meridional: @@ -526,7 +532,6 @@ def add_periodic_halo(self, zonal: bool, meridional: bool, halosize: int = 5): ) latshift = self.lat[-1] - 2 * self.lat[0] + self.lat[1] self._lat = np.concatenate((self.lat[-halosize:] - latshift, self.lat, self.lat[0:halosize] + latshift)) - self.ydim = self.lat.size self._meridional_halo = halosize self._lonlat_minmax = np.array( [np.nanmin(self.lon), np.nanmax(self.lon), np.nanmin(self.lat), np.nanmax(self.lat)], dtype=np.float32 @@ -569,11 +574,14 @@ def __init__(self, lon, lat, depth=None, time=None, time_origin=None, mesh: Mesh self._depth = np.zeros(1, dtype=np.float32) if depth is None else depth if not self.depth.flags["C_CONTIGUOUS"]: self._depth = np.array(self.depth, order="C") - self.zdim = self.depth.size self._z4d = -1 # only used in RectilinearSGrid if not self.depth.dtype == np.float32: self._depth = self.depth.astype(np.float32) + @property + def zdim(self): + return self.depth.size + class RectilinearSGrid(RectilinearGrid): """Rectilinear S Grid. Same horizontal discretisation as a rectilinear z grid, @@ -622,7 +630,6 @@ def __init__( self._depth = depth if not self.depth.flags["C_CONTIGUOUS"]: self._depth = np.array(self.depth, order="C") - self.zdim = self.depth.shape[-3] self._z4d = 1 if len(self.depth.shape) == 4 else 0 if self._z4d: # self.depth.shape[0] is 0 for S grids loaded from netcdf file @@ -647,6 +654,10 @@ def __init__( if self._lat_flipped: self._depth = np.flip(self.depth, axis=-2) + @property + def zdim(self): + return self.depth.shape[-3] + class CurvilinearGrid(Grid): def __init__( @@ -666,10 +677,16 @@ def __init__( lon = lon.squeeze() lat = lat.squeeze() super().__init__(lon, lat, time, time_origin, mesh) - self.xdim = self.lon.shape[1] - self.ydim = self.lon.shape[0] self.tdim = self.time.size + @property + def xdim(self): + return self.lon.shape[1] + + @property + def ydim(self): + return self.lon.shape[0] + def add_periodic_halo(self, zonal, meridional, halosize=5): """Add a 'halo' to the Grid, through extending the Grid (and lon/lat) similarly to the halo created for the Fields @@ -704,8 +721,6 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): self._lat = np.concatenate( (self.lat[:, -halosize:], self.lat, self.lat[:, 0:halosize]), axis=len(self.lat.shape) - 1 ) - self.xdim = self.lon.shape[1] - self.ydim = self.lat.shape[0] self._zonal_periodic = True self._zonal_halo = halosize if meridional: @@ -729,8 +744,6 @@ def add_periodic_halo(self, zonal, meridional, halosize=5): self._lon = np.concatenate( (self.lon[-halosize:, :], self.lon, self.lon[0:halosize, :]), axis=len(self.lon.shape) - 2 ) - self.xdim = self.lon.shape[1] - self.ydim = self.lat.shape[0] self._meridional_halo = halosize if isinstance(self, CurvilinearSGrid): self._add_Sdepth_periodic_halo(zonal, meridional, halosize) @@ -778,11 +791,14 @@ def __init__( self._depth = np.zeros(1, dtype=np.float32) if depth is None else depth if not self.depth.flags["C_CONTIGUOUS"]: self._depth = np.array(self.depth, order="C") - self.zdim = self.depth.size self._z4d = -1 # only for SGrid if not self.depth.dtype == np.float32: self._depth = self.depth.astype(np.float32) + @property + def zdim(self): + return self.depth.size + class CurvilinearSGrid(CurvilinearGrid): """Curvilinear S Grid. @@ -830,7 +846,6 @@ def __init__( self._depth = depth # should be a C-contiguous array of floats if not self.depth.flags["C_CONTIGUOUS"]: self._depth = np.array(self.depth, order="C") - self.zdim = self.depth.shape[-3] self._z4d = 1 if len(self.depth.shape) == 4 else 0 if self._z4d: # self.depth.shape[0] is 0 for S grids loaded from netcdf file @@ -852,3 +867,7 @@ def __init__( ), "depth dimension has the wrong format. It should be [zdim, ydim, xdim]" if not self.depth.dtype == np.float32: self._depth = self.depth.astype(np.float32) + + @property + def zdim(self): + return self.depth.shape[-3] From 9828b72aafd6b3273618280f826ac4605341f5fd Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:39:38 +0200 Subject: [PATCH 12/14] Update deprecation list to match --- tests/test_deprecations.py | 129 +++++++++++++++++++++++++------------ 1 file changed, 87 insertions(+), 42 deletions(-) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index 33745723b..96ae54277 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -6,6 +6,7 @@ from parcels import Field, FieldSet, JITParticle, ParticleSet from parcels.grid import ( + CurvilinearGrid, CurvilinearSGrid, CurvilinearZGrid, Grid, @@ -135,52 +136,92 @@ def test_testing_action_class(): Action("FieldSet", "parse_wildcards()", "make_private" ), # 1713 - Action("ParticleSet", "active_particles_mask()", "make_private" ), - Action("ParticleSet", "compute_neighbor_tree()", "make_private" ), - Action("ParticleSet", "neighbors_by_index()", "make_private" ), - Action("ParticleSet", "neighbors_by_coor()", "make_private" ), - Action("ParticleSet", "monte_carlo_sample()", "make_private" ), - Action("Grid", "check_zonal_periodic()", "make_private" ), - Action("Grid", "add_Sdepth_periodic_halo()", "make_private" ), - Action("Grid", "computeTimeChunk()", "make_private" ), - Action("ParticleSet", "repeat_starttime", "make_private" ), - Action("ParticleSet", "repeatlon", "make_private" ), - Action("ParticleSet", "repeatlat", "make_private" ), - Action("ParticleSet", "repeatdepth", "make_private" ), - Action("ParticleSet", "repeatpclass", "make_private" ), - Action("ParticleSet", "repeatkwargs", "make_private" ), - Action("ParticleSet", "kernel", "make_private" ), - Action("ParticleSet", "interaction_kernel", "make_private" ), - Action("ParticleSet", "repeatpid", "make_private" ), - Action("ParticleSet", "error_particles", "make_private" ), - Action("ParticleSet", "num_error_particles", "make_private" ), - Action("Grid", "xi", "remove" ), - Action("Grid", "yi", "remove" ), - Action("Grid", "zi", "remove" ), - Action("Grid", "ti", "make_private" ), - Action("Grid", "cstruct", "make_private" ), - Action("Grid", "lat_flipped", "make_private" ), - Action("Grid", "load_chunk", "make_private" ), - Action("Grid", "cgrid", "make_private" ), - Action("Grid", "child_ctypes_struct", "make_private" ), - Action("Grid", "gtype", "make_private" ), - Action("Grid", "z4d", "make_private" ), - Action("Grid", "update_status", "make_private" ), - Action("Grid", "chunk_not_loaded", "make_private" ), - Action("Grid", "chunk_loading_requested", "make_private" ), - Action("Grid", "chunk_loaded_touched", "make_private" ), - Action("Grid", "chunk_deprecated", "make_private" ), - Action("Grid", "chunk_loaded", "make_private" ), - Action("RectilinearGrid", "lat_flipped", "make_private" ), + Action("ParticleSet", "repeat_starttime", "make_private" ), + Action("ParticleSet", "repeatlon", "make_private" ), + Action("ParticleSet", "repeatlat", "make_private" ), + Action("ParticleSet", "repeatdepth", "make_private" ), + Action("ParticleSet", "repeatpclass", "make_private" ), + Action("ParticleSet", "repeatkwargs", "make_private" ), + Action("ParticleSet", "kernel", "make_private" ), + Action("ParticleSet", "interaction_kernel", "make_private" ), + Action("ParticleSet", "repeatpid", "make_private" ), + Action("ParticleSet", "active_particles_mask()", "make_private" ), + Action("ParticleSet", "compute_neighbor_tree()", "make_private" ), + Action("ParticleSet", "neighbors_by_index()", "make_private" ), + Action("ParticleSet", "neighbors_by_coor()", "make_private" ), + Action("ParticleSet", "monte_carlo_sample()", "make_private" ), + Action("ParticleSet", "error_particles", "make_private" ), + Action("ParticleSet", "num_error_particles", "make_private" ), + Action("Grid", "xi", "remove" ), + Action("Grid", "yi", "remove" ), + Action("Grid", "zi", "remove" ), + Action("Grid", "ti", "make_private" ), + Action("Grid", "lon", "read_only" ), + Action("Grid", "lat", "read_only" ), + Action("Grid", "time_origin", "read_only" ), + Action("Grid", "mesh", "read_only" ), + Action("Grid", "cstruct", "make_private" ), + Action("Grid", "cell_edge_sizes", "read_only" ), + Action("Grid", "zonal_periodic", "read_only" ), + Action("Grid", "zonal_halo", "read_only" ), + Action("Grid", "meridional_halo", "read_only" ), + Action("Grid", "lat_flipped", "make_private" ), + Action("Grid", "defer_load", "read_only" ), + Action("Grid", "lonlat_minmax", "read_only" ), + Action("Grid", "load_chunk", "make_private" ), + Action("Grid", "cgrid", "make_private" ), + Action("Grid", "child_ctypes_struct", "make_private" ), + Action("Grid", "gtype", "make_private" ), + Action("Grid", "xdim", "read_only" ), + Action("Grid", "ydim", "read_only" ), + Action("Grid", "zdim", "read_only" ), + Action("Grid", "z4d", "make_private" ), + Action("Grid", "depth", "read_only" ), + Action("Grid", "check_zonal_periodic()", "make_private" ), + Action("Grid", "add_Sdepth_periodic_halo()", "make_private" ), + Action("Grid", "computeTimeChunk()", "make_private" ), + Action("Grid", "update_status", "make_private" ), + Action("Grid", "chunk_not_loaded", "make_private" ), + Action("Grid", "chunk_loading_requested", "make_private" ), + Action("Grid", "chunk_loaded_touched", "make_private" ), + Action("Grid", "chunk_deprecated", "make_private" ), + Action("Grid", "chunk_loaded", "make_private" ), + Action("RectilinearGrid", "lon", "read_only" ), + Action("RectilinearGrid", "xdim", "read_only" ), + Action("RectilinearGrid", "lat", "read_only" ), + Action("RectilinearGrid", "ydim", "read_only" ), + Action("RectilinearGrid", "lat_flipped", "make_private" ), + Action("RectilinearGrid", "zonal_periodic", "read_only" ), + Action("RectilinearGrid", "zonal_halo", "read_only" ), + Action("RectilinearGrid", "meridional_halo", "read_only" ), Action("RectilinearZGrid", "gtype", "make_private" ), + Action("RectilinearZGrid", "depth", "read_only" ), + Action("RectilinearZGrid", "zdim", "read_only" ), Action("RectilinearZGrid", "z4d", "make_private" ), Action("RectilinearSGrid", "gtype", "make_private" ), + Action("RectilinearSGrid", "depth", "read_only" ), + Action("RectilinearSGrid", "zdim", "read_only" ), Action("RectilinearSGrid", "z4d", "make_private" ), + Action("RectilinearSGrid", "xdim", "read_only" ), + Action("RectilinearSGrid", "ydim", "read_only" ), Action("RectilinearSGrid", "lat_flipped", "make_private" ), + Action("CurvilinearGrid", "lon", "read_only" ), + Action("CurvilinearGrid", "xdim", "read_only" ), + Action("CurvilinearGrid", "ydim", "read_only" ), + Action("CurvilinearGrid", "lat", "read_only" ), + Action("CurvilinearGrid", "zonal_periodic", "read_only" ), + Action("CurvilinearGrid", "zonal_halo", "read_only" ), + Action("CurvilinearGrid", "meridional_halo", "read_only" ), Action("CurvilinearZGrid", "gtype", "make_private" ), + Action("CurvilinearZGrid", "depth", "read_only" ), + Action("CurvilinearZGrid", "zdim", "read_only" ), Action("CurvilinearZGrid", "z4d", "make_private" ), Action("CurvilinearSGrid", "gtype", "make_private" ), + Action("CurvilinearSGrid", "depth", "read_only" ), + Action("CurvilinearSGrid", "zdim", "read_only" ), Action("CurvilinearSGrid", "z4d", "make_private" ), + Action("CurvilinearSGrid", "xdim", "read_only" ), + Action("CurvilinearSGrid", "ydim", "read_only" ), ] # fmt: on @@ -224,23 +265,27 @@ def create_test_data(): }, "RectilinearGrid": { "class": RectilinearGrid, - "object": grid, + "object": grid, # TODO: Update object }, "RectilinearZGrid": { "class": RectilinearZGrid, - "object": grid, + "object": grid, # TODO: Update object }, "RectilinearSGrid": { "class": RectilinearSGrid, - "object": grid, + "object": grid, # TODO: Update object }, "CurvilinearZGrid": { "class": CurvilinearZGrid, - "object": grid, + "object": grid, # TODO: Update object + }, + "CurvilinearGrid": { + "class": CurvilinearGrid, + "object": grid, # TODO: Update object }, "CurvilinearSGrid": { "class": CurvilinearSGrid, - "object": grid, + "object": grid, # TODO: Update object }, } From 37cf142f4aff9145cdc9465f6457efb7376e9cb7 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:46:19 +0200 Subject: [PATCH 13/14] update todo comments --- tests/test_deprecations.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index 96ae54277..faaf456d7 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -265,27 +265,27 @@ def create_test_data(): }, "RectilinearGrid": { "class": RectilinearGrid, - "object": grid, # TODO: Update object + "object": grid, # not exactly right but good enough }, "RectilinearZGrid": { "class": RectilinearZGrid, - "object": grid, # TODO: Update object + "object": grid, # not exactly right but good enough }, "RectilinearSGrid": { "class": RectilinearSGrid, - "object": grid, # TODO: Update object + "object": grid, # not exactly right but good enough }, "CurvilinearZGrid": { "class": CurvilinearZGrid, - "object": grid, # TODO: Update object + "object": grid, # not exactly right but good enough }, "CurvilinearGrid": { "class": CurvilinearGrid, - "object": grid, # TODO: Update object + "object": grid, # not exactly right but good enough }, "CurvilinearSGrid": { "class": CurvilinearSGrid, - "object": grid, # TODO: Update object + "object": grid, # not exactly right but good enough }, } From e82472f0fc34b2f7e21df8f077bdc8f200a4d0f6 Mon Sep 17 00:00:00 2001 From: Vecko <36369090+VeckoTheGecko@users.noreply.github.com> Date: Wed, 2 Oct 2024 15:29:32 +0200 Subject: [PATCH 14/14] validate unique deprecations --- tests/test_deprecations.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index faaf456d7..b2007558b 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -224,6 +224,7 @@ def test_testing_action_class(): Action("CurvilinearSGrid", "ydim", "read_only" ), ] # fmt: on +assert len({str(a) for a in actions}) == len(actions) # Check that all actions are unique actions = list(filter(lambda action: not action.skip, actions))