Skip to content

Commit

Permalink
Merge pull request #1672 from andrew-s28/logging-updates
Browse files Browse the repository at this point in the history
Implement warnings in Parcels
  • Loading branch information
VeckoTheGecko authored Sep 2, 2024
2 parents 679d06d + 024aa35 commit 286558c
Show file tree
Hide file tree
Showing 20 changed files with 396 additions and 197 deletions.
11 changes: 6 additions & 5 deletions docs/examples/documentation_indexing.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
"source": [
"from glob import glob\n",
"from os import path\n",
"import warnings\n",
"\n",
"import numpy as np\n",
"\n",
Expand Down Expand Up @@ -179,17 +180,17 @@
" \"W\": c_grid_dimensions,\n",
"}\n",
"\n",
"fieldsetC = parcels.FieldSet.from_nemo(\n",
" filenames, variables, dimensions, netcdf_decodewarning=False\n",
")"
"with warnings.catch_warnings():\n",
" warnings.simplefilter(\"ignore\", parcels.FileWarning)\n",
" fieldsetC = parcels.FieldSet.from_nemo(filenames, variables, dimensions)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Note by the way, that we used `netcdf_decodewarning=False` in the `FieldSet.from_nemo()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`.\n"
"Note by the way, that we used `warnings.catch_warnings()` with `warnings.simplefilter(\"ignore\", parcels.FileWarning)` to wrap the `FieldSet.from_nemo()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`.\n"
]
},
{
Expand Down Expand Up @@ -293,7 +294,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.6"
"version": "3.12.4"
}
},
"nbformat": 4,
Expand Down
7 changes: 4 additions & 3 deletions docs/examples/tutorial_nemo_3D.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,16 @@
"source": [
"from datetime import timedelta\n",
"from glob import glob\n",
"import warnings\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import xarray as xr\n",
"\n",
"import parcels\n",
"from parcels import logger\n",
"from parcels import FileWarning\n",
"\n",
"# Add a filter for the xarray decoding warning\n",
"logger.addFilter(parcels.XarrayDecodedFilter())\n",
"warnings.simplefilter(\"ignore\", FileWarning)\n",
"\n",
"example_dataset_folder = parcels.download_example_dataset(\n",
" \"NemoNorthSeaORCA025-N006_data\"\n",
Expand Down Expand Up @@ -234,7 +235,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
"version": "3.12.5"
}
},
"nbformat": 4,
Expand Down
95 changes: 50 additions & 45 deletions docs/examples/tutorial_timestamps.ipynb

Large diffs are not rendered by default.

6 changes: 6 additions & 0 deletions docs/reference/misc.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@ parcels.tools.loggers module
:members:
:undoc-members:

parcels.tools.warnings module

.. automodule:: parcels.tools.warnings
:members:
:undoc-members:

parcels.tools.exampledata_utils module
--------------------------------------

Expand Down
21 changes: 15 additions & 6 deletions parcels/compilation/codegenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import collections
import math
import random
import warnings
from abc import ABC
from copy import copy

Expand All @@ -10,8 +11,8 @@
from parcels.field import Field, NestedField, VectorField
from parcels.grid import Grid
from parcels.particle import JITParticle
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import StatusCode
from parcels.tools.warnings import KernelWarning


class IntrinsicNode(ast.AST):
Expand Down Expand Up @@ -178,9 +179,11 @@ def __init__(self, obj, attr):

class ParticleXiYiZiTiAttributeNode(IntrinsicNode):
def __init__(self, obj, attr):
logger.warning_once(
warnings.warn(
f"Be careful when sampling particle.{attr}, as this is updated in the kernel loop. "
"Best to place the sampling statement before advection."
"Best to place the sampling statement before advection.",
KernelWarning,
stacklevel=2,
)
self.obj = obj.ccode
self.attr = attr
Expand Down Expand Up @@ -309,8 +312,10 @@ def visit_Subscript(self, node):
def visit_AugAssign(self, node):
node.target = self.visit(node.target)
if isinstance(node.target, ParticleAttributeNode) and node.target.attr in ["lon", "lat", "depth", "time"]:
logger.warning_once(
"Don't change the location of a particle directly in a Kernel. Use particle_dlon, particle_dlat, etc."
warnings.warn(
"Don't change the location of a particle directly in a Kernel. Use particle_dlon, particle_dlat, etc.",
KernelWarning,
stacklevel=2,
)
node.op = self.visit(node.op)
node.value = self.visit(node.value)
Expand Down Expand Up @@ -439,7 +444,11 @@ def generate(self, py_ast, funcvars: list[str]):
for kvar in funcvars:
if kvar in used_vars + ["particle_dlon", "particle_dlat", "particle_ddepth"]:
if kvar not in ["particle", "fieldset", "time", "particle_dlon", "particle_dlat", "particle_ddepth"]:
logger.warning(kvar + " declared in multiple Kernels")
warnings.warn(
kvar + " declared in multiple Kernels",
KernelWarning,
stacklevel=2,
)
funcvars_copy.remove(kvar)
else:
used_vars.append(kvar)
Expand Down
73 changes: 39 additions & 34 deletions parcels/field.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import collections
import datetime
import math
import warnings
from ctypes import POINTER, Structure, c_float, c_int, pointer
from pathlib import Path
from typing import TYPE_CHECKING, Iterable, Type
Expand All @@ -18,14 +19,14 @@
UnitConverter,
unitconverters_map,
)
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import (
AllParcelsErrorCodes,
FieldOutOfBoundError,
FieldOutOfBoundSurfaceError,
FieldSamplingError,
TimeExtrapolationError,
)
from parcels.tools.warnings import FieldSetWarning, _deprecated_param_netcdf_decodewarning

from .fieldfilebuffer import (
DaskFileBuffer,
Expand Down Expand Up @@ -163,6 +164,10 @@ def __init__(
to_write=False,
**kwargs,
):
if kwargs.get("netcdf_decodewarning") is not None:
_deprecated_param_netcdf_decodewarning()
kwargs.pop("netcdf_decodewarning")

if not isinstance(name, tuple):
self.name = name
self.filebuffername = name
Expand Down Expand Up @@ -211,8 +216,10 @@ def __init__(
GridType.RectilinearSGrid,
GridType.CurvilinearSGrid,
]:
logger.warning_once( # type: ignore
"General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal."
warnings.warn(
"General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal.",
FieldSetWarning,
stacklevel=2,
)

self.fieldset: "FieldSet" | None = None
Expand All @@ -223,9 +230,10 @@ def __init__(

self.time_periodic = time_periodic
if self.time_periodic is not False and self.allow_time_extrapolation:
logger.warning_once( # type: ignore
"allow_time_extrapolation and time_periodic cannot be used together.\n \
allow_time_extrapolation is set to False"
warnings.warn(
"allow_time_extrapolation and time_periodic cannot be used together. allow_time_extrapolation is set to False",
FieldSetWarning,
stacklevel=2,
)
self.allow_time_extrapolation = False
if self.time_periodic is True:
Expand Down Expand Up @@ -275,9 +283,8 @@ def __init__(
self.dataFiles = np.append(self.dataFiles, self.dataFiles[0])
self._field_fb_class = kwargs.pop("FieldFileBuffer", None)
self.netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
self.netcdf_decodewarning = kwargs.pop("netcdf_decodewarning", True)
self.loaded_time_indices: Iterable[int] = []
self.creation_log: str = kwargs.pop("creation_log", "")
self.loaded_time_indices: Iterable[int] = [] # type: ignore
self.creation_log = kwargs.pop("creation_log", "")
self.chunksize = kwargs.pop("chunksize", None)
self.netcdf_chunkdims_name_map = kwargs.pop("chunkdims_name_map", None)
self.grid.depth_field = kwargs.pop("depth_field", None)
Expand Down Expand Up @@ -315,8 +322,10 @@ def get_dim_filenames(cls, filenames, dim):

@staticmethod
def collect_timeslices(
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning=True
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning=None
):
if netcdf_decodewarning is not None:
_deprecated_param_netcdf_decodewarning()
if timestamps is not None:
dataFiles = []
for findex in range(len(data_filenames)):
Expand All @@ -329,9 +338,7 @@ def collect_timeslices(
timeslices = []
dataFiles = []
for fname in data_filenames:
with _grid_fb_class(
fname, dimensions, indices, netcdf_engine=netcdf_engine, netcdf_decodewarning=netcdf_decodewarning
) as filebuffer:
with _grid_fb_class(fname, dimensions, indices, netcdf_engine=netcdf_engine) as filebuffer:
ftime = filebuffer.time
timeslices.append(ftime)
dataFiles.append([fname] * len(ftime))
Expand Down Expand Up @@ -408,7 +415,7 @@ def from_netcdf(
chunksize :
size of the chunks in dask loading
netcdf_decodewarning : bool
Whether to show a warning id there is a problem decoding the netcdf files.
(DEPRECATED - v3.1.0) Whether to show a warning if there is a problem decoding the netcdf files.
Default is True, but in some cases where these warnings are expected, it may be useful to silence them
by setting netcdf_decodewarning=False.
grid :
Expand All @@ -423,6 +430,10 @@ def from_netcdf(
* `Timestamps <../examples/tutorial_timestamps.ipynb>`__
"""
if kwargs.get("netcdf_decodewarning") is not None:
_deprecated_param_netcdf_decodewarning()
kwargs.pop("netcdf_decodewarning")

# Ensure the timestamps array is compatible with the user-provided datafiles.
if timestamps is not None:
if isinstance(filenames, list):
Expand Down Expand Up @@ -475,7 +486,6 @@ def from_netcdf(
depth_filename = depth_filename[0]

netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
netcdf_decodewarning = kwargs.pop("netcdf_decodewarning", True)

indices = {} if indices is None else indices.copy()
for ind in indices:
Expand All @@ -498,9 +508,7 @@ def from_netcdf(

_grid_fb_class = NetcdfFileBuffer

with _grid_fb_class(
lonlat_filename, dimensions, indices, netcdf_engine, netcdf_decodewarning=netcdf_decodewarning
) as filebuffer:
with _grid_fb_class(lonlat_filename, dimensions, indices, netcdf_engine) as filebuffer:
lon, lat = filebuffer.lonlat
indices = filebuffer.indices
# Check if parcels_mesh has been explicitly set in file
Expand All @@ -514,7 +522,6 @@ def from_netcdf(
indices,
netcdf_engine,
interp_method=interp_method,
netcdf_decodewarning=netcdf_decodewarning,
) as filebuffer:
filebuffer.name = filebuffer.parse_name(variable[1])
if dimensions["depth"] == "not_yet_set":
Expand All @@ -537,7 +544,7 @@ def from_netcdf(
# Concatenate time variable to determine overall dimension
# across multiple files
time, time_origin, timeslices, dataFiles = cls.collect_timeslices(
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine
)
grid = Grid.create_grid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh)
grid.timeslices = timeslices
Expand All @@ -546,15 +553,17 @@ def from_netcdf(
# ==== means: the field has a shared grid, but may have different data files, so we need to collect the
# ==== correct file time series again.
_, _, _, dataFiles = cls.collect_timeslices(
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine
)
kwargs["dataFiles"] = dataFiles

chunksize: bool | None = kwargs.get("chunksize", None)
grid.chunksize = chunksize

if "time" in indices:
logger.warning_once("time dimension in indices is not necessary anymore. It is then ignored.") # type: ignore
warnings.warn(
"time dimension in indices is not necessary anymore. It is then ignored.", FieldSetWarning, stacklevel=2
)

if "full_load" in kwargs: # for backward compatibility with Parcels < v2.0.0
deferred_load = not kwargs["full_load"]
Expand Down Expand Up @@ -587,7 +596,6 @@ def from_netcdf(
interp_method=interp_method,
data_full_zdim=data_full_zdim,
chunksize=chunksize,
netcdf_decodewarning=netcdf_decodewarning,
) as filebuffer:
# If Field.from_netcdf is called directly, it may not have a 'data' dimension
# In that case, assume that 'name' is the data dimension
Expand Down Expand Up @@ -632,7 +640,6 @@ def from_netcdf(
kwargs["indices"] = indices
kwargs["time_periodic"] = time_periodic
kwargs["netcdf_engine"] = netcdf_engine
kwargs["netcdf_decodewarning"] = netcdf_decodewarning

return cls(
variable,
Expand Down Expand Up @@ -820,16 +827,13 @@ def calc_cell_edge_sizes(self):
self.grid.cell_edge_sizes["y"][y, x] = y_conv.to_source(dy, lon, lat, self.grid.depth[0])
self.cell_edge_sizes = self.grid.cell_edge_sizes
else:
logger.error(
raise ValueError(
(
"Field.cell_edge_sizes() not implemented for ",
self.grid.gtype,
"grids.",
"You can provide Field.grid.cell_edge_sizes yourself",
"by in e.g. NEMO using the e1u fields etc from the mesh_mask.nc file",
f"Field.cell_edge_sizes() not implemented for {self.grid.gtype} grids. "
"You can provide Field.grid.cell_edge_sizes yourself by in, e.g., "
"NEMO using the e1u fields etc from the mesh_mask.nc file."
)
)
exit(-1)

def cell_areas(self):
"""Method to calculate cell sizes based on cell_edge_sizes.
Expand Down Expand Up @@ -1347,8 +1351,10 @@ def time_index(self, time):

def _check_velocitysampling(self):
if self.name in ["U", "V", "W"]:
logger.warning_once(
"Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully"
warnings.warn(
"Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully",
RuntimeWarning,
stacklevel=2,
)

def __getitem__(self, key):
Expand Down Expand Up @@ -1653,7 +1659,6 @@ def computeTimeChunk(self, data, tindex):
cast_data_dtype=self.cast_data_dtype,
rechunk_callback_fields=rechunk_callback_fields,
chunkdims_name_map=self.netcdf_chunkdims_name_map,
netcdf_decodewarning=self.netcdf_decodewarning,
)
filebuffer.__enter__()
time_data = filebuffer.time
Expand Down
Loading

0 comments on commit 286558c

Please sign in to comment.