Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement warnings in Parcels #1672

Merged
merged 17 commits into from
Sep 2, 2024
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions docs/examples/tutorial_nemo_3D.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,16 @@
"source": [
"from datetime import timedelta\n",
"from glob import glob\n",
"import warnings\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import xarray as xr\n",
"\n",
"import parcels\n",
"from parcels import logger\n",
"from parcels import FileWarning\n",
"\n",
"# Add a filter for the xarray decoding warning\n",
"logger.addFilter(parcels.XarrayDecodedFilter())\n",
"warnings.simplefilter(\"ignore\", FileWarning)\n",
"\n",
"example_dataset_folder = parcels.download_example_dataset(\n",
" \"NemoNorthSeaORCA025-N006_data\"\n",
Expand Down Expand Up @@ -234,7 +235,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
"version": "3.12.5"
}
},
"nbformat": 4,
Expand Down
6 changes: 6 additions & 0 deletions docs/reference/misc.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@ parcels.tools.loggers module
:members:
:undoc-members:

parcels.tools.warnings module

.. automodule:: parcels.tools.warnings
:members:
:undoc-members:

parcels.tools.exampledata_utils module
--------------------------------------

Expand Down
21 changes: 15 additions & 6 deletions parcels/compilation/codegenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import collections
import math
import random
import warnings
from abc import ABC
from copy import copy

Expand All @@ -10,8 +11,8 @@
from parcels.field import Field, NestedField, VectorField
from parcels.grid import Grid
from parcels.particle import JITParticle
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import StatusCode
from parcels.tools.warnings import KernelWarning


class IntrinsicNode(ast.AST):
Expand Down Expand Up @@ -178,9 +179,11 @@ def __init__(self, obj, attr):

class ParticleXiYiZiTiAttributeNode(IntrinsicNode):
def __init__(self, obj, attr):
logger.warning_once(
warnings.warn(
f"Be careful when sampling particle.{attr}, as this is updated in the kernel loop. "
"Best to place the sampling statement before advection."
"Best to place the sampling statement before advection.",
KernelWarning,
stacklevel=2,
)
self.obj = obj.ccode
self.attr = attr
Expand Down Expand Up @@ -309,8 +312,10 @@ def visit_Subscript(self, node):
def visit_AugAssign(self, node):
node.target = self.visit(node.target)
if isinstance(node.target, ParticleAttributeNode) and node.target.attr in ["lon", "lat", "depth", "time"]:
logger.warning_once(
"Don't change the location of a particle directly in a Kernel. Use particle_dlon, particle_dlat, etc."
warnings.warn(
"Don't change the location of a particle directly in a Kernel. Use particle_dlon, particle_dlat, etc.",
KernelWarning,
stacklevel=2,
)
node.op = self.visit(node.op)
node.value = self.visit(node.value)
Expand Down Expand Up @@ -439,7 +444,11 @@ def generate(self, py_ast, funcvars):
for kvar in funcvars:
if kvar in used_vars + ["particle_dlon", "particle_dlat", "particle_ddepth"]:
if kvar not in ["particle", "fieldset", "time", "particle_dlon", "particle_dlat", "particle_ddepth"]:
logger.warning(kvar + " declared in multiple Kernels")
warnings.warn(
kvar + " declared in multiple Kernels",
KernelWarning,
stacklevel=2,
)
funcvars_copy.remove(kvar)
else:
used_vars.append(kvar)
Expand Down
37 changes: 26 additions & 11 deletions parcels/field.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import collections
import datetime
import math
import warnings
from ctypes import POINTER, Structure, c_float, c_int, pointer
from pathlib import Path

Expand All @@ -16,14 +17,14 @@
UnitConverter,
unitconverters_map,
)
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import (
AllParcelsErrorCodes,
FieldOutOfBoundError,
FieldOutOfBoundSurfaceError,
FieldSamplingError,
TimeExtrapolationError,
)
from parcels.tools.warnings import FieldSetWarning

from .fieldfilebuffer import (
DaskFileBuffer,
Expand Down Expand Up @@ -203,8 +204,10 @@ def __init__(
GridType.RectilinearSGrid,
GridType.CurvilinearSGrid,
]:
logger.warning_once(
"General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal."
warnings.warn(
"General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal.",
FieldSetWarning,
stacklevel=2,
)

self.fieldset = None
Expand All @@ -215,9 +218,10 @@ def __init__(

self.time_periodic = time_periodic
if self.time_periodic is not False and self.allow_time_extrapolation:
logger.warning_once(
"allow_time_extrapolation and time_periodic cannot be used together.\n \
allow_time_extrapolation is set to False"
warnings.warn(
"allow_time_extrapolation and time_periodic cannot be used together. allow_time_extrapolation is set to False",
FieldSetWarning,
stacklevel=2,
)
self.allow_time_extrapolation = False
if self.time_periodic is True:
Expand Down Expand Up @@ -403,6 +407,7 @@ def from_netcdf(
Whether to show a warning id there is a problem decoding the netcdf files.
Default is True, but in some cases where these warnings are expected, it may be useful to silence them
by setting netcdf_decodewarning=False.
This argument is being deprecated in favor of warnings control through the Python warnings module.
grid :
(Default value = None)
**kwargs :
Expand Down Expand Up @@ -467,6 +472,13 @@ def from_netcdf(
depth_filename = depth_filename[0]

netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
if kwargs.get("netcdf_decodewarning", None) is not None:
andrew-s28 marked this conversation as resolved.
Show resolved Hide resolved
warnings.warn(
"The 'netcdf_decodewarning' argument is deprecated and will be removed in a future release. "
"Please use the Python warnings module to control warnings.",
DeprecationWarning,
stacklevel=2,
)
andrew-s28 marked this conversation as resolved.
Show resolved Hide resolved
netcdf_decodewarning = kwargs.pop("netcdf_decodewarning", True)

indices = {} if indices is None else indices.copy()
Expand Down Expand Up @@ -546,7 +558,9 @@ def from_netcdf(
grid.chunksize = chunksize

if "time" in indices:
logger.warning_once("time dimension in indices is not necessary anymore. It is then ignored.")
warnings.warn(
"time dimension in indices is not necessary anymore. It is then ignored.", FieldSetWarning, stacklevel=2
)

if "full_load" in kwargs: # for backward compatibility with Parcels < v2.0.0
deferred_load = not kwargs["full_load"]
Expand Down Expand Up @@ -804,7 +818,7 @@ def calc_cell_edge_sizes(self):
self.grid.cell_edge_sizes["y"][y, x] = y_conv.to_source(dy, lon, lat, self.grid.depth[0])
self.cell_edge_sizes = self.grid.cell_edge_sizes
else:
logger.error(
raise ValueError(
andrew-s28 marked this conversation as resolved.
Show resolved Hide resolved
(
"Field.cell_edge_sizes() not implemented for ",
self.grid.gtype,
Expand All @@ -813,7 +827,6 @@ def calc_cell_edge_sizes(self):
"by in e.g. NEMO using the e1u fields etc from the mesh_mask.nc file",
)
)
exit(-1)

def cell_areas(self):
"""Method to calculate cell sizes based on cell_edge_sizes.
Expand Down Expand Up @@ -1329,8 +1342,10 @@ def time_index(self, time):

def _check_velocitysampling(self):
if self.name in ["U", "V", "W"]:
logger.warning_once(
"Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully"
warnings.warn(
"Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully",
RuntimeWarning,
stacklevel=2,
)

def __getitem__(self, key):
Expand Down
29 changes: 19 additions & 10 deletions parcels/fieldfilebuffer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import datetime
import math
import warnings

import dask.array as da
import numpy as np
Expand All @@ -10,8 +11,8 @@
from netCDF4 import Dataset as ncDataset

from parcels.tools.converters import convert_xarray_time_units
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import DaskChunkingError
from parcels.tools.warnings import FileWarning


class _FileBuffer:
Expand Down Expand Up @@ -49,9 +50,11 @@ def __enter__(self):
self.dataset["decoded"] = True
except:
if self.netcdf_decodewarning:
logger.warning_once(
warnings.warn(
f"File {self.filename} could not be decoded properly by xarray (version {xr.__version__}). "
"It will be opened with no decoding. Filling values might be wrongly parsed."
"It will be opened with no decoding. Filling values might be wrongly parsed.",
FileWarning,
stacklevel=2,
)

self.dataset = xr.open_dataset(str(self.filename), decode_cf=False, engine=self.netcdf_engine)
Expand Down Expand Up @@ -328,8 +331,10 @@ def __enter__(self):
)
self.dataset["decoded"] = True
except:
logger.warning_once(
f"File {self.filename} could not be decoded properly by xarray (version {xr.__version__}). It will be opened with no decoding. Filling values might be wrongly parsed."
warnings.warn(
f"File {self.filename} could not be decoded properly by xarray (version {xr.__version__}). It will be opened with no decoding. Filling values might be wrongly parsed.",
FileWarning,
stacklevel=2,
)
if self.lock_file:
self.dataset = xr.open_dataset(
Expand Down Expand Up @@ -732,9 +737,11 @@ def _get_initial_chunk_dictionary(self):
if predefined_cap is not None:
chunk_cap = da_utils.parse_bytes(predefined_cap)
else:
logger.info_once(
"Unable to locate chunking hints from dask, thus estimating the max. chunk size heuristically."
"Please consider defining the 'chunk-size' for 'array' in your local dask configuration file (see https://docs.oceanparcels.org/en/latest/examples/documentation_MPI.html#Chunking-the-FieldSet-with-dask and https://docs.dask.org)."
warnings.warn(
"Unable to locate chunking hints from dask, thus estimating the max. chunk size heuristically. "
"Please consider defining the 'chunk-size' for 'array' in your local dask configuration file (see https://docs.oceanparcels.org/en/latest/examples/documentation_MPI.html#Chunking-the-FieldSet-with-dask and https://docs.dask.org).",
FileWarning,
stacklevel=2,
)
loni, lonname, lonvalue = self._is_dimension_in_dataset("lon")
lati, latname, latvalue = self._is_dimension_in_dataset("lat")
Expand Down Expand Up @@ -771,8 +778,10 @@ def _get_initial_chunk_dictionary(self):
if isinstance(self.chunksize, dict):
self.chunksize = init_chunk_dict
except:
logger.warning(
f"Chunking with init_chunk_dict = {init_chunk_dict} failed - Executing Dask chunking 'failsafe'..."
warnings.warn(
f"Chunking with init_chunk_dict = {init_chunk_dict} failed - Executing Dask chunking 'failsafe'...",
FileWarning,
stacklevel=2,
)
self.autochunkingfailed = True
if not self.autochunkingfailed:
Expand Down
14 changes: 11 additions & 3 deletions parcels/fieldset.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import importlib.util
import os
import sys
import warnings
from copy import deepcopy
from glob import glob

Expand All @@ -13,6 +14,7 @@
from parcels.tools.converters import TimeConverter, convert_xarray_time_units
from parcels.tools.loggers import logger
from parcels.tools.statuscodes import TimeExtrapolationError
from parcels.tools.warnings import FieldSetWarning

try:
from mpi4py import MPI
Expand Down Expand Up @@ -435,7 +437,11 @@ def from_netcdf(
"""
# Ensure that times are not provided both in netcdf file and in 'timestamps'.
if timestamps is not None and "time" in dimensions:
logger.warning_once("Time already provided, defaulting to dimensions['time'] over timestamps.")
warnings.warn(
"Time already provided, defaulting to dimensions['time'] over timestamps.",
FieldSetWarning,
stacklevel=2,
)
timestamps = None

fields = {}
Expand Down Expand Up @@ -909,8 +915,10 @@ def from_pop(
if hasattr(fieldset, "W"):
if depth_units == "m":
fieldset.W.set_scaling_factor(-0.01) # cm/s to m/s and change the W direction
logger.warning_once(
"Parcels assumes depth in POP output to be in 'm'. Use depth_units='cm' if the output depth is in 'cm'."
warnings.warn(
"Parcels assumes depth in POP output to be in 'm'. Use depth_units='cm' if the output depth is in 'cm'.",
FieldSetWarning,
stacklevel=2,
)
elif depth_units == "cm":
fieldset.W.set_scaling_factor(-1.0) # change the W direction but keep W in cm/s because depth is in cm
Expand Down
Loading
Loading