Skip to content

Commit 46286ba

Browse files
Merge pull request #1666 from OceanParcels/v/small-changes
Misc: Remove pykdtree dependency, codecov config, SPEC 0 tooling and other changes
2 parents 595b7a9 + 5ac1c60 commit 46286ba

14 files changed

+291
-34
lines changed
File renamed without changes.
File renamed without changes.

.github/ci/min-core-deps.yml

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
name: xarray-tests
2+
channels:
3+
- conda-forge
4+
- nodefaults
5+
dependencies:
6+
# MINIMUM VERSIONS POLICY: keep track of minimum versions
7+
# for core packages. Dev and conda release builds should use this as reference.
8+
# Run ci/min_deps_check.py to verify that this file respects the policy.
9+
- python=3.10
10+
- cftime=1.6
11+
- cgen=2020.1
12+
- dask=2022.8
13+
- matplotlib-base=3.5
14+
# netcdf follows a 1.major.minor[.patch] convention
15+
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
16+
- netcdf4=1.6
17+
- numpy=1.23
18+
- platformdirs=2.5
19+
- psutil=5.9
20+
- pymbolic=2022.1
21+
- pytest=7.1
22+
- scipy=1.9
23+
- trajan=0.1
24+
- tqdm=4.64
25+
- xarray=2022.6
26+
- zarr=2.12

.github/ci/min_deps_check.py

+199
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,199 @@
1+
#!/usr/bin/env python
2+
"""Fetch from conda database all available versions of dependencies and their
3+
publication date. Compare it against requirements/min-core-deps.yml to verify the
4+
policy on obsolete dependencies is being followed. Print a pretty report :)
5+
6+
Adapted from xarray:
7+
https://github.com/pydata/xarray/blob/a04d857a03d1fb04317d636a7f23239cb9034491/ci/min_deps_check.py
8+
"""
9+
10+
from __future__ import annotations
11+
12+
import itertools
13+
import sys
14+
from collections.abc import Iterator
15+
from datetime import datetime
16+
17+
import conda.api # type: ignore[import]
18+
import yaml
19+
from dateutil.relativedelta import relativedelta
20+
21+
CHANNELS = ["conda-forge", "defaults"]
22+
IGNORE_DEPS = {}
23+
24+
POLICY_MONTHS = {"python": 3 * 12}
25+
POLICY_MONTHS_DEFAULT = 24
26+
POLICY_OVERRIDE: dict[str, tuple[int, int]] = {}
27+
errors = []
28+
29+
30+
def error(msg: str) -> None:
31+
global errors
32+
errors.append(msg)
33+
print("ERROR:", msg)
34+
35+
36+
def warning(msg: str) -> None:
37+
print("WARNING:", msg)
38+
39+
40+
def parse_requirements(fname) -> Iterator[tuple[str, int, int, int | None]]:
41+
"""Load requirements/min-all-deps.yml
42+
43+
Yield (package name, major version, minor version, [patch version])
44+
"""
45+
global errors
46+
47+
with open(fname) as fh:
48+
contents = yaml.safe_load(fh)
49+
for row in contents["dependencies"]:
50+
if isinstance(row, dict) and list(row) == ["pip"]:
51+
continue
52+
pkg, eq, version = row.partition("=")
53+
if pkg.rstrip("<>") in IGNORE_DEPS:
54+
continue
55+
if pkg.endswith("<") or pkg.endswith(">") or eq != "=":
56+
error("package should be pinned with exact version: " + row)
57+
continue
58+
59+
try:
60+
version_tup = tuple(int(x) for x in version.split("."))
61+
except ValueError:
62+
raise ValueError("non-numerical version: " + row)
63+
64+
if len(version_tup) == 2:
65+
yield (pkg, *version_tup, None) # type: ignore[misc]
66+
elif len(version_tup) == 3:
67+
yield (pkg, *version_tup) # type: ignore[misc]
68+
else:
69+
raise ValueError("expected major.minor or major.minor.patch: " + row)
70+
71+
72+
def query_conda(pkg: str) -> dict[tuple[int, int], datetime]:
73+
"""Query the conda repository for a specific package
74+
75+
Return map of {(major version, minor version): publication date}
76+
"""
77+
78+
def metadata(entry):
79+
version = entry.version
80+
81+
time = datetime.fromtimestamp(entry.timestamp)
82+
major, minor = map(int, version.split(".")[:2])
83+
84+
return (major, minor), time
85+
86+
raw_data = conda.api.SubdirData.query_all(pkg, channels=CHANNELS)
87+
data = sorted(metadata(entry) for entry in raw_data if entry.timestamp != 0)
88+
89+
release_dates = {
90+
version: [time for _, time in group if time is not None]
91+
for version, group in itertools.groupby(data, key=lambda x: x[0])
92+
}
93+
out = {version: min(dates) for version, dates in release_dates.items() if dates}
94+
95+
# Hardcoded fix to work around incorrect dates in conda
96+
if pkg == "python":
97+
out.update(
98+
{
99+
(2, 7): datetime(2010, 6, 3),
100+
(3, 5): datetime(2015, 9, 13),
101+
(3, 6): datetime(2016, 12, 23),
102+
(3, 7): datetime(2018, 6, 27),
103+
(3, 8): datetime(2019, 10, 14),
104+
(3, 9): datetime(2020, 10, 5),
105+
(3, 10): datetime(2021, 10, 4),
106+
(3, 11): datetime(2022, 10, 24),
107+
}
108+
)
109+
110+
return out
111+
112+
113+
def process_pkg(pkg: str, req_major: int, req_minor: int, req_patch: int | None) -> tuple[str, str, str, str, str, str]:
114+
"""Compare package version from requirements file to available versions in conda.
115+
Return row to build pandas dataframe:
116+
117+
- package name
118+
- major.minor.[patch] version in requirements file
119+
- publication date of version in requirements file (YYYY-MM-DD)
120+
- major.minor version suggested by policy
121+
- publication date of version suggested by policy (YYYY-MM-DD)
122+
- status ("<", "=", "> (!)")
123+
"""
124+
print(f"Analyzing {pkg}...")
125+
versions = query_conda(pkg)
126+
127+
try:
128+
req_published = versions[req_major, req_minor]
129+
except KeyError:
130+
error("not found in conda: " + pkg)
131+
return pkg, fmt_version(req_major, req_minor, req_patch), "-", "-", "-", "(!)"
132+
133+
policy_months = POLICY_MONTHS.get(pkg, POLICY_MONTHS_DEFAULT)
134+
policy_published = datetime.now() - relativedelta(months=policy_months)
135+
136+
filtered_versions = [version for version, published in versions.items() if published < policy_published]
137+
policy_major, policy_minor = max(filtered_versions, default=(req_major, req_minor))
138+
139+
try:
140+
policy_major, policy_minor = POLICY_OVERRIDE[pkg]
141+
except KeyError:
142+
pass
143+
policy_published_actual = versions[policy_major, policy_minor]
144+
145+
if (req_major, req_minor) < (policy_major, policy_minor):
146+
status = "<"
147+
elif (req_major, req_minor) > (policy_major, policy_minor):
148+
status = "> (!)"
149+
delta = relativedelta(datetime.now(), req_published).normalized()
150+
n_months = delta.years * 12 + delta.months
151+
warning(
152+
f"Package is too new: {pkg}={req_major}.{req_minor} was "
153+
f"published on {req_published:%Y-%m-%d} "
154+
f"which was {n_months} months ago (policy is {policy_months} months)"
155+
)
156+
else:
157+
status = "="
158+
159+
if req_patch is not None:
160+
warning("patch version should not appear in requirements file: " + pkg)
161+
status += " (w)"
162+
163+
return (
164+
pkg,
165+
fmt_version(req_major, req_minor, req_patch),
166+
req_published.strftime("%Y-%m-%d"),
167+
fmt_version(policy_major, policy_minor),
168+
policy_published_actual.strftime("%Y-%m-%d"),
169+
status,
170+
)
171+
172+
173+
def fmt_version(major: int, minor: int, patch: int | None = None) -> str:
174+
if patch is None:
175+
return f"{major}.{minor}"
176+
else:
177+
return f"{major}.{minor}.{patch}"
178+
179+
180+
def main() -> None:
181+
fname = sys.argv[1]
182+
rows = [process_pkg(pkg, major, minor, patch) for pkg, major, minor, patch in parse_requirements(fname)]
183+
184+
print("\nPackage Required Policy Status")
185+
print("----------------- -------------------- -------------------- ------")
186+
fmt = "{:17} {:7} ({:10}) {:7} ({:10}) {}"
187+
for row in rows:
188+
print(fmt.format(*row))
189+
190+
if errors:
191+
print("\nErrors:")
192+
print("-------")
193+
for i, e in enumerate(errors):
194+
print(f"{i+1}. {e}")
195+
sys.exit(1)
196+
197+
198+
if __name__ == "__main__":
199+
main()

.github/workflows/check-min-deps.yml

+29
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
name: CI Additional
2+
on:
3+
workflow_dispatch: # allows you to trigger manually
4+
schedule:
5+
- cron: "0 0 1 */3 *" # Run every 3 months
6+
7+
jobs:
8+
min-version-policy:
9+
name: Minimum Version Policy
10+
runs-on: "ubuntu-latest"
11+
defaults:
12+
run:
13+
shell: bash -l {0}
14+
15+
steps:
16+
- uses: actions/checkout@v4
17+
- name: Setup micromamba
18+
uses: mamba-org/setup-micromamba@v1
19+
with:
20+
environment-name: min-deps
21+
create-args: >-
22+
python=3.12
23+
pyyaml
24+
conda
25+
python-dateutil
26+
27+
- name: Core deps minimum versions policy
28+
run: |
29+
python .github/ci/min_deps_check.py .github/ci/min-core-deps.yml

codecov.yml

+7-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,13 @@ coverage:
66
patch:
77
default:
88
informational: true
9-
comment: false # Only on PR when theres a change in coverage
9+
comment:
10+
layout: " diff, flags, files"
11+
behavior: default
12+
require_changes: true
13+
require_base: false
14+
require_head: true
15+
hide_project_coverage: true
1016

1117
# When modifying this file, please validate using
1218
# curl -X POST --data-binary @codecov.yml https://codecov.io/validate

docs/contributing.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
Contributing to Parcels
2-
======================
2+
=======================
33

44
Why contribute?
55
---------------

docs/documentation/index.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ Documentation and Tutorials
44
Parcels has several documentation and tutorial Jupyter notebooks and scripts which go through various aspects of Parcels. Static versions of the notebooks are available below via the gallery in the site, with the interactive notebooks being available either completely online at the following `Binder link <https://mybinder.org/v2/gh/OceanParcels/parcels/master?labpath=docs%2Fexamples%2Fparcels_tutorial.ipynb>`_. Following the gallery of notebooks is a list of scripts which provide additional examples to users. You can work with the example notebooks and scripts locally by downloading :download:`parcels_tutorials.zip </_downloads/parcels_tutorials.zip>` and running with your own Parcels installation.
55

66
.. warning::
7-
When browsing/downloading the tutorials, it's important that you are using the documentation corresponding to the version of Parcels that you have installed. You can find which parcels version you have installed by doing ``import parcels`` followed by ``print(parcels.__version__)``. If you don't want to use the latest version of Parcels, you can browse prior versions of the documentation by using the version switcher in the bottom right of this page.
7+
In v3.1.0 we updated kernels in the tutorials to use ``parcels.ParcelsRandom`` instead of ``from parcels import ParcelsRandom``. Due to our C-conversion code, using ``parcels.ParcelsRandom`` only works with v3.1.0+. When browsing/downloading the tutorials, it's important that you are using the documentation corresponding to the version of Parcels that you have installed. You can find which parcels version you have installed by doing ``import parcels`` followed by ``print(parcels.__version__)``. If you don't want to use the latest version of Parcels, you can browse prior versions of the documentation by using the version switcher in the bottom right of this page.
88

99
.. nbgallery::
1010
:caption: Overview

docs/examples/tutorial_nemo_curvilinear.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,7 @@
226226
"source": [
227227
"## Speeding up `ParticleSet` initialisation by efficiently finding particle start-locations on the `Grid`\n",
228228
"\n",
229-
"On a Curvilinear grid, determining the location of each `Particle` on the grid is more complicated and therefore takes longer than on a Rectilinear grid. Since Parcels version 2.2.2, a function is available on the `ParticleSet` class, that speeds up the look-up. After creating the `ParticleSet`, but before running the `ParticleSet.execute()`, simply call the function `ParticleSet.populate_indices()`. Note that this only works if you have the [pykdtree](https://anaconda.org/conda-forge/pykdtree) package installed, which is only included in the Parcels dependencies in version >= 2.2.2\n"
229+
"On a Curvilinear grid, determining the location of each `Particle` on the grid is more complicated and therefore takes longer than on a Rectilinear grid. Since Parcels version 2.2.2, a function is available on the `ParticleSet` class, that speeds up the look-up. After creating the `ParticleSet`, but before running the `ParticleSet.execute()`, simply call the function `ParticleSet.populate_indices()`.\n"
230230
]
231231
},
232232
{

docs/examples/tutorial_output.ipynb

+5-3
Original file line numberDiff line numberDiff line change
@@ -190,9 +190,11 @@
190190
],
191191
"source": [
192192
"np.set_printoptions(linewidth=160)\n",
193-
"ns_per_hour = np.timedelta64(1, \"h\") # nanoseconds in an hour\n",
193+
"one_hour = np.timedelta64(1, \"h\") # Define timedelta object to help with conversion\n",
194194
"\n",
195-
"print(data_xarray[\"time\"].data.compute() / ns_per_hour) # time is stored in nanoseconds"
195+
"print(\n",
196+
" data_xarray[\"time\"].data.compute() / one_hour\n",
197+
") # timedelta / timedelta -> float number of hours"
196198
]
197199
},
198200
{
@@ -227,7 +229,7 @@
227229
" np.sqrt(np.square(np.diff(x)) + np.square(np.diff(y))), axis=1\n",
228230
") # d = (dx^2 + dy^2)^(1/2)\n",
229231
"\n",
230-
"real_time = data_xarray[\"time\"] / ns_per_hour # convert time to hours\n",
232+
"real_time = data_xarray[\"time\"] / one_hour # convert time to hours\n",
231233
"time_since_release = (\n",
232234
" real_time.values.transpose() - real_time.values[:, 0]\n",
233235
") # substract the initial time from each timeseries"

docs/index.rst

+1
Original file line numberDiff line numberDiff line change
@@ -19,5 +19,6 @@ If you need more help with Parcels, try the `Discussions page on GitHub <https:/
1919
Installation <installation>
2020
Tutorials & Documentation <documentation/index>
2121
API reference <reference>
22+
Release Notes <https://github.com/OceanParcels/Parcels/releases>
2223
Contributing <contributing>
2324
OceanParcels website <https://oceanparcels.org/>

environment.yml

-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ dependencies:
2222
- dask>=2.0
2323
- nbval
2424
- scikit-learn
25-
- pykdtree
2625
- zarr>=2.11.0,!=2.18.0
2726

2827
# Formatting

0 commit comments

Comments
 (0)