Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update ast, os, sys import mechanism to module imports #1635

Merged
merged 1 commit into from
Aug 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions parcels/fieldset.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import importlib.util
import os
import sys
from copy import deepcopy
from glob import glob
from os import path

import dask.array as da
import numpy as np
Expand Down Expand Up @@ -304,7 +304,7 @@ def parse_wildcards(cls, paths, filenames, var):
notfound_paths = filenames[var] if isinstance(filenames, dict) and var in filenames else filenames
raise OSError(f"FieldSet files not found for variable {var}: {str(notfound_paths)}")
for fp in paths:
if not path.exists(fp):
if not os.path.exists(fp):
raise OSError(f"FieldSet file not found: {fp}")
return paths

Expand Down Expand Up @@ -1074,7 +1074,7 @@ def from_modulefile(cls, filename, modulename="create_fieldset", **kwargs):
modulename: name of the function in the python file that returns a FieldSet object. Default is "create_fieldset".
"""
# check if filename exists
if not path.exists(filename):
if not os.path.exists(filename):
raise IOError(f"FieldSet module file {filename} does not exist")

# Importing the source file directly (following https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly)
Expand Down
4 changes: 2 additions & 2 deletions parcels/interaction/interactionkernel.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import inspect
import sys
from collections import defaultdict
from sys import version_info

import numpy as np

Expand Down Expand Up @@ -93,7 +93,7 @@ def check_kernel_signature_on_version(self):
numkernelargs = []
if self._pyfunc is not None and isinstance(self._pyfunc, list):
for func in self._pyfunc:
if version_info[0] < 3:
if sys.version_info[0] < 3:
numkernelargs.append(
len(inspect.getargspec(func).args)
)
Expand Down
40 changes: 20 additions & 20 deletions parcels/kernel.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import ast
import functools
import hashlib
import inspect
import math # noqa
import os
import random # noqa
import re
import sys
import types
from ast import FunctionDef, parse
from copy import deepcopy
from ctypes import byref, c_double, c_int
from hashlib import md5
from os import path, remove
from sys import platform, version_info
from time import time as ostime

import _ctypes
Expand Down Expand Up @@ -117,7 +117,7 @@ def _cache_key(self):
field_keys = "-".join(
[f"{name}:{field.units.__class__.__name__}" for name, field in self.field_args.items()])
key = self.name + self.ptype._cache_key + field_keys + ('TIME:%f' % ostime())
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()

@staticmethod
def fix_indentation(string):
Expand Down Expand Up @@ -177,7 +177,7 @@ def __init__(self, fieldset, ptype, pyfunc=None, funcname=None, funccode=None, p
self.funcvars = None
self.funccode = funccode or inspect.getsource(pyfunc.__code__)
# Parse AST if it is not provided explicitly
self.py_ast = py_ast or parse(BaseKernel.fix_indentation(self.funccode)).body[0]
self.py_ast = py_ast or ast.parse(BaseKernel.fix_indentation(self.funccode)).body[0]
if pyfunc is None:
# Extract user context by inspecting the call stack
stack = inspect.stack()
Expand All @@ -193,7 +193,7 @@ def __init__(self, fieldset, ptype, pyfunc=None, funcname=None, funccode=None, p
finally:
del stack # Remove cyclic references
# Compile and generate Python function from AST
py_mod = parse("")
py_mod = ast.parse("")
py_mod.body = [self.py_ast]
exec(compile(py_mod, "<ast>", "exec"), user_ctx)
self._pyfunc = user_ctx[self.funcname]
Expand Down Expand Up @@ -223,7 +223,7 @@ def __init__(self, fieldset, ptype, pyfunc=None, funcname=None, funccode=None, p
self.field_args[sF_name] = getattr(f, sF_component)
self.const_args = kernelgen.const_args
loopgen = LoopGenerator(fieldset, ptype)
if path.isfile(self._c_include):
if os.path.isfile(self._c_include):
with open(self._c_include) as f:
c_include_str = f.read()
else:
Expand Down Expand Up @@ -274,7 +274,7 @@ def _cache_key(self):
field_keys = "-".join(
[f"{name}:{field.units.__class__.__name__}" for name, field in self.field_args.items()])
key = self.name + self.ptype._cache_key + field_keys + ('TIME:%f' % ostime())
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()

def add_scipy_positionupdate_kernels(self):
# Adding kernels that set and update the coordinate changes
Expand Down Expand Up @@ -339,7 +339,7 @@ def check_fieldsets_in_kernels(self, pyfunc):
def check_kernel_signature_on_version(self):
numkernelargs = 0
if self._pyfunc is not None:
if version_info[0] < 3:
if sys.version_info[0] < 3:
numkernelargs = len(inspect.getargspec(self._pyfunc).args)
else:
numkernelargs = len(inspect.getfullargspec(self._pyfunc).args)
Expand Down Expand Up @@ -392,11 +392,11 @@ def get_kernel_compile_files(self):
if type(basename) in (list, dict, tuple, ndarray):
src_file_or_files = ["", ] * len(basename)
for i, src_file in enumerate(basename):
src_file_or_files[i] = f"{path.join(dyn_dir, src_file)}.c"
src_file_or_files[i] = f"{os.path.join(dyn_dir, src_file)}.c"
else:
src_file_or_files = f"{path.join(dyn_dir, basename)}.c"
lib_file = f"{path.join(dyn_dir, lib_path)}.{'dll' if platform == 'win32' else 'so'}"
log_file = f"{path.join(dyn_dir, basename)}.log"
src_file_or_files = f"{os.path.join(dyn_dir, basename)}.c"
lib_file = f"{os.path.join(dyn_dir, lib_path)}.{'dll' if sys.platform == 'win32' else 'so'}"
log_file = f"{os.path.join(dyn_dir, basename)}.log"
return src_file_or_files, lib_file, log_file

def compile(self, compiler):
Expand Down Expand Up @@ -430,8 +430,8 @@ def merge(self, kernel, kclass):
funcname = self.funcname + kernel.funcname
func_ast = None
if self.py_ast is not None:
func_ast = FunctionDef(name=funcname, args=self.py_ast.args, body=self.py_ast.body + kernel.py_ast.body,
decorator_list=[], lineno=1, col_offset=0)
func_ast = ast.FunctionDef(name=funcname, args=self.py_ast.args, body=self.py_ast.body + kernel.py_ast.body,
decorator_list=[], lineno=1, col_offset=0)
delete_cfiles = self.delete_cfiles and kernel.delete_cfiles
return kclass(self.fieldset, self.ptype, pyfunc=None,
funcname=funcname, funccode=self.funccode + kernel.funccode,
Expand Down Expand Up @@ -483,10 +483,10 @@ def from_list(cls, fieldset, ptype, pyfunc_list, *args, **kwargs):
@staticmethod
def cleanup_remove_files(lib_file, all_files_array, delete_cfiles):
if lib_file is not None:
if path.isfile(lib_file): # and delete_cfiles
[remove(s) for s in [lib_file, ] if path is not None and path.exists(s)]
if os.path.isfile(lib_file): # and delete_cfiles
[os.remove(s) for s in [lib_file, ] if os.path is not None and os.path.exists(s)]
if delete_cfiles and len(all_files_array) > 0:
[remove(s) for s in all_files_array if path is not None and path.exists(s)]
[os.remove(s) for s in all_files_array if os.path is not None and os.path.exists(s)]

@staticmethod
def cleanup_unload_lib(lib):
Expand All @@ -495,7 +495,7 @@ def cleanup_unload_lib(lib):
# naming scheme which is required on Windows OS'es to deal with updates to a Parcels' kernel.
if lib is not None:
try:
_ctypes.FreeLibrary(lib._handle) if platform == 'win32' else _ctypes.dlclose(lib._handle)
_ctypes.FreeLibrary(lib._handle) if sys.platform == 'win32' else _ctypes.dlclose(lib._handle)
except:
pass

Expand Down
4 changes: 2 additions & 2 deletions parcels/particleset.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import os
import sys
from abc import ABC
from copy import copy
from datetime import date, datetime
from datetime import timedelta as delta
from os import path

import cftime
import numpy as np
Expand Down Expand Up @@ -855,7 +855,7 @@ def execute(self, pyfunc=AdvectionRK4, pyfunc_inter=None, endtime=None, runtime=
if self.particledata.ptype.uses_jit:
self.kernel.remove_lib()
cppargs = ['-DDOUBLE_COORD_VARIABLES'] if self.particledata.lonlatdepth_dtype else None
self.kernel.compile(compiler=GNUCompiler(cppargs=cppargs, incdirs=[path.join(get_package_dir(), 'include'), "."]))
self.kernel.compile(compiler=GNUCompiler(cppargs=cppargs, incdirs=[os.path.join(get_package_dir(), 'include'), "."]))
self.kernel.load_lib()
if output_file:
output_file.add_metadata('parcels_kernels', self.kernel.name)
Expand Down
16 changes: 8 additions & 8 deletions parcels/rng.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
import sys
import uuid
from ctypes import c_float, c_int
from os import path, remove
from sys import platform

import _ctypes
import numpy.ctypeslib as npct
Expand Down Expand Up @@ -78,7 +78,7 @@ def __del__(self):
def unload_lib(self):
# Unload the currently loaded dynamic linked library to be secure
if self._lib is not None and self._loaded and _ctypes is not None:
_ctypes.FreeLibrary(self._lib._handle) if platform == 'win32' else _ctypes.dlclose(self._lib._handle)
_ctypes.FreeLibrary(self._lib._handle) if sys.platform == 'win32' else _ctypes.dlclose(self._lib._handle)
del self._lib
self._lib = None
self._loaded = False
Expand All @@ -90,22 +90,22 @@ def load_lib(self):
def remove_lib(self):
# If file already exists, pull new names. This is necessary on a Windows machine, because
# Python's ctype does not deal in any sort of manner well with dynamic linked libraries on this OS.
if self._lib is not None and self._loaded and _ctypes is not None and path.isfile(self.lib_file):
[remove(s) for s in [self.src_file, self.lib_file, self.log_file]]
if self._lib is not None and self._loaded and _ctypes is not None and os.path.isfile(self.lib_file):
[os.remove(s) for s in [self.src_file, self.lib_file, self.log_file]]

def compile(self, compiler=None):
if self.src_file is None or self.lib_file is None or self.log_file is None:
basename = 'parcels_random_%s' % uuid.uuid4()
lib_filename = "lib" + basename
basepath = path.join(get_cache_dir(), f"{basename}")
libpath = path.join(get_cache_dir(), f"{lib_filename}")
basepath = os.path.join(get_cache_dir(), f"{basename}")
libpath = os.path.join(get_cache_dir(), f"{lib_filename}")
self.src_file = f"{basepath}.c"
self.lib_file = f"{libpath}.so"
self.log_file = f"{basepath}.log"
ccompiler = compiler
if ccompiler is None:
cppargs = []
incdirs = [path.join(get_package_dir(), 'include'), ]
incdirs = [os.path.join(get_package_dir(), 'include'), ]
ccompiler = GNUCompiler(cppargs=cppargs, incdirs=incdirs)
if self._lib is None:
with open(self.src_file, 'w+') as f:
Expand Down
6 changes: 3 additions & 3 deletions tests/test_data/create_testfields.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
except:
asizeof = None

from os import path
import os

import xarray as xr

Expand Down Expand Up @@ -66,8 +66,8 @@ def generate_perlin_testfield():
print(f"Perlin V-field requires {V.size * V.itemsize} bytes of memory.")
fieldset = FieldSet.from_data(data, dimensions, mesh='spherical', transpose=False)
# fieldset.write("perlinfields") # can also be used, but then has a ghost depth dimension
write_simple_2Dt(fieldset.U, path.join(path.dirname(__file__), 'perlinfields'), varname='vozocrtx')
write_simple_2Dt(fieldset.V, path.join(path.dirname(__file__), 'perlinfields'), varname='vomecrty')
write_simple_2Dt(fieldset.U, os.path.join(os.path.dirname(__file__), 'perlinfields'), varname='vozocrtx')
write_simple_2Dt(fieldset.V, os.path.join(os.path.dirname(__file__), 'perlinfields'), varname='vomecrty')


def write_simple_2Dt(field, filename, varname=None):
Expand Down
16 changes: 8 additions & 8 deletions tests/test_data/fieldset_nemo.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
from os import path
import os

import parcels


def create_fieldset(indices=None):
data_path = path.join(path.dirname(__file__))
data_path = os.path.join(os.path.dirname(__file__))

filenames = {'U': {'lon': path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'lat': path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'data': path.join(data_path, 'Uu_eastward_nemo_cross_180lon.nc')},
'V': {'lon': path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'lat': path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'data': path.join(data_path, 'Vv_eastward_nemo_cross_180lon.nc')}}
filenames = {'U': {'lon': os.path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'lat': os.path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'data': os.path.join(data_path, 'Uu_eastward_nemo_cross_180lon.nc')},
'V': {'lon': os.path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'lat': os.path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'data': os.path.join(data_path, 'Vv_eastward_nemo_cross_180lon.nc')}}
variables = {'U': 'U', 'V': 'V'}
dimensions = {'lon': 'glamf', 'lat': 'gphif', 'time': 'time_counter'}
indices = indices or {}
Expand Down
16 changes: 8 additions & 8 deletions tests/test_data/fieldset_nemo_error.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
from os import path
import os

import parcels


def random_function_name():
data_path = path.join(path.dirname(__file__))
data_path = os.path.join(os.path.dirname(__file__))

filenames = {'U': {'lon': path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'lat': path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'data': path.join(data_path, 'Uu_eastward_nemo_cross_180lon.nc')},
'V': {'lon': path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'lat': path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'data': path.join(data_path, 'Vv_eastward_nemo_cross_180lon.nc')}}
filenames = {'U': {'lon': os.path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'lat': os.path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'data': os.path.join(data_path, 'Uu_eastward_nemo_cross_180lon.nc')},
'V': {'lon': os.path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'lat': os.path.join(data_path, 'mask_nemo_cross_180lon.nc'),
'data': os.path.join(data_path, 'Vv_eastward_nemo_cross_180lon.nc')}}
variables = {'U': 'U', 'V': 'V'}
dimensions = {'lon': 'glamf', 'lat': 'gphif', 'time': 'time_counter'}
return parcels.FieldSet.from_nemo(filenames, variables, dimensions)
Expand Down
Loading
Loading