diff --git a/parcels/compilation/codecompiler.py b/parcels/compilation/codecompiler.py index 406daf597..bcc508897 100644 --- a/parcels/compilation/codecompiler.py +++ b/parcels/compilation/codecompiler.py @@ -110,7 +110,7 @@ def __init__(self, cppargs=None, ldargs=None, incdirs=None, libdirs=None, libs=N self._ldargs += lflags self._ldargs += ldargs if len(Lflags) > 0: - self._ldargs += ["-Wl, -rpath=%s" % (":".join(libdirs))] + self._ldargs += [f"-Wl, -rpath={':'.join(libdirs)}"] self._ldargs += arch_flag self._incdirs = incdirs self._libdirs = libdirs diff --git a/parcels/compilation/codegenerator.py b/parcels/compilation/codegenerator.py index 2569dcc6c..acce0f7dc 100644 --- a/parcels/compilation/codegenerator.py +++ b/parcels/compilation/codegenerator.py @@ -32,7 +32,7 @@ def __getattr__(self, attr): elif isinstance(getattr(self.obj, attr), VectorField): return VectorFieldNode(getattr(self.obj, attr), ccode=f"{self.ccode}->{attr}") else: - return ConstNode(getattr(self.obj, attr), ccode="%s" % (attr)) + return ConstNode(getattr(self.obj, attr), ccode=f"{attr}") class FieldNode(IntrinsicNode): @@ -480,13 +480,13 @@ def visit_FunctionDef(self, node): c.Value("double", "time"), ] for field in self.field_args.values(): - args += [c.Pointer(c.Value("CField", "%s" % field.ccode_name))] + args += [c.Pointer(c.Value("CField", f"{field.ccode_name}"))] for field in self.vector_field_args.values(): for fcomponent in ["U", "V", "W"]: try: f = getattr(field, fcomponent) if f.ccode_name not in self.field_args: - args += [c.Pointer(c.Value("CField", "%s" % f.ccode_name))] + args += [c.Pointer(c.Value("CField", f"{f.ccode_name}"))] self.field_args[f.ccode_name] = f except: pass # field.W does not always exist @@ -519,9 +519,9 @@ def visit_Call(self, node): if isinstance(node.func, PrintNode): # Write our own Print parser because Python3-AST does not seem to have one if isinstance(node.args[0], ast.Str): - node.ccode = str(c.Statement('printf("%s\\n")' % (node.args[0].s))) + node.ccode = str(c.Statement(f'printf("{node.args[0].s}\\n")')) elif isinstance(node.args[0], ast.Name): - node.ccode = str(c.Statement('printf("%%f\\n", %s)' % (node.args[0].id))) + node.ccode = str(c.Statement(f'printf("%f\\n", {node.args[0].id})')) elif isinstance(node.args[0], ast.BinOp): if hasattr(node.args[0].right, "ccode"): args = node.args[0].right.ccode @@ -536,12 +536,12 @@ def visit_Call(self, node): args.append(a.id) else: args = [] - s = 'printf("%s\\n"' % node.args[0].left.s + s = f'printf("{node.args[0].left.s}\\n"' if isinstance(args, str): - s = s + (", %s)" % args) + s = s + f", {args})" else: for arg in args: - s = s + (", %s" % arg) + s = s + (f", {arg}") s = s + ")" node.ccode = str(c.Statement(s)) else: @@ -559,7 +559,7 @@ def visit_Call(self, node): elif isinstance(a, ParticleNode): continue elif pointer_args: - a.ccode = "&%s" % a.ccode + a.ccode = f"&{a.ccode}" ccode_args = ", ".join([a.ccode for a in node.args[pointer_args:]]) try: if isinstance(node.func, str): @@ -733,7 +733,7 @@ def visit_BoolOp(self, node): self.visit(node.op) for v in node.values: self.visit(v) - op_str = " %s " % node.op.ccode + op_str = f" {node.op.ccode} " node.ccode = op_str.join([v.ccode for v in node.values]) def visit_Eq(self, node): @@ -804,7 +804,7 @@ def visit_ConstNode(self, node): def visit_Return(self, node): self.visit(node.value) - node.ccode = c.Statement("return %s" % node.value.ccode) + node.ccode = c.Statement(f"return {node.value.ccode}") def visit_FieldEvalNode(self, node): self.visit(node.field) @@ -900,16 +900,16 @@ def visit_Print(self, node): for n in node.values: self.visit(n) if hasattr(node.values[0], "s"): - node.ccode = c.Statement('printf("%s\\n")' % (n.ccode)) + node.ccode = c.Statement(f'printf("{n.ccode}\\n")') return if hasattr(node.values[0], "s_print"): args = node.values[0].right.ccode - s = 'printf("%s\\n"' % node.values[0].left.ccode + s = f'printf("{node.values[0].left.ccode}\\n"' if isinstance(args, str): - s = s + (", %s)" % args) + s = s + f", {args})" else: for arg in args: - s = s + (", %s" % arg) + s = s + (f", {arg}") s = s + ")" node.ccode = c.Statement(s) return @@ -964,7 +964,7 @@ def generate(self, funcname, field_args, const_args, kernel_ast, c_include): c.Value("double", "dt"), ] for field, _ in field_args.items(): - args += [c.Pointer(c.Value("CField", "%s" % field))] + args += [c.Pointer(c.Value("CField", f"{field}"))] for const, _ in const_args.items(): args += [c.Value("double", const)] # are we SURE those const's are double's ? fargs_str = ", ".join(["particles->time_nextloop[pnum]"] + list(field_args.keys()) + list(const_args.keys())) diff --git a/parcels/fieldset.py b/parcels/fieldset.py index fda0d4701..3f02cad20 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -1220,7 +1220,7 @@ def from_parcels( extra_fields.update({"U": uvar, "V": vvar}) for vars in extra_fields: dimensions[vars] = deepcopy(default_dims) - dimensions[vars]["depth"] = "depth%s" % vars.lower() + dimensions[vars]["depth"] = f"depth{vars.lower()}" filenames = {v: str(f"{basename}{v}.nc") for v in extra_fields.keys()} return cls.from_netcdf( filenames, diff --git a/parcels/kernel.py b/parcels/kernel.py index b144abc72..37578af71 100644 --- a/parcels/kernel.py +++ b/parcels/kernel.py @@ -128,7 +128,7 @@ def _cache_key(self): field_keys = "-".join( [f"{name}:{field.units.__class__.__name__}" for name, field in self.field_args.items()] ) - key = self.name + self.ptype._cache_key + field_keys + ("TIME:%f" % ostime()) + key = self.name + self.ptype._cache_key + field_keys + (f"TIME:{ostime():f}") return hashlib.md5(key.encode("utf-8")).hexdigest() def remove_deleted(self, pset): @@ -304,7 +304,7 @@ def _cache_key(self): field_keys = "-".join( [f"{name}:{field.units.__class__.__name__}" for name, field in self.field_args.items()] ) - key = self.name + self.ptype._cache_key + field_keys + ("TIME:%f" % ostime()) + key = self.name + self.ptype._cache_key + field_keys + (f"TIME:{ostime():f}") return hashlib.md5(key.encode("utf-8")).hexdigest() def add_scipy_positionupdate_kernels(self): @@ -435,7 +435,7 @@ def get_kernel_compile_files(self): self._cache_key ) # only required here because loading is done by Kernel class instead of Compiler class dyn_dir = get_cache_dir() - basename = "%s_0" % cache_name + basename = f"{cache_name}_0" lib_path = "lib" + basename src_file_or_files = None if type(basename) in (list, dict, tuple, ndarray): diff --git a/parcels/particle.py b/parcels/particle.py index cd524275b..dbb0c7f60 100644 --- a/parcels/particle.py +++ b/parcels/particle.py @@ -39,13 +39,13 @@ def __get__(self, instance, cls): if issubclass(cls, JITParticle): return instance._cptr.__getitem__(self.name) else: - return getattr(instance, "_%s" % self.name, self.initial) + return getattr(instance, f"_{self.name}", self.initial) def __set__(self, instance, value): if isinstance(instance, JITParticle): instance._cptr.__setitem__(self.name, value) else: - setattr(instance, "_%s" % self.name, value) + setattr(instance, f"_{self.name}", value) def __repr__(self): return f"PVar<{self.name}|{self.dtype}>" diff --git a/parcels/particlefile.py b/parcels/particlefile.py index 6e047130b..0aa61e349 100644 --- a/parcels/particlefile.py +++ b/parcels/particlefile.py @@ -121,7 +121,7 @@ def __init__(self, name, particleset, outputdt=np.inf, chunks=None, create_new_z f"The ParticleFile name contains .zarr extension, but zarr files will be written per processor in MPI mode at {self.fname}" ) else: - self.fname = name if extension in [".zarr"] else "%s.zarr" % name + self.fname = name if extension in [".zarr"] else f"{name}.zarr" def _create_variables_attribute_dict(self): """Creates the dictionary with variable attributes. @@ -209,7 +209,7 @@ def write(self, pset, time, indices=None): time = time.total_seconds() if isinstance(time, timedelta) else time if pset.particledata._ncount == 0: - logger.warning("ParticleSet is empty on writing as array at time %g" % time) + logger.warning(f"ParticleSet is empty on writing as array at time {time:g}") return if indices is None: diff --git a/parcels/rng.py b/parcels/rng.py index 602e5c874..bac7fbce8 100644 --- a/parcels/rng.py +++ b/parcels/rng.py @@ -95,7 +95,7 @@ def remove_lib(self): def compile(self, compiler=None): if self.src_file is None or self.lib_file is None or self.log_file is None: - basename = "parcels_random_%s" % uuid.uuid4() + basename = f"parcels_random_{uuid.uuid4()}" lib_filename = "lib" + basename basepath = os.path.join(get_cache_dir(), f"{basename}") libpath = os.path.join(get_cache_dir(), f"{lib_filename}") diff --git a/parcels/tools/converters.py b/parcels/tools/converters.py index c8ccf8003..bef13d14e 100644 --- a/parcels/tools/converters.py +++ b/parcels/tools/converters.py @@ -135,7 +135,7 @@ def fulltime(self, time): raise RuntimeError(f"Calendar {self.calendar} not implemented in TimeConverter") def __repr__(self): - return "%s" % self.time_origin + return f"{self.time_origin}" def __eq__(self, other): other = other.time_origin if isinstance(other, TimeConverter) else other @@ -217,10 +217,10 @@ def to_source(self, value, x, y, z): return value * 1000.0 * 1.852 * 60.0 * cos(y * pi / 180) def ccode_to_target(self, x, y, z): - return "(1.0 / (1000. * 1.852 * 60. * cos(%s * M_PI / 180)))" % y + return f"(1.0 / (1000. * 1.852 * 60. * cos({y} * M_PI / 180)))" def ccode_to_source(self, x, y, z): - return "(1000. * 1.852 * 60. * cos(%s * M_PI / 180))" % y + return f"(1000. * 1.852 * 60. * cos({y} * M_PI / 180))" class GeographicSquare(UnitConverter): @@ -257,10 +257,10 @@ def to_source(self, value, x, y, z): return value * pow(1000.0 * 1.852 * 60.0 * cos(y * pi / 180), 2) def ccode_to_target(self, x, y, z): - return "pow(1.0 / (1000. * 1.852 * 60. * cos(%s * M_PI / 180)), 2)" % y + return f"pow(1.0 / (1000. * 1.852 * 60. * cos({y} * M_PI / 180)), 2)" def ccode_to_source(self, x, y, z): - return "pow((1000. * 1.852 * 60. * cos(%s * M_PI / 180)), 2)" % y + return f"pow((1000. * 1.852 * 60. * cos({y} * M_PI / 180)), 2)" unitconverters_map = { diff --git a/parcels/tools/global_statics.py b/parcels/tools/global_statics.py index 0e97bac0d..381efc363 100644 --- a/parcels/tools/global_statics.py +++ b/parcels/tools/global_statics.py @@ -34,6 +34,6 @@ def get_package_dir(): def get_cache_dir(): - directory = os.path.join(gettempdir(), "parcels-%s" % getuid()) + directory = os.path.join(gettempdir(), f"parcels-{getuid()}") Path(directory).mkdir(exist_ok=True) return directory diff --git a/parcels/tools/timer.py b/parcels/tools/timer.py index 02daf75ab..4a93c03dd 100644 --- a/parcels/tools/timer.py +++ b/parcels/tools/timer.py @@ -50,7 +50,7 @@ def print_tree_sequential(self, step=0, root_time=0, parent_time=0): print(" " * (step + 1), end="") if step > 0: print("(%3d%%) " % round(time / parent_time * 100), end="") - t_str = "%1.3e s" % time if root_time < 300 else datetime.timedelta(seconds=time) + t_str = f"{time:1.3e} s" if root_time < 300 else datetime.timedelta(seconds=time) print(f"Timer {(self._name).ljust(20 - 2*step + 7*(step == 0))}: {t_str}") for child in self._children: child.print_tree_sequential(step + 1, root_time, time) diff --git a/pyproject.toml b/pyproject.toml index d42401fbe..64f1927a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ select = [ "F", # pyflakes "I", # isort "B", # Bugbear - # "UP", # pyupgrade + "UP", # pyupgrade "LOG", # logging "ICN", # import conventions "G", # logging-format diff --git a/tests/test_kernel_language.py b/tests/test_kernel_language.py index 19ca2289a..ec3a20da0 100644 --- a/tests/test_kernel_language.py +++ b/tests/test_kernel_language.py @@ -277,7 +277,7 @@ def kernel(particle, fieldset, time): def kernel2(particle, fieldset, time): tmp = 3 - print("%f" % (tmp)) + print(f"{tmp:f}") pset.execute(kernel2, endtime=2.0, dt=1.0, verbose_progress=False) out, err = capfd.readouterr()