Skip to content

Commit

Permalink
Update format strings to f-strings
Browse files Browse the repository at this point in the history
  • Loading branch information
VeckoTheGecko committed Aug 27, 2024
1 parent 997e3f8 commit 85ffd62
Show file tree
Hide file tree
Showing 11 changed files with 34 additions and 34 deletions.
2 changes: 1 addition & 1 deletion parcels/compilation/codecompiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def __init__(self, cppargs=None, ldargs=None, incdirs=None, libdirs=None, libs=N
self._ldargs += lflags
self._ldargs += ldargs
if len(Lflags) > 0:
self._ldargs += ["-Wl, -rpath=%s" % (":".join(libdirs))]
self._ldargs += [f"-Wl, -rpath={':'.join(libdirs)}"]
self._ldargs += arch_flag
self._incdirs = incdirs
self._libdirs = libdirs
Expand Down
32 changes: 16 additions & 16 deletions parcels/compilation/codegenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __getattr__(self, attr):
elif isinstance(getattr(self.obj, attr), VectorField):
return VectorFieldNode(getattr(self.obj, attr), ccode=f"{self.ccode}->{attr}")
else:
return ConstNode(getattr(self.obj, attr), ccode="%s" % (attr))
return ConstNode(getattr(self.obj, attr), ccode=f"{attr}")


class FieldNode(IntrinsicNode):
Expand Down Expand Up @@ -480,13 +480,13 @@ def visit_FunctionDef(self, node):
c.Value("double", "time"),
]
for field in self.field_args.values():
args += [c.Pointer(c.Value("CField", "%s" % field.ccode_name))]
args += [c.Pointer(c.Value("CField", f"{field.ccode_name}"))]
for field in self.vector_field_args.values():
for fcomponent in ["U", "V", "W"]:
try:
f = getattr(field, fcomponent)
if f.ccode_name not in self.field_args:
args += [c.Pointer(c.Value("CField", "%s" % f.ccode_name))]
args += [c.Pointer(c.Value("CField", f"{f.ccode_name}"))]
self.field_args[f.ccode_name] = f
except:
pass # field.W does not always exist
Expand Down Expand Up @@ -519,9 +519,9 @@ def visit_Call(self, node):
if isinstance(node.func, PrintNode):
# Write our own Print parser because Python3-AST does not seem to have one
if isinstance(node.args[0], ast.Str):
node.ccode = str(c.Statement('printf("%s\\n")' % (node.args[0].s)))
node.ccode = str(c.Statement(f'printf("{node.args[0].s}\\n")'))
elif isinstance(node.args[0], ast.Name):
node.ccode = str(c.Statement('printf("%%f\\n", %s)' % (node.args[0].id)))
node.ccode = str(c.Statement(f'printf("%f\\n", {node.args[0].id})'))
elif isinstance(node.args[0], ast.BinOp):
if hasattr(node.args[0].right, "ccode"):
args = node.args[0].right.ccode
Expand All @@ -536,12 +536,12 @@ def visit_Call(self, node):
args.append(a.id)
else:
args = []
s = 'printf("%s\\n"' % node.args[0].left.s
s = f'printf("{node.args[0].left.s}\\n"'
if isinstance(args, str):
s = s + (", %s)" % args)
s = s + f", {args})"
else:
for arg in args:
s = s + (", %s" % arg)
s = s + (f", {arg}")
s = s + ")"
node.ccode = str(c.Statement(s))
else:
Expand All @@ -559,7 +559,7 @@ def visit_Call(self, node):
elif isinstance(a, ParticleNode):
continue
elif pointer_args:
a.ccode = "&%s" % a.ccode
a.ccode = f"&{a.ccode}"
ccode_args = ", ".join([a.ccode for a in node.args[pointer_args:]])
try:
if isinstance(node.func, str):
Expand Down Expand Up @@ -733,7 +733,7 @@ def visit_BoolOp(self, node):
self.visit(node.op)
for v in node.values:
self.visit(v)
op_str = " %s " % node.op.ccode
op_str = f" {node.op.ccode} "
node.ccode = op_str.join([v.ccode for v in node.values])

def visit_Eq(self, node):
Expand Down Expand Up @@ -804,7 +804,7 @@ def visit_ConstNode(self, node):

def visit_Return(self, node):
self.visit(node.value)
node.ccode = c.Statement("return %s" % node.value.ccode)
node.ccode = c.Statement(f"return {node.value.ccode}")

def visit_FieldEvalNode(self, node):
self.visit(node.field)
Expand Down Expand Up @@ -900,16 +900,16 @@ def visit_Print(self, node):
for n in node.values:
self.visit(n)
if hasattr(node.values[0], "s"):
node.ccode = c.Statement('printf("%s\\n")' % (n.ccode))
node.ccode = c.Statement(f'printf("{n.ccode}\\n")')
return
if hasattr(node.values[0], "s_print"):
args = node.values[0].right.ccode
s = 'printf("%s\\n"' % node.values[0].left.ccode
s = f'printf("{node.values[0].left.ccode}\\n"'
if isinstance(args, str):
s = s + (", %s)" % args)
s = s + f", {args})"
else:
for arg in args:
s = s + (", %s" % arg)
s = s + (f", {arg}")
s = s + ")"
node.ccode = c.Statement(s)
return
Expand Down Expand Up @@ -964,7 +964,7 @@ def generate(self, funcname, field_args, const_args, kernel_ast, c_include):
c.Value("double", "dt"),
]
for field, _ in field_args.items():
args += [c.Pointer(c.Value("CField", "%s" % field))]
args += [c.Pointer(c.Value("CField", f"{field}"))]
for const, _ in const_args.items():
args += [c.Value("double", const)] # are we SURE those const's are double's ?
fargs_str = ", ".join(["particles->time_nextloop[pnum]"] + list(field_args.keys()) + list(const_args.keys()))
Expand Down
2 changes: 1 addition & 1 deletion parcels/fieldset.py
Original file line number Diff line number Diff line change
Expand Up @@ -1220,7 +1220,7 @@ def from_parcels(
extra_fields.update({"U": uvar, "V": vvar})
for vars in extra_fields:
dimensions[vars] = deepcopy(default_dims)
dimensions[vars]["depth"] = "depth%s" % vars.lower()
dimensions[vars]["depth"] = f"depth{vars.lower()}"
filenames = {v: str(f"{basename}{v}.nc") for v in extra_fields.keys()}
return cls.from_netcdf(
filenames,
Expand Down
6 changes: 3 additions & 3 deletions parcels/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def _cache_key(self):
field_keys = "-".join(
[f"{name}:{field.units.__class__.__name__}" for name, field in self.field_args.items()]
)
key = self.name + self.ptype._cache_key + field_keys + ("TIME:%f" % ostime())
key = self.name + self.ptype._cache_key + field_keys + (f"TIME:{ostime():f}")
return hashlib.md5(key.encode("utf-8")).hexdigest()

def remove_deleted(self, pset):
Expand Down Expand Up @@ -304,7 +304,7 @@ def _cache_key(self):
field_keys = "-".join(
[f"{name}:{field.units.__class__.__name__}" for name, field in self.field_args.items()]
)
key = self.name + self.ptype._cache_key + field_keys + ("TIME:%f" % ostime())
key = self.name + self.ptype._cache_key + field_keys + (f"TIME:{ostime():f}")
return hashlib.md5(key.encode("utf-8")).hexdigest()

def add_scipy_positionupdate_kernels(self):
Expand Down Expand Up @@ -435,7 +435,7 @@ def get_kernel_compile_files(self):
self._cache_key
) # only required here because loading is done by Kernel class instead of Compiler class
dyn_dir = get_cache_dir()
basename = "%s_0" % cache_name
basename = f"{cache_name}_0"
lib_path = "lib" + basename
src_file_or_files = None
if type(basename) in (list, dict, tuple, ndarray):
Expand Down
4 changes: 2 additions & 2 deletions parcels/particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,13 @@ def __get__(self, instance, cls):
if issubclass(cls, JITParticle):
return instance._cptr.__getitem__(self.name)
else:
return getattr(instance, "_%s" % self.name, self.initial)
return getattr(instance, f"_{self.name}", self.initial)

def __set__(self, instance, value):
if isinstance(instance, JITParticle):
instance._cptr.__setitem__(self.name, value)
else:
setattr(instance, "_%s" % self.name, value)
setattr(instance, f"_{self.name}", value)

def __repr__(self):
return f"PVar<{self.name}|{self.dtype}>"
Expand Down
4 changes: 2 additions & 2 deletions parcels/particlefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def __init__(self, name, particleset, outputdt=np.inf, chunks=None, create_new_z
f"The ParticleFile name contains .zarr extension, but zarr files will be written per processor in MPI mode at {self.fname}"
)
else:
self.fname = name if extension in [".zarr"] else "%s.zarr" % name
self.fname = name if extension in [".zarr"] else f"{name}.zarr"

def _create_variables_attribute_dict(self):
"""Creates the dictionary with variable attributes.
Expand Down Expand Up @@ -209,7 +209,7 @@ def write(self, pset, time, indices=None):
time = time.total_seconds() if isinstance(time, timedelta) else time

if pset.particledata._ncount == 0:
logger.warning("ParticleSet is empty on writing as array at time %g" % time)
logger.warning(f"ParticleSet is empty on writing as array at time {time:g}")
return

if indices is None:
Expand Down
2 changes: 1 addition & 1 deletion parcels/rng.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def remove_lib(self):

def compile(self, compiler=None):
if self.src_file is None or self.lib_file is None or self.log_file is None:
basename = "parcels_random_%s" % uuid.uuid4()
basename = f"parcels_random_{uuid.uuid4()}"
lib_filename = "lib" + basename
basepath = os.path.join(get_cache_dir(), f"{basename}")
libpath = os.path.join(get_cache_dir(), f"{lib_filename}")
Expand Down
10 changes: 5 additions & 5 deletions parcels/tools/converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def fulltime(self, time):
raise RuntimeError(f"Calendar {self.calendar} not implemented in TimeConverter")

def __repr__(self):
return "%s" % self.time_origin
return f"{self.time_origin}"

def __eq__(self, other):
other = other.time_origin if isinstance(other, TimeConverter) else other
Expand Down Expand Up @@ -217,10 +217,10 @@ def to_source(self, value, x, y, z):
return value * 1000.0 * 1.852 * 60.0 * cos(y * pi / 180)

def ccode_to_target(self, x, y, z):
return "(1.0 / (1000. * 1.852 * 60. * cos(%s * M_PI / 180)))" % y
return f"(1.0 / (1000. * 1.852 * 60. * cos({y} * M_PI / 180)))"

def ccode_to_source(self, x, y, z):
return "(1000. * 1.852 * 60. * cos(%s * M_PI / 180))" % y
return f"(1000. * 1.852 * 60. * cos({y} * M_PI / 180))"


class GeographicSquare(UnitConverter):
Expand Down Expand Up @@ -257,10 +257,10 @@ def to_source(self, value, x, y, z):
return value * pow(1000.0 * 1.852 * 60.0 * cos(y * pi / 180), 2)

def ccode_to_target(self, x, y, z):
return "pow(1.0 / (1000. * 1.852 * 60. * cos(%s * M_PI / 180)), 2)" % y
return f"pow(1.0 / (1000. * 1.852 * 60. * cos({y} * M_PI / 180)), 2)"

def ccode_to_source(self, x, y, z):
return "pow((1000. * 1.852 * 60. * cos(%s * M_PI / 180)), 2)" % y
return f"pow((1000. * 1.852 * 60. * cos({y} * M_PI / 180)), 2)"


unitconverters_map = {
Expand Down
2 changes: 1 addition & 1 deletion parcels/tools/global_statics.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,6 @@ def get_package_dir():


def get_cache_dir():
directory = os.path.join(gettempdir(), "parcels-%s" % getuid())
directory = os.path.join(gettempdir(), f"parcels-{getuid()}")
Path(directory).mkdir(exist_ok=True)
return directory
2 changes: 1 addition & 1 deletion parcels/tools/timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def print_tree_sequential(self, step=0, root_time=0, parent_time=0):
print(" " * (step + 1), end="")
if step > 0:
print("(%3d%%) " % round(time / parent_time * 100), end="")
t_str = "%1.3e s" % time if root_time < 300 else datetime.timedelta(seconds=time)
t_str = f"{time:1.3e} s" if root_time < 300 else datetime.timedelta(seconds=time)
print(f"Timer {(self._name).ljust(20 - 2*step + 7*(step == 0))}: {t_str}")
for child in self._children:
child.print_tree_sequential(step + 1, root_time, time)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_kernel_language.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def kernel(particle, fieldset, time):

def kernel2(particle, fieldset, time):
tmp = 3
print("%f" % (tmp))
print(f"{tmp:f}")

pset.execute(kernel2, endtime=2.0, dt=1.0, verbose_progress=False)
out, err = capfd.readouterr()
Expand Down

0 comments on commit 85ffd62

Please sign in to comment.