Skip to content

Commit ebadee6

Browse files
convert-reponeutrinoceros
authored andcommitted
black pass
1 parent 7edfcee commit ebadee6

File tree

586 files changed

+43102
-28287
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

586 files changed

+43102
-28287
lines changed

benchmarks/benchmarks/benchmarks.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import numpy as np
44
from yt import YTArray, YTQuantity
55

6+
67
def time_quantity_init_scalar1():
78
3.0 * YTQuantity(1, "m/s")
89

benchmarks/benchmarks/data_objects.py

Lines changed: 24 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,17 @@
11
import numpy as np
22
import yt
33

4-
if yt.__version__.startswith('2'):
4+
if yt.__version__.startswith("2"):
55
from yt.mods import load, ColorTransferFunction
66
# else:
77
# from yt.visualization.volume_rendering.old_camera import Camera
88

9+
910
class Suite:
1011
dsname = "HiresIsolatedGalaxy/DD0044/DD0044"
12+
1113
def setup(self):
12-
if yt.__version__.startswith('3'):
14+
if yt.__version__.startswith("3"):
1315
self.ds = yt.load(self.dsname)
1416
self.ad = self.ds.all_data()
1517
self.field_name = "density"
@@ -19,46 +21,56 @@ def setup(self):
1921
self.field_name = "Density"
2022
# Warmup hdd
2123
self.ad[self.field_name]
22-
if yt.__version__.startswith('3'):
23-
mi, ma = self.ad.quantities['Extrema'](self.field_name)
24-
self.tf = yt.ColorTransferFunction((np.log10(mi)+1, np.log10(ma)))
24+
if yt.__version__.startswith("3"):
25+
mi, ma = self.ad.quantities["Extrema"](self.field_name)
26+
self.tf = yt.ColorTransferFunction((np.log10(mi) + 1, np.log10(ma)))
2527
else:
26-
mi, ma = self.ad.quantities['Extrema'](self.field_name)[0]
27-
self.tf = ColorTransferFunction((np.log10(mi)+1, np.log10(ma)))
28+
mi, ma = self.ad.quantities["Extrema"](self.field_name)[0]
29+
self.tf = ColorTransferFunction((np.log10(mi) + 1, np.log10(ma)))
2830
self.tf.add_layers(5, w=0.02, colormap="spectral")
2931
self.c = [0.5, 0.5, 0.5]
3032
self.L = [0.5, 0.2, 0.7]
3133
self.W = 1.0
3234
self.Npixels = 512
3335

34-
if yt.__version__.startswith('3'):
36+
if yt.__version__.startswith("3"):
37+
3538
def time_load_all_data(self):
3639
dd = self.ds.all_data()
3740
dd[self.field_name]
41+
3842
else:
43+
3944
def time_load_all_data(self):
4045
self.ds.h.all_data()
4146
dd[self.field_name]
4247

4348
def time_extrema_quantities(self):
44-
self.ad.quantities['Extrema'](self.field_name)
49+
self.ad.quantities["Extrema"](self.field_name)
50+
51+
if yt.__version__.startswith("3"):
4552

46-
if yt.__version__.startswith('3'):
4753
def time_alldata_projection(self):
4854
self.ds.proj(self.field_name, 0)
55+
4956
else:
57+
5058
def time_alldata_projection(self):
51-
self.ds.h.proj(0, self.field_name)
59+
self.ds.h.proj(0, self.field_name)
60+
61+
if yt.__version__.startswith("3"):
5262

53-
if yt.__version__.startswith('3'):
5463
def time_slice(self):
5564
slc = self.ds.slice(0, 0.5)
5665
slc[self.field_name]
66+
5767
else:
68+
5869
def time_slice(self):
5970
slc = self.ds.h.slice(0, 0.5, self.field_name)
6071
slc[self.field_name]
6172

73+
6274
# if yt.__version__.startswith('3'):
6375
# def command(self):
6476
# cam = Camera(self.c, self.L, self.W, self.Npixels, self.tf, ds=self.ds)

benchmarks/benchmarks/large_tipsy.py

Lines changed: 28 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -2,18 +2,24 @@
22
import yt
33
from yt.utilities.answer_testing.framework import data_dir_load
44

5+
56
class PKDGravTipsySuite:
67
dsname = "halo1e11_run1.00400/halo1e11_run1.00400"
78
timeout = 360.0
9+
810
def setup(self):
9-
cosmology_parameters = dict(current_redshift = 0.0,
10-
omega_lambda = 0.728,
11-
omega_matter = 0.272,
12-
hubble_constant = 0.702)
13-
kwargs = dict(field_dtypes = {"Coordinates": "d"},
14-
cosmology_parameters = cosmology_parameters,
15-
unit_base = {'length': (1.0/60.0, "Mpccm/h")},
16-
n_ref = 64)
11+
cosmology_parameters = dict(
12+
current_redshift=0.0,
13+
omega_lambda=0.728,
14+
omega_matter=0.272,
15+
hubble_constant=0.702,
16+
)
17+
kwargs = dict(
18+
field_dtypes={"Coordinates": "d"},
19+
cosmology_parameters=cosmology_parameters,
20+
unit_base={"length": (1.0 / 60.0, "Mpccm/h")},
21+
n_ref=64,
22+
)
1723
self.ds = data_dir_load(self.dsname, yt.TipsyDataset, (), kwargs)
1824

1925
def time_all_particles(self):
@@ -36,17 +42,23 @@ def time_particle_quantities(self):
3642
dd = self.ds.all_data()
3743
dd.quantities.extrema("particle_mass")
3844
dd.quantities.extrema("particle_velocity_magnitude")
39-
dd.quantities.extrema(["particle_velocity_%s" % ax for ax in 'xyz'])
45+
dd.quantities.extrema(["particle_velocity_%s" % ax for ax in "xyz"])
46+
4047

4148
class GasolineTipsySuite(PKDGravTipsySuite):
4249
dsname = "agora_1e11.00400/agora_1e11.00400"
4350
timeout = 360.0
51+
4452
def setup(self):
45-
cosmology_parameters = dict(current_redshift = 0.0,
46-
omega_lambda = 0.728,
47-
omega_matter = 0.272,
48-
hubble_constant = 0.702)
49-
kwargs = dict(cosmology_parameters = cosmology_parameters,
50-
unit_base = {'length': (1.0/60.0, "Mpccm/h")},
51-
n_ref = 64)
53+
cosmology_parameters = dict(
54+
current_redshift=0.0,
55+
omega_lambda=0.728,
56+
omega_matter=0.272,
57+
hubble_constant=0.702,
58+
)
59+
kwargs = dict(
60+
cosmology_parameters=cosmology_parameters,
61+
unit_base={"length": (1.0 / 60.0, "Mpccm/h")},
62+
n_ref=64,
63+
)
5264
self.ds = data_dir_load(self.dsname, yt.TipsyDataset, (), kwargs)

benchmarks/benchmarks/small_enzo.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import numpy as np
22
import yt
33

4+
45
class SmallEnzoSuite:
56
dsname = "IsolatedGalaxy/galaxy0030/galaxy0030"
7+
68
def setup(self):
79
self.ds = yt.load(self.dsname)
810

@@ -38,7 +40,7 @@ def time_particle_quantities(self):
3840
dd = self.ds.all_data()
3941
dd.quantities.extrema("particle_mass")
4042
dd.quantities.extrema("particle_velocity_magnitude")
41-
dd.quantities.extrema(["particle_velocity_%s" % ax for ax in 'xyz'])
43+
dd.quantities.extrema(["particle_velocity_%s" % ax for ax in "xyz"])
4244

4345
def time_gas_quantities(self):
4446
dd = self.ds.all_data()

benchmarks/benchmarks/small_flash.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import numpy as np
22
import yt
33

4+
45
class SmallFlashSuite:
56
dsname = "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0690"
7+
68
def setup(self):
79
self.ds = yt.load(self.dsname)
810

@@ -21,8 +23,7 @@ def time_project_weight(self):
2123
proj = self.ds.proj("density", 0, "density")
2224

2325
def time_ghostzones(self):
24-
dd = self.ds.sphere(self.ds.domain_center,
25-
self.ds.domain_width[0] * 0.25)
26+
dd = self.ds.sphere(self.ds.domain_center, self.ds.domain_width[0] * 0.25)
2627
dd["velocity_divergence"]
2728

2829
def time_gas_quantities(self):

benchmarks/benchmarks/small_gadget.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import numpy as np
22
import yt
33

4+
45
class SmallGadgetSuite:
56
dsname = "snapshot_033/snap_033.0.hdf5"
67
timeout = 360.0
@@ -32,4 +33,4 @@ def time_particle_quantities(self):
3233
dd = self.ds.all_data()
3334
dd.quantities.extrema("particle_mass")
3435
dd.quantities.extrema("particle_velocity_magnitude")
35-
dd.quantities.extrema(["particle_velocity_%s" % ax for ax in 'xyz'])
36+
dd.quantities.extrema(["particle_velocity_%s" % ax for ax in "xyz"])

benchmarks/benchmarks/small_ramses.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import numpy as np
22
import yt
33

4+
45
class SmallRAMSESSuite:
56
dsname = "output_00080/info_00080.txt"
7+
68
def setup(self):
79
self.ds = yt.load(self.dsname)
810

@@ -34,7 +36,7 @@ def time_particle_quantities(self):
3436
dd = self.ds.all_data()
3537
dd.quantities.extrema("particle_mass")
3638
dd.quantities.extrema("particle_velocity_magnitude")
37-
dd.quantities.extrema(["particle_velocity_%s" % ax for ax in 'xyz'])
39+
dd.quantities.extrema(["particle_velocity_%s" % ax for ax in "xyz"])
3840

3941
def time_gas_quantities(self):
4042
dd = self.ds.all_data()

conftest.py

Lines changed: 38 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -22,32 +22,25 @@
2222
answer_files = {}
2323

2424
# List of answer files
25-
answer_file_list = 'tests/tests.yaml'
26-
answer_dir = os.path.join(ytcfg.get('yt', 'test_data_dir'), 'answers')
25+
answer_file_list = "tests/tests.yaml"
26+
answer_dir = os.path.join(ytcfg.get("yt", "test_data_dir"), "answers")
2727

2828

2929
def pytest_addoption(parser):
3030
"""
3131
Lets options be passed to test functions.
3232
"""
3333
parser.addoption(
34-
"--with-answer-testing",
35-
action="store_true",
36-
default=False,
34+
"--with-answer-testing", action="store_true", default=False,
3735
)
3836
parser.addoption(
39-
"--answer-store",
40-
action="store_true",
41-
default=False,
37+
"--answer-store", action="store_true", default=False,
4238
)
4339
parser.addoption(
44-
"--answer-big-data",
45-
action="store_true",
46-
default=False,
40+
"--answer-big-data", action="store_true", default=False,
4741
)
4842
parser.addoption(
49-
"--save-answer-arrays",
50-
action="store_true",
43+
"--save-answer-arrays", action="store_true",
5144
)
5245

5346

@@ -62,12 +55,13 @@ def pytest_configure(config):
6255
os.mkdir(answer_dir)
6356
# Read the list of answer test classes and their associated answer
6457
# file
65-
with open(answer_file_list, 'r') as f:
58+
with open(answer_file_list, "r") as f:
6659
answer_files = yaml.safe_load(f)
6760
# Register custom marks for answer tests and big data
68-
config.addinivalue_line('markers', 'answer_test: Run the answer tests.')
69-
config.addinivalue_line('markers', 'big_data: Run answer tests that require'
70-
' large data files.')
61+
config.addinivalue_line("markers", "answer_test: Run the answer tests.")
62+
config.addinivalue_line(
63+
"markers", "big_data: Run answer tests that require" " large data files."
64+
)
7165

7266

7367
def pytest_collection_modifyitems(config, items):
@@ -81,21 +75,23 @@ def pytest_collection_modifyitems(config, items):
8175
for item in items:
8276
# If it's an answer test and the appropriate CL option hasn't
8377
# been set, skip it
84-
if "answer_test" in item.keywords and not config.getoption("--with-answer-testing"):
78+
if "answer_test" in item.keywords and not config.getoption(
79+
"--with-answer-testing"
80+
):
8581
item.add_marker(skip_answer)
8682
# If it's an answer test that requires big data and the CL
8783
# option hasn't been set, skip it
8884
if "big_data" in item.keywords and not config.getoption("--answer-big-data"):
8985
item.add_marker(skip_big)
9086

9187

92-
@pytest.fixture(scope='function')
88+
@pytest.fixture(scope="function")
9389
def temp_dir():
9490
r"""
9591
Creates a temporary directory needed by certain tests.
9692
"""
9793
curdir = os.getcwd()
98-
if int(os.environ.get('GENERATE_YTDATA', 0)):
94+
if int(os.environ.get("GENERATE_YTDATA", 0)):
9995
tmpdir = os.getcwd()
10096
else:
10197
tmpdir = tempfile.mkdtemp()
@@ -106,7 +102,7 @@ def temp_dir():
106102
shutil.rmtree(tmpdir)
107103

108104

109-
@pytest.fixture(scope='class')
105+
@pytest.fixture(scope="class")
110106
def answer_file(request):
111107
r"""
112108
Assigns the name of the appropriate answer file as an attribute of
@@ -145,12 +141,11 @@ def answer_file(request):
145141
answer_file = answer_files[request.cls.__name__]
146142
# Make sure we're not overwriting an existing answer set
147143
if os.path.isfile(os.path.join(answer_dir, answer_file)):
148-
if request.config.getoption('--answer-store'):
149-
raise FileExistsError("Error, attempting to overwrite "
144+
if request.config.getoption("--answer-store"):
145+
raise FileExistsError(
146+
"Error, attempting to overwrite "
150147
"answer file {}. Either specify a new version or "
151-
"set the `--force-override-answers` option".format(
152-
answer_file
153-
)
148+
"set the `--force-override-answers` option".format(answer_file)
154149
)
155150
else:
156151
assert False
@@ -167,13 +162,13 @@ def _param_list(request):
167162
# co_varnames is all of the variable names local to the function
168163
# starting with self, then the passed args, then the vars defined
169164
# in the function body. This excludes fixture names
170-
args = func.__code__.co_varnames[1:func.__code__.co_argcount]
165+
args = func.__code__.co_varnames[1 : func.__code__.co_argcount]
171166
# funcargs includes the names and values of all arguments, including
172167
# fixtures, so we use args to weed out the fixtures. Need to have
173168
# special treatment of the data files loaded in fixtures for the
174169
# frontends
175170
for key, val in request.node.funcargs.items():
176-
if key in args and not key.startswith('ds_'):
171+
if key in args and not key.startswith("ds_"):
177172
test_params[key] = val
178173
# Convert python-specific data objects (such as tuples) to a more
179174
# io-friendly format (in order to not have python-specific anchors
@@ -182,7 +177,7 @@ def _param_list(request):
182177
return test_params
183178

184179

185-
@pytest.fixture(scope='function')
180+
@pytest.fixture(scope="function")
186181
def hashing(request):
187182
r"""
188183
Handles initialization, generation, and saving of answer test
@@ -244,12 +239,20 @@ def hashing(request):
244239
# Add the other test parameters
245240
hashes.update(params)
246241
# Add the function name as the "master" key to the hashes dict
247-
hashes = {request.node.name : hashes}
242+
hashes = {request.node.name: hashes}
248243
# Either save or compare
249-
utils._handle_hashes(answer_dir, request.cls.answer_file, hashes,
250-
request.config.getoption('--answer-store'))
251-
if request.config.getoption('--save-answer-arrays'):
244+
utils._handle_hashes(
245+
answer_dir,
246+
request.cls.answer_file,
247+
hashes,
248+
request.config.getoption("--answer-store"),
249+
)
250+
if request.config.getoption("--save-answer-arrays"):
252251
# answer_file has .yaml appended to it, but here we're saving
253252
# the arrays as .npy files, so we remove the .yaml extension
254-
utils._save_arrays(answer_dir, request.cls.answer_file.split('.')[0],
255-
request.cls.hashes, request.config.getoption('--answer-store'))
253+
utils._save_arrays(
254+
answer_dir,
255+
request.cls.answer_file.split(".")[0],
256+
request.cls.hashes,
257+
request.config.getoption("--answer-store"),
258+
)

0 commit comments

Comments
 (0)