Skip to content

Commit

Permalink
STY: apply autofixes for RUF031
Browse files Browse the repository at this point in the history
  • Loading branch information
neutrinoceros authored and cphyc committed Dec 3, 2024
1 parent 3f9765b commit 6672c17
Show file tree
Hide file tree
Showing 10 changed files with 57 additions and 66 deletions.
4 changes: 2 additions & 2 deletions yt/data_objects/tests/test_derived_quantities.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def test_average():
("gas", "density"), ("gas", "cell_mass")
)
a_mean = (ad["gas", "density"] * ad["gas", "cell_mass"]).sum() / ad[
("gas", "cell_mass")
"gas", "cell_mass"
].sum()
assert_rel_equal(my_mean, a_mean, 12)

Expand All @@ -87,7 +87,7 @@ def test_standard_deviation():
("gas", "density"), ("gas", "cell_mass")
)
a_mean = (ad["gas", "density"] * ad["gas", "cell_mass"]).sum() / ad[
("gas", "cell_mass")
"gas", "cell_mass"
].sum()
assert_rel_equal(my_mean, a_mean, 12)
a_std = np.sqrt(
Expand Down
4 changes: 2 additions & 2 deletions yt/data_objects/tests/test_particle_trajectories_pytest.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def dummy(pfilter, data):
@pytest.mark.parametrize("ptype", [None, "io"])
def test_default_field_tuple(particle_trajectories_test_dataset, ptype):
ds = particle_trajectories_test_dataset[0]
ids = ds.all_data()[("all", "particle_index")]
ids = ds.all_data()["all", "particle_index"]
trajs = particle_trajectories_test_dataset.particle_trajectories(
ids, ptype=ptype, suppress_logging=True
)
Expand All @@ -138,7 +138,7 @@ def test_default_field_tuple(particle_trajectories_test_dataset, ptype):
@pytest.mark.parametrize("ptype", [None, "io"])
def test_time_and_index(particle_trajectories_test_dataset, ptype):
ds = particle_trajectories_test_dataset[0]
ids = ds.all_data()[("all", "particle_index")]
ids = ds.all_data()["all", "particle_index"]
trajs = particle_trajectories_test_dataset.particle_trajectories(
ids, ptype=ptype, suppress_logging=True
)
Expand Down
10 changes: 5 additions & 5 deletions yt/data_objects/tests/test_rays.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,9 @@ def test_ray_particle2():
# restricts you to 4 -- 5 digits precision
assert_equal(ray0["t"].shape, (1,))
assert_rel_equal(ray0["t"], np.array([0.5]), 5)
assert_rel_equal(ray0[("gas", "position")].v, np.array([[0.5, 0.5, 0.5]]), 5)
assert_rel_equal(ray0["gas", "position"].v, np.array([[0.5, 0.5, 0.5]]), 5)
dl0 = integrate_kernel(kernelfunc, b0, hsml0)
dl0 *= ray0[("gas", "mass")].v / ray0[("gas", "density")].v
dl0 *= ray0["gas", "mass"].v / ray0["gas", "density"].v
assert_rel_equal(ray0[("dts")].v, dl0 / len0, 4)

## Ray in the middle of the box:
Expand All @@ -133,10 +133,10 @@ def test_ray_particle2():
assert_equal(ray1["t"].shape, (2,))
assert_rel_equal(ray1["t"], np.array([0.25, 0.75]), 5)
assert_rel_equal(
ray1[("gas", "position")].v, np.array([[1.5, 0.5, 1.5], [1.5, 0.5, 2.5]]), 5
ray1["gas", "position"].v, np.array([[1.5, 0.5, 1.5], [1.5, 0.5, 2.5]]), 5
)
dl1 = integrate_kernel(kernelfunc, b1, hsml1)
dl1 *= ray1[("gas", "mass")].v / ray1[("gas", "density")].v
dl1 *= ray1["gas", "mass"].v / ray1["gas", "density"].v
assert_rel_equal(ray1[("dts")].v, dl1 / len1, 4)

## Ray missing all particles:
Expand All @@ -150,4 +150,4 @@ def test_ray_particle2():
ray2.field_data["dts"] = ray2.ds.arr(ray2._generate_container_field_sph("dts"))
assert_equal(ray2["t"].shape, (0,))
assert_equal(ray2["dts"].shape, (0,))
assert_equal(ray2[("gas", "position")].v.shape, (0, 3))
assert_equal(ray2["gas", "position"].v.shape, (0, 3))
42 changes: 21 additions & 21 deletions yt/frontends/amrex/tests/test_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,17 +97,17 @@ def test_nyx_particle_io():

grid = ds.index.grids[0]
npart_grid_0 = 7908 # read directly from the header
assert_equal(grid[("all", "particle_position_x")].size, npart_grid_0)
assert_equal(grid["all", "particle_position_x"].size, npart_grid_0)
assert_equal(grid["DM", "particle_position_y"].size, npart_grid_0)
assert_equal(grid["all", "particle_position_z"].size, npart_grid_0)

ad = ds.all_data()
npart = 32768 # read directly from the header
assert_equal(ad[("all", "particle_velocity_x")].size, npart)
assert_equal(ad["all", "particle_velocity_x"].size, npart)
assert_equal(ad["DM", "particle_velocity_y"].size, npart)
assert_equal(ad["all", "particle_velocity_z"].size, npart)

assert np.all(ad[("all", "particle_mass")] == ad[("all", "particle_mass")][0])
assert np.all(ad["all", "particle_mass"] == ad["all", "particle_mass"][0])

left_edge = ds.arr([0.0, 0.0, 0.0], "code_length")
right_edge = ds.arr([4.0, 4.0, 4.0], "code_length")
Expand All @@ -117,22 +117,22 @@ def test_nyx_particle_io():

assert np.all(
np.logical_and(
reg[("all", "particle_position_x")] <= right_edge[0],
reg[("all", "particle_position_x")] >= left_edge[0],
reg["all", "particle_position_x"] <= right_edge[0],
reg["all", "particle_position_x"] >= left_edge[0],
)
)

assert np.all(
np.logical_and(
reg[("all", "particle_position_y")] <= right_edge[1],
reg[("all", "particle_position_y")] >= left_edge[1],
reg["all", "particle_position_y"] <= right_edge[1],
reg["all", "particle_position_y"] >= left_edge[1],
)
)

assert np.all(
np.logical_and(
reg[("all", "particle_position_z")] <= right_edge[2],
reg[("all", "particle_position_z")] >= left_edge[2],
reg["all", "particle_position_z"] <= right_edge[2],
reg["all", "particle_position_z"] >= left_edge[2],
)
)

Expand All @@ -155,13 +155,13 @@ def test_castro_particle_io():

grid = ds.index.grids[2]
npart_grid_2 = 49 # read directly from the header
assert_equal(grid[("all", "particle_position_x")].size, npart_grid_2)
assert_equal(grid["all", "particle_position_x"].size, npart_grid_2)
assert_equal(grid["Tracer", "particle_position_y"].size, npart_grid_2)
assert_equal(grid["all", "particle_position_y"].size, npart_grid_2)

ad = ds.all_data()
npart = 49 # read directly from the header
assert_equal(ad[("all", "particle_velocity_x")].size, npart)
assert_equal(ad["all", "particle_velocity_x"].size, npart)
assert_equal(ad["Tracer", "particle_velocity_y"].size, npart)
assert_equal(ad["all", "particle_velocity_y"].size, npart)

Expand All @@ -173,15 +173,15 @@ def test_castro_particle_io():

assert np.all(
np.logical_and(
reg[("all", "particle_position_x")] <= right_edge[0],
reg[("all", "particle_position_x")] >= left_edge[0],
reg["all", "particle_position_x"] <= right_edge[0],
reg["all", "particle_position_x"] >= left_edge[0],
)
)

assert np.all(
np.logical_and(
reg[("all", "particle_position_y")] <= right_edge[1],
reg[("all", "particle_position_y")] >= left_edge[1],
reg["all", "particle_position_y"] <= right_edge[1],
reg["all", "particle_position_y"] >= left_edge[1],
)
)

Expand Down Expand Up @@ -265,22 +265,22 @@ def test_warpx_particle_io():

assert np.all(
np.logical_and(
reg[("all", "particle_position_x")] <= right_edge[0],
reg[("all", "particle_position_x")] >= left_edge[0],
reg["all", "particle_position_x"] <= right_edge[0],
reg["all", "particle_position_x"] >= left_edge[0],
)
)

assert np.all(
np.logical_and(
reg[("all", "particle_position_y")] <= right_edge[1],
reg[("all", "particle_position_y")] >= left_edge[1],
reg["all", "particle_position_y"] <= right_edge[1],
reg["all", "particle_position_y"] >= left_edge[1],
)
)

assert np.all(
np.logical_and(
reg[("all", "particle_position_z")] <= right_edge[2],
reg[("all", "particle_position_z")] >= left_edge[2],
reg["all", "particle_position_z"] <= right_edge[2],
reg["all", "particle_position_z"] >= left_edge[2],
)
)

Expand Down
4 changes: 2 additions & 2 deletions yt/frontends/parthenon/tests/test_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ def field_func(name):
# reading data of two fields and compare against each other (data is squared in output)
ad = ds.all_data()
assert_allclose(
ad[("parthenon", "one_minus_advected")] ** 2.0,
ad[("parthenon", "one_minus_advected_sq")],
ad["parthenon", "one_minus_advected"] ** 2.0,
ad["parthenon", "one_minus_advected_sq"],
)

# check if the peak is in the domain center (and at the highest refinement level)
Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/stream/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def _yield_coordinates(self, data_file, needed_ptype=None):
pos = np.column_stack(
[
self.fields[data_file.filename][
(ptype, f"particle_position_{ax}")
ptype, f"particle_position_{ax}"
]
for ax in "xyz"
]
Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/stream/tests/test_stream_stretched.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def test_cell_width_type(data_cell_widths_N16):
cell_widths=cell_widths,
)

_ = ds.slice(0, ds.domain_center[0])[("stream", "density")]
_ = ds.slice(0, ds.domain_center[0])["stream", "density"]


def test_cell_width_dimensionality(data_cell_widths_N16):
Expand Down
2 changes: 1 addition & 1 deletion yt/geometry/coordinates/geographic_coordinates.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def _dlatitude_to_dtheta(field, data):

def _longitude_to_phi(field, data):
# longitude runs from -180 to 180
lonvals = data[("index", "longitude")]
lonvals = data["index", "longitude"]
neglons = lonvals < 0.0
if np.any(neglons):
lonvals[neglons] = lonvals[neglons] + 360.0
Expand Down
51 changes: 21 additions & 30 deletions yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def makemasses(i, j, k):
center=center,
data_source=source,
)
img = prj.frb.data[("gas", "density")]
img = prj.frb.data["gas", "density"]
if weighted:
expected_out = np.zeros(
(
Expand Down Expand Up @@ -240,7 +240,7 @@ def makemasses(i, j, k):
buff_size=(outgridsize,) * 2,
center=(_center, "cm"),
)
img = slc.frb.data[("gas", "density")]
img = slc.frb.data["gas", "density"]

# center is same in non-projection coords
if axis == 0:
Expand Down Expand Up @@ -272,9 +272,9 @@ def makemasses(i, j, k):
ad = ds.all_data()
sphcoords = np.array(
[
(ad[("gas", "x")]).to("cm"),
(ad[("gas", "y")]).to("cm"),
(ad[("gas", "z")]).to("cm"),
(ad["gas", "x"]).to("cm"),
(ad["gas", "y"]).to("cm"),
(ad["gas", "z"]).to("cm"),
]
).T
# print("sphcoords:")
Expand All @@ -289,15 +289,12 @@ def makemasses(i, j, k):
)
# print("dists <= 1:")
# print(dists <= 1)
sml = (ad[("gas", "smoothing_length")]).to("cm")
sml = (ad["gas", "smoothing_length"]).to("cm")
normkern = cubicspline_python(dists / sml.v[np.newaxis, :])
sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")]
sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"]
contsum = np.sum(sphcontr, axis=1)
sphweights = (
normkern
/ sml[np.newaxis, :] ** 3
* ad[("gas", "mass")]
/ ad[("gas", "density")]
normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"]
)
weights = np.sum(sphweights, axis=1)
nzeromask = np.logical_not(weights == 0)
Expand Down Expand Up @@ -406,7 +403,7 @@ def makemasses(i, j, k):
center=(_center, "cm"),
north_vector=e2dir,
)
img = slc.frb.data[("gas", "density")]
img = slc.frb.data["gas", "density"]

# center is same in x/y (e3dir/e2dir)
gridcenx = (
Expand Down Expand Up @@ -434,9 +431,9 @@ def makemasses(i, j, k):
ad = ds.all_data()
sphcoords = np.array(
[
(ad[("gas", "x")]).to("cm"),
(ad[("gas", "y")]).to("cm"),
(ad[("gas", "z")]).to("cm"),
(ad["gas", "x"]).to("cm"),
(ad["gas", "y"]).to("cm"),
(ad["gas", "z"]).to("cm"),
]
).T
dists = distancematrix(
Expand All @@ -445,15 +442,12 @@ def makemasses(i, j, k):
periodic=(periodic,) * 3,
periods=np.array([3.0, 3.0, 3.0]),
)
sml = (ad[("gas", "smoothing_length")]).to("cm")
sml = (ad["gas", "smoothing_length"]).to("cm")
normkern = cubicspline_python(dists / sml.v[np.newaxis, :])
sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")]
sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"]
contsum = np.sum(sphcontr, axis=1)
sphweights = (
normkern
/ sml[np.newaxis, :] ** 3
* ad[("gas", "mass")]
/ ad[("gas", "density")]
normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"]
)
weights = np.sum(sphweights, axis=1)
nzeromask = np.logical_not(weights == 0)
Expand Down Expand Up @@ -509,9 +503,9 @@ def test_sph_grid(
ad = ds.all_data()
sphcoords = np.array(
[
(ad[("gas", "x")]).to("cm"),
(ad[("gas", "y")]).to("cm"),
(ad[("gas", "z")]).to("cm"),
(ad["gas", "x"]).to("cm"),
(ad["gas", "y"]).to("cm"),
(ad["gas", "z"]).to("cm"),
]
).T
gridx, gridy, gridz = np.meshgrid(xcens, ycens, zcens, indexing="ij")
Expand All @@ -522,15 +516,12 @@ def test_sph_grid(
gridcoords = np.array([gridx, gridy, gridz]).T
periods = bbox[:, 1] - bbox[:, 0]
dists = distancematrix(gridcoords, sphcoords, periodic=periodic, periods=periods)
sml = (ad[("gas", "smoothing_length")]).to("cm")
sml = (ad["gas", "smoothing_length"]).to("cm")
normkern = cubicspline_python(dists / sml.v[np.newaxis, :])
sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")]
sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"]
contsum = np.sum(sphcontr, axis=1)
sphweights = (
normkern
/ sml[np.newaxis, :] ** 3
* ad[("gas", "mass")]
/ ad[("gas", "density")]
normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"]
)
weights = np.sum(sphweights, axis=1)
nzeromask = np.logical_not(weights == 0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def makemasses(i, j, k):
north_vector=northvector,
depth=depth,
)
img = prj.frb.data[("gas", "density")]
img = prj.frb.data["gas", "density"]
if weighted:
# periodic shifts will modify the (relative) dl values a bit
expected_out = np.zeros(
Expand Down

0 comments on commit 6672c17

Please sign in to comment.