diff --git a/.github/workflows/wheels.yaml b/.github/workflows/wheels.yaml index edd3baa636c..714c8ba1803 100644 --- a/.github/workflows/wheels.yaml +++ b/.github/workflows/wheels.yaml @@ -33,7 +33,7 @@ jobs: uses: actions/checkout@v4 - name: Build wheels for CPython - uses: pypa/cibuildwheel@v2.21.3 + uses: pypa/cibuildwheel@v2.22.0 with: output-dir: dist @@ -124,7 +124,7 @@ jobs: merge-multiple: true - name: Publish to PyPI - uses: pypa/gh-action-pypi-publish@v1.11.0 + uses: pypa/gh-action-pypi-publish@v1.12.2 with: user: __token__ password: ${{ secrets.pypi_token }} diff --git a/tests/ci_install.sh b/tests/ci_install.sh index 57046585182..cb1daa54601 100644 --- a/tests/ci_install.sh +++ b/tests/ci_install.sh @@ -17,6 +17,11 @@ if [[ ${dependencies} == "full" || ${dependencies} == "cartopy" ]]; then osx|macOS) sudo mkdir -p /usr/local/man sudo chown -R "${USER}:admin" /usr/local/man + # uninstalling pkg-config to workaround a bug in macOS image + # https://github.com/Homebrew/homebrew-core/pull/198691#issuecomment-2495500991 + # this can be cleaned-up once the following patch is released: + # https://github.com/actions/runner-images/pull/11015 + HOMEBREW_NO_AUTO_UPDATE=1 brew uninstall pkg-config@0.29.2 || true HOMEBREW_NO_AUTO_UPDATE=1 brew install hdf5 open-mpi netcdf ccache macfuse ;; esac diff --git a/yt/data_objects/tests/test_derived_quantities.py b/yt/data_objects/tests/test_derived_quantities.py index 35ac7283d42..a47680b1389 100644 --- a/yt/data_objects/tests/test_derived_quantities.py +++ b/yt/data_objects/tests/test_derived_quantities.py @@ -68,7 +68,7 @@ def test_average(): ("gas", "density"), ("gas", "cell_mass") ) a_mean = (ad["gas", "density"] * ad["gas", "cell_mass"]).sum() / ad[ - ("gas", "cell_mass") + "gas", "cell_mass" ].sum() assert_rel_equal(my_mean, a_mean, 12) @@ -87,7 +87,7 @@ def test_standard_deviation(): ("gas", "density"), ("gas", "cell_mass") ) a_mean = (ad["gas", "density"] * ad["gas", "cell_mass"]).sum() / ad[ - ("gas", "cell_mass") + "gas", "cell_mass" ].sum() assert_rel_equal(my_mean, a_mean, 12) a_std = np.sqrt( diff --git a/yt/data_objects/tests/test_particle_trajectories_pytest.py b/yt/data_objects/tests/test_particle_trajectories_pytest.py index dbeda48fefe..9fa30049820 100644 --- a/yt/data_objects/tests/test_particle_trajectories_pytest.py +++ b/yt/data_objects/tests/test_particle_trajectories_pytest.py @@ -122,7 +122,7 @@ def dummy(pfilter, data): @pytest.mark.parametrize("ptype", [None, "io"]) def test_default_field_tuple(particle_trajectories_test_dataset, ptype): ds = particle_trajectories_test_dataset[0] - ids = ds.all_data()[("all", "particle_index")] + ids = ds.all_data()["all", "particle_index"] trajs = particle_trajectories_test_dataset.particle_trajectories( ids, ptype=ptype, suppress_logging=True ) @@ -138,7 +138,7 @@ def test_default_field_tuple(particle_trajectories_test_dataset, ptype): @pytest.mark.parametrize("ptype", [None, "io"]) def test_time_and_index(particle_trajectories_test_dataset, ptype): ds = particle_trajectories_test_dataset[0] - ids = ds.all_data()[("all", "particle_index")] + ids = ds.all_data()["all", "particle_index"] trajs = particle_trajectories_test_dataset.particle_trajectories( ids, ptype=ptype, suppress_logging=True ) diff --git a/yt/data_objects/tests/test_rays.py b/yt/data_objects/tests/test_rays.py index 3dcc2e97c83..7bafebca39d 100644 --- a/yt/data_objects/tests/test_rays.py +++ b/yt/data_objects/tests/test_rays.py @@ -110,9 +110,9 @@ def test_ray_particle2(): # restricts you to 4 -- 5 digits precision assert_equal(ray0["t"].shape, (1,)) assert_rel_equal(ray0["t"], np.array([0.5]), 5) - assert_rel_equal(ray0[("gas", "position")].v, np.array([[0.5, 0.5, 0.5]]), 5) + assert_rel_equal(ray0["gas", "position"].v, np.array([[0.5, 0.5, 0.5]]), 5) dl0 = integrate_kernel(kernelfunc, b0, hsml0) - dl0 *= ray0[("gas", "mass")].v / ray0[("gas", "density")].v + dl0 *= ray0["gas", "mass"].v / ray0["gas", "density"].v assert_rel_equal(ray0[("dts")].v, dl0 / len0, 4) ## Ray in the middle of the box: @@ -133,10 +133,10 @@ def test_ray_particle2(): assert_equal(ray1["t"].shape, (2,)) assert_rel_equal(ray1["t"], np.array([0.25, 0.75]), 5) assert_rel_equal( - ray1[("gas", "position")].v, np.array([[1.5, 0.5, 1.5], [1.5, 0.5, 2.5]]), 5 + ray1["gas", "position"].v, np.array([[1.5, 0.5, 1.5], [1.5, 0.5, 2.5]]), 5 ) dl1 = integrate_kernel(kernelfunc, b1, hsml1) - dl1 *= ray1[("gas", "mass")].v / ray1[("gas", "density")].v + dl1 *= ray1["gas", "mass"].v / ray1["gas", "density"].v assert_rel_equal(ray1[("dts")].v, dl1 / len1, 4) ## Ray missing all particles: @@ -150,4 +150,4 @@ def test_ray_particle2(): ray2.field_data["dts"] = ray2.ds.arr(ray2._generate_container_field_sph("dts")) assert_equal(ray2["t"].shape, (0,)) assert_equal(ray2["dts"].shape, (0,)) - assert_equal(ray2[("gas", "position")].v.shape, (0, 3)) + assert_equal(ray2["gas", "position"].v.shape, (0, 3)) diff --git a/yt/frontends/amrex/tests/test_outputs.py b/yt/frontends/amrex/tests/test_outputs.py index bfde30d0fb6..14e1031243b 100644 --- a/yt/frontends/amrex/tests/test_outputs.py +++ b/yt/frontends/amrex/tests/test_outputs.py @@ -97,17 +97,17 @@ def test_nyx_particle_io(): grid = ds.index.grids[0] npart_grid_0 = 7908 # read directly from the header - assert_equal(grid[("all", "particle_position_x")].size, npart_grid_0) + assert_equal(grid["all", "particle_position_x"].size, npart_grid_0) assert_equal(grid["DM", "particle_position_y"].size, npart_grid_0) assert_equal(grid["all", "particle_position_z"].size, npart_grid_0) ad = ds.all_data() npart = 32768 # read directly from the header - assert_equal(ad[("all", "particle_velocity_x")].size, npart) + assert_equal(ad["all", "particle_velocity_x"].size, npart) assert_equal(ad["DM", "particle_velocity_y"].size, npart) assert_equal(ad["all", "particle_velocity_z"].size, npart) - assert np.all(ad[("all", "particle_mass")] == ad[("all", "particle_mass")][0]) + assert np.all(ad["all", "particle_mass"] == ad["all", "particle_mass"][0]) left_edge = ds.arr([0.0, 0.0, 0.0], "code_length") right_edge = ds.arr([4.0, 4.0, 4.0], "code_length") @@ -117,22 +117,22 @@ def test_nyx_particle_io(): assert np.all( np.logical_and( - reg[("all", "particle_position_x")] <= right_edge[0], - reg[("all", "particle_position_x")] >= left_edge[0], + reg["all", "particle_position_x"] <= right_edge[0], + reg["all", "particle_position_x"] >= left_edge[0], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_y")] <= right_edge[1], - reg[("all", "particle_position_y")] >= left_edge[1], + reg["all", "particle_position_y"] <= right_edge[1], + reg["all", "particle_position_y"] >= left_edge[1], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_z")] <= right_edge[2], - reg[("all", "particle_position_z")] >= left_edge[2], + reg["all", "particle_position_z"] <= right_edge[2], + reg["all", "particle_position_z"] >= left_edge[2], ) ) @@ -155,13 +155,13 @@ def test_castro_particle_io(): grid = ds.index.grids[2] npart_grid_2 = 49 # read directly from the header - assert_equal(grid[("all", "particle_position_x")].size, npart_grid_2) + assert_equal(grid["all", "particle_position_x"].size, npart_grid_2) assert_equal(grid["Tracer", "particle_position_y"].size, npart_grid_2) assert_equal(grid["all", "particle_position_y"].size, npart_grid_2) ad = ds.all_data() npart = 49 # read directly from the header - assert_equal(ad[("all", "particle_velocity_x")].size, npart) + assert_equal(ad["all", "particle_velocity_x"].size, npart) assert_equal(ad["Tracer", "particle_velocity_y"].size, npart) assert_equal(ad["all", "particle_velocity_y"].size, npart) @@ -173,15 +173,15 @@ def test_castro_particle_io(): assert np.all( np.logical_and( - reg[("all", "particle_position_x")] <= right_edge[0], - reg[("all", "particle_position_x")] >= left_edge[0], + reg["all", "particle_position_x"] <= right_edge[0], + reg["all", "particle_position_x"] >= left_edge[0], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_y")] <= right_edge[1], - reg[("all", "particle_position_y")] >= left_edge[1], + reg["all", "particle_position_y"] <= right_edge[1], + reg["all", "particle_position_y"] >= left_edge[1], ) ) @@ -265,22 +265,22 @@ def test_warpx_particle_io(): assert np.all( np.logical_and( - reg[("all", "particle_position_x")] <= right_edge[0], - reg[("all", "particle_position_x")] >= left_edge[0], + reg["all", "particle_position_x"] <= right_edge[0], + reg["all", "particle_position_x"] >= left_edge[0], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_y")] <= right_edge[1], - reg[("all", "particle_position_y")] >= left_edge[1], + reg["all", "particle_position_y"] <= right_edge[1], + reg["all", "particle_position_y"] >= left_edge[1], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_z")] <= right_edge[2], - reg[("all", "particle_position_z")] >= left_edge[2], + reg["all", "particle_position_z"] <= right_edge[2], + reg["all", "particle_position_z"] >= left_edge[2], ) ) diff --git a/yt/frontends/parthenon/tests/test_outputs.py b/yt/frontends/parthenon/tests/test_outputs.py index 2737b2cce46..6c5e0ee7db2 100644 --- a/yt/frontends/parthenon/tests/test_outputs.py +++ b/yt/frontends/parthenon/tests/test_outputs.py @@ -50,8 +50,8 @@ def field_func(name): # reading data of two fields and compare against each other (data is squared in output) ad = ds.all_data() assert_allclose( - ad[("parthenon", "one_minus_advected")] ** 2.0, - ad[("parthenon", "one_minus_advected_sq")], + ad["parthenon", "one_minus_advected"] ** 2.0, + ad["parthenon", "one_minus_advected_sq"], ) # check if the peak is in the domain center (and at the highest refinement level) diff --git a/yt/frontends/stream/io.py b/yt/frontends/stream/io.py index 3d422992e53..d57da3b576c 100644 --- a/yt/frontends/stream/io.py +++ b/yt/frontends/stream/io.py @@ -161,7 +161,7 @@ def _yield_coordinates(self, data_file, needed_ptype=None): pos = np.column_stack( [ self.fields[data_file.filename][ - (ptype, f"particle_position_{ax}") + ptype, f"particle_position_{ax}" ] for ax in "xyz" ] diff --git a/yt/frontends/stream/tests/test_stream_stretched.py b/yt/frontends/stream/tests/test_stream_stretched.py index 2b4b3dded40..b93f0bc199d 100644 --- a/yt/frontends/stream/tests/test_stream_stretched.py +++ b/yt/frontends/stream/tests/test_stream_stretched.py @@ -87,7 +87,7 @@ def test_cell_width_type(data_cell_widths_N16): cell_widths=cell_widths, ) - _ = ds.slice(0, ds.domain_center[0])[("stream", "density")] + _ = ds.slice(0, ds.domain_center[0])["stream", "density"] def test_cell_width_dimensionality(data_cell_widths_N16): diff --git a/yt/geometry/coordinates/geographic_coordinates.py b/yt/geometry/coordinates/geographic_coordinates.py index 50cb4841f6f..036a28ac757 100644 --- a/yt/geometry/coordinates/geographic_coordinates.py +++ b/yt/geometry/coordinates/geographic_coordinates.py @@ -160,7 +160,7 @@ def _dlatitude_to_dtheta(field, data): def _longitude_to_phi(field, data): # longitude runs from -180 to 180 - lonvals = data[("index", "longitude")] + lonvals = data["index", "longitude"] neglons = lonvals < 0.0 if np.any(neglons): lonvals[neglons] = lonvals[neglons] + 360.0 diff --git a/yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py b/yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py index 29dcf63f0dd..ef12dac8242 100644 --- a/yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py +++ b/yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py @@ -113,7 +113,7 @@ def makemasses(i, j, k): center=center, data_source=source, ) - img = prj.frb.data[("gas", "density")] + img = prj.frb.data["gas", "density"] if weighted: expected_out = np.zeros( ( @@ -240,7 +240,7 @@ def makemasses(i, j, k): buff_size=(outgridsize,) * 2, center=(_center, "cm"), ) - img = slc.frb.data[("gas", "density")] + img = slc.frb.data["gas", "density"] # center is same in non-projection coords if axis == 0: @@ -272,9 +272,9 @@ def makemasses(i, j, k): ad = ds.all_data() sphcoords = np.array( [ - (ad[("gas", "x")]).to("cm"), - (ad[("gas", "y")]).to("cm"), - (ad[("gas", "z")]).to("cm"), + (ad["gas", "x"]).to("cm"), + (ad["gas", "y"]).to("cm"), + (ad["gas", "z"]).to("cm"), ] ).T # print("sphcoords:") @@ -289,15 +289,12 @@ def makemasses(i, j, k): ) # print("dists <= 1:") # print(dists <= 1) - sml = (ad[("gas", "smoothing_length")]).to("cm") + sml = (ad["gas", "smoothing_length"]).to("cm") normkern = cubicspline_python(dists / sml.v[np.newaxis, :]) - sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")] + sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] contsum = np.sum(sphcontr, axis=1) sphweights = ( - normkern - / sml[np.newaxis, :] ** 3 - * ad[("gas", "mass")] - / ad[("gas", "density")] + normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"] ) weights = np.sum(sphweights, axis=1) nzeromask = np.logical_not(weights == 0) @@ -406,7 +403,7 @@ def makemasses(i, j, k): center=(_center, "cm"), north_vector=e2dir, ) - img = slc.frb.data[("gas", "density")] + img = slc.frb.data["gas", "density"] # center is same in x/y (e3dir/e2dir) gridcenx = ( @@ -434,9 +431,9 @@ def makemasses(i, j, k): ad = ds.all_data() sphcoords = np.array( [ - (ad[("gas", "x")]).to("cm"), - (ad[("gas", "y")]).to("cm"), - (ad[("gas", "z")]).to("cm"), + (ad["gas", "x"]).to("cm"), + (ad["gas", "y"]).to("cm"), + (ad["gas", "z"]).to("cm"), ] ).T dists = distancematrix( @@ -445,15 +442,12 @@ def makemasses(i, j, k): periodic=(periodic,) * 3, periods=np.array([3.0, 3.0, 3.0]), ) - sml = (ad[("gas", "smoothing_length")]).to("cm") + sml = (ad["gas", "smoothing_length"]).to("cm") normkern = cubicspline_python(dists / sml.v[np.newaxis, :]) - sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")] + sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] contsum = np.sum(sphcontr, axis=1) sphweights = ( - normkern - / sml[np.newaxis, :] ** 3 - * ad[("gas", "mass")] - / ad[("gas", "density")] + normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"] ) weights = np.sum(sphweights, axis=1) nzeromask = np.logical_not(weights == 0) @@ -509,9 +503,9 @@ def test_sph_grid( ad = ds.all_data() sphcoords = np.array( [ - (ad[("gas", "x")]).to("cm"), - (ad[("gas", "y")]).to("cm"), - (ad[("gas", "z")]).to("cm"), + (ad["gas", "x"]).to("cm"), + (ad["gas", "y"]).to("cm"), + (ad["gas", "z"]).to("cm"), ] ).T gridx, gridy, gridz = np.meshgrid(xcens, ycens, zcens, indexing="ij") @@ -522,15 +516,12 @@ def test_sph_grid( gridcoords = np.array([gridx, gridy, gridz]).T periods = bbox[:, 1] - bbox[:, 0] dists = distancematrix(gridcoords, sphcoords, periodic=periodic, periods=periods) - sml = (ad[("gas", "smoothing_length")]).to("cm") + sml = (ad["gas", "smoothing_length"]).to("cm") normkern = cubicspline_python(dists / sml.v[np.newaxis, :]) - sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")] + sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] contsum = np.sum(sphcontr, axis=1) sphweights = ( - normkern - / sml[np.newaxis, :] ** 3 - * ad[("gas", "mass")] - / ad[("gas", "density")] + normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"] ) weights = np.sum(sphweights, axis=1) nzeromask = np.logical_not(weights == 0) diff --git a/yt/visualization/plot_window.py b/yt/visualization/plot_window.py index 78f96f14a28..5ce9bd790ea 100644 --- a/yt/visualization/plot_window.py +++ b/yt/visualization/plot_window.py @@ -26,6 +26,7 @@ validate_moment, ) from yt.geometry.api import Geometry +from yt.geometry.oct_geometry_handler import OctreeIndex from yt.units.unit_object import Unit # type: ignore from yt.units.unit_registry import UnitParseError # type: ignore from yt.units.yt_array import YTArray, YTQuantity @@ -2494,7 +2495,12 @@ def __init__( is_sph_field = finfo.is_sph_field particle_datasets = (ParticleDataset, StreamParticlesDataset) - if isinstance(data_source.ds, particle_datasets) and is_sph_field: + dom_width = data_source.ds.domain_width + cubic_domain = dom_width.max() == dom_width.min() + + if (isinstance(data_source.ds, particle_datasets) and is_sph_field) or ( + isinstance(data_source.ds.index, OctreeIndex) and cubic_domain + ): center_use = parse_center_array(center, ds=data_source.ds, axis=None) else: center_use = center_rot diff --git a/yt/visualization/tests/test_offaxisprojection.py b/yt/visualization/tests/test_offaxisprojection.py index b9839745d7a..46dccf860ae 100644 --- a/yt/visualization/tests/test_offaxisprojection.py +++ b/yt/visualization/tests/test_offaxisprojection.py @@ -16,6 +16,7 @@ from yt.visualization.api import ( OffAxisProjectionPlot, OffAxisSlicePlot, + ProjectionPlot, ) from yt.visualization.image_writer import write_projection from yt.visualization.volume_rendering.api import off_axis_projection @@ -210,6 +211,35 @@ def test_field_cut_off_axis_octree(): assert_equal(np.nanmin(p4rho[p4rho > 0.0]) >= 0.5, True) +def test_off_axis_octree(): + ds = fake_octree_ds() + p1 = ProjectionPlot( + ds, + "x", + ("gas", "density"), + center=[0.6] * 3, + width=0.8, + weight_field=("gas", "density"), + ) + p2 = OffAxisProjectionPlot( + ds, + [1, 0, 0], + ("gas", "density"), + center=[0.6] * 3, + width=0.8, + weight_field=("gas", "density"), + ) + + # Note: due to our implementation, the off-axis projection will have a + # slightly blurred cell edges so we can't do an exact comparison + v1, v2 = p1.frb["gas", "density"], p2.frb["gas", "density"] + diff = (v1 - v2) / (v1 + v2) * 2 + + # Make sure the difference is zero-centered with a small standard deviation + assert np.mean(diff).max() < 1e-3 # 0.1%: very little bias + assert np.std(diff) < 0.05 # <2% error on average + + def test_offaxis_moment(): ds = fake_random_ds(64) diff --git a/yt/visualization/tests/test_offaxisprojection_pytestonly.py b/yt/visualization/tests/test_offaxisprojection_pytestonly.py index ab3928a5dc0..40212b9992c 100644 --- a/yt/visualization/tests/test_offaxisprojection_pytestonly.py +++ b/yt/visualization/tests/test_offaxisprojection_pytestonly.py @@ -125,7 +125,7 @@ def makemasses(i, j, k): north_vector=northvector, depth=depth, ) - img = prj.frb.data[("gas", "density")] + img = prj.frb.data["gas", "density"] if weighted: # periodic shifts will modify the (relative) dl values a bit expected_out = np.zeros( diff --git a/yt/visualization/volume_rendering/off_axis_projection.py b/yt/visualization/volume_rendering/off_axis_projection.py index 3c2359a3f80..38c5677ef55 100644 --- a/yt/visualization/volume_rendering/off_axis_projection.py +++ b/yt/visualization/volume_rendering/off_axis_projection.py @@ -444,21 +444,23 @@ def temp_weightfield(field, data): # We need the width of the plot window in projected coordinates, # i.e. we ignore the z-component wmax = width[:2].max() - - # Normalize the positions & dx so that they are in the range [-0.5, 0.5] - xyz = np.stack( - [ - ((data_source["index", k] - center[i]) / wmax).to("1").d - for i, k in enumerate("xyz") - ], - axis=-1, + xyz = data_source.ds.arr( + np.zeros((len(data_source[vol.field]), 3)), "code_length" ) for idim, periodic in enumerate(data_source.ds.periodicity): + axis = data_source.ds.coordinates.axis_order[idim] + # Recenter positions w.r.t. center of the plot window + xyz[..., idim] = data_source["index", axis] - center[idim] if not periodic: continue - # Wrap into [-0.5, +0.5] - xyz[..., idim] = (xyz[..., idim] + 0.5) % 1 - 0.5 + # If we have periodic boundaries, we need to wrap the corresponding + # coordinates into [-w/2, +w/2] + w = data_source.ds.domain_width[idim] + xyz[..., idim] = (xyz[..., idim] + w / 2) % w - w / 2 + + # Rescale to [-0.5, +0.5] + xyz = (xyz / wmax).to("1").d dx = (data_source["index", "dx"] / wmax).to("1").d