Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-yt for openSUSE:Factory 
checked in at 2025-08-28 17:19:11
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-yt (Old)
 and      /work/SRC/openSUSE:Factory/.python-yt.new.1977 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-yt"

Thu Aug 28 17:19:11 2025 rev:16 rq:1301682 version:4.4.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-yt/python-yt.changes      2025-08-02 
00:41:10.557132810 +0200
+++ /work/SRC/openSUSE:Factory/.python-yt.new.1977/python-yt.changes    
2025-08-28 17:19:44.069895965 +0200
@@ -1,0 +2,24 @@
+Thu Aug 28 05:38:50 UTC 2025 - Steve Kowalik <steven.kowa...@suse.com>
+
+- Update to 4.4.1:
+  * Fix off-axis rendering when center is not [0.5, 0.5, 0.5], fix
+    periodicity
+  * Ensure positions are dimensionless in plot callbacks
+  * Correctly normalize off-axis projections for octree datasets
+  * Calculate default depth for off axis projections
+  * Fix compatibility with matplotlib 3.10 for images that have both a
+    custom background color and a non-transparent 'bad' color
+  * Removing code that removes field type in set_title()
+  * Properly reference weight field in profile code for particle-based
+    datasets
+  * Fix swap_axes bug wih celledgescallback
+  * Fix nans in off-center frb slices from ds.r
+  * Make backend_specs case insensitive
+  * Update testing.integrate_kernel type hints
+  * Better behavior when cholla doesn't write "mu" attribute
+  * removing code that removes field type in set_title()
+  * Adapt ci and test suite to breaking changes in pytest 8.4.0
+  * fix an incoming incompatibility with pytest 8.5 in our test framework
+  * Filter out an indirect deprecation warning from pillow via matplotlib
+
+-------------------------------------------------------------------

Old:
----
  yt-4.4.0.tar.gz

New:
----
  yt-4.4.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-yt.spec ++++++
--- /var/tmp/diff_new_pack.dnAZeK/_old  2025-08-28 17:19:44.945932680 +0200
+++ /var/tmp/diff_new_pack.dnAZeK/_new  2025-08-28 17:19:44.949932848 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package python-yt
 #
-# Copyright (c) 2025 SUSE LLC
+# Copyright (c) 2025 SUSE LLC and contributors
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -30,7 +30,7 @@
 
 %{?sle15_python_module_pythons}
 Name:           python-yt%{psuffix}
-Version:        4.4.0
+Version:        4.4.1
 Release:        0
 Summary:        An analysis and visualization toolkit for volumetric data
 License:        BSD-3-Clause

++++++ yt-4.4.0.tar.gz -> yt-4.4.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/PKG-INFO new/yt-4.4.1/PKG-INFO
--- old/yt-4.4.0/PKG-INFO       2024-11-11 14:17:01.435154400 +0100
+++ new/yt-4.4.1/PKG-INFO       2025-07-08 20:30:30.533897600 +0200
@@ -1,6 +1,6 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.4
 Name: yt
-Version: 4.4.0
+Version: 4.4.1
 Summary: An analysis and visualization toolkit for volumetric data
 Author-email: The yt project <yt-...@python.org>
 License: BSD 3-Clause
@@ -27,7 +27,7 @@
 Classifier: Topic :: Scientific/Engineering :: Astronomy
 Classifier: Topic :: Scientific/Engineering :: Physics
 Classifier: Topic :: Scientific/Engineering :: Visualization
-Requires-Python: >=3.10.3
+Requires-Python: >=3.10.4
 Description-Content-Type: text/markdown
 License-File: COPYING.txt
 Requires-Dist: cmyt>=1.1.2
@@ -190,6 +190,8 @@
 Requires-Dist: pytest-mpl>=0.16.1; extra == "test"
 Requires-Dist: sympy!=1.10,!=1.9; extra == "test"
 Requires-Dist: imageio!=2.35.0; extra == "test"
+Requires-Dist: contourpy; extra == "test"
+Dynamic: license-file
 
 # The yt Project
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/conftest.py new/yt-4.4.1/conftest.py
--- old/yt-4.4.0/conftest.py    2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/conftest.py    2025-07-08 20:30:02.000000000 +0200
@@ -22,7 +22,7 @@
 
 MPL_VERSION = Version(version("matplotlib"))
 NUMPY_VERSION = Version(version("numpy"))
-PILLOW_VERSION = Version(version("pillow"))
+PILLOW_VERSION = Version(version("Pillow"))
 
 # setuptools does not ship with the standard lib starting in Python 3.12, so 
we need to
 # be resilient if it's not available at runtime
@@ -160,6 +160,13 @@
                 ":DeprecationWarning",
             )
 
+    if PILLOW_VERSION >= Version("11.3.0") and MPL_VERSION <= 
Version("3.10.3"):
+        # patched upstream: https://github.com/matplotlib/matplotlib/pull/30221
+        config.addinivalue_line(
+            "filterwarnings",
+            r"ignore:'mode' parameter is deprecated:DeprecationWarning",
+        )
+
     if PANDAS_VERSION is not None and PANDAS_VERSION >= Version("2.2.0"):
         config.addinivalue_line(
             "filterwarnings",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/doc/source/conf.py 
new/yt-4.4.1/doc/source/conf.py
--- old/yt-4.4.0/doc/source/conf.py     2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/doc/source/conf.py     2025-07-08 20:30:02.000000000 +0200
@@ -70,7 +70,7 @@
 # The short X.Y version.
 version = "4.4"
 # The full version, including alpha/beta/rc tags.
-release = "4.4.0"
+release = "4.4.1"
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/pyproject.toml new/yt-4.4.1/pyproject.toml
--- old/yt-4.4.0/pyproject.toml 2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/pyproject.toml 2025-07-08 20:30:02.000000000 +0200
@@ -10,7 +10,7 @@
 
 [project]
 name = "yt"
-version = "4.4.0"
+version = "4.4.1"
 description = "An analysis and visualization toolkit for volumetric data"
 authors = [
     { name = "The yt project", email = "yt-...@python.org" },
@@ -38,7 +38,7 @@
 keywords = [
     "astronomy astrophysics visualization amr adaptivemeshrefinement",
 ]
-requires-python = ">=3.10.3"
+requires-python = ">=3.10.4"
 
 # keep in sync with minimal_requirements.txt
 dependencies = [
@@ -200,6 +200,7 @@
     "pytest-mpl>=0.16.1",
     "sympy!=1.10,!=1.9", # see https://github.com/sympy/sympy/issues/22241
     "imageio!=2.35.0", # see https://github.com/yt-project/yt/issues/4966
+    "contourpy",
 ]
 
 [project.scripts]
@@ -381,7 +382,6 @@
     --ignore-glob='/*/yt/visualization/tests/test_particle_plot.py'
     --ignore-glob='/*/yt/visualization/tests/test_plot_modifications.py'
     --ignore-glob='/*/yt/visualization/tests/test_plotwindow.py'
-    --ignore-glob='/*/yt/visualization/tests/test_profile_plots.py'
     --ignore-glob='/*/yt/visualization/tests/test_raw_field_slices.py'
     
--ignore-glob='/*/yt/visualization/volume_rendering/tests/test_mesh_render.py'
     
--ignore-glob='/*/yt/visualization/volume_rendering/tests/test_vr_orientation.py'
@@ -462,12 +462,11 @@
 exclude = "(test_*|lodgeit)"
 
 [tool.cibuildwheel]
-build = "cp310-* cp311-* cp312-* cp313-*"
 build-verbosity = 1
 test-skip = "*-musllinux*"
 test-extras = "test"
 test-command = [
-    "pytest -c {project}/pyproject.toml --rootdir . --color=yes --pyargs yt 
-ra",
+    "python -m pytest -c {project}/pyproject.toml --rootdir . --color=yes 
--pyargs yt -ra",
 ]
 
 [tool.cibuildwheel.linux]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/_maintenance/numpy2_compat.py 
new/yt-4.4.1/yt/_maintenance/numpy2_compat.py
--- old/yt-4.4.0/yt/_maintenance/numpy2_compat.py       2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/_maintenance/numpy2_compat.py       2025-07-08 
20:30:02.000000000 +0200
@@ -6,4 +6,4 @@
     # np.trapz is deprecated in numpy 2.0 in favor of np.trapezoid
     trapezoid = np.trapezoid
 else:
-    trapezoid = np.trapz  # type: ignore [attr-defined] # noqa: NPY201
+    trapezoid = np.trapz  # type: ignore[assignment] # noqa: NPY201
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/_version.py new/yt-4.4.1/yt/_version.py
--- old/yt-4.4.0/yt/_version.py 2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/_version.py 2025-07-08 20:30:02.000000000 +0200
@@ -7,7 +7,7 @@
     "version_info",
 ]
 
-__version__ = "4.4.0"  # keep in sync with pyproject.toml
+__version__ = "4.4.1"  # keep in sync with pyproject.toml
 
 
 class VersionTuple(NamedTuple):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/data_objects/profiles.py 
new/yt-4.4.1/yt/data_objects/profiles.py
--- old/yt-4.4.0/yt/data_objects/profiles.py    2024-11-11 14:16:34.000000000 
+0100
+++ new/yt-4.4.1/yt/data_objects/profiles.py    2025-07-08 20:30:02.000000000 
+0200
@@ -1314,7 +1314,7 @@
                 for f in bin_fields + fields
             ]
             if wf is not None:
-                is_local.append(wf.sampling_type == "local")
+                is_local.append(data_source.ds.field_info[wf].sampling_type == 
"local")
             is_local_or_pfield = [
                 pf or lf for (pf, lf) in zip(is_pfield, is_local, strict=True)
             ]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/data_objects/region_expression.py 
new/yt-4.4.1/yt/data_objects/region_expression.py
--- old/yt-4.4.0/yt/data_objects/region_expression.py   2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/data_objects/region_expression.py   2025-07-08 
20:30:02.000000000 +0200
@@ -138,7 +138,11 @@
             height = source.right_edge[yax] - source.left_edge[yax]
             # Make a resolution tuple with
             resolution = (int(new_slice[xax].step.imag), 
int(new_slice[yax].step.imag))
-            sl = sl.to_frb(width=width, resolution=resolution, height=height)
+            # Use the center of the slice, not the entire domain
+            center = source.center
+            sl = sl.to_frb(
+                width=width, resolution=resolution, height=height, 
center=center
+            )
         return sl
 
     def _slice_to_edges(self, ax, val):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/yt-4.4.0/yt/data_objects/tests/test_dataset_access.py 
new/yt-4.4.1/yt/data_objects/tests/test_dataset_access.py
--- old/yt-4.4.0/yt/data_objects/tests/test_dataset_access.py   2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/data_objects/tests/test_dataset_access.py   2025-07-08 
20:30:02.000000000 +0200
@@ -112,6 +112,19 @@
     frb4 = ds.r[0.5, 0.25:0.75:1024j, 0.25:0.75:512j]
     assert_equal(frb3["gas", "density"], frb4["gas", "density"])
 
+    # Test off-center slice
+    offset_box = ds.box([0.0, 0.0, 0.4], [1.0, 0.5, 0.9])
+
+    sl5 = ds.r[0.5, 0:0.5, 0.4:0.9]
+    sl6 = ds.slice("x", 0.5, data_source=offset_box)
+    assert_equal(sl5["gas", "density"], sl6["gas", "density"])
+
+    frb5 = sl5.to_frb(
+        width=0.5, height=0.5, resolution=(1024, 512), center=(0.5, 0.25, 0.65)
+    )
+    frb6 = ds.r[0.5, 0.0:0.5:1024j, 0.4:0.9:512j]
+    assert_equal(frb5["gas", "density"], frb6["gas", "density"])
+
 
 def test_point_from_r():
     ds = fake_amr_ds(fields=["density"], units=["g/cm**3"])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/data_objects/tests/test_profiles.py 
new/yt-4.4.1/yt/data_objects/tests/test_profiles.py
--- old/yt-4.4.0/yt/data_objects/tests/test_profiles.py 2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/data_objects/tests/test_profiles.py 2025-07-08 
20:30:02.000000000 +0200
@@ -534,6 +534,12 @@
         [("gas", "kinetic_energy_density")],
         weight_field=None,
     )
+    yt.create_profile(
+        ds.all_data(),
+        [("gas", "density"), ("gas", "temperature")],
+        [("gas", "kinetic_energy_density")],
+        weight_field=("gas", "density"),
+    )
 
 
 def test_profile_override_limits():
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/fields/tests/test_fields.py 
new/yt-4.4.1/yt/fields/tests/test_fields.py
--- old/yt-4.4.0/yt/fields/tests/test_fields.py 2024-11-11 14:16:34.000000000 
+0100
+++ new/yt-4.4.1/yt/fields/tests/test_fields.py 2025-07-08 20:30:02.000000000 
+0200
@@ -1,3 +1,5 @@
+from dataclasses import dataclass
+
 import numpy as np
 from numpy.testing import (
     assert_almost_equal,
@@ -8,6 +10,7 @@
 )
 
 from yt import load
+from yt.data_objects.static_output import Dataset
 from yt.frontends.stream.fields import StreamFieldInfo
 from yt.testing import (
     assert_allclose_units,
@@ -59,61 +62,15 @@
     return field[1]
 
 
-class TestFieldAccess:
-    description = None
-
-    def __init__(self, field_name, ds, nprocs):
-        # Note this should be a field name
-        self.field_name = field_name
-        self.description = f"Accessing_{field_name}_{nprocs}"
-        self.nprocs = nprocs
-        self.ds = ds
-
-    def __call__(self):
-        field = self.ds._get_field_info(self.field_name)
-        skip_grids = False
-        needs_spatial = False
-        for v in field.validators:
-            if getattr(v, "ghost_zones", 0) > 0:
-                skip_grids = True
-            if hasattr(v, "ghost_zones"):
-                needs_spatial = True
-
-        ds = self.ds
-
-        # This gives unequal sized grids as well as subgrids
-        dd1 = ds.all_data()
-        dd2 = ds.all_data()
-        sp = get_params(ds)
-        dd1.field_parameters.update(sp)
-        dd2.field_parameters.update(sp)
-        with np.errstate(all="ignore"):
-            v1 = dd1[self.field_name]
-            # No more conversion checking
-            assert_equal(v1, dd1[self.field_name])
-            if not needs_spatial:
-                with field.unit_registry(dd2):
-                    res = field._function(field, dd2)
-                    res = dd2.apply_units(res, field.units)
-                assert_array_almost_equal_nulp(v1, res, 4)
-            if not skip_grids:
-                for g in ds.index.grids:
-                    g.field_parameters.update(sp)
-                    v1 = g[self.field_name]
-                    g.clear_data()
-                    g.field_parameters.update(sp)
-                    r1 = field._function(field, g)
-                    if field.sampling_type == "particle":
-                        assert_equal(v1.shape[0], g.NumberOfParticles)
-                    else:
-                        assert_array_equal(r1.shape, v1.shape)
-                        for ax in "xyz":
-                            assert_array_equal(g["index", ax].shape, v1.shape)
-                    with field.unit_registry(g):
-                        res = field._function(field, g)
-                        assert_array_equal(v1.shape, res.shape)
-                        res = g.apply_units(res, field.units)
-                    assert_array_almost_equal_nulp(v1, res, 4)
+@dataclass(slots=True, frozen=True)
+class FieldAccessTestCase:
+    field_name: str
+    ds: Dataset
+    nprocs: int
+
+    @property
+    def description(self) -> str:
+        return f"Accessing_{self.field_name}_{self.nprocs}"
 
 
 def get_base_ds(nprocs):
@@ -188,7 +145,53 @@
 
         for nprocs in [1, 4, 8]:
             test_all_fields.__name__ = f"{field}_{nprocs}"
-            yield TestFieldAccess(field, datasets[nprocs], nprocs)
+
+            tc = FieldAccessTestCase(field, datasets[nprocs], nprocs)
+
+            field = tc.ds._get_field_info(tc.field_name)
+            skip_grids = False
+            needs_spatial = False
+            for v in field.validators:
+                if getattr(v, "ghost_zones", 0) > 0:
+                    skip_grids = True
+                if hasattr(v, "ghost_zones"):
+                    needs_spatial = True
+
+            ds = tc.ds
+
+            # This gives unequal sized grids as well as subgrids
+            dd1 = ds.all_data()
+            dd2 = ds.all_data()
+            sp = get_params(ds)
+            dd1.field_parameters.update(sp)
+            dd2.field_parameters.update(sp)
+            with np.errstate(all="ignore"):
+                v1 = dd1[tc.field_name]
+                # No more conversion checking
+                assert_equal(v1, dd1[tc.field_name])
+                if not needs_spatial:
+                    with field.unit_registry(dd2):
+                        res = field._function(field, dd2)
+                        res = dd2.apply_units(res, field.units)
+                    assert_array_almost_equal_nulp(v1, res, 4)
+                if not skip_grids:
+                    for g in ds.index.grids:
+                        g.field_parameters.update(sp)
+                        v1 = g[tc.field_name]
+                        g.clear_data()
+                        g.field_parameters.update(sp)
+                        r1 = field._function(field, g)
+                        if field.sampling_type == "particle":
+                            assert_equal(v1.shape[0], g.NumberOfParticles)
+                        else:
+                            assert_array_equal(r1.shape, v1.shape)
+                            for ax in "xyz":
+                                assert_array_equal(g["index", ax].shape, 
v1.shape)
+                        with field.unit_registry(g):
+                            res = field._function(field, g)
+                            assert_array_equal(v1.shape, res.shape)
+                            res = g.apply_units(res, field.units)
+                        assert_array_almost_equal_nulp(v1, res, 4)
 
 
 def test_add_deposited_particle_field():
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/frontends/amrex/data_structures.py 
new/yt-4.4.1/yt/frontends/amrex/data_structures.py
--- old/yt-4.4.0/yt/frontends/amrex/data_structures.py  2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/frontends/amrex/data_structures.py  2025-07-08 
20:30:02.000000000 +0200
@@ -370,23 +370,19 @@
                 dx[i].append(DRE[2] - DLE[2])
         self.level_dds = np.array(dx, dtype="float64")
         next(header_file)
-        if self.ds.geometry == "cartesian":
-            default_ybounds = (0.0, 1.0)
-            default_zbounds = (0.0, 1.0)
-        elif self.ds.geometry == "cylindrical":
-            self.level_dds[:, 2] = 2 * np.pi
-            default_ybounds = (0.0, 1.0)
-            default_zbounds = (0.0, 2 * np.pi)
-        elif self.ds.geometry == "spherical":
-            # BoxLib only supports 1D spherical, so ensure
-            # the other dimensions have the right extent.
-            self.level_dds[:, 1] = np.pi
-            self.level_dds[:, 2] = 2 * np.pi
-            default_ybounds = (0.0, np.pi)
-            default_zbounds = (0.0, 2 * np.pi)
-        else:
-            header_file.close()
-            raise RuntimeError("Unknown BoxLib coordinate system.")
+        match self.ds.geometry:
+            case Geometry.CARTESIAN:
+                default_ybounds = (0.0, 1.0)
+                default_zbounds = (0.0, 1.0)
+            case Geometry.CYLINDRICAL:
+                default_ybounds = (0.0, 1.0)
+                default_zbounds = (0.0, 2 * np.pi)
+            case Geometry.SPHERICAL:
+                default_ybounds = (0.0, np.pi)
+                default_zbounds = (0.0, 2 * np.pi)
+            case _:
+                header_file.close()
+                raise RuntimeError("Unknown BoxLib coordinate system.")
         if int(next(header_file)) != 0:
             header_file.close()
             raise RuntimeError("INTERNAL ERROR! This should be a zero.")
@@ -904,9 +900,15 @@
         else:
             self.geometry = Geometry(geom_str)
 
-        if self.geometry == "cylindrical":
+        if self.geometry is Geometry.CYLINDRICAL:
+            dre = self.domain_right_edge.copy()
+            dre[2] = 2.0 * np.pi
+            self.domain_right_edge = dre
+        if self.geometry is Geometry.SPHERICAL and self.dimensionality < 3:
             dre = self.domain_right_edge.copy()
             dre[2] = 2.0 * np.pi
+            if self.dimensionality < 2:
+                dre[1] = np.pi
             self.domain_right_edge = dre
 
         header_file.close()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/frontends/amrex/tests/test_outputs.py 
new/yt-4.4.1/yt/frontends/amrex/tests/test_outputs.py
--- old/yt-4.4.0/yt/frontends/amrex/tests/test_outputs.py       2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/frontends/amrex/tests/test_outputs.py       2025-07-08 
20:30:02.000000000 +0200
@@ -443,3 +443,60 @@
     # Check an int parameter
     assert ds.parameters["s0_interp_type"] == 3
     assert type(ds.parameters["s0_interp_type"]) is int  # noqa: E721
+
+
+# test loading non-Cartesian coordinate systems in different dimensionalities
+
+
+def check_coordsys_data(ds):
+    # check that level_dds is consistent with domain_width
+    assert_allclose(
+        ds.index.level_dds[0] * ds.domain_dimensions,
+        ds.domain_width.to_value("code_length"),
+        rtol=1e-12,
+        atol=0.0,
+    )
+
+    # check that we get the expected number of data points when selecting the
+    # entire domain
+    expected_size = sum(np.count_nonzero(g.child_mask) for g in ds.index.grids)
+    ad = ds.all_data()
+    assert ad["boxlib", "Temp"].size == expected_size
+
+
+cyl_1d = "castro_sedov_1d_cyl_plt00150"
+cyl_2d = "castro_sedov_2d_sph_in_cyl_plt00130"
+sph_1d = "sedov_1d_sph_plt00120"
+sph_2d = "xrb_spherical_smallplt00010"
+
+
+@requires_file(cyl_1d)
+def test_coordsys_1d_cylindrical():
+    ds = data_dir_load(cyl_1d)
+    assert ds.geometry == "cylindrical"
+    assert ds.dimensionality == 1
+    check_coordsys_data(ds)
+
+
+@requires_file(cyl_2d)
+def test_coordsys_2d_cylindrical():
+    ds = data_dir_load(cyl_2d)
+    assert ds.geometry == "cylindrical"
+    assert ds.dimensionality == 2
+    check_coordsys_data(ds)
+
+
+@requires_file(sph_1d)
+def test_coordsys_1d_spherical():
+    ds = data_dir_load(sph_1d)
+    assert ds.geometry == "spherical"
+    assert ds.dimensionality == 1
+    check_coordsys_data(ds)
+
+
+@requires_file(sph_2d)
+def test_coordsys_2d_spherical():
+    ds = data_dir_load(sph_2d)
+    assert ds.geometry == "spherical"
+    assert ds.dimensionality == 2
+    check_coordsys_data(ds)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/frontends/cholla/data_structures.py 
new/yt-4.4.1/yt/frontends/cholla/data_structures.py
--- old/yt-4.4.0/yt/frontends/cholla/data_structures.py 2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/frontends/cholla/data_structures.py 2025-07-08 
20:30:02.000000000 +0200
@@ -8,6 +8,7 @@
 from yt.funcs import setdefaultattr
 from yt.geometry.api import Geometry
 from yt.geometry.grid_geometry_handler import GridIndex
+from yt.utilities.logger import ytLogger as mylog
 from yt.utilities.on_demand_imports import _h5py as h5py
 
 from .fields import ChollaFieldInfo
@@ -111,7 +112,19 @@
             self.current_time = attrs["t"][:]
             self._periodicity = tuple(attrs.get("periodicity", (False, False, 
False)))
             self.gamma = attrs.get("gamma", 5.0 / 3.0)
-            self.mu = attrs.get("mu", 1.0)
+            if (self.default_species_fields is not None) and "mu" in attrs:
+                raise ValueError(
+                    'default_species_fields must be None when "mu" is an hdf5 
attribute'
+                )
+            elif "mu" in attrs:
+                self.mu = attrs["mu"]
+            elif self.default_species_fields is None:
+                # other yt-machinery can't handle ds.mu == None, so we simply
+                # avoid defining the mu attribute if we don't know its value
+                mylog.info(
+                    'add the "mu" hdf5 attribute OR use the 
default_species_fields kwarg '
+                    "to compute temperature"
+                )
             self.refine_by = 1
 
             # If header specifies code units, default to those (in CGS)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/frontends/cholla/fields.py 
new/yt-4.4.1/yt/frontends/cholla/fields.py
--- old/yt-4.4.0/yt/frontends/cholla/fields.py  2024-11-11 14:16:34.000000000 
+0100
+++ new/yt-4.4.1/yt/frontends/cholla/fields.py  2025-07-08 20:30:02.000000000 
+0200
@@ -87,21 +87,23 @@
         )
 
         # Add temperature field
-        def _temperature(field, data):
-            return (
-                data.ds.mu
-                * data["gas", "pressure"]
-                / data["gas", "density"]
-                * mh
-                / kboltz
-            )
+        if hasattr(self.ds, "mu"):
 
-        self.add_field(
-            ("gas", "temperature"),
-            sampling_type="cell",
-            function=_temperature,
-            units=unit_system["temperature"],
-        )
+            def _temperature(field, data):
+                return (
+                    data.ds.mu
+                    * data["gas", "pressure"]
+                    / data["gas", "density"]
+                    * mh
+                    / kboltz
+                )
+
+            self.add_field(
+                ("gas", "temperature"),
+                sampling_type="cell",
+                function=_temperature,
+                units=unit_system["temperature"],
+            )
 
         # Add color field if present (scalar0 / density)
         if ("cholla", "scalar0") in self.field_list:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/frontends/parthenon/data_structures.py 
new/yt-4.4.1/yt/frontends/parthenon/data_structures.py
--- old/yt-4.4.0/yt/frontends/parthenon/data_structures.py      2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/frontends/parthenon/data_structures.py      2025-07-08 
20:30:02.000000000 +0200
@@ -129,6 +129,7 @@
 
 
 class ParthenonDataset(Dataset):
+    _load_requirements = ["h5py"]
     _field_info_class = ParthenonFieldInfo
     _dataset_type = "parthenon"
     _index_class = ParthenonHierarchy
@@ -312,6 +313,8 @@
 
     @classmethod
     def _is_valid(cls, filename: str, *args, **kwargs) -> bool:
+        if cls._missing_load_requirements():
+            return False
         return filename.endswith((".phdf", ".rhdf"))
 
     @property
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/frontends/ramses/data_structures.py 
new/yt-4.4.1/yt/frontends/ramses/data_structures.py
--- old/yt-4.4.0/yt/frontends/ramses/data_structures.py 2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/frontends/ramses/data_structures.py 2025-07-08 
20:30:02.000000000 +0200
@@ -29,7 +29,7 @@
     ramses_header,
 )
 from .field_handlers import get_field_handlers
-from .fields import _X, RAMSESFieldInfo
+from .fields import RAMSESFieldInfo
 from .hilbert import get_intersecting_cpus
 from .io_utils import fill_hydro, read_amr
 from .particle_handlers import get_particle_handlers
@@ -1003,11 +1003,6 @@
         magnetic_unit = np.sqrt(4 * np.pi * mass_unit / (time_unit**2 * 
length_unit))
         pressure_unit = density_unit * (length_unit / time_unit) ** 2
 
-        # TODO:
-        # Generalize the temperature field to account for ionization
-        # For now assume an atomic ideal gas with cosmic abundances (x_H = 
0.76)
-        mean_molecular_weight_factor = _X**-1
-
         setdefaultattr(self, "density_unit", self.quan(density_unit, 
"g/cm**3"))
         setdefaultattr(self, "magnetic_unit", self.quan(magnetic_unit, 
"gauss"))
         setdefaultattr(self, "pressure_unit", self.quan(pressure_unit, 
"dyne/cm**2"))
@@ -1016,9 +1011,7 @@
         setdefaultattr(
             self, "velocity_unit", self.quan(length_unit, "cm") / 
self.time_unit
         )
-        temperature_unit = (
-            self.velocity_unit**2 * mp * mean_molecular_weight_factor / kb
-        )
+        temperature_unit = self.velocity_unit**2 * mp / kb
         setdefaultattr(self, "temperature_unit", 
temperature_unit.in_units("K"))
 
         # Only the length unit get scales by a factor of boxlen
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/frontends/ramses/field_handlers.py 
new/yt-4.4.1/yt/frontends/ramses/field_handlers.py
--- old/yt-4.4.0/yt/frontends/ramses/field_handlers.py  2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/frontends/ramses/field_handlers.py  2025-07-08 
20:30:02.000000000 +0200
@@ -42,6 +42,7 @@
         self.ds = ds = domain.ds
         self.domain = domain
         self.domain_id = domain.domain_id
+
         basename = os.path.abspath(ds.root_folder)
         iout = 
int(os.path.basename(ds.parameter_filename).split(".")[0].split("_")[1])
 
@@ -166,6 +167,9 @@
             register_field_handler(cls)
 
         cls._unique_registry = {}
+        cls.parameters = {}
+        cls.rt_parameters = {}
+        cls._detected_field_list = {}
         return cls
 
     def __init__(self, domain):
@@ -231,6 +235,10 @@
 
         return self._level_count
 
+    @property
+    def field_list(self):
+        return self._detected_field_list[self.ds.unique_identifier]
+
     @cached_property
     def offset(self):
         """
@@ -242,7 +250,7 @@
         It should be generic enough for most of the cases, but if the
         *structure* of your fluid file is non-canonical, change this.
         """
-        nvars = len(self.field_list)
+        nvars = len(self._detected_field_list[self.ds.unique_identifier])
         with FortranFile(self.fname) as fd:
             # Skip headers
             nskip = len(self.attrs)
@@ -265,7 +273,7 @@
                 fd,
                 min_level,
                 self.domain.domain_id,
-                self.parameters["nvar"],
+                self.parameters[self.ds.unique_identifier]["nvar"],
                 self.domain.amr_header,
                 Nskip=nvars * 8,
             )
@@ -315,7 +323,7 @@
         attrs = cls.attrs
         with FortranFile(fname) as fd:
             hvals = fd.read_attrs(attrs)
-        cls.parameters = hvals
+        cls.parameters[ds.unique_identifier] = hvals
 
         # Store some metadata
         ds.gamma = hvals["gamma"]
@@ -446,7 +454,9 @@
             count_extra += 1
         if count_extra > 0:
             mylog.debug("Detected %s extra fluid fields.", count_extra)
-        cls.field_list = [(cls.ftype, e) for e in fields]
+        cls._detected_field_list[ds.unique_identifier] = [
+            (cls.ftype, e) for e in fields
+        ]
 
         cls.set_detected_fields(ds, fields)
 
@@ -477,9 +487,9 @@
         basedir = os.path.split(ds.parameter_filename)[0]
         fname = os.path.join(basedir, cls.fname.format(iout=iout, icpu=1))
         with FortranFile(fname) as fd:
-            cls.parameters = fd.read_attrs(cls.attrs)
+            cls.parameters[ds.unique_identifier] = fd.read_attrs(cls.attrs)
 
-        nvar = cls.parameters["nvar"]
+        nvar = cls.parameters[ds.unique_identifier]["nvar"]
         ndim = ds.dimensionality
 
         fields = cls.load_fields_from_yt_config()
@@ -498,7 +508,9 @@
                 for i in range(nvar - ndetected):
                     fields.append(f"var{i}")
 
-        cls.field_list = [(cls.ftype, e) for e in fields]
+        cls._detected_field_list[ds.unique_identifier] = [
+            (cls.ftype, e) for e in fields
+        ]
 
         cls.set_detected_fields(ds, fields)
 
@@ -573,7 +585,7 @@
             # Touchy part, we have to read the photon group properties
             mylog.debug("Not reading photon group properties")
 
-            cls.rt_parameters = rheader
+            cls.rt_parameters[ds.unique_identifier] = rheader
 
         ngroups = rheader["nGroups"]
 
@@ -582,7 +594,7 @@
         fname = os.path.join(basedir, cls.fname.format(iout=iout, icpu=1))
         fname_desc = os.path.join(basedir, cls.file_descriptor)
         with FortranFile(fname) as fd:
-            cls.parameters = fd.read_attrs(cls.attrs)
+            cls.parameters[ds.unique_identifier] = fd.read_attrs(cls.attrs)
 
         ok = False
 
@@ -616,16 +628,18 @@
             for ng in range(ngroups):
                 fields.extend([t % (ng + 1) for t in tmp])
 
-        cls.field_list = [(cls.ftype, e) for e in fields]
+        cls._detected_field_list[ds.unique_identifier] = [
+            (cls.ftype, e) for e in fields
+        ]
 
         cls.set_detected_fields(ds, fields)
         return fields
 
     @classmethod
     def get_rt_parameters(cls, ds):
-        if cls.rt_parameters:
-            return cls.rt_parameters
+        if cls.rt_parameters[ds.unique_identifier]:
+            return cls.rt_parameters[ds.unique_identifier]
 
         # Call detect fields to get the rt_parameters
         cls.detect_fields(ds)
-        return cls.rt_parameters
+        return cls.rt_parameters[ds.unique_identifier]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/frontends/ramses/tests/test_outputs.py 
new/yt-4.4.1/yt/frontends/ramses/tests/test_outputs.py
--- old/yt-4.4.0/yt/frontends/ramses/tests/test_outputs.py      2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/frontends/ramses/tests/test_outputs.py      2025-07-08 
20:30:02.000000000 +0200
@@ -390,8 +390,8 @@
     fields_1 = set(DETECTED_FIELDS[ds1.unique_identifier]["ramses"])
 
     # Check the right number of variables has been loaded
-    assert P1["nvar"] == 10
-    assert len(fields_1) == P1["nvar"]
+    assert P1[ds1.unique_identifier]["nvar"] == 10
+    assert len(fields_1) == P1[ds1.unique_identifier]["nvar"]
 
     # Now load another dataset
     ds2 = yt.load(output_00080)
@@ -400,8 +400,8 @@
     fields_2 = set(DETECTED_FIELDS[ds2.unique_identifier]["ramses"])
 
     # Check the right number of variables has been loaded
-    assert P2["nvar"] == 6
-    assert len(fields_2) == P2["nvar"]
+    assert P2[ds2.unique_identifier]["nvar"] == 6
+    assert len(fields_2) == P2[ds2.unique_identifier]["nvar"]
 
 
 @requires_file(ramses_new_format)
@@ -794,3 +794,21 @@
 
     # Also make sure the difference is large for some cells
     assert (np.abs(diff) > 0.1).any()
+
+
+@requires_file(output_00080)
+@requires_file(ramses_mhd_128)
+def test_order_does_not_matter():
+    for order in (1, 2):
+        ds0 = yt.load(output_00080)
+        ds1 = yt.load(ramses_mhd_128)
+
+        # This should not raise any exception
+        if order == 1:
+            _sp1 = ds1.all_data()
+            sp0 = ds0.all_data()
+        else:
+            sp0 = ds0.all_data()
+            _sp1 = ds1.all_data()
+
+        sp0["gas", "velocity_x"].max().to("km/s")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/yt-4.4.0/yt/frontends/stream/tests/test_stream_stretched.py 
new/yt-4.4.1/yt/frontends/stream/tests/test_stream_stretched.py
--- old/yt-4.4.0/yt/frontends/stream/tests/test_stream_stretched.py     
2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/frontends/stream/tests/test_stream_stretched.py     
2025-07-08 20:30:02.000000000 +0200
@@ -60,7 +60,10 @@
 def data_cell_widths_N16():
     np.random.seed(0x4D3D3D3)
     N = 16
-    data = {"density": np.random.random((N, N, N))}
+    data = {
+        "density": np.random.random((N, N, N)),
+        "temperature": np.random.random((N, N, N)),
+    }
 
     cell_widths = []
     for _ in range(3):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/loaders.py new/yt-4.4.1/yt/loaders.py
--- old/yt-4.4.0/yt/loaders.py  2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/loaders.py  2025-07-08 20:30:02.000000000 +0200
@@ -369,9 +369,9 @@
                 bbox,
                 cell_widths=cell_widths,
             )
-            cell_widths = grid_cell_widths
             grid_dimensions = np.array(list(shapes), dtype="int32")
             temp[key] = [data[key][slice] for slice in slices]
+        cell_widths = grid_cell_widths
 
         for gid in range(nprocs):
             new_data[gid] = {}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/sample_data_registry.json 
new/yt-4.4.1/yt/sample_data_registry.json
--- old/yt-4.4.0/yt/sample_data_registry.json   2024-11-11 14:16:34.000000000 
+0100
+++ new/yt-4.4.1/yt/sample_data_registry.json   2025-07-08 20:30:02.000000000 
+0200
@@ -646,6 +646,12 @@
     "load_name": null,
     "url": 
"https://yt-project.org/data/castro_sedov_2d_cyl_in_cart_plt00150.tar.gz";
   },
+  "castro_sedov_2d_sph_in_cyl_plt00130.tar.gz": {
+    "hash": "ef4d081a2a2f8e10afe132768725c573631b82021e91f07782f1c1fbe043e2b5",
+    "load_kwargs": {},
+    "load_name": null,
+    "url": 
"https://yt-project.org/data/castro_sedov_2d_sph_in_cyl_plt00130.tar.gz";
+  },
   "castro_sod_x_plt00036.tar.gz": {
     "hash": "3f0a586b41e7b54fa2b3cddd50f9384feb2efe1fe1a815e7348965ae7bf88f78",
     "load_kwargs": {},
@@ -946,6 +952,12 @@
     "load_name": "DD0045/DD0045.0.h5",
     "url": "https://yt-project.org/data/tiny_fof_halos.tar.gz";
   },
+  "xrb_spherical_smallplt00010.tar.gz": {
+    "hash": "27ede5ed03f7c89b2afac03a368beb56d5f25f0c7c95b81f14f071a54a795783",
+    "load_kwargs": {},
+    "load_name": null,
+    "url": "https://yt-project.org/data/xrb_spherical_smallplt00010.tar.gz";
+  },
   "ytdata_test.tar.gz": {
     "hash": "cafb2b06ab3190ba17909585b58a4724e25f27ac72f11d6dff1a482146eb8958",
     "load_kwargs": {},
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/testing.py new/yt-4.4.1/yt/testing.py
--- old/yt-4.4.0/yt/testing.py  2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/testing.py  2025-07-08 20:30:02.000000000 +0200
@@ -10,11 +10,12 @@
 from functools import wraps
 from importlib.util import find_spec
 from shutil import which
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, TypeVar
 from unittest import SkipTest
 
 import matplotlib
 import numpy as np
+import numpy.typing as npt
 from more_itertools import always_iterable
 from numpy.random import RandomState
 from unyt.exceptions import UnitOperationError
@@ -91,8 +92,8 @@
 
 # tested: volume integral is 1.
 def cubicspline_python(
-    x: float | np.ndarray,
-) -> np.ndarray:
+    x: float | npt.NDArray[np.floating],
+) -> npt.NDArray[np.floating]:
     """
     cubic spline SPH kernel function for testing against more
     effiecient cython methods
@@ -118,8 +119,12 @@
 
 
 def integrate_kernel(
-    kernelfunc: Callable[[float], float], b: float, hsml: float
-) -> float:
+    kernelfunc: Callable[
+        [float | npt.NDArray[np.floating]], float | npt.NDArray[np.floating]
+    ],
+    b: float | npt.NDArray[np.floating],
+    hsml: float | npt.NDArray[np.floating],
+) -> npt.NDArray[np.floating]:
     """
     integrates a kernel function over a line passing entirely
     through it
@@ -147,18 +152,21 @@
     dx = np.diff(xe, axis=0)
     spv = kernelfunc(np.sqrt(xc**2 + x**2))
     integral = np.sum(spv * dx, axis=0)
-    return pre * integral
+    return np.atleast_1d(pre * integral)
 
 
 _zeroperiods = np.array([0.0, 0.0, 0.0])
 
 
+_FloatingT = TypeVar("_FloatingT", bound=np.floating)
+
+
 def distancematrix(
-    pos3_i0: np.ndarray,
-    pos3_i1: np.ndarray,
+    pos3_i0: npt.NDArray[_FloatingT],
+    pos3_i1: npt.NDArray[_FloatingT],
     periodic: tuple[bool, bool, bool] = (True,) * 3,
-    periods: np.ndarray = _zeroperiods,
-) -> np.ndarray:
+    periods: npt.NDArray[_FloatingT] = _zeroperiods,
+) -> npt.NDArray[_FloatingT]:
     """
     Calculates the distances between two arrays of points.
 
@@ -1237,7 +1245,15 @@
     def dec(func):
         @wraps(func)
         def wrapper(*args, **kwargs):
-            raise SkipTest(reason)
+            if os.getenv("PYTEST_VERSION") is not None:
+                # this is the recommended way to detect a pytest session
+                # 
https://docs.pytest.org/en/stable/reference/reference.html#envvar-PYTEST_VERSION
+                import pytest
+
+                pytest.skip(reason)
+            else:
+                # running from nose, or unittest
+                raise SkipTest(reason)
 
         return wrapper
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/visualization/base_plot_types.py 
new/yt-4.4.1/yt/visualization/base_plot_types.py
--- old/yt-4.4.0/yt/visualization/base_plot_types.py    2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/base_plot_types.py    2025-07-08 
20:30:02.000000000 +0200
@@ -45,23 +45,23 @@
 
 
 BACKEND_SPECS = {
-    "GTK": ["backend_gtk", "FigureCanvasGTK", "FigureManagerGTK"],
-    "GTKAgg": ["backend_gtkagg", "FigureCanvasGTKAgg", None],
-    "GTKCairo": ["backend_gtkcairo", "FigureCanvasGTKCairo", None],
-    "MacOSX": ["backend_macosx", "FigureCanvasMac", "FigureManagerMac"],
-    "Qt5Agg": ["backend_qt5agg", "FigureCanvasQTAgg", None],
-    "QtAgg": ["backend_qtagg", "FigureCanvasQTAgg", None],
-    "TkAgg": ["backend_tkagg", "FigureCanvasTkAgg", None],
-    "WX": ["backend_wx", "FigureCanvasWx", None],
-    "WXAgg": ["backend_wxagg", "FigureCanvasWxAgg", None],
-    "GTK3Cairo": [
+    "gtk": ["backend_gtk", "FigureCanvasGTK", "FigureManagerGTK"],
+    "gtkagg": ["backend_gtkagg", "FigureCanvasGTKAgg", None],
+    "gtkcairo": ["backend_gtkcairo", "FigureCanvasGTKCairo", None],
+    "macosx": ["backend_macosx", "FigureCanvasMac", "FigureManagerMac"],
+    "qt5agg": ["backend_qt5agg", "FigureCanvasQTAgg", None],
+    "qtagg": ["backend_qtagg", "FigureCanvasQTAgg", None],
+    "tkagg": ["backend_tkagg", "FigureCanvasTkAgg", None],
+    "wx": ["backend_wx", "FigureCanvasWx", None],
+    "wxagg": ["backend_wxagg", "FigureCanvasWxAgg", None],
+    "gtk3cairo": [
         "backend_gtk3cairo",
         "FigureCanvasGTK3Cairo",
         "FigureManagerGTK3Cairo",
     ],
-    "GTK3Agg": ["backend_gtk3agg", "FigureCanvasGTK3Agg", 
"FigureManagerGTK3Agg"],
-    "WebAgg": ["backend_webagg", "FigureCanvasWebAgg", None],
-    "nbAgg": ["backend_nbagg", "FigureCanvasNbAgg", "FigureManagerNbAgg"],
+    "gtk3agg": ["backend_gtk3agg", "FigureCanvasGTK3Agg", 
"FigureManagerGTK3Agg"],
+    "webagg": ["backend_webagg", "FigureCanvasWebAgg", None],
+    "nbagg": ["backend_nbagg", "FigureCanvasNbAgg", "FigureManagerNbAgg"],
     "agg": ["backend_agg", "FigureCanvasAgg", None],
 }
 
@@ -151,7 +151,7 @@
 
     def _get_canvas_classes(self):
         if self.interactivity:
-            key = str(matplotlib.get_backend())
+            key = str(matplotlib.get_backend()).lower()
         else:
             key = "agg"
 
@@ -326,6 +326,7 @@
             aspect=aspect,
             cmap=self.colorbar_handler.cmap,
             interpolation="nearest",
+            interpolation_stage="data",
             transform=transform,
             alpha=alpha,
         )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/visualization/fixed_resolution.py 
new/yt-4.4.1/yt/visualization/fixed_resolution.py
--- old/yt-4.4.0/yt/visualization/fixed_resolution.py   2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/fixed_resolution.py   2025-07-08 
20:30:02.000000000 +0200
@@ -639,7 +639,6 @@
                 self.bounds[5] - self.bounds[4],
             )
         )
-        depth = dd.depth[0] if dd.depth is not None else None
         buff = off_axis_projection(
             dd.dd,
             dd.center,
@@ -652,7 +651,7 @@
             no_ghost=dd.no_ghost,
             interpolated=dd.interpolated,
             north_vector=dd.north_vector,
-            depth=depth,
+            depth=dd.depth,
             method=dd.method,
         )
         if self.data_source.moment == 2:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/visualization/plot_modifications.py 
new/yt-4.4.1/yt/visualization/plot_modifications.py
--- old/yt-4.4.0/yt/visualization/plot_modifications.py 2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/plot_modifications.py 2025-07-08 
20:30:02.000000000 +0200
@@ -995,8 +995,8 @@
 
         if plot._type_name in ["CuttingPlane", "Projection", "Slice"]:
             if plot._type_name == "CuttingPlane":
-                x = data["px"] * dx
-                y = data["py"] * dy
+                x = (data["px"] * dx).to("1")
+                y = (data["py"] * dy).to("1")
                 z = data[self.field]
             elif plot._type_name in ["Projection", "Slice"]:
                 # Makes a copy of the position fields "px" and "py" and adds 
the
@@ -1024,8 +1024,10 @@
                 wI = AllX & AllY
 
                 # This converts XShifted and YShifted into plot coordinates
-                x = ((XShifted[wI] - x0) * dx).ndarray_view() + xx0
-                y = ((YShifted[wI] - y0) * dy).ndarray_view() + yy0
+                # Note: we force conversion into "1" to prevent issues in case
+                # one of the length has some dimensionless factor (Mpc/h)
+                x = ((XShifted[wI] - x0) * dx).to("1").ndarray_view() + xx0
+                y = ((YShifted[wI] - y0) * dy).to("1").ndarray_view() + yy0
                 z = data[self.field][wI]
 
             # Both the input and output from the triangulator are in plot
@@ -1130,8 +1132,8 @@
 
         x0, x1, y0, y1 = self._physical_bounds(plot)
         xx0, xx1, yy0, yy1 = self._plot_bounds(plot)
-        (dx, dy) = self._pixel_scale(plot)
-        (ypix, xpix) = plot.raw_image_shape
+        dx, dy = self._pixel_scale(plot)
+        ypix, xpix = plot.raw_image_shape
         ax = plot.data.axis
         px_index = plot.data.ds.coordinates.x_axis[ax]
         py_index = plot.data.ds.coordinates.y_axis[ax]
@@ -1165,10 +1167,17 @@
         for px_off, py_off in zip(pxs.ravel(), pys.ravel(), strict=True):
             pxo = px_off * DW[px_index]
             pyo = py_off * DW[py_index]
-            left_edge_x = np.array((GLE[:, px_index] + pxo - x0) * dx) + xx0
-            left_edge_y = np.array((GLE[:, py_index] + pyo - y0) * dy) + yy0
-            right_edge_x = np.array((GRE[:, px_index] + pxo - x0) * dx) + xx0
-            right_edge_y = np.array((GRE[:, py_index] + pyo - y0) * dy) + yy0
+            # Note: [dx] = 1/length, [GLE] = length
+            # we force conversion into "1" to prevent issues if e.g. GLE is in 
Mpc/h
+            # where dx * GLE would have units 1/h rather than being truly 
dimensionless
+            left_edge_x = np.array((((GLE[:, px_index] + pxo - x0) * dx) + 
xx0).to("1"))
+            left_edge_y = np.array((((GLE[:, py_index] + pyo - y0) * dy) + 
yy0).to("1"))
+            right_edge_x = np.array(
+                (((GRE[:, px_index] + pxo - x0) * dx) + xx0).to("1")
+            )
+            right_edge_y = np.array(
+                (((GRE[:, py_index] + pyo - y0) * dy) + yy0).to("1")
+            )
             xwidth = xpix * (right_edge_x - left_edge_x) / (xx1 - xx0)
             ywidth = ypix * (right_edge_y - left_edge_y) / (yy1 - yy0)
             visible = np.logical_and(
@@ -2071,12 +2080,20 @@
                 units = "code_length"
             self.radius = self.radius.to(units)
 
+        if not hasattr(self.radius, "units"):
+            self.radius = plot.data.ds.quan(self.radius, "code_length")
+
+        if not hasattr(self.center, "units"):
+            self.center = plot.data.ds.arr(self.center, "code_length")
+
         # This assures the radius has the appropriate size in
         # the different coordinate systems, since one cannot simply
         # apply a different transform for a length in the same way
         # you can for a coordinate.
         if self.coord_system == "data" or self.coord_system == "plot":
-            scaled_radius = self.radius * self._pixel_scale(plot)[0]
+            # Note: we force conversion into "1" to prevent issues in case
+            # one of the length has some dimensionless factor (Mpc/h)
+            scaled_radius = (self.radius * self._pixel_scale(plot)[0]).to("1")
         else:
             scaled_radius = self.radius / (plot.xlim[1] - plot.xlim[0])
 
@@ -3390,6 +3407,12 @@
         extent = self._plot_bounds(plot)
         if plot._swap_axes:
             im_buffer = im_buffer.transpose((1, 0, 2))
+            # note: when using imshow, the extent keyword argument has to be 
the
+            # swapped extents, so the extent is swapped here (rather than
+            # calling self._set_plot_limits).
+            # https://github.com/yt-project/yt/issues/5094
+            extent = _swap_axes_extents(extent)
+
         plot._axes.imshow(
             im_buffer,
             origin="lower",
@@ -3397,4 +3420,3 @@
             extent=extent,
             alpha=self.alpha,
         )
-        self._set_plot_limits(plot, extent)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/visualization/plot_window.py 
new/yt-4.4.1/yt/visualization/plot_window.py
--- old/yt-4.4.0/yt/visualization/plot_window.py        2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/plot_window.py        2025-07-08 
20:30:02.000000000 +0200
@@ -26,6 +26,7 @@
     validate_moment,
 )
 from yt.geometry.api import Geometry
+from yt.geometry.oct_geometry_handler import OctreeIndex
 from yt.units.unit_object import Unit  # type: ignore
 from yt.units.unit_registry import UnitParseError  # type: ignore
 from yt.units.yt_array import YTArray, YTQuantity
@@ -82,7 +83,11 @@
 
 
 def get_oblique_window_parameters(
-    normal, center, width, ds, depth=None, get3bounds=False
+    normal,
+    center,
+    width,
+    ds,
+    depth=None,
 ):
     center, display_center = ds.coordinates.sanitize_center(center, axis=None)
     width = ds.coordinates.sanitize_width(normal, width, depth)
@@ -100,15 +105,7 @@
 
     w = tuple(el.in_units("code_length") for el in width)
     bounds = tuple(((2 * (i % 2)) - 1) * w[i // 2] / 2 for i in range(len(w) * 
2))
-    if get3bounds and depth is None:
-        # off-axis projection, depth not specified
-        # -> set 'large enough' depth using half the box diagonal + margin
-        d2 = ds.domain_width[0].in_units("code_length") ** 2
-        d2 += ds.domain_width[1].in_units("code_length") ** 2
-        d2 += ds.domain_width[2].in_units("code_length") ** 2
-        diag = np.sqrt(d2)
-        bounds = bounds + (-0.51 * diag, 0.51 * diag)
-    return (bounds, center)
+    return bounds, center
 
 
 def get_axes_unit(width, ds):
@@ -2387,7 +2384,8 @@
     depth : A tuple or a float
         A tuple containing the depth to project through and the string
         key of the unit: (width, 'unit'). If set to a float, code units
-        are assumed
+        are assumed. In not set, then a depth equal to the diagonal of
+        the domain width plus a small margin will be used.
     weight_field : string
         The name of the weighting field.  Set to None for no weight.
     max_level: int
@@ -2464,6 +2462,13 @@
                 "currently supported geometries:"
                 f" {self._supported_geometries!r}"
             )
+
+        if depth is None:
+            # off-axis projection, depth not specified
+            # -> set 'large enough' depth using half the box diagonal + margin
+            depth = np.linalg.norm(ds.domain_width.in_units("code_length")) * 
1.0001
+        depth = ds.coordinates.sanitize_depth(depth)[0]
+
         # center_rot normalizes the center to (0,0),
         # units match bounds
         # for SPH data, we want to input the original center
@@ -2477,7 +2482,6 @@
             width,
             ds,
             depth=depth,
-            get3bounds=True,
         )
         # will probably fail if you try to project an SPH and non-SPH
         # field in a single call
@@ -2493,7 +2497,12 @@
         is_sph_field = finfo.is_sph_field
         particle_datasets = (ParticleDataset, StreamParticlesDataset)
 
-        if isinstance(data_source.ds, particle_datasets) and is_sph_field:
+        dom_width = data_source.ds.domain_width
+        cubic_domain = dom_width.max() == dom_width.min()
+
+        if (isinstance(data_source.ds, particle_datasets) and is_sph_field) or 
(
+            isinstance(data_source.ds.index, OctreeIndex) and cubic_domain
+        ):
             center_use = parse_center_array(center, ds=data_source.ds, 
axis=None)
         else:
             center_use = center_rot
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt/visualization/profile_plotter.py 
new/yt-4.4.1/yt/visualization/profile_plotter.py
--- old/yt-4.4.0/yt/visualization/profile_plotter.py    2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/profile_plotter.py    2025-07-08 
20:30:02.000000000 +0200
@@ -1319,8 +1319,6 @@
 
         """
         for f in self._profile.field_data:
-            if isinstance(f, tuple):
-                f = f[1]
             self.plot_title[self.data_source._determine_fields(f)[0]] = title
         return self
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/yt-4.4.0/yt/visualization/tests/test_image_comp_2D_plots.py 
new/yt-4.4.1/yt/visualization/tests/test_image_comp_2D_plots.py
--- old/yt-4.4.0/yt/visualization/tests/test_image_comp_2D_plots.py     
2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/tests/test_image_comp_2D_plots.py     
2025-07-08 20:30:02.000000000 +0200
@@ -41,6 +41,37 @@
     npt.assert_allclose(im0, im1)
 
 
+def test_annotation_parse_h():
+    ds = fake_random_ds(16)
+
+    # Make sure `h` (reduced Hubble constant) is not equal to 1
+    ds.unit_registry.modify("h", 0.7)
+
+    rad = ds.quan(0.1, "cm/h")
+    center = ds.arr([0.5] * 3, "code_length")
+
+    # Twice the same slice plot
+    p1 = SlicePlot(ds, "x", "density")
+    p2 = SlicePlot(ds, "x", "density")
+
+    # But the *same* center is given in different units
+    p1.annotate_sphere(center.to("cm"), rad, circle_args={"color": "black"})
+    p2.annotate_sphere(center.to("cm/h"), rad, circle_args={"color": "black"})
+
+    # Render annotations, and extract matplotlib image
+    # as an RGB array
+    p1.render()
+    p1.plots["gas", "density"].figure.canvas.draw()
+    img1 = p1.plots["gas", "density"].figure.canvas.renderer.buffer_rgba()
+
+    p2.render()
+    p2.plots["gas", "density"].figure.canvas.draw()
+    img2 = p2.plots["gas", "density"].figure.canvas.renderer.buffer_rgba()
+
+    # This should be the same image
+    npt.assert_allclose(img1, img2)
+
+
 @pytest.mark.mpl_image_compare
 def test_inf_and_finite_values_set_zlim():
     # see https://github.com/yt-project/yt/issues/3901
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/yt-4.4.0/yt/visualization/tests/test_offaxisprojection.py 
new/yt-4.4.1/yt/visualization/tests/test_offaxisprojection.py
--- old/yt-4.4.0/yt/visualization/tests/test_offaxisprojection.py       
2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/tests/test_offaxisprojection.py       
2025-07-08 20:30:02.000000000 +0200
@@ -16,6 +16,7 @@
 from yt.visualization.api import (
     OffAxisProjectionPlot,
     OffAxisSlicePlot,
+    ProjectionPlot,
 )
 from yt.visualization.image_writer import write_projection
 from yt.visualization.volume_rendering.api import off_axis_projection
@@ -210,6 +211,43 @@
     assert_equal(np.nanmin(p4rho[p4rho > 0.0]) >= 0.5, True)
 
 
+def test_off_axis_octree():
+    np.random.seed(12345)
+    ds = fake_octree_ds()
+    center = [0.4, 0.4, 0.4]
+
+    for weight in [("gas", "cell_mass"), None, ("index", "dx")]:
+        p1 = ProjectionPlot(
+            ds,
+            "x",
+            ("gas", "density"),
+            center=center,
+            width=0.8,
+            weight_field=weight,
+        )
+        p2 = OffAxisProjectionPlot(
+            ds,
+            [1, 0, 0],
+            ("gas", "density"),
+            center=center,
+            width=0.8,
+            weight_field=weight,
+        )
+
+        # Note: due to our implementation, the off-axis projection will have a
+        # slightly blurred cell edges so we can't do an exact comparison
+        v1, v2 = p1.frb["gas", "density"], p2.frb["gas", "density"]
+        diff = (v1 - v2) / (v1 + v2) * 2
+
+        # Make sure the difference has a small bias
+        assert np.mean(diff).max() < 1e-3  # 0.1%
+
+        # Compute 10-90% percentile
+        q10, q90 = np.percentile(diff, q=(10, 90))
+        assert q10 > -0.02  # 2%: little up/down deviations
+        assert q90 < +0.02  # 2%: little up/down deviations
+
+
 def test_offaxis_moment():
     ds = fake_random_ds(64)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/yt-4.4.0/yt/visualization/tests/test_offaxisprojection_pytestonly.py 
new/yt-4.4.1/yt/visualization/tests/test_offaxisprojection_pytestonly.py
--- old/yt-4.4.0/yt/visualization/tests/test_offaxisprojection_pytestonly.py    
2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/tests/test_offaxisprojection_pytestonly.py    
2025-07-08 20:30:02.000000000 +0200
@@ -1,14 +1,18 @@
 import numpy as np
 import pytest
 import unyt
+from numpy.testing import assert_allclose
 
 from yt.testing import (
     assert_rel_equal,
     cubicspline_python,
+    fake_amr_ds,
     fake_sph_flexible_grid_ds,
+    fake_sph_grid_ds,
     integrate_kernel,
+    requires_module_pytest,
 )
-from yt.visualization.api import ProjectionPlot
+from yt.visualization.api import OffAxisProjectionPlot, ProjectionPlot
 
 
 @pytest.mark.parametrize("weighted", [True, False])
@@ -165,3 +169,72 @@
     # print("expected:\n", expected_out)
     # print("recovered:\n", img.v)
     assert_rel_equal(expected_out, img.v, 4)
+
+
+_diag_dist = np.sqrt(3.0)  # diagonal distance of a cube with length 1.
+# each case is depth, center, expected integrated distance
+_cases_to_test = [
+    (_diag_dist / 3.0, "domain_left_edge", _diag_dist / 3.0 / 2.0),
+    (_diag_dist * 2.0, "domain_left_edge", _diag_dist),
+    (_diag_dist * 4.0, "domain_left_edge", _diag_dist),
+    (None, "domain_center", _diag_dist),
+]
+
+
+@pytest.mark.parametrize("depth,proj_center,expected", _cases_to_test)
+def test_offaxisprojection_depth(depth, proj_center, expected):
+    # this checks that the depth keyword argument works as expected.
+    # in all cases, it integrates the (index, ones) field for a normal
+    # pointing to the right edge corner of the domain.
+    #
+    # For the tests where the projection is centered on the left edge,
+    # the integrate distance will scale as depth / 2.0. When centered
+    # on the origin, it will scale with depth. The integrated distance
+    # should max out at the diagonal distance of the cube (when the depth
+    # exceeds the cube diagonal distance).
+    #
+    # Also note that the accuracy will depend on the buffer dimensions:
+    # using the default (800,800) results in accuracy of about 1 percent
+
+    ds = fake_amr_ds()
+
+    n = [1.0, 1.0, 1.0]
+    c = getattr(ds, proj_center)
+    field = ("index", "ones")
+
+    p = ProjectionPlot(ds, n, field, depth=depth, weight_field=None, center=c)
+
+    maxval = p.frb[field].max().d
+    assert_allclose(expected, maxval, atol=1e-2)
+
+
+_sph_test_cases = [
+    ([1.0, 1.0, 0.7], 27),
+    ([1.0, 1.0, 1], 19),
+    ([0.0, 0.0, 1], 9),
+]
+
+
+@requires_module_pytest("contourpy")
+@pytest.mark.parametrize("normal_vec, n_particles", _sph_test_cases)
+def test_offaxisprojection_sph_defaultdepth(normal_vec, n_particles):
+    # checks that particles are picked up as expected for a range of
+    # depths and normal vectors. Certain viewing angles will result
+    # in overlapping particles (since fake_sph_grid_ds aligns particles
+    # on a grid): n_particles is the expected number of circles in the
+    # resulting image for the given normal vector. Circle counts are
+    # calculated here using a contour generator.
+    from contourpy import contour_generator
+
+    ds = fake_sph_grid_ds()
+    c = ds.domain_center
+    diag_dist = np.linalg.norm(ds.domain_width)
+    field = ("gas", "mass")
+    p = OffAxisProjectionPlot(
+        ds, normal_vec, field, weight_field=None, center=c, width=diag_dist
+    )
+    p.render()
+
+    # get the number of circles in the plot
+    cg = contour_generator(z=p.frb[("gas", "mass")].d)
+    assert n_particles == len(cg.lines(1.0))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/yt-4.4.0/yt/visualization/tests/test_profile_plots.py 
new/yt-4.4.1/yt/visualization/tests/test_profile_plots.py
--- old/yt-4.4.0/yt/visualization/tests/test_profile_plots.py   2024-11-11 
14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/tests/test_profile_plots.py   2025-07-08 
20:30:02.000000000 +0200
@@ -3,10 +3,11 @@
 import tempfile
 import unittest
 
+import numpy as np
 import pytest
 
 import yt
-from yt.testing import assert_allclose_units, fake_random_ds
+from yt.testing import assert_allclose_units, fake_random_ds, 
fake_random_sph_ds
 from yt.visualization.api import PhasePlot
 
 
@@ -66,6 +67,49 @@
         return p.plots["gas", "mass"].figure
 
 
+class TestPhasePlotParticleAPI:
+    @classmethod
+    def setup_class(cls):
+        bbox = np.array([[-1.0, 3.0], [1.0, 5.2], [-1.0, 3.0]])
+        cls.ds = fake_random_sph_ds(50, bbox)
+
+    def get_plot(self):
+        return PhasePlot(
+            self.ds, ("gas", "density"), ("gas", "density"), ("gas", "mass")
+        )
+
+    @pytest.mark.parametrize("kwargs", [{}, {"color": "b"}])
+    def test_phaseplot_annotate_text(self, kwargs):
+        p = self.get_plot()
+        p.annotate_text(1e-4, 1e-2, "Test text annotation", **kwargs)
+        p.render()
+
+    def test_phaseplot_set_title(self):
+        p = self.get_plot()
+        p.annotate_title("Test Title")
+        p.render()
+
+    def test_phaseplot_set_log(self):
+        p = self.get_plot()
+        p.set_log(("gas", "mass"), False)
+        p.render()
+
+    def test_phaseplot_set_unit(self):
+        p = self.get_plot()
+        p.set_unit(("gas", "mass"), "Msun")
+        p.render()
+
+    def test_phaseplot_set_xlim(self):
+        p = self.get_plot()
+        p.set_xlim(1e-3, 1e0)
+        p.render()
+
+    def test_phaseplot_set_ylim(self):
+        p = self.get_plot()
+        p.set_ylim(1e-2, 1e0)
+        p.render()
+
+
 def test_set_units():
     fields = ("density", "temperature")
     units = (
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/yt-4.4.0/yt/visualization/volume_rendering/off_axis_projection.py 
new/yt-4.4.1/yt/visualization/volume_rendering/off_axis_projection.py
--- old/yt-4.4.0/yt/visualization/volume_rendering/off_axis_projection.py       
2024-11-11 14:16:34.000000000 +0100
+++ new/yt-4.4.1/yt/visualization/volume_rendering/off_axis_projection.py       
2025-07-08 20:30:02.000000000 +0200
@@ -443,22 +443,26 @@
         data_source.get_data(fields)
         # We need the width of the plot window in projected coordinates,
         # i.e. we ignore the z-component
-        wmax = width[:2].max()
-
-        # Normalize the positions & dx so that they are in the range [-0.5, 
0.5]
-        xyz = np.stack(
-            [
-                ((data_source["index", k] - center[i]) / wmax).to("1").d
-                for i, k in enumerate("xyz")
-            ],
-            axis=-1,
+        wmax = width[:2].max().to("code_length")
+        xyz = data_source.ds.arr(
+            np.zeros((len(data_source[vol.field]), 3)), "code_length"
         )
 
         for idim, periodic in enumerate(data_source.ds.periodicity):
+            axis = data_source.ds.coordinates.axis_order[idim]
+            # Recenter positions w.r.t. center of the plot window
+            xyz[..., idim] = (data_source["index", axis] - center[idim]).to(
+                "code_length"
+            )
             if not periodic:
                 continue
-            # Wrap into [-0.5, +0.5]
-            xyz[..., idim] = (xyz[..., idim] + 0.5) % 1 - 0.5
+            # If we have periodic boundaries, we need to wrap the corresponding
+            # coordinates into [-w/2, +w/2]
+            w = data_source.ds.domain_width[idim].to("code_length")
+            xyz[..., idim] = (xyz[..., idim] + w / 2) % w - w / 2
+
+        # Rescale to [-0.5, +0.5]
+        xyz = (xyz / wmax).to("1").d
 
         dx = (data_source["index", "dx"] / wmax).to("1").d
 
@@ -481,6 +485,10 @@
             Nx=resolution[0],
             Ny=resolution[1],
         )
+        # Note: since dx was divided by wmax, we need to rescale by it
+        projected_weighted_qty *= wmax.d / np.sqrt(3)
+        projected_weight *= wmax.d / np.sqrt(3)
+
         image = ImageArray(
             data_source.ds.arr(
                 np.stack([projected_weighted_qty, projected_weight], axis=-1),
@@ -490,6 +498,7 @@
             registry=data_source.ds.unit_registry,
             info={"imtype": "rendering"},
         )
+
     else:
         for grid, mask in data_source.blocks:
             data = []
@@ -512,12 +521,14 @@
             vol.sampler(pg, num_threads=num_threads)
 
         image = vol.finalize_image(camera, vol.sampler.aimage)
+
         image = ImageArray(
             image, funits, registry=data_source.ds.unit_registry, 
info=image.info
         )
 
-        if weight is not None:
-            data_source.ds.field_info.pop(("index", "temp_weightfield"))
+    # Remove the temporary weight field
+    if weight is not None:
+        data_source.ds.field_info.pop(("index", "temp_weightfield"))
 
     if method == "integrate":
         if weight is None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt.egg-info/PKG-INFO 
new/yt-4.4.1/yt.egg-info/PKG-INFO
--- old/yt-4.4.0/yt.egg-info/PKG-INFO   2024-11-11 14:17:00.000000000 +0100
+++ new/yt-4.4.1/yt.egg-info/PKG-INFO   2025-07-08 20:30:30.000000000 +0200
@@ -1,6 +1,6 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.4
 Name: yt
-Version: 4.4.0
+Version: 4.4.1
 Summary: An analysis and visualization toolkit for volumetric data
 Author-email: The yt project <yt-...@python.org>
 License: BSD 3-Clause
@@ -27,7 +27,7 @@
 Classifier: Topic :: Scientific/Engineering :: Astronomy
 Classifier: Topic :: Scientific/Engineering :: Physics
 Classifier: Topic :: Scientific/Engineering :: Visualization
-Requires-Python: >=3.10.3
+Requires-Python: >=3.10.4
 Description-Content-Type: text/markdown
 License-File: COPYING.txt
 Requires-Dist: cmyt>=1.1.2
@@ -190,6 +190,8 @@
 Requires-Dist: pytest-mpl>=0.16.1; extra == "test"
 Requires-Dist: sympy!=1.10,!=1.9; extra == "test"
 Requires-Dist: imageio!=2.35.0; extra == "test"
+Requires-Dist: contourpy; extra == "test"
+Dynamic: license-file
 
 # The yt Project
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/yt-4.4.0/yt.egg-info/requires.txt 
new/yt-4.4.1/yt.egg-info/requires.txt
--- old/yt-4.4.0/yt.egg-info/requires.txt       2024-11-11 14:17:00.000000000 
+0100
+++ new/yt-4.4.1/yt.egg-info/requires.txt       2025-07-08 20:30:30.000000000 
+0200
@@ -212,6 +212,7 @@
 pytest-mpl>=0.16.1
 sympy!=1.10,!=1.9
 imageio!=2.35.0
+contourpy
 
 [tipsy]
 

Reply via email to