diff --git a/examples/NDSL/01_gt4py_basics.ipynb b/examples/NDSL/01_gt4py_basics.ipynb index 98b43922..15d29139 100644 --- a/examples/NDSL/01_gt4py_basics.ipynb +++ b/examples/NDSL/01_gt4py_basics.ipynb @@ -66,14 +66,14 @@ "shape = (nx, ny, nz)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "arr = np.indices(shape, dtype=float).sum(\n", " axis=0\n", ") # Value of each entry is sum of the I and J index at each point\n", "\n", - "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend)" + "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend)" ] }, { @@ -199,7 +199,7 @@ "outputs": [], "source": [ "qty_out = Quantity(\n", - " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "print(\"Plotting values of qty_in at K = 0\")\n", @@ -212,7 +212,7 @@ "qty_out.plot_k_level(0)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "print(\"Resetting qty_out to zero...\")\n", @@ -224,7 +224,7 @@ "qty_out.plot_k_level(0)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "print(\"Resetting qty_out to zero...\")\n", @@ -238,7 +238,7 @@ "qty_out.plot_k_level(1)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "print(\"Resetting qty_out to zero...\")\n", "print(\"Plotting values of qty_in at K = 0\")\n", @@ -251,7 +251,7 @@ "qty_out.plot_k_level(0)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros([nx, ny, nz]), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "print(\"Resetting qty_out to zero...\")\n", "print(\"Plotting values of qty_out at K = 0\")\n", @@ -294,13 +294,13 @@ "shape = (nx + 2 * nhalo, ny + 2 * nhalo, nz)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "arr = np.indices(shape, dtype=float).sum(\n", " axis=0\n", ") # Value of each entry is sum of the I and J index at each point\n", - "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend)\n", + "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend)\n", "\n", "print(\"Plotting values of qty_in at K = 0\")\n", "qty_in.plot_k_level(0)\n", @@ -344,7 +344,7 @@ "\n", "print(\"Resetting qty_out to zeros\")\n", "qty_out = Quantity(\n", - " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "print(\"Executing 'copy_downward' with origin=(1, 1, 0), domain=(nx, ny, nz-1)\")\n", @@ -401,13 +401,13 @@ "shape = (nx + 2 * nhalo, ny + 2 * nhalo, nz)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "arr = np.indices(shape, dtype=float).sum(\n", " axis=0\n", ") # Value of each entry is sum of the I and J index at each point\n", - "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend)\n", + "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend)\n", "\n", "\n", "@stencil(backend=backend)\n", @@ -444,13 +444,13 @@ "outputs": [], "source": [ "qty_out = Quantity(\n", - " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "arr = np.indices(shape, dtype=float).sum(\n", " axis=0\n", ") # Value of each entry is sum of the I and J index at each point\n", - "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend)\n", + "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend)\n", "\n", "print(\"Plotting values of qty_in at K = 0\")\n", "qty_in.plot_k_level(0)\n", @@ -525,13 +525,13 @@ "shape = (nx + 2 * nhalo, ny + 2 * nhalo, nz)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "arr = np.indices(shape, dtype=float).sum(\n", " axis=0\n", ") # Value of each entry is sum of the I and J index at each point\n", - "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend)\n", + "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend)\n", "\n", "print(\"Plotting values of qty_in at K = 0\")\n", "qty_in.plot_k_level(0)\n", @@ -546,7 +546,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": ".venv", "language": "python", "name": "python3" }, diff --git a/examples/NDSL/02_NDSL_basics.ipynb b/examples/NDSL/02_NDSL_basics.ipynb index b9d53dca..de59b985 100644 --- a/examples/NDSL/02_NDSL_basics.ipynb +++ b/examples/NDSL/02_NDSL_basics.ipynb @@ -133,7 +133,7 @@ "shape = (nx + 2 * nhalo, ny + 2 * nhalo, nz)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "\n", @@ -141,7 +141,7 @@ " axis=0\n", ") # Value of each entry is sum of the I and J index at each point\n", "\n", - "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend)\n", + "qty_in = Quantity(data=arr, dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend)\n", "\n", "print(\"Plotting qty_in at K = 0\")\n", "qty_in.plot_k_level(0)\n", @@ -224,7 +224,7 @@ "copy_field_offset = CopyFieldOffset(stencil_factory)\n", "\n", "qty_out = Quantity(\n", - " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", gt4py_backend=backend\n", + " data=np.zeros(shape), dims=[\"I\", \"J\", \"K\"], units=\"m\", backend=backend\n", ")\n", "\n", "print(\"Initialize qty_out to zeros\")" diff --git a/ndsl/quantity/local.py b/ndsl/quantity/local.py index 9311f1d8..79f360c6 100644 --- a/ndsl/quantity/local.py +++ b/ndsl/quantity/local.py @@ -1,4 +1,3 @@ -import warnings from collections.abc import Sequence from typing import Any @@ -23,28 +22,11 @@ def __init__( dims: Sequence[str], units: str, *, - backend: str | None = None, + backend: str, origin: Sequence[int] | None = None, extent: Sequence[int] | None = None, - gt4py_backend: str | None = None, allow_mismatch_float_precision: bool = False, ): - if gt4py_backend is not None: - warnings.warn( - "gt4py_backend is deprecated. Use `backend` instead.", - DeprecationWarning, - stacklevel=2, - ) - if backend is None: - backend = gt4py_backend - - if backend is None: - warnings.warn( - "`backend` will be a required argument starting with the next version of NDSL.", - DeprecationWarning, - stacklevel=2, - ) - # Initialize memory to obviously wrong value - Local should _not_ be expected # to be zero'ed. data[:] = 123456789 diff --git a/ndsl/quantity/metadata.py b/ndsl/quantity/metadata.py index 45a14445..d53a08e9 100644 --- a/ndsl/quantity/metadata.py +++ b/ndsl/quantity/metadata.py @@ -16,40 +16,39 @@ @dataclasses.dataclass class QuantityMetadata: origin: tuple[int, ...] - "the start of the computational domain" + "The start of the computational domain." extent: tuple[int, ...] - "the shape of the computational domain" + "The shape of the computational domain." n_halo: int - "Number of halo-points used in the horizontal" + "Number of halo-points used in the horizontal." dims: tuple[str, ...] - "names of each dimension" + "Names of each dimension." units: str - "units of the quantity" + "Units of the quantity." data_type: type - "ndarray-like type used to store the data" + "ndarray-like type used to store the data." dtype: type - "dtype of the data in the ndarray-like object" - gt4py_backend: str | None = None - "Deprecated. Use backend instead." - backend: str | None = None + "dtype of the data in the ndarray-like object." + backend: str "GT4Py backend name. Used for performance optimal data allocation." @property def dim_lengths(self) -> dict[str, int]: - """mapping of dimension names to their lengths""" + """Mapping of dimension names to their lengths.""" return dict(zip(self.dims, self.extent)) @property def np(self) -> NumpyModule: - """numpy-like module used to interact with the data""" + """numpy-like module used to interact with the data.""" if issubclass(self.data_type, cupy.ndarray): return cupy - elif issubclass(self.data_type, np.ndarray): + + if issubclass(self.data_type, np.ndarray): return np - else: - raise TypeError( - f"quantity underlying data is of unexpected type {self.data_type}" - ) + + raise TypeError( + f"Quantity underlying data is of unexpected type {self.data_type}" + ) def duplicate_metadata(self, metadata_copy: QuantityMetadata) -> None: metadata_copy.origin = self.origin @@ -58,7 +57,6 @@ def duplicate_metadata(self, metadata_copy: QuantityMetadata) -> None: metadata_copy.units = self.units metadata_copy.data_type = self.data_type metadata_copy.dtype = self.dtype - metadata_copy.gt4py_backend = self.gt4py_backend metadata_copy.backend = self.backend diff --git a/ndsl/quantity/quantity.py b/ndsl/quantity/quantity.py index 7bc1df05..e31ac123 100644 --- a/ndsl/quantity/quantity.py +++ b/ndsl/quantity/quantity.py @@ -33,10 +33,9 @@ def __init__( dims: Sequence[str], units: str, *, - backend: str | None = None, + backend: str, origin: Sequence[int] | None = None, extent: Sequence[int] | None = None, - gt4py_backend: str | None = None, allow_mismatch_float_precision: bool = False, number_of_halo_points: int = 0, ): @@ -52,7 +51,6 @@ def __init__( computational domain. Defaults to None. extent: number of points along each axis within the computational domain. Defaults to None. - gt4py_backend: deprecated, use `backend` instead. allow_mismatch_float_precision: allow for precision that is not the simulation-wide default configuration. Defaults to False. number_of_halo_points: Number of halo points used. Defaults to 0. @@ -61,21 +59,6 @@ def __init__( ValueError: Data-type mismatch between configuration and input-data TypeError: Typing of the data that does not fit """ - if gt4py_backend is not None: - warnings.warn( - "gt4py_backend is deprecated. Use `backend` instead.", - DeprecationWarning, - stacklevel=2, - ) - if backend is None: - backend = gt4py_backend - - if backend is None: - warnings.warn( - "`backend` will be a required argument starting with the next version of NDSL.", - DeprecationWarning, - stacklevel=2, - ) if ( not allow_mismatch_float_precision @@ -96,15 +79,6 @@ def __init__( else: extent = tuple(extent) - if isinstance(data, (int, float, list)): - # If converting basic data, use a numpy ndarray. - warnings.warn( - "Usage of basic data in Quantities is deprecated. Please use it with a numpy or cuppy ndarray instead.", - DeprecationWarning, - stacklevel=2, - ) - data = np.asarray(data) - if not isinstance(data, (np.ndarray, cupy.ndarray)): raise TypeError( f"Only supports numpy.ndarray and cupy.ndarray, got {type(data)}" @@ -112,51 +86,47 @@ def __init__( _validate_quantity_property_lengths(data.shape, dims, origin, extent) - if backend is not None: - gt4py_backend_cls = gt_backend.from_name(backend) - is_optimal_layout = gt4py_backend_cls.storage_info["is_optimal_layout"] - device = gt4py_backend_cls.storage_info["device"] - - dimensions: tuple[str | int, ...] = tuple( - [ - ( - axis # type: ignore # mypy can't parse this list construction of hell - if any(dim in axis_dims for axis_dims in constants.SPATIAL_DIMS) - else str(data.shape[index]) - ) - for index, (dim, axis) in enumerate( - zip(dims, ("I", "J", "K", *([None] * (len(dims) - 3)))) - ) - ] - ) + gt4py_backend_cls = gt_backend.from_name(backend) + is_optimal_layout = gt4py_backend_cls.storage_info["is_optimal_layout"] + device = gt4py_backend_cls.storage_info["device"] - if isinstance(data, np.ndarray): - is_correct_device = device == "cpu" - elif isinstance(data, cupy.ndarray): - is_correct_device = device == "gpu" - else: - raise ValueError( - f"Unknown device target for quantity allocation {type(data)}" - ) - - if is_optimal_layout(data, dimensions) and is_correct_device: - self._data = data - else: - warnings.warn( - f"Suboptimal data layout found. Copying data to optimally align for backend '{backend}'.", - UserWarning, - stacklevel=2, + dimensions: tuple[str | int, ...] = tuple( + [ + ( + axis # type: ignore # mypy can't parse this list construction of hell + if any(dim in axis_dims for axis_dims in constants.SPATIAL_DIMS) + else str(data.shape[index]) ) - self._data = gt_storage.from_array( - data, - data.dtype, - backend=backend, - aligned_index=origin, - dimensions=dimensions, + for index, (dim, axis) in enumerate( + zip(dims, ("I", "J", "K", *([None] * (len(dims) - 3)))) ) + ] + ) + + if isinstance(data, np.ndarray): + is_correct_device = device == "cpu" + elif isinstance(data, cupy.ndarray): + is_correct_device = device == "gpu" else: - # We have no info about the gt4py backend, so just assign it. + raise ValueError( + f"Unknown device target for quantity allocation {type(data)}" + ) + + if is_optimal_layout(data, dimensions) and is_correct_device: self._data = data + else: + warnings.warn( + f"Suboptimal data layout found. Copying data to optimally align for backend '{backend}'.", + UserWarning, + stacklevel=2, + ) + self._data = gt_storage.from_array( + data, + data.dtype, + backend=backend, + aligned_index=origin, + dimensions=dimensions, + ) self._metadata = QuantityMetadata( origin=_ensure_int_tuple(origin, "origin"), @@ -167,7 +137,6 @@ def __init__( data_type=type(self._data), dtype=data.dtype, backend=backend, - gt4py_backend=backend, ) self._attrs = {} # type: ignore[var-annotated] self._compute_domain_view = BoundedArrayView( @@ -181,7 +150,6 @@ def from_data_array( *, origin: Sequence[int] | None = None, extent: Sequence[int] | None = None, - gt4py_backend: str | None = None, number_of_halo_points: int = 0, backend: str | None = None, allow_mismatch_float_precision: bool = False, @@ -193,7 +161,6 @@ def from_data_array( data_array origin: first point in data within the computational domain extent: number of points along each axis within the computational domain - gt4py_backend: deprecated, use `backend` instead. allow_mismatch_float_precision: allow for precision that is not the simulation-wide default configuration. Defaults to False. number_of_halo_points: Number of halo points used. Defaults to 0. @@ -204,15 +171,6 @@ def from_data_array( if "units" not in data_array.attrs: data_array.attrs.update({"units": "unknown"}) - if gt4py_backend is not None: - warnings.warn( - "gt4py_backend is deprecated. Use `backend` instead.", - DeprecationWarning, - stacklevel=2, - ) - if backend is None: - backend = gt4py_backend - return cls( data_array.values, cast(tuple[str], data_array.dims), @@ -291,16 +249,7 @@ def units(self) -> str: return self.metadata.units @property - def gt4py_backend(self) -> str | None: - warnings.warn( - "gt4py_backend is deprecated. Use `backend` instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.metadata.gt4py_backend - - @property - def backend(self) -> str | None: + def backend(self) -> str: return self.metadata.backend @property diff --git a/ndsl/stencils/testing/parallel_translate.py b/ndsl/stencils/testing/parallel_translate.py index 16dd6009..43e88b32 100644 --- a/ndsl/stencils/testing/parallel_translate.py +++ b/ndsl/stencils/testing/parallel_translate.py @@ -74,12 +74,14 @@ def state_from_inputs(self, inputs: dict) -> Any: input_data = state[name] if len(properties["dims"]) > 0: dims = properties["dims"] + backend = self._base.stencil_factory.backend state[properties["name"]] = Quantity( input_data, dims, properties["units"], origin=self.grid.sizer.get_origin(dims), extent=self.grid.sizer.get_extent(dims), + backend=backend, ) else: state[properties["name"]] = input_data diff --git a/ndsl/stencils/testing/test_translate.py b/ndsl/stencils/testing/test_translate.py index 8ba789cb..b0b36931 100644 --- a/ndsl/stencils/testing/test_translate.py +++ b/ndsl/stencils/testing/test_translate.py @@ -12,8 +12,6 @@ from ndsl.dsl import gt4py_utils as gt_utils from ndsl.dsl.dace.dace_config import DaceConfig from ndsl.dsl.stencil import CompilationConfig, StencilConfig -from ndsl.quantity import Quantity -from ndsl.restart._legacy_restart import RESTART_PROPERTIES from ndsl.stencils.testing.savepoint import DataLoader, SavepointCase, dataset_to_dict from ndsl.testing.comparison import BaseMetric, LegacyMetric, MultiModalFloatMetric from ndsl.testing.perturbation import perturb @@ -311,23 +309,6 @@ def test_sequential_savepoint( pytest.fail("No tests passed") -def state_from_savepoint(serializer, savepoint, name_to_std_name): - properties = RESTART_PROPERTIES - origin = gt_utils.origin - state = {} - for name, _std_name in name_to_std_name.items(): - array = serializer.read(name, savepoint) - extent = tuple(np.asarray(array.shape) - 2 * np.asarray(origin)) - state["air_temperature"] = Quantity( - array, - dims=reversed(properties["air_temperature"]["dims"]), - units=properties["air_temperature"]["units"], - origin=origin, - extent=extent, - ) - return state - - def get_communicator(comm, layout): partitioner = CubedSpherePartitioner(TilePartitioner(layout)) communicator = CubedSphereCommunicator(comm, partitioner) diff --git a/tests/conftest.py b/tests/conftest.py index 0aeb2f84..12e2b88c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -20,10 +20,11 @@ def backend(request): def gt4py_backend(backend): if backend in ("numpy"): return "numpy" - elif backend in ("cupy"): + + if backend in ("cupy"): return "gt:gpu" - else: - return None + + return None @pytest.fixture diff --git a/tests/quantity/test_local.py b/tests/quantity/test_local.py index 26efd8f5..e5f53be3 100644 --- a/tests/quantity/test_local.py +++ b/tests/quantity/test_local.py @@ -31,41 +31,6 @@ def test_dace_data_descriptor_is_transient() -> None: assert array.transient -def test_gt4py_backend_is_deprecated() -> None: - nx = 5 - shape = (nx,) - backend = "debug" - with pytest.deprecated_call(match="gt4py_backend is deprecated"): - local = Local( - data=np.empty(shape), - origin=(0,), - extent=(nx,), - dims=("dim_X",), - units="n/a", - gt4py_backend=backend, - ) - - # make sure we assign backend - assert local.backend == backend - - # make sure we are backwards compatible (for now) - with pytest.deprecated_call(match="gt4py_backend is deprecated"): - assert local.gt4py_backend == backend - - -def test_backend_will_be_required() -> None: - nx = 5 - shape = (nx,) - with pytest.deprecated_call(match="`backend` will be a required argument"): - local = Local( - data=np.empty(shape), - origin=(0,), - extent=(nx,), - dims=("dim_X",), - units="n/a", - ) - - @dataclasses.dataclass class GoodLocals(LocalState): my_local: Local = dataclasses.field( diff --git a/tests/quantity/test_quantity.py b/tests/quantity/test_quantity.py index ef94b45e..e2c00ddd 100644 --- a/tests/quantity/test_quantity.py +++ b/tests/quantity/test_quantity.py @@ -1,6 +1,5 @@ import numpy as np import pytest -import xarray as xr from ndsl import Quantity from ndsl.quantity.bounds import _shift_slice @@ -318,82 +317,3 @@ def test_data_setter(): # Expected fail: new array is not even an array with pytest.raises(TypeError, match="Quantity.data buffer swap failed.*"): quantity.data = "meh" - - -def test_constructor_with_gt4py_backend_is_deprecated() -> None: - nx = 5 - shape = (nx,) - backend = "debug" - with pytest.deprecated_call(match="gt4py_backend is deprecated"): - quantity = Quantity( - data=np.empty(shape), - origin=(0,), - extent=(nx,), - dims=("dim_X",), - units="n/a", - gt4py_backend=backend, - ) - - # make sure we assign backend - assert quantity.backend == backend - - # make sure we are backwards compatible (on the QuantityMetadata) - with pytest.deprecated_call(match="gt4py_backend is deprecated"): - assert quantity.gt4py_backend == backend - - -def test_from_data_array_with_gt4py_backend_is_deprecated() -> None: - nx = 5 - shape = (nx,) - backend = "debug" - with pytest.deprecated_call(match="gt4py_backend is deprecated"): - np_data = np.empty(shape) - data_array = xr.DataArray(data=np_data, attrs={"units": "n/a"}) - quantity = Quantity.from_data_array( - data_array, - origin=(0,), - extent=(nx,), - number_of_halo_points=0, - gt4py_backend=backend, - ) - - # make sure we assign backend - assert quantity.backend == backend - - # make sure we don't assign gt4py_backend anymore (on the QuantityMetadata) - with pytest.deprecated_call(match="gt4py_backend is deprecated"): - assert quantity.gt4py_backend == backend - - -def test_assign_basic_data_is_deprecated() -> None: - nx = 5 - backend = "debug" - with pytest.deprecated_call( - match="Usage of basic data in Quantities is deprecated" - ): - quantity = Quantity( - data=[0, 1, 2, 3, 4], - origin=(0,), - extent=(nx,), - dims=("dim_X",), - units="n/a", - backend=backend, - allow_mismatch_float_precision=True, - ) - - # make sure we can still use it (for now) - for i in range(5): - assert quantity.data[i] == i - - -def test_constructor_backend_will_be_required() -> None: - nx = 5 - shape = (nx,) - with pytest.deprecated_call(match="`backend` will be a required argument"): - local = Quantity( - data=np.empty(shape), - origin=(0,), - extent=(nx,), - dims=("dim_X",), - units="n/a", - ) diff --git a/tests/test_halo_data_transformer.py b/tests/test_halo_data_transformer.py index 2d34567a..91e10d78 100644 --- a/tests/test_halo_data_transformer.py +++ b/tests/test_halo_data_transformer.py @@ -166,9 +166,7 @@ def quantity(dims, units, origin, extent, shape, dtype, gt4py_backend): outside of it.""" sz = _shape_length(shape) data = np.arange(0, sz, dtype=dtype).reshape(shape) - if "gtc" not in gt4py_backend: - # should also test code if gt4py_backend is unset - gt4py_backend = None + return Quantity( data, dims=dims,