diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 324d049e..c5c6e397 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -26,37 +26,48 @@ jobs: # test oldest supported version of main dependencies on python 3.8 - os: ubuntu os_version: latest - PYTHON_VERSION: '3.8' + PYTHON_VERSION: '3.9' # Set pillow and scikit-image version to be compatible with imageio and scipy - # matplotlib needs 3.5 to support markers in hyperspy 2.0 (requires `collection.set_offset_transform`) - DEPENDENCIES: matplotlib==3.5 numpy==1.20.0 tifffile==2022.7.28 dask[array]==2021.5.1 distributed==2021.5.1 numba==0.52 imageio==2.16 pillow==8.3.2 scikit-image==0.18.0 python-box==6.0.0 + # align matplotlib dependency with hyperspy + DEPENDENCIES: matplotlib==3.6 numpy==1.20.0 tifffile==2022.7.28 dask[array]==2021.5.1 distributed==2021.5.1 numba==0.53 imageio==2.16 pillow==8.3.2 scikit-image==0.18.0 python-box==6.0.0 LABEL: '-oldest' # test minimum requirement - os: ubuntu os_version: latest - PYTHON_VERSION: '3.9' + PYTHON_VERSION: '3.10' LABEL: '-minimum' - os: ubuntu os_version: latest PYTHON_VERSION: '3.12' - LABEL: '-minimum-without-hyperspy' + LABEL: '-hyperspy-dev' - os: ubuntu os_version: latest - PYTHON_VERSION: '3.11' - LABEL: '-hyperspy-dev' + PYTHON_VERSION: '3.10' + LABEL: '-without-hyperspy' - os: ubuntu os_version: latest PYTHON_VERSION: '3.9' - LABEL: '-without-hyperspy' - os: ubuntu os_version: latest - PYTHON_VERSION: '3.8' + PYTHON_VERSION: '3.12' - os: ubuntu os_version: latest - PYTHON_VERSION: '3.11' + PYTHON_VERSION: '3.13' + # can remove minimum dependencies when numba supports 3.13 + LABEL: '-minimum' + - os: macos + os_version: latest + PYTHON_VERSION: '3.13' + # can remove minimum dependencies when numba supports 3.13 + LABEL: '-minimum' + - os: windows + os_version: latest + PYTHON_VERSION: '3.13' + # can remove minimum dependencies when numba supports 3.13 + LABEL: '-minimum' - os: macos os_version: '13' - PYTHON_VERSION: '3.11' + PYTHON_VERSION: '3.12' steps: - uses: actions/checkout@v4 @@ -79,6 +90,7 @@ jobs: name: Install Python with: python-version: ${{ matrix.PYTHON_VERSION }} + cache: 'pip' - name: Get the number of CPUs id: cpus @@ -91,7 +103,7 @@ jobs: with open(output_file, "a", encoding="utf-8") as output_stream: output_stream.write(f"count={num_cpus}\n") shell: python - + - name: Set Environment Variable shell: bash # Set PIP_SELECTOR environment variable according to matrix.LABEL @@ -108,8 +120,14 @@ jobs: python --version pip --version + - name: Install traits dev for python 3.13 + if: ${{ matrix.PYTHON_VERSION == '3.13' }} + run: | + # traits release with python 3.13 pending + pip install git+https://github.com/enthought/traits.git + - name: Install hyperspy and exspy - if: ${{ ! contains(matrix.LABEL, 'without-hyperspy') }} + if: ${{ ! contains(matrix.LABEL, 'without-hyperspy') && matrix.PYTHON_VERSION != '3.13'}} run: | pip install hyperspy exspy @@ -119,14 +137,20 @@ jobs: pip install git+https://github.com/hyperspy/hyperspy.git pip install git+https://github.com/hyperspy/exspy.git - - name: Install pint and python-mrcz dev + - name: Install hyperspy (python 3.13) and exspy (dev) + if: ${{ matrix.PYTHON_VERSION == '3.13' }} + run: | + # speed up installing scikit-image using pre-release with python 3.13 wheels + pip install scikit-image --pre + pip install git+https://github.com/ericpre/hyperspy.git@python313 + pip install git+https://github.com/hyperspy/exspy.git + + - name: Install python-mrcz dev # for numpy 2.0 support for python >= 3.9 # https://github.com/em-MRCZ/python-mrcz/pull/15 - # https://github.com/hgrecco/pint/issues/1974 - if: ${{ ! contains(matrix.LABEL, 'oldest') && matrix.PYTHON_VERSION != '3.8' }} + if: ${{ ! contains(matrix.LABEL, 'oldest') && ! contains(matrix.LABEL, 'minimum') }} run: | pip install git+https://github.com/ericpre/python-mrcz.git@numpy2.0_and_deprecation_fixes - pip install git+https://github.com/hgrecco/pint - name: Install shell: bash @@ -147,7 +171,7 @@ jobs: - name: Install numpy 2.0 if: ${{ ! contains(matrix.LABEL, 'oldest') && matrix.PYTHON_VERSION != '3.8' }} run: | - pip install numpy==2 + pip install numpy>=2 - name: Pip list run: | diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 4745baa4..28e3b40f 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -23,6 +23,10 @@ resources: strategy: matrix: + Linux_Python312: + vmImage: 'ubuntu-latest' + PYTHON_VERSION: '3.12' + MINIFORGE_PATH: $(Agent.BuildDirectory)/miniforge3 Linux_Python310: vmImage: 'ubuntu-latest' PYTHON_VERSION: '3.10' @@ -31,25 +35,21 @@ strategy: vmImage: 'ubuntu-latest' PYTHON_VERSION: '3.9' MINIFORGE_PATH: $(Agent.BuildDirectory)/miniforge3 - Linux_Python38: - vmImage: 'ubuntu-latest' - PYTHON_VERSION: '3.8' - MINIFORGE_PATH: $(Agent.BuildDirectory)/miniforge3 - MacOS_Python38: + MacOS_Python39: vmImage: 'macOS-latest' - PYTHON_VERSION: '3.8' + PYTHON_VERSION: '3.9' MINIFORGE_PATH: $(Agent.BuildDirectory)/miniforge3 - MacOS_Python310: + MacOS_Python312: vmImage: 'macOS-latest' - PYTHON_VERSION: '3.10' + PYTHON_VERSION: '3.12' MINIFORGE_PATH: $(Agent.BuildDirectory)/miniforge3 - Windows_Python38: + Windows_Python39: vmImage: 'windows-latest' - PYTHON_VERSION: '3.8' + PYTHON_VERSION: '3.9' MINIFORGE_PATH: $(Agent.BuildDirectory)\miniforge3 - Windows_Python310: + Windows_Python312: vmImage: 'windows-latest' - PYTHON_VERSION: '3.10' + PYTHON_VERSION: '3.12' MINIFORGE_PATH: $(Agent.BuildDirectory)\miniforge3 pool: diff --git a/pyproject.toml b/pyproject.toml index 90407421..2025b778 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta" [project] name = "rosettasciio" description = "Reading and writing scientific file formats" -requires-python = ">=3.8" +requires-python = ">=3.9" readme = "README.md" classifiers = [ "Development Status :: 4 - Beta", @@ -13,11 +13,11 @@ classifiers = [ "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", "Operating System :: OS Independent", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Physics", "Topic :: Software Development :: Libraries", @@ -94,8 +94,8 @@ eds-stream = ["sparse"] hdf5 = ["h5py>=2.3"] image = ["imageio>=2.16"] mrcz = ["blosc>=1.5", "mrcz>=0.3.6"] -scalebar_export = ["matplotlib-scalebar", "matplotlib>=3.5"] -speed = ["numba>=0.52"] +scalebar_export = ["matplotlib-scalebar", "matplotlib>=3.6"] +speed = ["numba>=0.53"] tiff = ["tifffile>=2022.7.28", "imagecodecs"] usid = ["pyUSID>=0.0.11"] zspy = ["zarr", "msgpack"] diff --git a/rsciio/bruker/_api.py b/rsciio/bruker/_api.py index 44333f18..e961c7e8 100644 --- a/rsciio/bruker/_api.py +++ b/rsciio/bruker/_api.py @@ -490,9 +490,9 @@ def __init__(self, spectrum): self.detector_metadata["DetLayers"][i.tag] = dict(i.attrib) # map stuff from esma xml branch: - if esma_header: + if esma_header is not None: self.esma_metadata = x2d.dictionarize(esma_header) - if xrf_header: + if xrf_header is not None: xrf_header_dict = x2d.dictionarize(xrf_header) self.esma_metadata = { "PrimaryEnergy": xrf_header_dict["Voltage"], diff --git a/rsciio/digitalsurf/_api.py b/rsciio/digitalsurf/_api.py index cdc78e71..e3ee27f6 100644 --- a/rsciio/digitalsurf/_api.py +++ b/rsciio/digitalsurf/_api.py @@ -714,7 +714,9 @@ def _split_signal_dict(self): self._split_surfaceserie() elif (n_nav, n_sig) == (2, 0): warnings.warn( - f"Signal dimension {n_sig} and navigation dimension {n_nav} exported as surface type. Consider transposing signal object before exporting if this is intentional." + f"Signal dimension {n_sig} and navigation dimension {n_nav} exported " + "as surface type. Consider transposing signal object before exporting " + "if this is intentional." ) if self._is_binary(): self._split_binary_img() diff --git a/rsciio/edax/_api.py b/rsciio/edax/_api.py index 7fa3b472..b1256fb6 100644 --- a/rsciio/edax/_api.py +++ b/rsciio/edax/_api.py @@ -591,7 +591,7 @@ def get_ipr_dtype_list(endianess="<", version=333): dtype_list = [ ("version", end + "u2"), ("imageType", end + "u2"), - ("label", end + "a8"), + ("label", end + "S8"), ("sMin", end + "u2"), ("sMax", end + "u2"), ("color", end + "u2"), @@ -618,7 +618,7 @@ def get_ipr_dtype_list(endianess="<", version=333): ("mppX", end + "f4"), ("mppY", end + "f4"), ("nTextLines", end + "u2"), - ("charText", end + "4a32"), + ("charText", end + "4S32"), ("reserved3", end + "4f4"), ("nOverlayElements", end + "u2"), ("overlayColors", end + "16u2"), diff --git a/rsciio/emd/_emd_ncem.py b/rsciio/emd/_emd_ncem.py index dea1b175..1b5153b9 100644 --- a/rsciio/emd/_emd_ncem.py +++ b/rsciio/emd/_emd_ncem.py @@ -203,8 +203,10 @@ def _read_dataset(dataset): if h5py.check_string_dtype(dataset.dtype) and hasattr(dataset, "asstr"): # h5py 3.0 and newer # https://docs.h5py.org/en/3.0.0/strings.html - dataset = dataset.asstr()[:] - return dataset, chunks + data = dataset.asstr()[:] + else: + data = dataset[:] + return data, chunks def _read_emd_version(self, group): """Return the group version if the group is an EMD group, otherwise @@ -225,38 +227,38 @@ def _read_data_from_groups( axes = [] transpose_required = True if dataset_name != "datacube" else False - array_list = [self.file.get(f"{key}/{dataset_name}") for key in group_path] + dataset_list = [self.file.get(f"{key}/{dataset_name}") for key in group_path] - if None in array_list: + if None in dataset_list: raise IOError("Dataset can't be found.") - if len(array_list) > 1: + if len(dataset_list) > 1: # Squeeze the data only when if self.lazy: - data_list = [da.from_array(*self._read_dataset(d)) for d in array_list] + data_list = [ + da.from_array(*self._read_dataset(d)) for d in dataset_list + ] if transpose_required: data_list = [da.transpose(d) for d in data_list] data = da.stack(data_list) data = da.squeeze(data) else: - data_list = [ - np.asanyarray(self._read_dataset(d)[0]) for d in array_list - ] + data_list = [self._read_dataset(d)[0] for d in dataset_list] if transpose_required: data_list = [np.transpose(d) for d in data_list] data = np.stack(data_list).squeeze() else: - d = array_list[0] + d = dataset_list[0] if self.lazy: data = da.from_array(*self._read_dataset(d)) else: - data = np.asanyarray(self._read_dataset(d)[0]) + data = self._read_dataset(d)[0] if transpose_required: data = data.transpose() shape = data.shape - if len(array_list) > 1: + if len(dataset_list) > 1: offset, scale, units = 0, 1, None if self._is_prismatic_file and "depth" in stack_key: simu_om = original_metadata.get("simulation_parameters", {}) @@ -274,7 +276,7 @@ def _read_data_from_groups( simu_om.get("tile", 0)[2] * simu_om.get("cellDimension", 0)[0] ) if not math.isclose( - total_thickness, len(array_list) * scale, rel_tol=1e-4 + total_thickness, len(dataset_list) * scale, rel_tol=1e-4 ): _logger.warning( "Depth axis is non-uniform and its offset " @@ -289,7 +291,7 @@ def _read_data_from_groups( "name": stack_key if stack_key is not None else None, "offset": offset, "scale": scale, - "size": len(array_list), + "size": len(dataset_list), "units": units, "navigate": True, } diff --git a/rsciio/tests/generate_dm_testing_files.py b/rsciio/tests/generate_dm_testing_files.py index 32a8c75c..3e2f658a 100644 --- a/rsciio/tests/generate_dm_testing_files.py +++ b/rsciio/tests/generate_dm_testing_files.py @@ -136,9 +136,10 @@ def generate_4D_files(f, data_types, dmversion): if __name__ == "__main__": - with open("generate_dm3_test_files.s", "w") as f1, open( - "generate_dm4_test_files.s", "w" - ) as f2: + with ( + open("generate_dm3_test_files.s", "w") as f1, + open("generate_dm4_test_files.s", "w") as f2, + ): for f in (f1, f2): f.write("image im\n") f.write("string filename, path\n") diff --git a/rsciio/tests/test_blockfile.py b/rsciio/tests/test_blockfile.py index a9195f1f..40529fa7 100644 --- a/rsciio/tests/test_blockfile.py +++ b/rsciio/tests/test_blockfile.py @@ -45,7 +45,7 @@ @pytest.fixture() def fake_signal(): - fake_data = np.arange(300).reshape(3, 4, 5, 5) + fake_data = np.arange(300, dtype=np.uint8).reshape(3, 4, 5, 5) fake_signal = hs.signals.Signal2D(fake_data) fake_signal.axes_manager[0].scale_as_quantity = "1 mm" fake_signal.axes_manager[1].scale_as_quantity = "1 mm" @@ -323,12 +323,12 @@ def test_different_x_y_scale_units(save_path): def test_inconvertible_units(save_path, fake_signal): fake_signal.axes_manager[2].units = "1/A" fake_signal.axes_manager[3].units = "1/A" - fake_signal.change_dtype(np.uint8) with pytest.warns(UserWarning): fake_signal.save(save_path, overwrite=True) def test_overflow(save_path, fake_signal): + fake_signal.change_dtype(np.uint16) with pytest.warns(UserWarning): fake_signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) @@ -397,8 +397,7 @@ def test_vbfs(save_path, fake_signal, navigator): save_path, intensity_scaling=None, navigator=navigator, overwrite=True ) sig_reload = hs.load(save_path) - compare = (fake_signal.data % 256).astype(np.uint8) - np.testing.assert_allclose(sig_reload.data, compare) + np.testing.assert_allclose(sig_reload.data, fake_signal.data) def test_invalid_vbf(save_path, fake_signal): @@ -418,8 +417,10 @@ def test_default_header(): def test_non_square(save_path): signal = hs.signals.Signal2D((255 * np.random.rand(10, 3, 5, 6)).astype(np.uint8)) - with pytest.raises(ValueError): - signal.save(save_path, overwrite=True) + with pytest.warns(UserWarning): + # warning about expect cm units + with pytest.raises(ValueError): + signal.save(save_path, overwrite=True) def test_load_lazy(): @@ -459,6 +460,8 @@ def test_load_inplace(): def test_write_fresh(save_path): signal = hs.signals.Signal2D((255 * np.random.rand(10, 3, 5, 5)).astype(np.uint8)) + signal.axes_manager["sig"].set(units="cm") + signal.axes_manager["nav"].set(units="nm") signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) np.testing.assert_equal(signal.data, sig_reload.data) @@ -481,14 +484,18 @@ def test_write_fresh(save_path): def test_write_data_line(save_path): signal = hs.signals.Signal2D((255 * np.random.rand(3, 5, 5)).astype(np.uint8)) - signal.save(save_path, overwrite=True) + with pytest.warns(UserWarning): + # expected units warning + signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) np.testing.assert_equal(signal.data, sig_reload.data) def test_write_data_single(save_path): signal = hs.signals.Signal2D((255 * np.random.rand(5, 5)).astype(np.uint8)) - signal.save(save_path, overwrite=True) + with pytest.warns(UserWarning): + # expected units warning + signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) np.testing.assert_equal(signal.data, sig_reload.data) @@ -496,8 +503,10 @@ def test_write_data_single(save_path): def test_write_data_am_mismatch(save_path): signal = hs.signals.Signal2D((255 * np.random.rand(10, 3, 5, 5)).astype(np.uint8)) signal.axes_manager.navigation_axes[1].size = 4 - with pytest.raises(ValueError): - signal.save(save_path, overwrite=True) + with pytest.warns(UserWarning): + # expected units warning + with pytest.raises(ValueError): + signal.save(save_path, overwrite=True) def test_unrecognized_header_warning(save_path, fake_signal): @@ -513,8 +522,10 @@ def test_unrecognized_header_warning(save_path, fake_signal): def test_write_cutoff(save_path): signal = hs.signals.Signal2D((255 * np.random.rand(10, 3, 5, 5)).astype(np.uint8)) signal.axes_manager.navigation_axes[0].size = 20 - # Test that it raises a warning + signal.axes_manager["sig"].set(units="cm") + signal.axes_manager["nav"].set(units="nm") signal.save(save_path, overwrite=True) + # Test that it raises a warning with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") sig_reload = hs.load(save_path) @@ -538,6 +549,8 @@ def test_crop_notes(save_path): signal = hs.signals.Signal2D((255 * np.random.rand(2, 3, 2, 2)).astype(np.uint8)) signal.original_metadata.add_node("blockfile_header.Note") signal.original_metadata.blockfile_header.Note = note - signal.save(save_path, overwrite=True) + with pytest.warns(UserWarning): + # expected units warning + signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) assert sig_reload.original_metadata.blockfile_header.Note == note[:note_len] diff --git a/rsciio/tests/test_digitalsurf.py b/rsciio/tests/test_digitalsurf.py index d50d7e13..5829ff8c 100644 --- a/rsciio/tests/test_digitalsurf.py +++ b/rsciio/tests/test_digitalsurf.py @@ -22,6 +22,7 @@ import pytest from rsciio.digitalsurf._api import DigitalSurfHandler, MountainsMapFileError +from rsciio.utils.tools import dummy_context_manager hs = pytest.importorskip("hyperspy.api", reason="hyperspy not installed") @@ -676,7 +677,12 @@ def test_norm_int_data(dtype, special, fullscale): Zscale = 0.0 # to avoid CodeQL error: pot. non-initialized var Zoffset = -np.inf # to avoid CodeQL error: pot. non-initialized var - pointsize, Zmin, Zmax, Zscale, Zoffset, data_int = dh._norm_data(dat, special) + if dtype in [np.uint8, np.uint16]: + cm = pytest.warns(UserWarning) + else: + cm = dummy_context_manager() + with cm: + pointsize, Zmin, Zmax, Zscale, Zoffset, data_int = dh._norm_data(dat, special) off = minint + 1 if special and fullscale else dat.min() maxval = maxint - 1 if special and fullscale else dat.max() @@ -738,7 +744,12 @@ def test_writegeneric_validtypes(tmp_path, dtype, compressed): generated from numpy array""" gen = hs.signals.Signal1D(np.arange(24, dtype=dtype)) + 25 fgen = tmp_path.joinpath("test.pro") - gen.save(fgen, compressed=compressed, overwrite=True) + if dtype in [np.uint8, np.uint16]: + cm = pytest.warns(UserWarning) + else: + cm = dummy_context_manager() + with cm: + gen.save(fgen, compressed=compressed, overwrite=True) gen2 = hs.load(fgen) assert np.allclose(gen2.data, gen.data) @@ -788,7 +799,8 @@ def test_writegeneric_transposedsurface( fgen = tmp_path.joinpath("test.sur") - gen.save(fgen, overwrite=True) + with pytest.warns(): + gen.save(fgen, overwrite=True) gen2 = hs.load(fgen) diff --git a/rsciio/tests/test_emd_ncem.py b/rsciio/tests/test_emd_ncem.py index d649e774..846d10c3 100644 --- a/rsciio/tests/test_emd_ncem.py +++ b/rsciio/tests/test_emd_ncem.py @@ -39,7 +39,6 @@ data_signal = np.arange(27).reshape((3, 3, 3)).T data_image = np.arange(9).reshape((3, 3)).T data_spectrum = np.arange(3).T -data_save = np.arange(24).reshape((2, 3, 4)) sig_metadata = {"a": 1, "b": 2} user = { "name": "John Doe", @@ -200,7 +199,7 @@ def test_load_file(tmp_path): @pytest.mark.parametrize("lazy", (True, False)) def test_save_and_read(lazy, tmp_path): - signal_ref = hs.signals.BaseSignal(data_save) + signal_ref = hs.signals.BaseSignal(np.arange(24).reshape((2, 3, 4))) signal_ref.metadata.General.title = test_title signal_ref.axes_manager[0].name = "x" signal_ref.axes_manager[1].name = "y" @@ -223,7 +222,7 @@ def test_save_and_read(lazy, tmp_path): signal = hs.load(tmp_path / "example_temp.emd", lazy=lazy) if lazy: signal.compute(close_file=True) - om = signal.original_metadata + om = signal.original_metadata.as_dictionary() np.testing.assert_equal(signal.data, signal_ref.data) np.testing.assert_equal(signal.axes_manager[0].name, "x") np.testing.assert_equal(signal.axes_manager[1].name, "y") @@ -238,10 +237,10 @@ def test_save_and_read(lazy, tmp_path): np.testing.assert_equal(signal.axes_manager[1].units, "µm") np.testing.assert_equal(signal.axes_manager[2].units, "mm") np.testing.assert_equal(signal.metadata.General.title, test_title) - np.testing.assert_equal(om.user.as_dictionary(), user) - np.testing.assert_equal(om.microscope.as_dictionary(), microscope) - np.testing.assert_equal(om.sample.as_dictionary(), sample) - np.testing.assert_equal(om.comments.as_dictionary(), comments) + np.testing.assert_equal(om["user"], user) + np.testing.assert_equal(om["microscope"], microscope) + np.testing.assert_equal(om["sample"], sample) + np.testing.assert_equal(om["comments"], comments) assert isinstance(signal, hs.signals.BaseSignal) diff --git a/rsciio/tests/test_hspy.py b/rsciio/tests/test_hspy.py index 22131b25..b05e32aa 100644 --- a/rsciio/tests/test_hspy.py +++ b/rsciio/tests/test_hspy.py @@ -39,6 +39,7 @@ UniformDataAxis, ) from hyperspy.decorators import lazifyTestClass # noqa: E402 +from hyperspy.exceptions import VisibleDeprecationWarning # noqa: E402 from hyperspy.misc.test_utils import sanitize_dict as san_dict # noqa: E402 from rsciio._hierarchical import get_signal_chunks # noqa: E402 @@ -419,7 +420,9 @@ def test_none_metadata(): def test_rgba16(): print(TEST_DATA_PATH) - s = hs.load(TEST_DATA_PATH / "test_rgba16.hdf5", reader="HSPY") + with pytest.warns(VisibleDeprecationWarning): + # The binned attribute has been moved from metadata.Signal + s = hs.load(TEST_DATA_PATH / "test_rgba16.hdf5", reader="HSPY") data = np.load(TEST_NPZ_DATA_PATH / "test_rgba16.npz")["a"] assert (s.data == data).all() @@ -784,7 +787,9 @@ def test_load_missing_y2_value(self): # the point marker only needs the x1 and y1 value to work # so this should load fname = TEST_DATA_PATH / "test_marker_point_y2_data_deleted.hdf5" - s = hs.load(fname, reader="HSPY") + with pytest.warns(VisibleDeprecationWarning): + # The binned attribute has been moved from metadata.Signal + s = hs.load(fname, reader="HSPY") assert len(s.metadata.Markers) == 5 def test_save_variable_length_markers(self, tmp_path): @@ -893,14 +898,12 @@ def test_saving_ragged_array_single_string(tmp_path, file): @zspy_marker @pytest.mark.parametrize("lazy", [True, False]) def test_save_load_model(tmp_path, file, lazy): - from hyperspy._components.gaussian import Gaussian - filename = tmp_path / file s = hs.signals.Signal1D(np.ones((10, 10, 10, 10))) if lazy: s = s.as_lazy() m = s.create_model() - m.append(Gaussian()) + m.append(hs.model.components1D.Gaussian()) m.store("test") s.save(filename) signal2 = hs.load(filename) diff --git a/rsciio/tests/test_tia.py b/rsciio/tests/test_tia.py index 19980257..17c1568c 100644 --- a/rsciio/tests/test_tia.py +++ b/rsciio/tests/test_tia.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License # along with RosettaSciIO. If not, see . +import sys from pathlib import Path import numpy as np @@ -35,9 +36,11 @@ def prepare_non_zero_float(): import tarfile + kwargs = {"filter": "data"} if sys.version_info.minor >= 12 else {} + tgz_fname = TEST_DATA_PATH_OLD / "non_float_meta_value_zeroed.tar.gz" with tarfile.open(tgz_fname, "r:gz") as tar: - tar.extractall(path=TEST_DATA_PATH_OLD) + tar.extractall(path=TEST_DATA_PATH_OLD, **kwargs) yield diff --git a/rsciio/tests/test_tvips.py b/rsciio/tests/test_tvips.py index e759e44a..50ed03b6 100644 --- a/rsciio/tests/test_tvips.py +++ b/rsciio/tests/test_tvips.py @@ -445,12 +445,17 @@ def test_file_writer( filepath = tmp_path / "test_tvips_save_000.tvips" scan_shape = signal.axes_manager.navigation_shape - file_writer( - filepath, - signal._to_dictionary(), - max_file_size=max_file_size, - frame_header_extra_bytes=fheb, - ) + if max_file_size is not None and max_file_size < 500: + cm = pytest.warns(UserWarning) + else: + cm = dummy_context_manager() + with cm: + file_writer( + filepath, + signal._to_dictionary(), + max_file_size=max_file_size, + frame_header_extra_bytes=fheb, + ) if max_file_size is None: assert len(list(tmp_path.iterdir())) == 1 else: diff --git a/upcoming_changes/339.maintenance.rst b/upcoming_changes/339.maintenance.rst new file mode 100644 index 00000000..9bb766d9 --- /dev/null +++ b/upcoming_changes/339.maintenance.rst @@ -0,0 +1 @@ +Add explicit support for python 3.13. \ No newline at end of file