From 425a0a02a9c110232f9d6243be7b97c6b279af1e Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Thu, 8 Feb 2024 13:31:56 +0100 Subject: [PATCH 1/9] Remove fallback for FilterRefBase missing in h5py<3 --- src/hdf5plugin/_filters.py | 56 ++++++++------------------------------ 1 file changed, 11 insertions(+), 45 deletions(-) diff --git a/src/hdf5plugin/_filters.py b/src/hdf5plugin/_filters.py index f2de87a5..1e776a59 100644 --- a/src/hdf5plugin/_filters.py +++ b/src/hdf5plugin/_filters.py @@ -27,7 +27,6 @@ import logging import math import struct -from collections.abc import Mapping import h5py from ._config import build_config @@ -71,40 +70,7 @@ """SPERR filter ID""" -try: - _FilterRefClass = h5py.filters.FilterRefBase -except AttributeError: - class _FilterRefClass(Mapping): - """Base class for referring to an HDF5 and describing its options - - Your subclass must define filter_id, and may define a filter_options tuple. - """ - filter_id = None - filter_options = () - - # Mapping interface supports using instances as **kwargs for compatibility - # with older versions of h5py - @property - def _kwargs(self): - return { - 'compression': self.filter_id, - 'compression_opts': self.filter_options - } - - def __hash__(self): - return hash((self.filter_id, self.filter_options)) - - def __len__(self): - return len(self._kwargs) - - def __iter__(self): - return iter(self._kwargs) - - def __getitem__(self, item): - return self._kwargs[item] - - -class Bitshuffle(_FilterRefClass): +class Bitshuffle(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using bitshuffle filter. It can be passed as keyword arguments: @@ -167,7 +133,7 @@ def __init__(self, nelems=0, cname=None, clevel=3, lz4=None): self.filter_options = (nelems, self.__COMPRESSIONS[cname]) -class Blosc(_FilterRefClass): +class Blosc(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using blosc filter. It can be passed as keyword arguments: @@ -223,7 +189,7 @@ def __init__(self, cname='lz4', clevel=5, shuffle=SHUFFLE): self.filter_options = (0, 0, 0, 0, clevel, shuffle, compression) -class Blosc2(_FilterRefClass): +class Blosc2(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using blosc2 filter. It can be passed as keyword arguments: @@ -285,7 +251,7 @@ def __init__(self, cname='blosclz', clevel=5, filters=SHUFFLE): self.filter_options = (0, 0, 0, 0, clevel, filters, compression) -class BZip2(_FilterRefClass): +class BZip2(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using BZip2 filter. It can be passed as keyword arguments: @@ -310,7 +276,7 @@ def __init__(self, blocksize=9) -> None: self.filter_options = (blocksize,) -class FciDecomp(_FilterRefClass): +class FciDecomp(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using FciDecomp filter. It can be passed as keyword arguments: @@ -335,7 +301,7 @@ def __init__(self, *args, **kwargs): "You may need to reinstall hdf5plugin with a recent version of pip, or rebuild it with a newer compiler.") -class LZ4(_FilterRefClass): +class LZ4(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using lz4 filter. It can be passed as keyword arguments: @@ -361,7 +327,7 @@ def __init__(self, nbytes=0): self.filter_options = (nbytes,) -class Zfp(_FilterRefClass): +class Zfp(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using ZFP filter. It can be passed as keyword arguments: @@ -487,7 +453,7 @@ def __init__(self, logger.info(f"filter options = {self.filter_options}") -class Sperr(_FilterRefClass): +class Sperr(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using SPERR filter. It can be passed as keyword arguments: @@ -599,7 +565,7 @@ def __pack_options(cls, mode: int, quality: float, swap: bool) -> tuple[int]: return (ret,) -class SZ(_FilterRefClass): +class SZ(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using SZ filter. It can be passed as keyword arguments: @@ -686,7 +652,7 @@ def __pack_float64(error: float) -> tuple: return high, low -class SZ3(_FilterRefClass): +class SZ3(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using SZ3 filter. It can be passed as keyword arguments: @@ -754,7 +720,7 @@ def __pack_float64(error: float) -> tuple: return high, low -class Zstd(_FilterRefClass): +class Zstd(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using FciDecomp filter. It can be passed as keyword arguments: From a82e6a7e5693e4d8398c3da3674201708aa777ed Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Thu, 8 Feb 2024 13:33:17 +0100 Subject: [PATCH 2/9] Require h5py>=3.0.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 09a809ed..c0b84d43 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ classifiers = [ "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ] -dependencies = ["h5py"] +dependencies = ["h5py>=3.0.0"] dynamic = ["version"] [project.urls] From c810ce9d514fb24c485458c351ecb462ad799295 Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Thu, 8 Feb 2024 13:37:13 +0100 Subject: [PATCH 3/9] update docstring to use compression= instead of ** --- src/hdf5plugin/_filters.py | 58 +++++++++++++------------------------- 1 file changed, 19 insertions(+), 39 deletions(-) diff --git a/src/hdf5plugin/_filters.py b/src/hdf5plugin/_filters.py index 1e776a59..170f34cb 100644 --- a/src/hdf5plugin/_filters.py +++ b/src/hdf5plugin/_filters.py @@ -73,15 +73,13 @@ class Bitshuffle(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using bitshuffle filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset( 'bitshuffle_with_lz4', data=numpy.arange(100), - **hdf5plugin.Bitshuffle(nelems=0, lz4=True)) + compression=hdf5plugin.Bitshuffle(nelems=0, lz4=True)) f.close() :param int nelems: @@ -136,15 +134,13 @@ def __init__(self, nelems=0, cname=None, clevel=3, lz4=None): class Blosc(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using blosc filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset( 'blosc_byte_shuffle_blosclz', data=numpy.arange(100), - **hdf5plugin.Blosc(cname='blosclz', clevel=9, shuffle=hdf5plugin.Blosc.SHUFFLE)) + compression=hdf5plugin.Blosc(cname='blosclz', clevel=9, shuffle=hdf5plugin.Blosc.SHUFFLE)) f.close() :param str cname: @@ -192,15 +188,13 @@ def __init__(self, cname='lz4', clevel=5, shuffle=SHUFFLE): class Blosc2(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using blosc2 filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset( 'blosc2_byte_shuffle_blosclz', data=numpy.arange(100), - **hdf5plugin.Blosc2(cname='blosclz', clevel=9, filters=hdf5plugin.Blosc2.SHUFFLE)) + compression=hdf5plugin.Blosc2(cname='blosclz', clevel=9, filters=hdf5plugin.Blosc2.SHUFFLE)) f.close() :param str cname: @@ -254,15 +248,13 @@ def __init__(self, cname='blosclz', clevel=5, filters=SHUFFLE): class BZip2(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using BZip2 filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset( 'bzip2', data=numpy.arange(100), - **hdf5plugin.BZip2(blocksize=5)) + compression=hdf5plugin.BZip2(blocksize=5)) f.close() :param int blocksize: Size of the blocks as a multiple of 100k @@ -279,15 +271,13 @@ def __init__(self, blocksize=9) -> None: class FciDecomp(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using FciDecomp filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset( 'fcidecomp', data=numpy.arange(100), - **hdf5plugin.FciDecomp()) + compression=hdf5plugin.FciDecomp()) f.close() """ filter_name = "fcidecomp" @@ -304,13 +294,11 @@ def __init__(self, *args, **kwargs): class LZ4(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using lz4 filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset('lz4', data=numpy.arange(100), - **hdf5plugin.LZ4(nbytes=0)) + compression=hdf5plugin.LZ4(nbytes=0)) f.close() :param int nbytes: @@ -330,15 +318,13 @@ def __init__(self, nbytes=0): class Zfp(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using ZFP filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset( 'zfp', data=numpy.random.random(100), - **hdf5plugin.Zfp()) + compression=hdf5plugin.Zfp()) f.close() This filter provides different modes: @@ -351,7 +337,7 @@ class Zfp(h5py.filters.FilterRefBase): f.create_dataset( 'zfp_fixed_rate', data=numpy.random.random(100), - **hdf5plugin.Zfp(rate=10.0)) + compression=hdf5plugin.Zfp(rate=10.0)) - **Fixed-precision** mode: To use, set the ``precision`` argument. For details, see `zfp fixed-precision mode `_. @@ -361,7 +347,7 @@ class Zfp(h5py.filters.FilterRefBase): f.create_dataset( 'zfp_fixed_precision', data=numpy.random.random(100), - **hdf5plugin.Zfp(precision=10)) + compression=hdf5plugin.Zfp(precision=10)) - **Fixed-accuracy** mode: To use, set the ``accuracy`` argument For details, see `zfp fixed-accuracy mode `_. @@ -371,7 +357,7 @@ class Zfp(h5py.filters.FilterRefBase): f.create_dataset( 'zfp_fixed_accuracy', data=numpy.random.random(100), - **hdf5plugin.Zfp(accuracy=0.001)) + compression=hdf5plugin.Zfp(accuracy=0.001)) - **Reversible** (i.e., lossless) mode: To use, set the ``reversible`` argument to True For details, see `zfp reversible mode `_. @@ -381,7 +367,7 @@ class Zfp(h5py.filters.FilterRefBase): f.create_dataset( 'zfp_reversible', data=numpy.random.random(100), - **hdf5plugin.Zfp(reversible=True)) + compression=hdf5plugin.Zfp(reversible=True)) - **Expert** mode: To use, set the ``minbits``, ``maxbits``, ``maxprec`` and ``minexp`` arguments. For details, see `zfp expert mode `_. @@ -391,7 +377,7 @@ class Zfp(h5py.filters.FilterRefBase): f.create_dataset( 'zfp_expert', data=numpy.random.random(100), - **hdf5plugin.Zfp(minbits=1, maxbits=16657, maxprec=64, minexp=-1074)) + compression=hdf5plugin.Zfp(minbits=1, maxbits=16657, maxprec=64, minexp=-1074)) :param float rate: Use fixed-rate mode and set the number of compressed bits per value. @@ -568,15 +554,13 @@ def __pack_options(cls, mode: int, quality: float, swap: bool) -> tuple[int]: class SZ(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using SZ filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset( 'sz', data=numpy.random.random(100), - **hdf5plugin.SZ()) + compression=hdf5plugin.SZ()) f.close() This filter provides different modes: @@ -589,7 +573,7 @@ class SZ(h5py.filters.FilterRefBase): f.create_dataset( 'sz_absolute', data=numpy.random.random(100), - **hdf5plugin.SZ(absolute=0.1)) + compression=hdf5plugin.SZ(absolute=0.1)) - **Relative** mode: To use, set the ``relative`` argument. It ensures that the resulting values will be within the provided relative tolerance. @@ -600,7 +584,7 @@ class SZ(h5py.filters.FilterRefBase): f.create_dataset( 'sz_relative', data=numpy.random.random(100), - **hdf5plugin.SZ(relative=0.01)) + compression=hdf5plugin.SZ(relative=0.01)) - **Point-wise relative** mode: To use, set the ``pointwise_relative`` argument. It ensures that each grid point of the resulting values will be within the provided relative tolerance. @@ -610,7 +594,7 @@ class SZ(h5py.filters.FilterRefBase): f.create_dataset( 'sz_pointwise_relative', data=numpy.random.random(100), - **hdf5plugin.SZ(pointwise_relative=0.01)) + compression=hdf5plugin.SZ(pointwise_relative=0.01)) For more details about the compressor `SZ `_. """ @@ -655,8 +639,6 @@ def __pack_float64(error: float) -> tuple: class SZ3(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using SZ3 filter. - It can be passed as keyword arguments: - - **Absolute** mode: To use, set the ``absolute`` argument. It ensures that the resulting values will be within the provided absolute tolerance. @@ -665,7 +647,7 @@ class SZ3(h5py.filters.FilterRefBase): f.create_dataset( 'sz3_absolute', data=numpy.random.random(100), - **hdf5plugin.SZ3(absolute=0.1)) + compression=hdf5plugin.SZ3(absolute=0.1)) For more details about the compressor, see `SZ3 `_. @@ -723,15 +705,13 @@ def __pack_float64(error: float) -> tuple: class Zstd(h5py.filters.FilterRefBase): """``h5py.Group.create_dataset``'s compression arguments for using FciDecomp filter. - It can be passed as keyword arguments: - .. code-block:: python f = h5py.File('test.h5', 'w') f.create_dataset( 'zstd', data=numpy.arange(100), - **hdf5plugin.Zstd()) + compression=hdf5plugin.Zstd()) f.close() :param int clevel: Compression level from 1 (lowest compression) to 22 (maximum compression). @@ -743,7 +723,7 @@ class Zstd(h5py.filters.FilterRefBase): f.create_dataset( 'zstd', data=numpy.arange(100), - **hdf5plugin.Zstd(clevel=22)) + compression=hdf5plugin.Zstd(clevel=22)) f.close() """ filter_name = "zstd" From 1f371aa9fc40a519601178a3c2618b06d9a6d5cb Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Thu, 8 Feb 2024 13:41:17 +0100 Subject: [PATCH 4/9] update documentation to use compression= instead of ** --- doc/usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/usage.rst b/doc/usage.rst index 11d9dba9..7c757326 100644 --- a/doc/usage.rst +++ b/doc/usage.rst @@ -42,7 +42,7 @@ Sample code: # Compression f = h5py.File('test.h5', 'w') - f.create_dataset('data', data=numpy.arange(100), **hdf5plugin.LZ4()) + f.create_dataset('data', data=numpy.arange(100), compression=hdf5plugin.LZ4()) f.close() # Decompression From 598af4ad42bc0dab9516d24fe9ef2f9f96246a4f Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Thu, 8 Feb 2024 13:41:58 +0100 Subject: [PATCH 5/9] use compression= in tests --- src/hdf5plugin/test.py | 29 +++++++++++++++-------------- test/test.py | 6 +++--- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/src/hdf5plugin/test.py b/src/hdf5plugin/test.py index c4e2178f..a33e9f04 100644 --- a/src/hdf5plugin/test.py +++ b/src/hdf5plugin/test.py @@ -82,22 +82,23 @@ def _test(self, data = numpy.ones((self._data_natoms,), dtype=dtype).reshape(self._data_shape) filename = os.path.join(self.tempdir, "test_" + filter_name + ".h5") - args = {"blosc": hdf5plugin.Blosc, - "blosc2": hdf5plugin.Blosc2, - "bshuf": hdf5plugin.Bitshuffle, - "bzip2": hdf5plugin.BZip2, - "lz4": hdf5plugin.LZ4, - "fcidecomp": hdf5plugin.FciDecomp, - "sperr": hdf5plugin.Sperr, - "sz": hdf5plugin.SZ, - "sz3": hdf5plugin.SZ3, - "zfp": hdf5plugin.Zfp, - "zstd": hdf5plugin.Zstd, - }[filter_name](**options) + compression_class = { + "blosc": hdf5plugin.Blosc, + "blosc2": hdf5plugin.Blosc2, + "bshuf": hdf5plugin.Bitshuffle, + "bzip2": hdf5plugin.BZip2, + "lz4": hdf5plugin.LZ4, + "fcidecomp": hdf5plugin.FciDecomp, + "sperr": hdf5plugin.Sperr, + "sz": hdf5plugin.SZ, + "sz3": hdf5plugin.SZ3, + "zfp": hdf5plugin.Zfp, + "zstd": hdf5plugin.Zstd, + }[filter_name] # Write f = h5py.File(filename, "w") - f.create_dataset("data", data=data, chunks=data.shape, **args) + f.create_dataset("data", data=data, chunks=data.shape, compression=compression_class(**options)) f.close() # Read @@ -149,7 +150,7 @@ def testDepreactedBitshuffle(self): def _get_bitshuffle_version(self): filename = os.path.join(self.tempdir, "get_bitshuffle_version.h5") with h5py.File(filename, "w", driver="core", backing_store=False) as h5f: - h5f.create_dataset("data", numpy.arange(10), **hdf5plugin.Bitshuffle()) + h5f.create_dataset("data", numpy.arange(10), compression=hdf5plugin.Bitshuffle()) plist = h5f["data"].id.get_create_plist() assert plist.get_nfilters() == 1 filter_ = plist.get_filter(0) diff --git a/test/test.py b/test/test.py index 94e37ada..53a398c1 100644 --- a/test/test.py +++ b/test/test.py @@ -235,7 +235,7 @@ def testSZ3(self): output_file = os.path.join(self.tempdir, compressed_name + ".h5") with h5py.File(output_file, "w", driver="core", backing_store=False) as h5o: h5o.create_dataset("data", data=original, dtype=original.dtype, chunks=original.shape, - **hdf5plugin.SZ3(absolute=value)) + compression=hdf5plugin.SZ3(absolute=value)) output_data = h5o["/data"][()] self.assertFalse(numpy.all(original == output_data), "Values should not be identical") @@ -260,7 +260,7 @@ def testSZ3(self): output_file = os.path.join(self.tempdir, compressed_name + ".h5") with h5py.File(output_file, "w", driver="core", backing_store=False) as h5o: h5o.create_dataset("data", data=original, dtype=original.dtype, chunks=original.shape, - **hdf5plugin.SZ3(relative=value)) + compression=hdf5plugin.SZ3(relative=value)) output_data = h5o["/data"][()] self.assertFalse(numpy.all(original == output_data), "Values should not be identical") @@ -295,7 +295,7 @@ def testSZ3(self): output_file = os.path.join(self.tempdir, compressed_name + ".h5") with h5py.File(output_file, "w", driver="core", backing_store=False) as h5o: h5o.create_dataset("data", data=original, dtype=original.dtype, chunks=original.shape, - **hdf5plugin.SZ3(norm2=value)) + compression=hdf5plugin.SZ3(norm2=value)) output_data = h5o["/data"][()] self.assertFalse(numpy.all(original == output_data), "Values should not be identical") From c5e726d8dab05d73247ae01e9283e5ff44a58576 Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Thu, 8 Feb 2024 13:48:49 +0100 Subject: [PATCH 6/9] remove compatibilty code for h5py<3 --- src/hdf5plugin/_utils.py | 4 ---- src/hdf5plugin/test.py | 20 ++++++++------------ 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/src/hdf5plugin/_utils.py b/src/hdf5plugin/_utils.py index 55b9af7a..07318ede 100644 --- a/src/hdf5plugin/_utils.py +++ b/src/hdf5plugin/_utils.py @@ -99,10 +99,6 @@ def register_filter(name): # Unregister existing filter filter_id = FILTERS[name] is_avail = is_filter_available(name) - if h5py.version.version_tuple < (2, 10) and is_avail in (True, None): - logger.error( - "h5py.h5z.unregister_filter is not available in this version of h5py.") - return False if is_avail is True: if not h5py.h5z.unregister_filter(filter_id): logger.error(f"Failed to unregister filter {name} ({filter_id})") diff --git a/src/hdf5plugin/test.py b/src/hdf5plugin/test.py index a33e9f04..5dde2f6c 100644 --- a/src/hdf5plugin/test.py +++ b/src/hdf5plugin/test.py @@ -107,16 +107,15 @@ def _test(self, plist = f['data'].id.get_create_plist() filters = [plist.get_filter(i) for i in range(plist.get_nfilters())] - if h5py.version.version_tuple >= (2, 10): # Need read_direct_chunk - # Read chunk raw (compressed) data - chunk = f['data'].id.read_direct_chunk((0,) * data.ndim)[1] + # Read chunk raw (compressed) data + chunk = f['data'].id.read_direct_chunk((0,) * data.ndim)[1] - if compressed is True: # Check if chunk is actually compressed - self.assertLess(len(chunk), data.nbytes) - elif compressed is False: - self.assertEqual(len(chunk), data.nbytes) - else: - assert compressed == 'nocheck' + if compressed is True: # Check if chunk is actually compressed + self.assertLess(len(chunk), data.nbytes) + elif compressed is False: + self.assertEqual(len(chunk), data.nbytes) + else: + assert compressed == 'nocheck' if lossless: self.assertTrue(numpy.array_equal(saved, data)) @@ -400,7 +399,6 @@ def _simple_test(self, filter_name): else: self._test(filter_name) - @unittest.skipIf(h5py.version.version_tuple < (2, 10), "h5py<2.10: unregister_filer not available") @unittest.skipUnless(BUILD_CONFIG.embedded_filters, "No embedded filters") def test_register_single_filter_by_name(self): """Re-register embedded filters one at a time given their name""" @@ -410,7 +408,6 @@ def test_register_single_filter_by_name(self): self.assertTrue(status) self._simple_test(filter_name) - @unittest.skipIf(h5py.version.version_tuple < (2, 10), "h5py<2.10: unregister_filer not available") @unittest.skipUnless(BUILD_CONFIG.embedded_filters, "No embedded filters") def test_register_single_filter_by_id(self): """Re-register embedded filters one at a time given their ID""" @@ -421,7 +418,6 @@ def test_register_single_filter_by_id(self): self.assertTrue(status) self._simple_test(filter_name) - @unittest.skipIf(h5py.version.version_tuple < (2, 10), "h5py<2.10: unregister_filer not available") @unittest.skipUnless(BUILD_CONFIG.embedded_filters, "No embedded filters") def test_register_all_filters(self): """Re-register embedded filters all at once""" From df6e155b8266433dcb26b1891a410e797ffa883e Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Thu, 8 Feb 2024 14:04:01 +0100 Subject: [PATCH 7/9] update ci to use h5py>=3 --- .github/workflows/ci.yml | 18 +++++++++--------- appveyor.yml | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 86bdcc01..b59a8c5e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,32 +24,32 @@ jobs: fail-fast: false matrix: include: - - name-suffix: "wheel-h5py_2.8.0" + - name-suffix: "wheel-h5py_3.0.0" os: ubuntu-latest python-version: '3.7' - OLDEST_DEPENDENCIES: 'h5py==2.8.0' + OLDEST_DEPENDENCIES: 'h5py==3.0.0' - - name-suffix: "sdist-h5py_2.10.0" + - name-suffix: "sdist-h5py_3.6.0" os: ubuntu-latest - python-version: '3.8' - OLDEST_DEPENDENCIES: 'h5py==2.10.0 numpy==1.23.5' + python-version: '3.10' + OLDEST_DEPENDENCIES: 'h5py==3.6.0' - name-suffix: "wheel-h5py_3.10.0" os: ubuntu-latest python-version: '3.12' OLDEST_DEPENDENCIES: 'h5py==3.10.0 "numpy<2"' - - name-suffix: "wheel-h5py_2.10.0" + - name-suffix: "wheel-h5py_3.0.0" os: macos-13 python-version: '3.7.16' - OLDEST_DEPENDENCIES: 'h5py==2.10.0 "numpy<2"' + OLDEST_DEPENDENCIES: 'h5py==3.0.0 "numpy<2"' env: MACOSX_DEPLOYMENT_TARGET: 10.15 - - name-suffix: "wheel-h5py_3.8.0" + - name-suffix: "wheel-h5py_3.10.0" os: macos-13 python-version: '3.10.8' - OLDEST_DEPENDENCIES: 'h5py==3.8.0 "numpy<2"' + OLDEST_DEPENDENCIES: 'h5py==3.10.0 "numpy<2"' steps: - uses: actions/checkout@v4 diff --git a/appveyor.yml b/appveyor.yml index 8d5c0521..45b633dc 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -22,7 +22,7 @@ environment: - BUILD_PY_PATH: "C:\\Python37-x64;C:\\Python37-x64\\Scripts" TEST_PY_PATH: "C:\\Miniconda37-x64;C:\\Miniconda37-x64\\Library\\mingw-w64\\bin;C:\\Miniconda37-x64\\Library\\usr\\bin;C:\\Miniconda37-x64\\Library\\bin;C:\\Miniconda37-x64\\Scripts;C:\\Miniconda37-x64\\bin;C:\\Miniconda37-x64\\condabin" INSTALL_CMD: "conda install -y" - OLDEST_DEPENDENCIES: "h5py==2.8.0 numpy<2" + OLDEST_DEPENDENCIES: "h5py==3.0.0 numpy<2" install: From 187b07fd5c19cd3bd403e5e8da468b8cfcc72f08 Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Mon, 15 Apr 2024 15:27:28 +0200 Subject: [PATCH 8/9] Add numpy<2 constraint when missing --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b59a8c5e..0a78a426 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,12 +27,12 @@ jobs: - name-suffix: "wheel-h5py_3.0.0" os: ubuntu-latest python-version: '3.7' - OLDEST_DEPENDENCIES: 'h5py==3.0.0' + OLDEST_DEPENDENCIES: 'h5py==3.0.0 "numpy<2"' - name-suffix: "sdist-h5py_3.6.0" os: ubuntu-latest python-version: '3.10' - OLDEST_DEPENDENCIES: 'h5py==3.6.0' + OLDEST_DEPENDENCIES: 'h5py==3.6.0 "numpy<2"' - name-suffix: "wheel-h5py_3.10.0" os: ubuntu-latest From 3d35dae8ea68281489f9481a3e8a1a929627c32c Mon Sep 17 00:00:00 2001 From: Thomas VINCENT Date: Wed, 3 Jul 2024 13:32:21 +0200 Subject: [PATCH 9/9] fix CI build wheel macos arm64 --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0a78a426..94440229 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -71,8 +71,8 @@ jobs: python test/test.py build_wheels_macos: - name: Build ARM64 wheels on macos-11 - runs-on: macos-11 + name: Build ARM64 wheels on macos-latest + runs-on: macos-latest steps: - uses: actions/checkout@v4 - uses: pypa/cibuildwheel@v2.16.5