diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 501269b..268473a 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -45,7 +45,7 @@ py -m venv .venv py -m install -v -e .[dev] ``` -# Post setup +# Pre-commit You should prepare pre-commit, which will help you by checking that commits pass required checks: @@ -87,15 +87,3 @@ You can see a preview with: ```bash nox -s docs -- --serve ``` - -# Pre-commit - -This project uses pre-commit for all style checking. While you can run it with -nox, this is such an important tool that it deserves to be installed on its own. -Install pre-commit and run: - -```bash -pre-commit run -a -``` - -to check all files. diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index 6d07484..1d64b59 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -1,7 +1,11 @@ -name: wheels +name: CD on: workflow_dispatch: + pull_request: + push: + branches: + - main release: types: - published @@ -11,63 +15,42 @@ concurrency: cancel-in-progress: true env: + # Many color libraries just need this to be set to any value, but at least + # one distinguishes color depth, where "3" -> "256-bit color". FORCE_COLOR: 3 jobs: - make_sdist: - name: Make SDist + dist: + name: Distribution build runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Build SDist - run: pipx run build --sdist - - - uses: actions/upload-artifact@v4 - with: - name: cibw-sdist - path: dist/*.tar.gz - - build_wheels: - name: Wheel on ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, windows-latest, macos-latest] steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: pypa/cibuildwheel@v2.23 - - - name: Upload wheels - uses: actions/upload-artifact@v4 - with: - name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }} - path: wheelhouse/*.whl + - uses: hynek/build-and-inspect-python-package@v2 - upload_all: - needs: [build_wheels, make_sdist] + publish: + needs: [dist] + name: Publish to PyPI environment: pypi permissions: id-token: write + attestations: write + contents: read runs-on: ubuntu-latest if: github.event_name == 'release' && github.event.action == 'published' steps: - uses: actions/download-artifact@v4 with: - pattern: cibw-* + name: Packages path: dist - merge-multiple: true - - uses: pypa/gh-action-pypi-publish@release/v1 + - name: Generate artifact attestation for sdist and wheel + uses: actions/attest-build-provenance@v2.1.0 with: - # Remember to tell (test-)pypi about this repo before publishing - # Remove this line to publish to PyPI - repository-url: https://test.pypi.org/legacy/ + subject-path: "dist/*" + + - uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ff8e155..e67c0f3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,6 +12,8 @@ concurrency: cancel-in-progress: true env: + # Many color libraries just need this to be set to any value, but at least + # one distinguishes color depth, where "3" -> "256-bit color". FORCE_COLOR: 3 jobs: @@ -28,10 +30,8 @@ jobs: - uses: pre-commit/action@v3.0.1 with: extra_args: --hook-stage manual --all-files - # - name: Run PyLint - # run: | - # echo "::add-matcher::$GITHUB_WORKSPACE/.github/matchers/pylint.json" - # pipx run nox -s pylint + - name: Run PyLint + run: pipx run nox -s pylint -- --output-format=github checks: name: Check Python ${{ matrix.python-version }} on ${{ matrix.runs-on }} @@ -40,11 +40,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.10", "3.12"] - runs-on: [ubuntu-latest, macos-latest, windows-latest] + python-version: ["3.10", "3.13"] + runs-on: [ubuntu-latest] #, windows-latest, macos-14] # include: - # - python-version: pypy-3.10 + # - python-version: "pypy-3.10" # runs-on: ubuntu-latest steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 063c1b7..269761a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,13 +4,13 @@ ci: repos: - repo: https://github.com/adamchainz/blacken-docs - rev: "1.16.0" + rev: "1.19.1" hooks: - id: blacken-docs additional_dependencies: [black==24.*] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: "v4.5.0" + rev: "v5.0.0" hooks: - id: check-added-large-files - id: check-case-conflict @@ -33,21 +33,21 @@ repos: - id: rst-inline-touching-normal - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v3.1.0" + rev: "v4.0.0-alpha.8" hooks: - id: prettier types_or: [yaml, markdown, html, css, scss, javascript, json] args: [--prose-wrap=always] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.3.0" + rev: "v0.11.8" hooks: - id: ruff args: ["--fix", "--show-fixes"] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-clang-format - rev: "v17.0.6" + rev: "v20.1.3" hooks: - id: clang-format types_or: [c++, c, cuda] @@ -62,12 +62,12 @@ repos: # - pytest - repo: https://github.com/codespell-project/codespell - rev: "v2.2.6" + rev: "v2.4.1" hooks: - id: codespell - repo: https://github.com/shellcheck-py/shellcheck-py - rev: "v0.9.0.6" + rev: "v0.10.0.1" hooks: - id: shellcheck @@ -85,13 +85,13 @@ repos: - id: cmake-format - repo: https://github.com/abravalheri/validate-pyproject - rev: "v0.16" + rev: "v0.24.1" hooks: - id: validate-pyproject additional_dependencies: ["validate-pyproject-schema-store[all]"] - repo: https://github.com/python-jsonschema/check-jsonschema - rev: "0.28.0" + rev: "0.33.0" hooks: - id: check-dependabot - id: check-github-workflows diff --git a/pyproject.toml b/pyproject.toml index 97e855c..d5fc11c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,3 @@ -[build-system] -build-backend = "setuptools.build_meta" -requires = ["setuptools>=42", "setuptools-scm>=7"] - [project] name = "xarray-pschdf5" authors = [ @@ -10,26 +6,25 @@ authors = [ description = "XArray reader for PSC HDF5 data" readme = "README.md" license.file = "LICENSE" -requires-python = ">=3.8" +requires-python = ">=3.10" classifiers = [ "Development Status :: 1 - Planning", "Intended Audience :: Science/Research", "Intended Audience :: Developers", - "License :: OSI Approved :: BSD License", + "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Scientific/Engineering", "Typing :: Typed", ] dynamic = ["version"] -dependencies = ["pugixml >= 0.5.0", "h5py", "xarray"] +dependencies = ["pugixml >= 0.5.0", "h5py", "xarray", "typing-extensions"] [project.optional-dependencies] test = ["pytest >=6", "pytest-cov >=3", "typing-extensions"] @@ -51,8 +46,12 @@ Changelog = "https://github.com/psc-code/xarray-pschdf5/releases" [project.entry-points."xarray.backends"] pschdf5 = "xarray_pschdf5.pschdf5_backend:PscHdf5Entrypoint" +[build-system] +build-backend = "setuptools.build_meta" +requires = ["setuptools>=42", "setuptools-scm>=7"] + [tool.scikit-build] -minimum-version = "0.4" +minimum-version = "build-system.requires" build-dir = "build/{wheel_tag}" metadata.version.provider = "scikit_build_core.metadata.setuptools_scm" sdist.include = ["src/xarray_pschdf5/_version.py"] @@ -62,12 +61,6 @@ sdist.include = ["src/xarray_pschdf5/_version.py"] write_to = "src/xarray_pschdf5/_version.py" -[tool.cibuildwheel] -test-command = "pytest {project}/tests" -test-extras = ["test"] -test-skip = ["*universal2:arm64"] - - [tool.pytest.ini_options] minversion = "6.0" addopts = ["-ra", "--showlocals", "--strict-markers", "--strict-config"] @@ -76,14 +69,13 @@ filterwarnings = ["error"] log_cli_level = "INFO" testpaths = ["tests"] - [tool.coverage] run.source = ["xarray_pschdf5"] report.exclude_also = ['\.\.\.', 'if typing.TYPE_CHECKING:'] [tool.mypy] files = ["src", "tests"] -python_version = "3.8" +python_version = "3.10" warn_unused_configs = true strict = true enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] @@ -128,8 +120,8 @@ ignore = [ "PLR09", # Too many <...> "PLR2004", # Magic value used in comparison "ISC001", # Conflicts with formatter - "C408", # like my dict() calls - "RET504", + "UP031", # Use format specifiers instead of percent format + "G002", # Logging statement uses `%` ] isort.required-imports = ["from __future__ import annotations"] # Uncomment if using a _compat.typing backport @@ -141,7 +133,7 @@ isort.required-imports = ["from __future__ import annotations"] [tool.pylint] -py-version = "3.8" +py-version = "3.10" ignore-paths = [".*/_version.py"] extension-pkg-allow-list = ["xarray_pschdf5._core"] reports.output-format = "colorized" diff --git a/release.yml b/release.yml new file mode 100644 index 0000000..9d1e098 --- /dev/null +++ b/release.yml @@ -0,0 +1,5 @@ +changelog: + exclude: + authors: + - dependabot + - pre-commit-ci diff --git a/src/xarray_pschdf5/pschdf5_backend.py b/src/xarray_pschdf5/pschdf5_backend.py index 773a946..2840c8a 100644 --- a/src/xarray_pschdf5/pschdf5_backend.py +++ b/src/xarray_pschdf5/pschdf5_backend.py @@ -13,25 +13,16 @@ from pugixml import pugi from typing_extensions import override from xarray.backends import BackendEntrypoint +from xarray.backends.common import AbstractDataStore +from xarray.core.datatree import DataTree +from xarray.core.types import ReadBuffer class PscHdf5Entrypoint(BackendEntrypoint): - @override - def open_dataset( - self, - filename_or_obj, - *, - mask_and_scale: bool = True, - decode_times: bool = True, - concat_characters: bool = True, - decode_coords: bool = True, - drop_variables: str | Iterable[str] | None = None, - use_cftime: bool | None = None, - decode_timedelta: bool | None = None, - # other backend specific keyword arguments - # `chunks` and `cache` DO NOT go here, they are handled by xarray - ): - return pschdf5_open_dataset(filename_or_obj, drop_variables=drop_variables) + """XArray backend entrypoint for PSC HDF5 data""" + + description = "XArray reader for PSC HDF5 data" + # url = "https://link_to/your_backend/documentation" open_dataset_parameters: ClassVar[Any] = ["filename_or_obj", "drop_variables"] @@ -43,9 +34,30 @@ def guess_can_open(self, filename_or_obj) -> bool: return filename_or_obj.suffix == ".xdmf" - description = "XArray reader for PSC HDF5 data" + @override + def open_dataset( + self, + filename_or_obj, + *, + mask_and_scale: bool = True, # pylint: disable=unused-argument + decode_times: bool = True, # pylint: disable=unused-argument + concat_characters: bool = True, # pylint: disable=unused-argument + decode_coords: bool = True, # pylint: disable=unused-argument + drop_variables: str | Iterable[str] | None = None, + use_cftime: bool | None = None, # pylint: disable=unused-argument + decode_timedelta: bool | None = None, # pylint: disable=unused-argument + # other backend specific keyword arguments + # `chunks` and `cache` DO NOT go here, they are handled by xarray + ): + return pschdf5_open_dataset(filename_or_obj, drop_variables=drop_variables) - url = "https://link_to/your_backend/documentation" # FIXME + @override + def open_datatree( + self, + filename_or_obj: str | os.PathLike[Any] | ReadBuffer[Any] | AbstractDataStore, + **kwargs: Any, + ) -> DataTree: + raise NotImplementedError() @dataclasses.dataclass @@ -92,7 +104,7 @@ def pschdf5_open_dataset(filename_or_obj, *, drop_variables=None): _, var_info = next(iter(var_infos.items())) coords = _make_coords(grid, var_info.times) - vars = {} + vars = {} # pylint: disable=redefined-builtin for name, info in var_infos.items(): da = xr.DataArray(data=np.empty(info.shape), dims=info.dims) for it, path in enumerate(info.paths): @@ -106,7 +118,7 @@ def pschdf5_open_dataset(filename_or_obj, *, drop_variables=None): ds = xr.Dataset(vars, coords=coords, attrs=attrs) # ds.set_close(my_close_method) - return ds + return ds # noqa: RET504 def _make_dims(fld): @@ -154,7 +166,7 @@ def _parse_dimensions_attr(node): def _parse_geometry_origin_dxdydz(geometry): - geo = dict() + geo = {} for child in geometry.children(): if child.attribute("Name").value() == "Origin": geo["origin"] = np.asarray( @@ -169,20 +181,18 @@ def _parse_geometry_origin_dxdydz(geometry): def _parse_geometry_xyz(geometry): - geo = dict() data_item = geometry.child("DataItem") assert data_item.attribute("Format").value() == "XML" dims = _parse_dimensions_attr(data_item) data = np.loadtxt(data_item.text().as_string().splitlines()) - geo = {"data_item": data.reshape(dims)} - return geo + return {"data_item": data.reshape(dims)} def _parse_temporal_collection(filename, grid_collection): temporal = [] for node in grid_collection.children(): href = node.attribute("href").value() - doc = pugi.XMLDocument() + doc = pugi.XMLDocument() # pylint: disable=c-extension-no-member result = doc.load_file(filename.parent / href) if not result: msg = f"parse error: status={result.status} description={result.description()}" @@ -240,10 +250,11 @@ def _parse_spatial_collection(grid_collection): def read_xdmf(filename): - doc = pugi.XMLDocument() + doc = pugi.XMLDocument() # pylint: disable=c-extension-no-member result = doc.load_file(filename) if not result: - raise f"parse error: status={result.status} description={result.description()}" + msg = f"parse error: status={result.status} description={result.description()}" + raise RuntimeError(msg) grid_collection = doc.child("Xdmf").child("Domain").child("Grid") assert grid_collection.attribute("GridType").value() == "Collection"