Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ build: off

environment:
# not ready for --doctest-modules yet, some are invalid
TEST_SCRIPT: 'hatch test -i py=3.9 --cover --durations 10'
TEST_SCRIPT: 'hatch test -i py=3.10 --cover --durations 10'
# unless indicated otherwise, we test datalad_next
DTS: datalad_next
# place coverage files to a known location regardless of where a test run
Expand Down
13 changes: 4 additions & 9 deletions datalad_next/annexremotes/archivist.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,7 @@

from pathlib import Path
from shutil import copyfileobj
from typing import (
Dict,
Generator,
List,
Tuple,
)
from collections.abc import Generator

from datalad_next.archive_operations import ArchiveOperations

Expand Down Expand Up @@ -379,13 +374,13 @@ class _ArchiveHandlers:
# TODO make archive access caching behavior configurable from the outside
def __init__(self, repo):
# mapping of archive keys to an info dict
self._db: Dict[AnnexKey, _ArchiveInfo] = {}
self._db: dict[AnnexKey, _ArchiveInfo] = {}
# for running git-annex queries against the repo
self._repo = repo

def from_locators(
self, locs: List[ArchivistLocator]
) -> Generator[Tuple[ArchiveOperations, Iterable[ArchivistLocator]],
self, locs: list[ArchivistLocator],
) -> Generator[tuple[ArchiveOperations, Iterable[ArchivistLocator]],
None, None]:
"""Produce archive handlers for the given locators

Expand Down
2 changes: 0 additions & 2 deletions datalad_next/commands/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
"""
from __future__ import annotations

from typing import Dict

from datalad.interface.base import (
Interface,
build_doc,
Expand Down
1 change: 0 additions & 1 deletion datalad_next/commands/credentials.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@

import json
import logging
from typing import Dict

from datalad import (
cfg as dlcfg,
Expand Down
12 changes: 4 additions & 8 deletions datalad_next/config/utils.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,10 @@
from __future__ import annotations

from os import environ
from typing import (
Dict,
Mapping,
Tuple,
)
from collections.abc import Mapping


def get_gitconfig_items_from_env() -> Mapping[str, str | Tuple[str, ...]]:
def get_gitconfig_items_from_env() -> Mapping[str, str | tuple[str, ...]]:
"""Parse git-config ENV (``GIT_CONFIG_COUNT|KEY|VALUE``) and return as dict

This implementation does not use ``git-config`` directly, but aims to
Expand All @@ -29,7 +25,7 @@ def get_gitconfig_items_from_env() -> Mapping[str, str | Tuple[str, ...]]:
times, the respective values are aggregated in reported as a tuple
for that specific key.
"""
items: Dict[str, str | Tuple[str, ...]] = {}
items: dict[str, str | tuple[str, ...]] = {}
for k, v in ((_get_gitconfig_var_from_env(i, 'key'),
_get_gitconfig_var_from_env(i, 'value'))
for i in range(_get_gitconfig_itemcount())):
Expand Down Expand Up @@ -64,7 +60,7 @@ def _get_gitconfig_var_from_env(nid: int, kind: str) -> str:
return var


def set_gitconfig_items_in_env(items: Mapping[str, str | Tuple[str, ...]]):
def set_gitconfig_items_in_env(items: Mapping[str, str | tuple[str, ...]]):
"""Set git-config ENV (``GIT_CONFIG_COUNT|KEY|VALUE``) from a mapping

Any existing declaration of configuration items in the environment is
Expand Down
20 changes: 8 additions & 12 deletions datalad_next/constraints/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,7 @@
from dataclasses import dataclass
from textwrap import indent
from types import MappingProxyType
from typing import (
Any,
Dict,
Tuple,
)
from typing import Any

# needed for imports in other pieced of the ``constraints`` module
from datalad_next.exceptions import NoDatasetFound
Expand Down Expand Up @@ -38,7 +34,7 @@ def __init__(self,
constraint,
value: Any,
msg: str,
ctx: Dict[str, Any] | None = None):
ctx: dict[str, Any] | None = None):
"""
Parameters
----------
Expand Down Expand Up @@ -100,7 +96,7 @@ def constraint(self):
return self.args[1]

@property
def caused_by(self) -> Tuple[Exception] | None:
def caused_by(self) -> tuple[Exception] | None:
"""Returns a tuple of any underlying exceptions that caused a violation
"""
cb = self.context.get('__caused_by__', None)
Expand Down Expand Up @@ -149,7 +145,7 @@ class ConstraintErrors(ConstraintError):
nature of the context identifiers (expect for being hashable). See
``CommandParametrizationError`` for a specialization.
"""
def __init__(self, exceptions: Dict[Any, ConstraintError]):
def __init__(self, exceptions: dict[Any, ConstraintError]):
super().__init__(
# this is the main payload, the base class expects a Constraint
# but only stores it
Expand Down Expand Up @@ -184,7 +180,7 @@ class ParameterContextErrors(Mapping):
# went wrong (in general, for a specific parameter, etc...)
def __init__(
self,
errors: Dict[ParameterConstraintContext, ConstraintError],
errors: dict[ParameterConstraintContext, ConstraintError],
):
self._errors = errors

Expand Down Expand Up @@ -249,7 +245,7 @@ class ParameterConstraintContext:

EnsureRange(min=3)(params['p1'] + params['p2'])
"""
parameters: Tuple[str]
parameters: tuple[str, ...]
description: str | None = None

def __str__(self):
Expand Down Expand Up @@ -297,8 +293,8 @@ class ParametrizationErrors(ConstraintErrors):
"""
def __init__(
self,
exceptions: Dict[str, ConstraintError] |
Dict[ParameterConstraintContext, ConstraintError]):
exceptions: dict[str, ConstraintError] |
dict[ParameterConstraintContext, ConstraintError]):
super().__init__(
{k if isinstance(k, ParameterConstraintContext)
else ParameterConstraintContext((k,)):
Expand Down
17 changes: 8 additions & 9 deletions datalad_next/constraints/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@

from __future__ import annotations

from collections.abc import Container
from itertools import chain
from typing import (
from collections.abc import (
Callable,
Dict,
Container,
)
from itertools import chain

from .base import Constraint
from .basic import (
Expand Down Expand Up @@ -92,12 +91,12 @@ class EnsureCommandParameterization(Constraint):
"""
def __init__(
self,
param_constraints: Dict[str, Constraint],
param_constraints: dict[str, Constraint],
*,
validate_defaults: Container[str] | None = None,
joint_constraints:
Dict[ParameterConstraintContext, Callable] | None = None,
tailor_for_dataset: Dict[str, str] | None = None,
dict[ParameterConstraintContext, Callable] | None = None,
tailor_for_dataset: dict[str, str] | None = None,
):
"""
Parameters
Expand Down Expand Up @@ -134,7 +133,7 @@ def __init__(
self._validate_defaults = validate_defaults or set()
self._tailor_for_dataset = tailor_for_dataset or {}

def joint_validation(self, params: Dict, on_error: str) -> Dict:
def joint_validation(self, params: dict, on_error: str) -> dict:
"""Higher-order validation considering multiple parameters at a time

This method is called with all, individually validated, command
Expand Down Expand Up @@ -245,7 +244,7 @@ def __call__(
at_default=None,
required=None,
on_error='raise-early',
) -> Dict:
) -> dict:
"""
Parameters
----------
Expand Down
5 changes: 1 addition & 4 deletions datalad_next/iter_collections/annexworktree.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,6 @@
PurePath,
)
from typing import (
Dict,
Type,
Union,
Any,
Generator,
)
Expand Down Expand Up @@ -345,7 +342,7 @@ def _get_worktree_item(


def _join_annex_info(
processed_data: Union[Type[StoreOnly], Dict[str, str]],
processed_data: type[StoreOnly] | dict[str, str],
stored_data: GitWorktreeItem,
) -> dict:
"""Internal helper to join results from pipeline stages
Expand Down
21 changes: 9 additions & 12 deletions datalad_next/iter_collections/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,8 @@
import stat
from typing import (
TYPE_CHECKING,
Dict,
Union,
Any,
IO,
List,
)

from datalad_next.consts import COPY_BUFSIZE
Expand Down Expand Up @@ -102,12 +99,12 @@ def from_path(
path: Path,
*,
link_target: bool = True,
) -> Union[
DirectoryItem,
AnnexWorktreeFileSystemItem,
FileSystemItem,
GitWorktreeFileSystemItem,
]:
) -> (
DirectoryItem
| AnnexWorktreeFileSystemItem
| FileSystemItem
| GitWorktreeFileSystemItem
):
"""Populate item properties from a single `stat` and `readlink` call

The given ``path`` must exist. The ``link_target`` flag indicates
Expand Down Expand Up @@ -142,10 +139,10 @@ def from_path(


def compute_multihash_from_fp(
fp: Union[BufferedReader, ExFileObject, ZipExtFile],
hash: List[str],
fp: BufferedReader | ExFileObject | ZipExtFile,
hash: list[str],
bufsize: int = COPY_BUFSIZE,
) -> Dict[str, str]:
) -> dict[str, str]:
"""Compute multiple hashes from a file-like
"""
mhash = MultiHash(hash)
Expand Down
10 changes: 4 additions & 6 deletions datalad_next/patches/customremotes_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,10 @@
This patch also adds code that allows to patch a class that is already loaded
"""

from __future__ import annotations

from contextlib import closing
import logging
from typing import (
Dict,
Type,
)

from . import apply_patch
from datalad_next.annexremotes import SpecialRemote
Expand Down Expand Up @@ -63,7 +61,7 @@ class AnnexProgressLogHandler(logging.Handler):
def __init__(self, annexremote: SpecialRemote):
super().__init__()
self.annexremote = annexremote
self._ptrackers: Dict[str, int] = {}
self._ptrackers: dict[str, int] = {}

def emit(self, record: logging.LogRecord):
"""Process a log record
Expand Down Expand Up @@ -101,7 +99,7 @@ def emit(self, record: logging.LogRecord):
self.annexremote.send_progress(prg)


def patched_underscore_main(args: list, cls: Type[SpecialRemote]):
def patched_underscore_main(args: list, cls: type[SpecialRemote]):
"""Full replacement for datalad.customremotes.main._main()

Its only purpose is to create a running instance of a SpecialRemote.
Expand Down
25 changes: 11 additions & 14 deletions datalad_next/patches/push_to_export_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,8 @@

import logging
from typing import (
Dict,
Generator,
Iterable,
Optional,
Union,
)

import datalad.core.distributed.push as mod_push
Expand All @@ -37,12 +34,12 @@
lgr = logging.getLogger('datalad.core.distributed.push')


def _is_export_remote(remote_info: Optional[Dict]) -> bool:
def _is_export_remote(remote_info: dict | None) -> bool:
"""Check if remote_info is valid and has exporttree set to "yes"

Parameters
----------
remote_info: Optional[Dict]
remote_info: dict | None
Optional dictionary the contains git annex special.

Returns
Expand All @@ -57,8 +54,8 @@ def _is_export_remote(remote_info: Optional[Dict]) -> bool:


def _get_credentials(ds: Dataset,
remote_info: Dict
) -> Optional[Dict]:
remote_info: dict,
) -> dict | None:

# Check for credentials
params = {
Expand Down Expand Up @@ -125,8 +122,8 @@ def get_export_records(repo: AnnexRepo) -> Generator:


def _get_export_log_entry(repo: AnnexRepo,
target_uuid: str
) -> Optional[Dict]:
target_uuid: str,
) -> dict | None:
target_entries = [
entry
for entry in repo.get_export_records()
Expand All @@ -138,7 +135,7 @@ def _get_export_log_entry(repo: AnnexRepo,


def _is_valid_treeish(repo: AnnexRepo,
export_entry: Dict,
export_entry: dict,
) -> bool:

# Due to issue https://github.com/datalad/datalad-next/issues/39
Expand All @@ -156,10 +153,10 @@ def _transfer_data(repo: AnnexRepo,
target: str,
content: Iterable,
data: str,
force: Optional[str],
jobs: Optional[Union[str, int]],
res_kwargs: Dict,
got_path_arg: bool
force: str | None,
jobs: str | int | None,
res_kwargs: dict,
got_path_arg: bool,
) -> Generator:

target_uuid, remote_info = ([
Expand Down
Loading
Loading