diff --git a/datalad_gooey/constraints.py b/datalad_gooey/constraints.py index bdd76cb..4cffe55 100644 --- a/datalad_gooey/constraints.py +++ b/datalad_gooey/constraints.py @@ -2,265 +2,33 @@ Path, PurePath, ) -import re -from typing import Dict from datalad import cfg as dlcfg # this is an import target for all constraints used within gooey -from datalad.support.constraints import ( - AltConstraints, +from datalad_next.constraints.base import ( Constraint, - EnsureStr as _CoreEnsureStr, - EnsureChoice, - EnsureNone, + AltConstraints, +) +from datalad_next.constraints import ( EnsureBool, + EnsureStr, + EnsureChoice, + EnsureIterableOf, EnsureInt, + EnsureListOf, + EnsureNone, + EnsurePath, EnsureRange, + NoConstraint, ) +from datalad_next.constraints.parameter_legacy import EnsureParameterConstraint +from datalad_next.constraints.compound import ConstraintWithPassthrough from datalad.distribution.dataset import EnsureDataset as CoreEnsureDataset from datalad.distribution.dataset import ( Dataset, - require_dataset, ) -# extension for Constraint from datalad-core -def for_dataset(self, dataset: Dataset) -> Constraint: - """Return a constraint-variant for a specific dataset context - - The default implementation returns the unmodified, identical - constraint. However, subclasses can implement different behaviors. - """ - return self - - -# patch it in -Constraint.for_dataset = for_dataset - - -class NoConstraint(Constraint): - """A contraint that represents no constraints""" - def short_description(self): - return '' - - def __call__(self, value): - return value - - -# extends the implementation in -core with regex matching -class EnsureStr(_CoreEnsureStr): - """Ensure an input is a string of some min. length and matching a pattern - - Pattern matching is optional and minimum length is zero (empty string is - OK). - - No type conversion is performed. - """ - def __init__(self, min_len: int = 0, match: str = None): - """ - Parameters - ---------- - min_len: int, optional - Minimal length for a string. - match: - Regular expression used to match any input value against. - Values not matching the expression will cause a - `ValueError` to be raised. - """ - super().__init__(min_len=min_len) - self._match = match - if match is not None: - self._match = re.compile(match) - - def __call__(self, value) -> str: - value = super().__call__(value) - if self._match: - if not self._match.match(value): - raise ValueError( - f'{value} does not match {self._match.pattern}') - return value - - def long_description(self): - return 'must be a string{}'.format( - f' and match {self._match.pattern}' if self._match else '', - ) - - def short_description(self): - return 'str{}'.format( - f'({self._match.pattern})' if self._match else '', - ) - - -class EnsureMapping(Constraint): - """Ensure a mapping of a key to a value of a specific nature""" - - def __init__(self, - key: Constraint, - value: Constraint, - delimiter: str = ':'): - """ - Parameters - ---------- - key: - Key constraint instance. - value: - Value constraint instance. - delimiter: - Delimiter to use for splitting a key from a value for a `str` input. - """ - super().__init__() - self._key_constraint = key - self._value_constraint = value - self._delimiter = delimiter - - def short_description(self): - return 'mapping of {} -> {}'.format( - self._key_constraint.short_description(), - self._value_constraint.short_description(), - ) - - def __call__(self, value) -> Dict: - # determine key and value from various kinds of input - if isinstance(value, str): - # will raise if it cannot split into two - key, val = value.split(sep=self._delimiter, maxsplit=1) - elif isinstance(value, dict): - if not len(value): - raise ValueError('dict does not contain a key') - elif len(value) > 1: - raise ValueError(f'{value} contains more than one key') - key, val = value.copy().popitem() - elif isinstance(value, (list, tuple)): - if not len(value) == 2: - raise ValueError('key/value sequence does not have length 2') - key, val = value - - key = self._key_constraint(key) - val = self._value_constraint(val) - return {key: val} - - def for_dataset(self, dataset: Dataset): - # tailor both constraints to the dataset and reuse delimiter - return EnsureMapping( - key=self._key_constraint.for_dataset(dataset), - value=self._value_constraint.for_dataset(dataset), - delimiter=self._delimiter, - ) - - -class EnsureIterableOf(Constraint): - """Ensure that an input is a list of a particular data type - """ - def __init__(self, - iter_type: type, - item_constraint: callable, - min_len: int or None = None, - max_len: int or None = None): - """ - Parameters - ---------- - iter_type: - Target type of iterable. Common types are `list`, or `tuple`, - but also generator type iterables are possible. Type constructor - must take an iterable with items as the only required positional - argument. - item_constraint: - Each incoming item will be mapped through this callable - before being passed to the iterable type constructor. - min_len: - If not None, the iterable will be verified to have this minimum - number of items. The iterable type must implement `__len__()` - for this check to be supported. - max_len: - If not None, the iterable will be verified to have this maximum - number of items. The iterable type must implement `__len__()` - for this check to be supported. - """ - if min_len is not None and max_len is not None and min_len > max_len: - raise ValueError( - 'Given minimum length exceeds given maximum length') - self._iter_type = iter_type - self._item_constraint = item_constraint - self._min_len = min_len - self._max_len = max_len - super().__init__() - - @property - def item_constraint(self): - return self._item_constraint - - def __call__(self, value): - iter = self._iter_type( - self._item_constraint(i) for i in value - ) - if self._min_len is not None or self._max_len is not None: - # only do this if necessary, generators will not support - # __len__, for example - iter_len = len(iter) - if self._min_len is not None and iter_len < self._min_len: - raise ValueError( - f'Length-{iter_len} iterable is shorter than ' - f'required minmum length {self._min_len}') - if self._max_len is not None and iter_len > self._max_len: - raise ValueError( - f'Length-{iter_len} iterable is longer than ' - f'required maximum length {self._max_len}') - return iter - - def short_description(self): - return f'{self._iter_type}({self._item_constraint})' - - -class EnsureListOf(EnsureIterableOf): - def __init__(self, - item_constraint: callable, - min_len: int or None = None, - max_len: int or None = None): - """ - Parameters - ---------- - item_constraint: - Each incoming item will be mapped through this callable - before being passed to the list constructor. - min_len: - If not None, the list will be verified to have this minimum - number of items. - max_len: - If not None, the list will be verified to have this maximum - number of items. - """ - super().__init__(list, item_constraint, - min_len=min_len, max_len=max_len) - - def short_description(self): - return f'list({self._item_constraint})' - - -class EnsureTupleOf(EnsureIterableOf): - def __init__(self, - item_constraint: callable, - min_len: int or None = None, - max_len: int or None = None): - """ - Parameters - ---------- - item_constraint: - Each incoming item will be mapped through this callable - before being passed to the tuple constructor. - min_len: - If not None, the tuple will be verified to have this minimum - number of items. - max_len: - If not None, the tuple will be verified to have this maximum - number of items. - """ - super().__init__(tuple, item_constraint, - min_len=min_len, max_len=max_len) - - def short_description(self): - return f'tuple({self._item_constraint})' - - class EnsureStrOrNoneWithEmptyIsNone(EnsureStr): def __call__(self, value): if value is None: @@ -271,151 +39,6 @@ def __call__(self, value): return v if v else None -class EnsurePath(Constraint): - """Ensures an input is convertible to a (platform) path and returns a `Path` - - Optionally, the path can be tested for existence and whether it is absolute - or relative. - """ - def __init__(self, - path_type: type = Path, - is_format: str or None = None, - lexists: bool or None = None, - is_mode: callable = None): - """ - Parameters - ---------- - path_type: - Specific pathlib type to convert the input to. The default is `Path`, - i.e. the platform's path type. Not all pathlib Path types can be - instantiated on all platforms, and not all checks are possible with - all path types. - is_format: {'absolute', 'relative'} or None - If not None, the path is tested whether it matches being relative or - absolute. - lexists: - If not None, the path is tested to confirmed exists or not. A symlink - need not point to an existing path to fullfil the "exists" condition. - is_mode: - If set, this callable will receive the path's `.lstat().st_mode`, - and an exception is raised, if the return value does not evaluate - to `True`. Typical callables for this feature are provided by the - `stat` module, e.g. `S_ISDIR()` - """ - super().__init__() - self._path_type = path_type - self._is_format = is_format - self._lexists = lexists - self._is_mode = is_mode - - def __call__(self, value): - path = self._path_type(value) - mode = None - if self._lexists is not None or self._is_mode is not None: - try: - mode = path.lstat().st_mode - except FileNotFoundError: - # this is fine, handled below - pass - if self._lexists is not None: - if self._lexists and mode is None: - raise ValueError(f'{path} does not exist') - elif not self._lexists and mode is not None: - raise ValueError(f'{path} does (already) exist') - if self._is_format is not None: - is_abs = path.is_absolute() - if self._is_format == 'absolute' and not is_abs: - raise ValueError(f'{path} is not an absolute path') - elif self._is_format == 'relative' and is_abs: - raise ValueError(f'{path} is not a relative path') - if self._is_mode is not None: - if not self._is_mode(mode): - raise ValueError(f'{path} does not match desired mode') - return path - - def short_description(self): - return '{}{}path'.format( - 'existing ' - if self._lexists - else 'non-existing ' - if self._lexists else '', - 'absolute ' - if self._is_format == 'absolute' - else 'relative' - if self._is_format == 'relative' - else '', - ) - - -class EnsureGitRefName(Constraint): - """Ensures that a reference name is well formed - - Validation is peformed by calling `git check-ref-format`. - """ - def __init__(self, - allow_onelevel: bool = True, - normalize: bool = True, - refspec_pattern: bool = False): - """ - Parameters - ---------- - allow_onelevel: - Flag whether one-level refnames are accepted, e.g. just 'main' - instead of 'refs/heads/main'. - normalize: - Flag whether a normalized refname is validated and return. - This includes removing any leading slash (/) characters and - collapsing runs of adjacent slashes between name components - into a single slash. - refspec_pattern: - Flag whether to interpret a value as a reference name pattern - for a refspec (allowed to contain a single '*'). - """ - super().__init__() - self._allow_onelevel = allow_onelevel - self._normalize = normalize - self._refspec_pattern = refspec_pattern - - def __call__(self, value: str) -> str: - if not value: - # simple, do here - raise ValueError('refname must not be empty') - - from datalad.runner import GitRunner, CommandError, StdOutCapture - runner = GitRunner() - cmd = ['git', 'check-ref-format'] - cmd.append('--allow-onelevel' - if self._allow_onelevel - else '--no-allow-onelevel') - if self._refspec_pattern: - cmd.append('--refspec-pattern') - if self._normalize: - cmd.append('--normalize') - - cmd.append(value) - - try: - out = runner.run(cmd, protocol=StdOutCapture) - except CommandError as e: - raise ValueError(f'{value} is not a valid refname') from e - - if self._normalize: - return out['stdout'].strip() - else: - return value - - def long_description(self): - return 'must be a string{}'.format( - f' and match {self._match.pattern}' if self._match else '', - ) - - def short_description(self): - return '{}Git refname{}'.format( - '(single-level) ' if self._allow_onelevel else '', - ' or refspec pattern' if self._refspec_pattern else '', - ) - - class EnsureDataset(CoreEnsureDataset): # for now, this is just as pointless as the implementation in core # plus allowing for Path objects diff --git a/datalad_gooey/param_form_utils.py b/datalad_gooey/param_form_utils.py index f0ac842..cfc2ba6 100644 --- a/datalad_gooey/param_form_utils.py +++ b/datalad_gooey/param_form_utils.py @@ -32,21 +32,20 @@ from .utils import _NoValue from .constraints import ( AltConstraints, + ConstraintWithPassthrough, EnsureBool, EnsureExistingDirectory, EnsureDatasetSiblingName, EnsureNone, EnsureIterableOf, - EnsureListOf, EnsureDataset, - CoreEnsureDataset, EnsureConfigProcedureName, EnsurePath, EnsureInt, EnsureRange, EnsureCredentialName, EnsureStr, - NoConstraint, + EnsureParameterConstraint, ) __all__ = ['populate_form_w_params'] @@ -169,82 +168,20 @@ def _get_comprehensive_constraint( default: Any, param_spec: Parameter, cmd_api_spec: Dict): - action = param_spec.cmd_kwargs.get('action') - # definitive per-item constraint, consider override from API - # otherwise fall back on Parameter.constraints - constraint = cmd_api_spec['parameter_constraints'][pname] \ - if pname in cmd_api_spec.get('parameter_constraints', []) \ - else override_constraint_by_param_name.get( - pname, - param_spec.constraints) - - if not constraint: - if action in ('store_true', 'store_false'): - constraint = EnsureBool() - elif param_spec.cmd_kwargs.get('choices'): - constraint = EnsureChoice(*param_spec.cmd_kwargs.get('choices')) - else: - # always have one for simplicity - constraint = NoConstraint() - - # we must addtionally consider the following nargs spec for - # a complete constraint specification - # (int, '*', '+'), plus action= - # - 'store_const' TODO - # - 'store_true' and 'store_false' TODO - # - 'append' - # - 'append_const' TODO - # - 'count' TODO - # - 'extend' TODO - - # get the definitive argparse "nargs" value - nargs = None - if pname in cmd_api_spec.get('parameter_nargs', []): - # take as gospel - nargs = cmd_api_spec['parameter_nargs'][pname] - else: - # fall back on Parameter attribute - nargs = param_spec.cmd_kwargs.get('nargs', None) - try: - nargs = int(nargs) - except (ValueError, TypeError): - pass - - # TODO reconsider using `list`, with no length-check it could - # be a generator - if isinstance(nargs, int): - # sequence of a particular length - constraint = EnsureIterableOf( - list, constraint, min_len=nargs, max_len=nargs) - elif nargs == '*': - # datalad expects things often/always to also work for a single item - constraint = EnsureIterableOf(list, constraint) | constraint - elif nargs == '+': - # sequence of at least 1 item, always a sequence, - # but again datalad expects things often/always to also work for - # a single item - constraint = EnsureIterableOf( - list, constraint, min_len=1) | constraint - # handling of `default` and `const` would be here - #elif nargs == '?' - - if action == 'append': - # wrap into a(nother) sequence - # (think: list of 2-tuples, etc. - constraint = EnsureIterableOf(list, constraint) - - # lastly try to validate the default, if that fails - # wrap into alternative - try: - constraint(default) - except Exception: - # should be this TODO - #constraint = constraint | EnsureValue(default) - # for now - if default is None: - constraint = constraint | EnsureNone() - - return constraint + return EnsureParameterConstraint.from_parameter( + param_spec, + default, + # definitive per-item constraint, consider override from API + # otherwise fall back on Parameter.constraints + item_constraint=cmd_api_spec['parameter_constraints'][pname] + if pname in cmd_api_spec.get('parameter_constraints', []) + else override_constraint_by_param_name.get(pname), + nargs=cmd_api_spec.get('parameter_nargs', {}).get(pname), + ).parameter_constraint + # TODO at some point, return the full EnsureParameterConstraint + # and also validate the pname with it. this would need the validation + # in Parameter.set() to also consider (and pass) the name, or to + # access the .parameter_constraint property specifically def _get_parameter( @@ -276,6 +213,12 @@ def _get_parameter( # if we have no idea, use a simple line edit type_widget = pw.StrParameter + + if isinstance(constraint, ConstraintWithPassthrough): + # to make proper constraint identification below work, get the + # internal constraint: + constraint = constraint.constraint + ### now some parameters where we can derive semantics from their name if isinstance(constraint, EnsureDataset) \ or isinstance(constraint, EnsureExistingDirectory): diff --git a/datalad_gooey/tests/test_constraints.py b/datalad_gooey/tests/test_constraints.py index b721e02..e69de29 100644 --- a/datalad_gooey/tests/test_constraints.py +++ b/datalad_gooey/tests/test_constraints.py @@ -1,169 +0,0 @@ -import pytest - -import pathlib - -from ..constraints import ( - EnsureBool, - EnsureInt, - EnsureMapping, - EnsureStr, - EnsureGitRefName, - EnsurePath, - EnsureIterableOf, - EnsureListOf, - EnsureTupleOf, -) - - -def test_EnsurePath(tmp_path): - target = pathlib.Path(tmp_path) - - assert EnsurePath()(tmp_path) == target - assert EnsurePath(lexists=True)(tmp_path) == target - with pytest.raises(ValueError): - EnsurePath(lexists=False)(tmp_path) - with pytest.raises(ValueError): - EnsurePath(lexists=True)(tmp_path / 'nothere') - assert EnsurePath(is_format='absolute')(tmp_path) == target - with pytest.raises(ValueError): - EnsurePath(is_format='relative')(tmp_path) - with pytest.raises(ValueError): - EnsurePath(is_format='absolute')(tmp_path.name) - from stat import S_ISDIR, S_ISREG - assert EnsurePath(is_mode=S_ISDIR)(tmp_path) == target - with pytest.raises(ValueError): - EnsurePath(is_mode=S_ISREG)(tmp_path) - # give particular path type - assert EnsurePath(path_type=pathlib.PurePath - )(tmp_path) == pathlib.PurePath(tmp_path) - # not everything is possible, this is known and OK - with pytest.raises(AttributeError): - EnsurePath( - path_type=pathlib.PurePath, - is_mode=S_ISREG, - )(tmp_path) - - -def test_EnsureGitRefName(): - assert EnsureGitRefName().short_description() == '(single-level) Git refname' - # standard branch name must work - assert EnsureGitRefName()('main') == 'main' - # normalize is on by default - assert EnsureGitRefName()('/main') == 'main' - # be able to turn off onelevel - with pytest.raises(ValueError): - EnsureGitRefName(allow_onelevel=False)('main') - assert EnsureGitRefName(allow_onelevel=False)( - 'refs/heads/main') == 'refs/heads/main' - # refspec pattern off by default - with pytest.raises(ValueError): - EnsureGitRefName()('refs/heads/*') - assert EnsureGitRefName(refspec_pattern=True)( - 'refs/heads/*') == 'refs/heads/*' - - -def test_EnsureStr_match(): - # alphanum plus _ and ., non-empty - pattern = '[a-zA-Z0-9-.]+' - constraint = EnsureStr(match=pattern) - - # reports the pattern in the description - for m in (constraint.short_description, constraint.long_description): - assert pattern in m() - - # must work - assert constraint('a0F-2.') == 'a0F-2.' - - for v in ('', '123_abc'): - with pytest.raises(ValueError): - assert constraint('') - - -# imported from ancient test code in datalad-core, -# main test is test_EnsureIterableOf -def test_EnsureTupleOf(): - c = EnsureTupleOf(str) - assert c(['a', 'b']) == ('a', 'b') - assert c(['a1', 'b2']) == ('a1', 'b2') - assert c.short_description() == "tuple()" - - -# imported from ancient test code in datalad-core, -# main test is test_EnsureIterableOf -def test_EnsureListOf(): - c = EnsureListOf(str) - assert c(['a', 'b']) == ['a', 'b'] - assert c(['a1', 'b2']) == ['a1', 'b2'] - assert c.short_description() == "list()" - - -def test_EnsureIterableOf(): - assert EnsureIterableOf( - list, int).short_description() == "()" - # testing aspects that are not covered by test_EnsureListOf - tgt = [True, False, True] - assert EnsureIterableOf(list, bool)((1, 0, 1)) == tgt - assert EnsureIterableOf(list, bool, min_len=3, max_len=3)((1, 0, 1)) == tgt - with pytest.raises(ValueError): - # too many items - EnsureIterableOf(list, bool, max_len=2)((1, 0, 1)) - with pytest.raises(ValueError): - # too few items - EnsureIterableOf(list, bool, min_len=4)((1, 0, 1)) - with pytest.raises(ValueError): - # invalid specification min>max - EnsureIterableOf(list, bool, min_len=1, max_len=0) - with pytest.raises(TypeError): - # item_constraint fails - EnsureIterableOf(list, dict)([5.6, 3.2]) - with pytest.raises(ValueError): - # item_constraint fails - EnsureIterableOf(list, EnsureBool())([5.6, 3.2]) - - seq = [3.3, 1, 2.6] - - def _mygen(): - for i in seq: - yield i - - def _myiter(iter): - for i in iter: - yield i - - # feeding a generator into EnsureIterableOf and getting one out - assert list(EnsureIterableOf(_myiter, int)(_mygen())) == [3, 1, 2] - - -def test_EnsureMapping(): - true_key = 5 - true_value = False - - constraint = EnsureMapping(EnsureInt(), EnsureBool(), delimiter='::') - - assert 'mapping of int -> bool' in constraint.short_description() - - # must all work - for v in ('5::no', - [5, 'false'], - ('5', False), - {'5': 'False'}, - ): - d = constraint(v) - assert isinstance(d, dict) - assert len(d) == 1 - k, v = d.popitem() - assert k == true_key - assert v == true_value - - # must all fail - for v in ('5', - [], - tuple(), - {}, - # additional value - [5, False, False], - {'5': 'False', '6': True}): - with pytest.raises(ValueError): - d = constraint(v) - - # TODO test for_dataset() once we have a simple EnsurePathInDataset