diff --git a/datalad_next/commands/ls_file_collection.py b/datalad_next/commands/ls_file_collection.py index 57462c49f..de5626a0a 100644 --- a/datalad_next/commands/ls_file_collection.py +++ b/datalad_next/commands/ls_file_collection.py @@ -59,6 +59,9 @@ GitWorktreeFileSystemItem, iter_gitworktree, ) +from datalad_next.iter_collections.annexworktree import ( + iter_annexworktree, +) lgr = getLogger('datalad.local.ls_file_collection') @@ -72,6 +75,7 @@ 'directory', 'tarfile', 'gitworktree', + 'annexworktree', ) @@ -110,7 +114,7 @@ def get_collection_iter(self, **kwargs): hash = kwargs['hash'] iter_fx = None iter_kwargs = None - if type in ('directory', 'tarfile', 'gitworktree'): + if type in ('directory', 'tarfile', 'gitworktree', 'annexworktree'): if not isinstance(collection, Path): self.raise_for( kwargs, @@ -131,6 +135,9 @@ def get_collection_iter(self, **kwargs): elif type == 'gitworktree': iter_fx = iter_gitworktree item2res = gitworktreeitem_to_dict + elif type == 'annexworktree': + iter_fx = iter_annexworktree + item2res = annexworktreeitem_to_dict else: raise RuntimeError( 'unhandled collection-type: this is a defect, please report.') @@ -205,6 +212,20 @@ def gitworktreeitem_to_dict(item, hash) -> Dict: return d +def annexworktreeitem_to_dict(item, hash) -> Dict: + d = gitworktreeitem_to_dict(item, hash) + if item.annexkey: + d['annexkey'] = item.annexkey + + if item.annexsize: + d['annexsize'] = item.annexsize + + if item.annexobjpath: + d['annexobjpath'] = item.annexobjpath + + return d + + @build_doc class LsFileCollection(ValidatedInterface): """Report information on files in a collection diff --git a/datalad_next/commands/tests/test_ls_file_collection.py b/datalad_next/commands/tests/test_ls_file_collection.py index fdb3eb0ae..beb428c58 100644 --- a/datalad_next/commands/tests/test_ls_file_collection.py +++ b/datalad_next/commands/tests/test_ls_file_collection.py @@ -165,3 +165,19 @@ def test_ls_renderer(): Path(__file__).parent, result_renderer='tailored', ) + + +def test_ls_annexworktree_empty_dataset(existing_dataset): + + (existing_dataset.pathobj / 'sample.bin').write_bytes(b'\x00' * 1024) + existing_dataset.save(message='add sample file') + + res = ls_file_collection('annexworktree', existing_dataset.pathobj) + assert len(res) == 4 + annexed_files = [annex_info for annex_info in res if 'annexkey' in annex_info] + assert len(annexed_files) == 1 + assert { + 'annexkey', + 'annexsize', + 'annexobjpath' + }.issubset(set(annexed_files[0].keys()))