From 77ccc6a3608b4656d157cf729799636b676d201b Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 20 Dec 2023 15:51:04 +0100 Subject: [PATCH 001/164] set gitpod.yml docker image to latest --- .gitpod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitpod.yml b/.gitpod.yml index 3c8b6b530..899f58e55 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,4 +1,4 @@ -image: nfcore/gitpod:dev +image: nfcore/gitpod:latest tasks: - name: install current state of nf-core/tools and setup pre-commit command: | From 7d24e7510d3e7e74e86dc9fdedc44ceec20b329c Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 12:18:59 +0100 Subject: [PATCH 002/164] added patch command for subworkflows --- nf_core/subworkflows/patch.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 nf_core/subworkflows/patch.py diff --git a/nf_core/subworkflows/patch.py b/nf_core/subworkflows/patch.py new file mode 100644 index 000000000..3c8b3d5e4 --- /dev/null +++ b/nf_core/subworkflows/patch.py @@ -0,0 +1,10 @@ +import logging + +from nf_core.components.patch import ComponentPatch + +log = logging.getLogger(__name__) + + +class SubworkflowPatch(ComponentPatch): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + super().__init__(pipeline_dir, "subworkflows", remote_url, branch, no_pull, installed_by) From 86e3e2f32dc235879340730fbb0f9bc17b6a4c72 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 12:29:37 +0100 Subject: [PATCH 003/164] forgot to import patch in init --- nf_core/subworkflows/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/subworkflows/__init__.py b/nf_core/subworkflows/__init__.py index 88e8a0938..8e3c85a27 100644 --- a/nf_core/subworkflows/__init__.py +++ b/nf_core/subworkflows/__init__.py @@ -3,5 +3,6 @@ from .install import SubworkflowInstall from .lint import SubworkflowLint from .list import SubworkflowList +from .patch import SubworkflowPatch from .remove import SubworkflowRemove from .update import SubworkflowUpdate From 26b39c2f336db7d31ccfa421c34f1a2137dd6640 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 12:58:13 +0100 Subject: [PATCH 004/164] added files for the tests --- nf_core/__main__.py | 35 +++++++++++++++++++++++++++++++++++ tests/test_subworkflows.py | 9 +++++++++ 2 files changed, 44 insertions(+) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index d6f6077be..c372e8897 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1684,6 +1684,41 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): log.error(e) sys.exit(1) +# nf-core subworkflows patch +@subworkflows.command("patch") +@click.pass_context +@click.argument("tool", type=str, required=False, metavar=" or ") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option("-r", "--remove", is_flag=True, default=False) +def subworkflows_patch(ctx, tool, dir, remove): + """ + Create a patch file for minor changes in a subworkflow + + Checks if a subworkflow has been modified locally and creates a patch file + describing how the module has changed from the remote version + """ + from nf_core.subworkflows import SubworkflowPatch + + try: + subworkflow_patch = SubworkflowPatch( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + if remove: + subworkflow_patch.remove(tool) + else: + subworkflow_patch.patch(tool) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) # nf-core subworkflows remove @subworkflows.command("remove") diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 6163faa7a..5a781cd87 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -138,6 +138,15 @@ def tearDown(self): test_subworkflows_list_remote, test_subworkflows_list_remote_gitlab, ) + from .subworkflows.patch import( # type: ignore[misc] + test_create_patch_change, + test_create_patch_no_change, + test_create_patch_try_apply_failed, + test_create_patch_try_apply_successful, + test_create_patch_update_fail, + test_create_patch_update_success, + test_remove_patch, + ) from .subworkflows.remove import ( # type: ignore[misc] test_subworkflows_remove_included_subworkflow, test_subworkflows_remove_one_of_two_subworkflow, From 015a61be7ae0d0a59c98615a251bd982c14e777a Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 13:37:46 +0100 Subject: [PATCH 005/164] created draft for test file --- tests/subworkflows/patch.py | 77 +++++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 tests/subworkflows/patch.py diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py new file mode 100644 index 000000000..c2d7cd97f --- /dev/null +++ b/tests/subworkflows/patch.py @@ -0,0 +1,77 @@ +import os + +import pytest + +from nf_core.modules.modules_json import ModulesJson +from nf_core.subworkflows.install import SubworkflowInstall + +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + with_temporary_folder, +) + +""" +Test the 'nf-core subworkflows patch' command +""" + +def setup_patch(self, pipeline_dir, modify_subworkflow): + # Install the subworkflow bam_sort_stats_samtools + subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + sub_subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools") + samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") + samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") + samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") + + + if modify_subworkflow: + # Modify the subworkflow + subworkflow_path = Path(pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + modify_subworkflow(subworkflow_path / "main.nf") + + +def modify_subworkflow(path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - ch_fasta // channel: [ val(meta), path(fasta) ] + for line_index in range(len(lines)): + if lines[line_index] == " ch_fasta // channel: [ val(meta), path(fasta) ]\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + +def test_create_patch_change(self): + """Test creating a patch when there is a change to the module""" + +def test_create_patch_no_change(self): + """Test creating a patch when there is no change to the subworkflow""" + # Try creating a patch file + # Check that no patch file has been added to the directory + +def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + +def test_create_patch_try_apply_successful(self): + """Test creating a patch file and applying it to a new version of the the files""" + +def test_create_patch_update_fail(self): + """Test creating a patch file and updating a subworkflow when there is a diff conflict""" + +def test_create_patch_update_success (self): + """ + Test creating a patch file and the updating the subworkflow + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + +def test_remove_patch(self): + """Test creating a patch when there is no change to the subworkflow""" From 563e2326fb063603508b67de6205998004fac9e5 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 13:41:28 +0100 Subject: [PATCH 006/164] ruff format --- nf_core/__main__.py | 2 ++ tests/subworkflows/patch.py | 11 +++++++++-- tests/test_subworkflows.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index c372e8897..3468f74e5 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1684,6 +1684,7 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): log.error(e) sys.exit(1) + # nf-core subworkflows patch @subworkflows.command("patch") @click.pass_context @@ -1720,6 +1721,7 @@ def subworkflows_patch(ctx, tool, dir, remove): log.error(e) sys.exit(1) + # nf-core subworkflows remove @subworkflows.command("remove") @click.pass_context diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index c2d7cd97f..b340d27fe 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -18,6 +18,7 @@ Test the 'nf-core subworkflows patch' command """ + def setup_patch(self, pipeline_dir, modify_subworkflow): # Install the subworkflow bam_sort_stats_samtools subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") @@ -28,7 +29,6 @@ def setup_patch(self, pipeline_dir, modify_subworkflow): samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") - if modify_subworkflow: # Modify the subworkflow subworkflow_path = Path(pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") @@ -48,24 +48,30 @@ def modify_subworkflow(path): with open(path, "w") as fh: fh.writelines(lines) + def test_create_patch_change(self): """Test creating a patch when there is a change to the module""" + def test_create_patch_no_change(self): """Test creating a patch when there is no change to the subworkflow""" # Try creating a patch file # Check that no patch file has been added to the directory + def test_create_patch_try_apply_failed(self): """Test creating a patch file and applying it to a new version of the the files""" + def test_create_patch_try_apply_successful(self): """Test creating a patch file and applying it to a new version of the the files""" + def test_create_patch_update_fail(self): """Test creating a patch file and updating a subworkflow when there is a diff conflict""" -def test_create_patch_update_success (self): + +def test_create_patch_update_success(self): """ Test creating a patch file and the updating the subworkflow @@ -73,5 +79,6 @@ def test_create_patch_update_success (self): but uses higher level api """ + def test_remove_patch(self): """Test creating a patch when there is no change to the subworkflow""" diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 5a781cd87..6a58473e0 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -138,7 +138,7 @@ def tearDown(self): test_subworkflows_list_remote, test_subworkflows_list_remote_gitlab, ) - from .subworkflows.patch import( # type: ignore[misc] + from .subworkflows.patch import ( # type: ignore[misc] test_create_patch_change, test_create_patch_no_change, test_create_patch_try_apply_failed, From 31505ab1ae67254a81537950a00bf12b846049ad Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 14:16:11 +0100 Subject: [PATCH 007/164] cleaning up --- tests/subworkflows/patch.py | 21 +-------------------- 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index b340d27fe..c34711ba0 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -1,18 +1,5 @@ import os - -import pytest - -from nf_core.modules.modules_json import ModulesJson -from nf_core.subworkflows.install import SubworkflowInstall - -from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, - GITLAB_URL, - with_temporary_folder, -) +from pathlib import Path """ Test the 'nf-core subworkflows patch' command @@ -22,12 +9,6 @@ def setup_patch(self, pipeline_dir, modify_subworkflow): # Install the subworkflow bam_sort_stats_samtools subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - sub_subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools") - samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") - samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") - samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") if modify_subworkflow: # Modify the subworkflow From 4d0dc8532d6a5975531a9b9a47f06105762eeddc Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 14:59:23 +0100 Subject: [PATCH 008/164] added tests --- tests/subworkflows/patch.py | 149 ++++++++++++++++++++++++++++++++++-- 1 file changed, 142 insertions(+), 7 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index c34711ba0..cbb248e7e 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -1,5 +1,28 @@ import os +import tempfile from pathlib import Path +from unittest import mock + + +import pytest + +from nf_core.modules.modules_json import ModulesJson +from nf_core.subworkflows.install import SubworkflowInstall +import nf_core.subworkflows +import nf_core.components.components_command + + +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + with_temporary_folder, +) + +# TODO: #Change this for the correct SUCCEED_SHA +SUCCEED_SHA = "????" """ Test the 'nf-core subworkflows patch' command @@ -30,29 +53,123 @@ def modify_subworkflow(path): fh.writelines(lines) -def test_create_patch_change(self): +def test_create_patch_no_change(self): """Test creating a patch when there is a change to the module""" + setup_patch(self.pipeline_dir, False) + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch("bam_sort_stats_samtools") -def test_create_patch_no_change(self): + subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + + # Check that no patch file has been added to the directory + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + +def test_create_patch_change(self): """Test creating a patch when there is no change to the subworkflow""" + setup_patch(self.pipeline_dir, True) + # Try creating a patch file - # Check that no patch file has been added to the directory + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") -def test_create_patch_try_apply_failed(self): - """Test creating a patch file and applying it to a new version of the the files""" + patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check that the correct lines are in the patch file + with open(subworkflow_path / patch_fn) as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ val(meta), path(fasta) ]" in patch_lines def test_create_patch_try_apply_successful(self): """Test creating a patch file and applying it to a new version of the the files""" + setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", SUCCEED_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / patch_fn + assert update_obj.try_apply_patch("bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir) is True + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, "nf-core", SUCCEED_SHA) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check that the correct lines are in the patch file + with open(subworkflow_path / patch_fn) as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ val(meta), path(fasta) ]" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(subworkflow_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert " ch_fasta // channel: [ val(meta), path(fasta) ]\n" not in main_nf_lines -def test_create_patch_update_fail(self): - """Test creating a patch file and updating a subworkflow when there is a diff conflict""" +def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH + ) + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", SUCCEED_SHA, update_obj.modules_repo, install_dir) + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / patch_fn + assert update_obj.try_apply_patch("bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir) is False + +# TODO: create those two missing tests def test_create_patch_update_success(self): + """Test creating a patch file and updating a subworkflow when there is a diff conflict""" + + +def test_create_patch_update_fail(self): """ Test creating a patch file and the updating the subworkflow @@ -63,3 +180,21 @@ def test_create_patch_update_success(self): def test_remove_patch(self): """Test creating a patch when there is no change to the subworkflow""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + + patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + with mock.patch.object(nf_core.create.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove("bam_sort_stats_samtools") + # Check that the diff file has been removed + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + From 3030a76c5adfc867a876a34c0be405b3935c5f61 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:01:22 +0100 Subject: [PATCH 009/164] ruff --- tests/subworkflows/patch.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index cbb248e7e..44c07d7d6 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -3,22 +3,14 @@ from pathlib import Path from unittest import mock - import pytest -from nf_core.modules.modules_json import ModulesJson -from nf_core.subworkflows.install import SubworkflowInstall -import nf_core.subworkflows import nf_core.components.components_command - +import nf_core.subworkflows from ..utils import ( GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, - with_temporary_folder, ) # TODO: #Change this for the correct SUCCEED_SHA From 6b885ff43b3a031fd9f8da3af58b4eb72f5a6d3e Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:02:58 +0100 Subject: [PATCH 010/164] ruff format --- tests/subworkflows/patch.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 44c07d7d6..6e26d4844 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -59,6 +59,7 @@ def test_create_patch_no_change(self): # Check that no patch file has been added to the directory assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + def test_create_patch_change(self): """Test creating a patch when there is no change to the subworkflow""" setup_patch(self.pipeline_dir, True) @@ -107,7 +108,12 @@ def test_create_patch_try_apply_successful(self): # Try applying the patch subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" patch_relpath = subworkflow_relpath / patch_fn - assert update_obj.try_apply_patch("bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir) is True + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is True + ) # Move the files from the temporary directory update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, "nf-core", SUCCEED_SHA) @@ -129,6 +135,7 @@ def test_create_patch_try_apply_successful(self): # These lines should have been removed by the patch assert " ch_fasta // channel: [ val(meta), path(fasta) ]\n" not in main_nf_lines + def test_create_patch_try_apply_failed(self): """Test creating a patch file and applying it to a new version of the the files""" setup_patch(self.pipeline_dir, True) @@ -154,7 +161,13 @@ def test_create_patch_try_apply_failed(self): # Try applying the patch subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" patch_relpath = subworkflow_relpath / patch_fn - assert update_obj.try_apply_patch("bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir) is False + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is False + ) + # TODO: create those two missing tests def test_create_patch_update_success(self): @@ -189,4 +202,3 @@ def test_remove_patch(self): patch_obj.remove("bam_sort_stats_samtools") # Check that the diff file has been removed assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} - From dfcfb5b5111aa1d4d64ebcbd75a989d6b36ba0db Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:07:51 +0100 Subject: [PATCH 011/164] removed split --- tests/subworkflows/patch.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 6e26d4844..ebec1171f 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -70,7 +70,7 @@ def test_create_patch_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -93,7 +93,7 @@ def test_create_patch_try_apply_successful(self): patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -146,7 +146,7 @@ def test_create_patch_try_apply_failed(self): patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -193,7 +193,7 @@ def test_remove_patch(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} From 299193020c8c1e37b72633036d9e9a1b375cb899 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:09:59 +0100 Subject: [PATCH 012/164] mypy --- tests/subworkflows/patch.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index ebec1171f..440c633e7 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -70,7 +70,7 @@ def test_create_patch_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" + patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -93,7 +93,7 @@ def test_create_patch_try_apply_successful(self): patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" + patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -146,7 +146,7 @@ def test_create_patch_try_apply_failed(self): patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" + patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -193,7 +193,7 @@ def test_remove_patch(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" + patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} From e909d3c19330f014c59dc3101d06e0fad24e70c4 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:22:22 +0100 Subject: [PATCH 013/164] setup_patch --- tests/subworkflows/patch.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 440c633e7..06bb5bcad 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -15,15 +15,22 @@ # TODO: #Change this for the correct SUCCEED_SHA SUCCEED_SHA = "????" +ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" + """ Test the 'nf-core subworkflows patch' command """ -def setup_patch(self, pipeline_dir, modify_subworkflow): +def setup_patch(pipeline_dir, modify_subworkflow): # Install the subworkflow bam_sort_stats_samtools - subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + install_obj = nf_core.subworkflows.SubworkflowInstall( + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=ORG_SHA + ) + + # Install the module + install_obj.install("bam_sort_stats_samtools") if modify_subworkflow: # Modify the subworkflow From fd5b0d14193c16cf1fc716db851dd38df16277e2 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:33:05 +0100 Subject: [PATCH 014/164] called function correctly --- tests/subworkflows/patch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 06bb5bcad..19afc757f 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -35,10 +35,10 @@ def setup_patch(pipeline_dir, modify_subworkflow): if modify_subworkflow: # Modify the subworkflow subworkflow_path = Path(pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - modify_subworkflow(subworkflow_path / "main.nf") + modify_main_nf(subworkflow_path / "main.nf") -def modify_subworkflow(path): +def modify_main_nf(path): """Modify a file to test patch creation""" with open(path) as fh: lines = fh.readlines() From cdd9cfb67ac2e173440bcdc3eb112ddc41c795b7 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 16:09:20 +0100 Subject: [PATCH 015/164] wraping up for the day --- tests/subworkflows/patch.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 19afc757f..66065784c 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -9,8 +9,9 @@ import nf_core.subworkflows from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, + GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL, + GITLAB_REPO ) # TODO: #Change this for the correct SUCCEED_SHA @@ -26,7 +27,7 @@ def setup_patch(pipeline_dir, modify_subworkflow): # Install the subworkflow bam_sort_stats_samtools install_obj = nf_core.subworkflows.SubworkflowInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=ORG_SHA + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH, sha=ORG_SHA ) # Install the module @@ -34,7 +35,7 @@ def setup_patch(pipeline_dir, modify_subworkflow): if modify_subworkflow: # Modify the subworkflow - subworkflow_path = Path(pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") modify_main_nf(subworkflow_path / "main.nf") @@ -57,11 +58,11 @@ def test_create_patch_no_change(self): setup_patch(self.pipeline_dir, False) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) with pytest.raises(UserWarning): patch_obj.patch("bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that no patch file has been added to the directory assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} @@ -72,10 +73,10 @@ def test_create_patch_change(self): setup_patch(self.pipeline_dir, True) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created @@ -93,11 +94,11 @@ def test_create_patch_change(self): def test_create_patch_try_apply_successful(self): """Test creating a patch file and applying it to a new version of the the files""" setup_patch(self.pipeline_dir, True) - subworkflow_relpath = Path("subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) patch_obj.patch("bam_sort_stats_samtools") patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" @@ -105,7 +106,7 @@ def test_create_patch_try_apply_successful(self): assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} update_obj = nf_core.subworkflows.SubworkflowUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) # Install the new files @@ -117,13 +118,13 @@ def test_create_patch_try_apply_successful(self): patch_relpath = subworkflow_relpath / patch_fn assert ( update_obj.try_apply_patch( - "bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir ) is True ) # Move the files from the temporary directory - update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, "nf-core", SUCCEED_SHA) + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, SUCCEED_SHA) # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -146,11 +147,11 @@ def test_create_patch_try_apply_successful(self): def test_create_patch_try_apply_failed(self): """Test creating a patch file and applying it to a new version of the the files""" setup_patch(self.pipeline_dir, True) - subworkflow_relpath = Path("subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) patch_obj.patch("bam_sort_stats_samtools") patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" @@ -158,7 +159,7 @@ def test_create_patch_try_apply_failed(self): assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} update_obj = nf_core.subworkflows.SubworkflowUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) # Install the new files @@ -170,7 +171,7 @@ def test_create_patch_try_apply_failed(self): patch_relpath = subworkflow_relpath / patch_fn assert ( update_obj.try_apply_patch( - "bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir ) is False ) @@ -195,10 +196,10 @@ def test_remove_patch(self): setup_patch(self.pipeline_dir, True) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created From 6fc6da2514c0aa73a6c971f639e52bd4d44590cf Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 07:30:15 +0200 Subject: [PATCH 016/164] change pipeline template and tooling to allow `main` as default branch --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- .github/RELEASE_CHECKLIST.md | 6 ++-- .github/workflows/branch.yml | 16 ++++----- .github/workflows/pytest.yml | 2 +- README.md | 8 ++--- nf_core/__main__.py | 5 +-- nf_core/components/create.py | 4 +-- .../pipeline-template/.github/CONTRIBUTING.md | 6 ++-- .../.github/PULL_REQUEST_TEMPLATE.md | 4 +-- .../.github/workflows/awsfulltest.yml | 3 +- .../.github/workflows/branch.yml | 18 +++++----- .../.github/workflows/download_pipeline.yml | 4 ++- nf_core/pipeline-template/README.md | 2 +- .../assets/schema_input.json | 2 +- nf_core/pipelines/create/create.py | 34 ++++++++++++------- nf_core/pipelines/download.py | 2 +- nf_core/pipelines/lint/actions_awsfulltest.py | 2 +- nf_core/pipelines/lint/version_consistency.py | 2 +- nf_core/pipelines/schema.py | 2 +- nf_core/synced_repo.py | 3 +- tests/pipelines/lint/test_actions_awstest.py | 2 +- tests/pipelines/test_lint.py | 2 +- tests/pipelines/test_sync.py | 8 ++--- 23 files changed, 78 insertions(+), 61 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 71411be1b..9dbd7a1f6 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -6,7 +6,7 @@ These are the most common things requested on pull requests (PRs). Remember that PRs should be made against the dev branch, unless you're preparing a release. -Learn more about contributing: https://github.com/nf-core/tools/tree/master/.github/CONTRIBUTING.md +Learn more about contributing: https://github.com/nf-core/tools/tree/main/.github/CONTRIBUTING.md --> ## PR checklist diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index 9a1905c7a..f96df0f76 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -6,11 +6,11 @@ 4. Check that modules/subworkflows in template are up to date with the latest releases 5. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:latest`. 6. Make sure all CI tests are passing! -7. Create a PR from `dev` to `master` -8. Make sure all CI tests are passing again (additional tests are run on PRs to `master`) +7. Create a PR from `dev` to `main` +8. Make sure all CI tests are passing again (additional tests are run on PRs to `main`) 9. Request review (2 approvals required) 10. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) -11. Merge the PR into `master` +11. Merge the PR into `main` 12. Wait for CI tests on the commit to passed 13. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. 14. Create a new release copying the `CHANGELOG` for that release into the description section. diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index bbac1cc6f..a116a3622 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -1,15 +1,15 @@ name: nf-core branch protection -# This workflow is triggered on PRs to master branch on the repository -# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` +# This workflow is triggered on PRs to main/master branch on the repository +# It fails when someone tries to make a PR against the nf-core `main/master` branch instead of `dev` on: pull_request_target: - branches: [master] + branches: [main, master] jobs: test: runs-on: ubuntu-latest steps: - # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches + # PRs to the nf-core repo main/master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches - name: Check PRs if: github.repository == 'nf-core/tools' run: | @@ -21,7 +21,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `master` branch :x: + ## This PR is against the `main/master` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -31,9 +31,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch. - The `master` branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `main` branch. + The `main/master` branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to `main/master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index dc8803188..3a3d04949 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -49,7 +49,7 @@ jobs: steps: - name: Check conditions id: conditions - run: echo "run-tests=${{ github.ref == 'refs/heads/master' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" + run: echo "run-tests=${{ github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" outputs: python-version: ${{ matrix.python-version }} diff --git a/README.md b/README.md index 58fb708a0..710efed05 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@

- - nf-core/tools + + nf-core/tools

-[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=master&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amaster) -[![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) +[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=main&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amain) +[![codecov](https://codecov.io/gh/nf-core/tools/branch/main/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) [![code style: prettier](https://img.shields.io/badge/code%20style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) [![code style: Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 0efea13ec..212278aa2 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -4,6 +4,7 @@ import logging import os import sys +from pathlib import Path import rich import rich.console @@ -286,7 +287,7 @@ def command_pipelines_create(ctx, name, description, author, version, force, out @click.option( "--release", is_flag=True, - default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + default=Path(os.environ.get("GITHUB_REF", "").strip(" '\"")).parent.name in ["master", "main"] and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", help="Execute additional checks for release-ready workflows.", @@ -2139,7 +2140,7 @@ def command_download( @click.option( "--release", is_flag=True, - default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + default=Path(os.environ.get("GITHUB_REF", "").strip(" '\"")).parent.name in ["master", "main"] and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", help="Execute additional checks for release-ready workflows.", diff --git a/nf_core/components/create.py b/nf_core/components/create.py index c71b12841..6b3b9dad2 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -244,7 +244,7 @@ def _get_module_structure_components(self): if self.process_label is None: log.info( "Provide an appropriate resource label for the process, taken from the " - "[link=https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" + "[link=https://github.com/nf-core/tools/blob/main/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" "For example: {}".format(", ".join(process_label_defaults)) ) while self.process_label is None: @@ -260,7 +260,7 @@ def _get_module_structure_components(self): "Where applicable all sample-specific information e.g. 'id', 'single_end', 'read_group' " "MUST be provided as an input via a Groovy Map called 'meta'. " "This information may [italic]not[/] be required in some instances, for example " - "[link=https://github.com/nf-core/modules/blob/master/modules/nf-core/bwa/index/main.nf]indexing reference genome files[/link]." + "[link=https://github.com/nf-core/modules/blob/main/modules/nf-core/bwa/index/main.nf]indexing reference genome files[/link]." ) while self.has_meta is None: self.has_meta = rich.prompt.Confirm.ask( diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 5a58501bb..d0efeb92c 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -59,9 +59,9 @@ These tests are run both with the latest available version of `Nextflow` and als :warning: Only in the unlikely and regretful event of a release happening with a bug. -- On your own fork, make a new branch `patch` based on `upstream/master`. +- On your own fork, make a new branch `patch` based on `upstream/main` or `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- A PR should be made on `master` from patch to directly this particular bug. +- A PR should be made on `main`/`master` from patch to directly this particular bug. {% if is_nfcore -%} @@ -100,7 +100,7 @@ Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json ### Default processes resource requirements -Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. +Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/main/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. The process resources can be passed on to the tool dynamically within the process with the `${task.cpus}` and `${task.memory}` variables in the `script:` block. diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index dee23ccab..88d5c7efa 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -8,14 +8,14 @@ These are the most common things requested on pull requests (PRs). Remember that PRs should be made against the dev branch, unless you're preparing a pipeline release. -Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) +Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/tree/{{ default_branch }}/.github/CONTRIBUTING.md) --> ## PR checklist - [ ] This comment contains a description of changes (with reason). - [ ] If you've fixed a bug or added code that should be tested, add tests! -- [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) +- [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/{{ default_branch }}/.github/CONTRIBUTING.md) {%- if is_nfcore %} - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. {%- endif %} diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index dc0450be4..922e535e2 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -1,11 +1,12 @@ name: nf-core AWS full size tests -# This workflow is triggered on PRs opened against the master branch. +# This workflow is triggered on PRs opened against the main/master branch. # It can be additionally triggered manually with GitHub actions workflow dispatch button. # It runs the -profile 'test_full' on AWS batch on: pull_request: branches: + - main - master workflow_dispatch: pull_request_review: diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index df1a627b1..e0ae1aa8a 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -1,15 +1,17 @@ name: nf-core branch protection -# This workflow is triggered on PRs to master branch on the repository -# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` +# This workflow is triggered on PRs to main/master branch on the repository +# It fails when someone tries to make a PR against the nf-core `main/master` branch instead of `dev` on: pull_request_target: - branches: [master] + branches: + - main + - master jobs: test: runs-on: ubuntu-latest steps: - # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches + # PRs to the nf-core repo main/master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches - name: Check PRs if: github.repository == '{{ name }}' run: | @@ -22,7 +24,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `master` branch :x: + ## This PR is against the `main/master` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -32,9 +34,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch. - The `master` branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `main/master` branch. + The `main/master` branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to `main/master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 99a42d86d..93765d030 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -2,7 +2,7 @@ name: Test successful pipeline download with 'nf-core pipelines download' # Run the workflow when: # - dispatched manually -# - when a PR is opened or reopened to master branch +# - when a PR is opened or reopened to main/master branch # - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev. on: workflow_dispatch: @@ -17,9 +17,11 @@ on: - edited - synchronize branches: + - main - master pull_request_target: branches: + - main - master env: diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 7718d2e5f..8074da2f3 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -124,7 +124,7 @@ An extensive list of references for the tools used by the pipeline can be found You can cite the `nf-core` publication as follows: {% else -%} -This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/master/LICENSE). +This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/{{ default_branch }}/LICENSE). {% endif -%} diff --git a/nf_core/pipeline-template/assets/schema_input.json b/nf_core/pipeline-template/assets/schema_input.json index e76b95fa9..6271f572f 100644 --- a/nf_core/pipeline-template/assets/schema_input.json +++ b/nf_core/pipeline-template/assets/schema_input.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-07/schema", - "$id": "https://raw.githubusercontent.com/{{ name }}/master/assets/schema_input.json", + "$id": "https://raw.githubusercontent.com/{{ name }}/{{ default_branch }}/assets/schema_input.json", "title": "{{ name }} pipeline - params.input schema", "description": "Schema for the file provided with params.input", "type": "array", diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 05b04a542..71265a7e2 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -86,6 +86,10 @@ def __init__( if self.config.outdir is None: self.config.outdir = str(Path.cwd()) + + # Get the default branch name from the Git configuration + self.get_default_branch() + self.jinja_params, self.skip_areas = self.obtain_jinja_params_dict( self.config.skip_features or [], str(self.config.outdir) ) @@ -230,6 +234,7 @@ def obtain_jinja_params_dict( jinja_params["name_docker"] = jinja_params["name"].replace(jinja_params["org"], jinja_params["prefix_nodash"]) jinja_params["logo_light"] = f"{jinja_params['name_noslash']}_logo_light.png" jinja_params["logo_dark"] = f"{jinja_params['name_noslash']}_logo_dark.png" + jinja_params["default_branch"] = self.default_branch if config_yml is not None: if ( hasattr(config_yml, "lint") @@ -251,6 +256,7 @@ def obtain_jinja_params_dict( def init_pipeline(self): """Creates the nf-core pipeline.""" + # Make the new pipeline self.render_template() @@ -417,20 +423,17 @@ def make_pipeline_logo(self): force=bool(self.force), ) - def git_init_pipeline(self) -> None: - """Initialises the new pipeline as a Git repository and submits first commit. - - Raises: - UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. - """ - default_branch: Optional[str] = self.default_branch + def get_default_branch(self) -> None: + """Gets the default branch name from the Git configuration.""" try: - default_branch = default_branch or str(git.config.GitConfigParser().get_value("init", "defaultBranch")) + self.default_branch = ( + str(git.config.GitConfigParser().get_value("init", "defaultBranch")) or "main" + ) # default to main except configparser.Error: log.debug("Could not read init.defaultBranch") - if default_branch in ["dev", "TEMPLATE"]: + if self.default_branch in ["dev", "TEMPLATE"]: raise UserWarning( - f"Your Git defaultBranch '{default_branch}' is incompatible with nf-core.\n" + f"Your Git defaultBranch '{self.default_branch}' is incompatible with nf-core.\n" "'dev' and 'TEMPLATE' can not be used as default branch name.\n" "Set the default branch name with " "[white on grey23] git config --global init.defaultBranch [/]\n" @@ -438,12 +441,19 @@ def git_init_pipeline(self) -> None: "Pipeline git repository will not be initialised." ) + def git_init_pipeline(self) -> None: + """Initialises the new pipeline as a Git repository and submits first commit. + + Raises: + UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. + """ + log.info("Initialising local pipeline git repository") repo = git.Repo.init(self.outdir) repo.git.add(A=True) repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") - if default_branch: - repo.active_branch.rename(default_branch) + if self.default_branch: + repo.active_branch.rename(self.default_branch) try: repo.git.branch("TEMPLATE") repo.git.branch("dev") diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 97453b127..d153188ff 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1672,7 +1672,7 @@ def tidy_tags_and_branches(self): for tag in tags_to_remove: self.repo.delete_tag(tag) - # switch to a revision that should be kept, because deleting heads fails, if they are checked out (e.g. "master") + # switch to a revision that should be kept, because deleting heads fails, if they are checked out (e.g. "main") self.checkout(self.revision[0]) # delete unwanted heads/branches from repository diff --git a/nf_core/pipelines/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py index 7ea167f6c..2fa7e9082 100644 --- a/nf_core/pipelines/lint/actions_awsfulltest.py +++ b/nf_core/pipelines/lint/actions_awsfulltest.py @@ -42,7 +42,7 @@ def actions_awsfulltest(self) -> Dict[str, List[str]]: # Check that the action is only turned on for published releases try: - if wf[True]["pull_request"]["branches"] != ["master"]: + if wf[True]["pull_request"]["branches"] != ["master", "main"]: raise AssertionError() if wf[True]["pull_request_review"]["types"] != ["submitted"]: raise AssertionError() diff --git a/nf_core/pipelines/lint/version_consistency.py b/nf_core/pipelines/lint/version_consistency.py index 5fe24ed72..2f9cead83 100644 --- a/nf_core/pipelines/lint/version_consistency.py +++ b/nf_core/pipelines/lint/version_consistency.py @@ -5,7 +5,7 @@ def version_consistency(self): """Pipeline and container version number consistency. .. note:: This test only runs when the ``--release`` flag is set for ``nf-core pipelines lint``, - or ``$GITHUB_REF`` is equal to ``master``. + or ``$GITHUB_REF`` is equal to ``main``. This lint fetches the pipeline version number from three possible locations: diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 7f562bff3..8c9437c7f 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -427,7 +427,7 @@ def validate_schema_title_description(self, schema=None): if "title" not in self.schema: raise AssertionError("Schema missing top-level `title` attribute") # Validate that id, title and description match the pipeline manifest - id_attr = "https://raw.githubusercontent.com/{}/master/nextflow_schema.json".format( + id_attr = "https://raw.githubusercontent.com/{}/main/nextflow_schema.json".format( self.pipeline_manifest["name"].strip("\"'") ) if self.schema["$id"] != id_attr: diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index e2a76ccae..bccd26571 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -10,6 +10,7 @@ from git.exc import GitCommandError from nf_core.components.components_utils import ( + NF_CORE_MODULES_DEFAULT_BRANCH, NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, ) @@ -186,7 +187,7 @@ def setup_branch(self, branch): if branch is None: # Don't bother fetching default branch if we're using nf-core if self.remote_url == NF_CORE_MODULES_REMOTE: - self.branch = "master" + self.branch = NF_CORE_MODULES_DEFAULT_BRANCH else: self.branch = self.get_default_branch() else: diff --git a/tests/pipelines/lint/test_actions_awstest.py b/tests/pipelines/lint/test_actions_awstest.py index 51b55cb86..01dc9f616 100644 --- a/tests/pipelines/lint/test_actions_awstest.py +++ b/tests/pipelines/lint/test_actions_awstest.py @@ -24,7 +24,7 @@ def test_actions_awstest_fail(self): new_pipeline = self._make_pipeline_copy() with open(Path(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: awstest_yml = yaml.safe_load(fh) - awstest_yml[True]["push"] = ["master"] + awstest_yml[True]["push"] = ["main"] with open(Path(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: yaml.dump(awstest_yml, fh) diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 9ca29d249..543cc5ae0 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -25,7 +25,7 @@ def setUp(self) -> None: ########################## class TestPipelinesLint(TestLint): def test_run_linting_function(self): - """Run the master run_linting() function in lint.py + """Run the run_linting() function in lint.py We don't really check any of this code as it's just a series of function calls and we're testing each of those individually. This is mostly to check for syntax errors.""" diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index ffbe75510..66b6b9623 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -43,14 +43,14 @@ def mocked_requests_get(url) -> MockResponse: { "state": "closed", "head": {"ref": "nf-core-template-merge-2"}, - "base": {"ref": "master"}, + "base": {"ref": "main"}, "html_url": "pr_url", } ] + [ { "state": "open", "head": {"ref": f"nf-core-template-merge-{branch_no}"}, - "base": {"ref": "master"}, + "base": {"ref": "main"}, "html_url": "pr_url", } for branch_no in range(3, 7) @@ -343,7 +343,7 @@ def test_close_open_pr(self, mock_patch, mock_post) -> None: pr: Dict[str, Union[str, Dict[str, str]]] = { "state": "open", "head": {"ref": "nf-core-template-merge-3"}, - "base": {"ref": "master"}, + "base": {"ref": "main"}, "html_url": "pr_html_url", "url": "url_to_update_pr", "comments_url": "pr_comments_url", @@ -366,7 +366,7 @@ def test_close_open_pr_fail(self, mock_patch, mock_post): pr = { "state": "open", "head": {"ref": "nf-core-template-merge-3"}, - "base": {"ref": "master"}, + "base": {"ref": "main"}, "html_url": "pr_html_url", "url": "bad_url_to_update_pr", "comments_url": "pr_comments_url", From 889e59e15f2d77377ebfd7912ee5205bd853a403 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 08:41:53 +0200 Subject: [PATCH 017/164] handle missing self.default_branch --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 71265a7e2..2a227329f 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -431,7 +431,7 @@ def get_default_branch(self) -> None: ) # default to main except configparser.Error: log.debug("Could not read init.defaultBranch") - if self.default_branch in ["dev", "TEMPLATE"]: + if self.default_branch is not None and self.default_branch in ["dev", "TEMPLATE"]: raise UserWarning( f"Your Git defaultBranch '{self.default_branch}' is incompatible with nf-core.\n" "'dev' and 'TEMPLATE' can not be used as default branch name.\n" From 5b3dabd588f84f2e2dc111aa639e07a3ebfd3c87 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 08:47:44 +0200 Subject: [PATCH 018/164] fix order of initialization --- nf_core/pipelines/create/create.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 2a227329f..8fbb04908 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -84,6 +84,11 @@ def __init__( # Read features yaml file self.template_features_yml = load_features_yaml() + # Set fields used by the class methods + self.no_git = no_git + self.default_branch = default_branch + self.is_interactive = is_interactive + if self.config.outdir is None: self.config.outdir = str(Path.cwd()) @@ -108,11 +113,6 @@ def __init__( # Set convenience variables self.name = self.config.name - - # Set fields used by the class methods - self.no_git = no_git - self.default_branch = default_branch - self.is_interactive = is_interactive self.force = self.config.force if self.config.outdir == ".": From da915195f6450f6870eec2b2dbccf1dbd7f2206d Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 09:00:05 +0200 Subject: [PATCH 019/164] fix schema check --- nf_core/pipeline-template/nextflow_schema.json | 2 +- nf_core/pipelines/schema.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 4a376330b..819dd7149 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-07/schema", - "$id": "https://raw.githubusercontent.com/{{ name }}/master/nextflow_schema.json", + "$id": "https://raw.githubusercontent.com/{{ name }}/{{ default_branch }}/nextflow_schema.json", "title": "{{ name }} pipeline parameters", "description": "{{ description }}", "type": "object", diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 8c9437c7f..1a75029f3 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -402,7 +402,7 @@ def validate_schema(self, schema=None): def validate_schema_title_description(self, schema=None): """ Extra validation command for linting. - Checks that the schema "$id", "title" and "description" attributes match the piipeline config. + Checks that the schema "$id", "title" and "description" attributes match the pipeline config. """ if schema is None: schema = self.schema @@ -427,11 +427,11 @@ def validate_schema_title_description(self, schema=None): if "title" not in self.schema: raise AssertionError("Schema missing top-level `title` attribute") # Validate that id, title and description match the pipeline manifest - id_attr = "https://raw.githubusercontent.com/{}/main/nextflow_schema.json".format( - self.pipeline_manifest["name"].strip("\"'") - ) - if self.schema["$id"] != id_attr: - raise AssertionError(f"Schema `$id` should be `{id_attr}`\n Found `{self.schema['$id']}`") + id_attr = f"https://raw.githubusercontent.com/{self.pipeline_manifest["name"].strip("\"'")}/main/nextflow_schema.json" + if self.schema["$id"] not in [id_attr, id_attr.replace("/main/", "/master/")]: + raise AssertionError( + f"Schema `$id` should be `{id_attr}` or {id_attr.replace("/main/", "/master/")}. \n Found `{self.schema['$id']}`" + ) title_attr = "{} pipeline parameters".format(self.pipeline_manifest["name"].strip("\"'")) if self.schema["title"] != title_attr: From ed0fb1b4bb0b7c21ea61d6ed84197b692b02905a Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 09:08:51 +0200 Subject: [PATCH 020/164] set default value for default branch --- nf_core/pipelines/create/create.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8fbb04908..c102441c3 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -56,7 +56,7 @@ def __init__( template_config: Optional[Union[CreateConfig, str, Path]] = None, organisation: str = "nf-core", from_config_file: bool = False, - default_branch: Optional[str] = None, + default_branch: str = "main", is_interactive: bool = False, ) -> None: if isinstance(template_config, CreateConfig): @@ -431,7 +431,7 @@ def get_default_branch(self) -> None: ) # default to main except configparser.Error: log.debug("Could not read init.defaultBranch") - if self.default_branch is not None and self.default_branch in ["dev", "TEMPLATE"]: + if self.default_branch in ["dev", "TEMPLATE"]: raise UserWarning( f"Your Git defaultBranch '{self.default_branch}' is incompatible with nf-core.\n" "'dev' and 'TEMPLATE' can not be used as default branch name.\n" From 9d2a3abf8af09b172b5065e40d7618f0dbfbecca Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 09:12:38 +0200 Subject: [PATCH 021/164] fix order in lint --- nf_core/pipelines/lint/actions_awsfulltest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py index 2fa7e9082..080ae3583 100644 --- a/nf_core/pipelines/lint/actions_awsfulltest.py +++ b/nf_core/pipelines/lint/actions_awsfulltest.py @@ -42,7 +42,7 @@ def actions_awsfulltest(self) -> Dict[str, List[str]]: # Check that the action is only turned on for published releases try: - if wf[True]["pull_request"]["branches"] != ["master", "main"]: + if wf[True]["pull_request"]["branches"] != ["main", "master"]: raise AssertionError() if wf[True]["pull_request_review"]["types"] != ["submitted"]: raise AssertionError() From 20545509ff899d82683baca5a7395a84febb9c73 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 10:07:47 +0200 Subject: [PATCH 022/164] fix quotes --- nf_core/pipelines/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 1a75029f3..520fbab94 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -427,7 +427,7 @@ def validate_schema_title_description(self, schema=None): if "title" not in self.schema: raise AssertionError("Schema missing top-level `title` attribute") # Validate that id, title and description match the pipeline manifest - id_attr = f"https://raw.githubusercontent.com/{self.pipeline_manifest["name"].strip("\"'")}/main/nextflow_schema.json" + id_attr = f"https://raw.githubusercontent.com/{self.pipeline_manifest['name'].strip('\"\'')}/main/nextflow_schema.json" if self.schema["$id"] not in [id_attr, id_attr.replace("/main/", "/master/")]: raise AssertionError( f"Schema `$id` should be `{id_attr}` or {id_attr.replace("/main/", "/master/")}. \n Found `{self.schema['$id']}`" From 19ec18810aafff259a85bb848520e9671c919672 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 10:30:07 +0200 Subject: [PATCH 023/164] revert to format --- nf_core/pipelines/schema.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 520fbab94..b0feab091 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -427,7 +427,8 @@ def validate_schema_title_description(self, schema=None): if "title" not in self.schema: raise AssertionError("Schema missing top-level `title` attribute") # Validate that id, title and description match the pipeline manifest - id_attr = f"https://raw.githubusercontent.com/{self.pipeline_manifest['name'].strip('\"\'')}/main/nextflow_schema.json" + pipeline_name = self.pipeline_manifest["name"].strip("\"'") + id_attr = f"https://raw.githubusercontent.com/{pipeline_name}/main/nextflow_schema.json" if self.schema["$id"] not in [id_attr, id_attr.replace("/main/", "/master/")]: raise AssertionError( f"Schema `$id` should be `{id_attr}` or {id_attr.replace("/main/", "/master/")}. \n Found `{self.schema['$id']}`" From fa2750a6c3fbc2ce3ccaf1f97e862fbbb301940e Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 27 Aug 2024 15:18:27 +0200 Subject: [PATCH 024/164] use format instead of f-string --- nf_core/pipelines/schema.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index b0feab091..b27a2d918 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -427,8 +427,9 @@ def validate_schema_title_description(self, schema=None): if "title" not in self.schema: raise AssertionError("Schema missing top-level `title` attribute") # Validate that id, title and description match the pipeline manifest - pipeline_name = self.pipeline_manifest["name"].strip("\"'") - id_attr = f"https://raw.githubusercontent.com/{pipeline_name}/main/nextflow_schema.json" + id_attr = "https://raw.githubusercontent.com/{}/main/nextflow_schema.json".format( + self.pipeline_manifest["name"].strip("\"'") + ) if self.schema["$id"] not in [id_attr, id_attr.replace("/main/", "/master/")]: raise AssertionError( f"Schema `$id` should be `{id_attr}` or {id_attr.replace("/main/", "/master/")}. \n Found `{self.schema['$id']}`" From 5bc36e41ad319d4f869693294831dd45c18f77ff Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 17 Oct 2024 13:54:29 +0200 Subject: [PATCH 025/164] allow mixed list and dict in lint config --- nf_core/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 87dd307e7..ff8da1eea 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1088,7 +1088,7 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] +LintConfigType = Optional[Dict[str, Union[List[str], List[Union[List[str], Dict[str, List[str]]]], bool]]] class NFCoreYamlConfig(BaseModel): @@ -1153,7 +1153,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] except ValidationError as e: error_message = f"Config file '{config_fn}' is invalid" for error in e.errors(): - error_message += f"\n{error['loc'][0]}: {error['msg']}" + error_message += f"\n{error['loc'][0]}: {error['msg']}\ninput: {error['input']}" raise AssertionError(error_message) wf_config = fetch_wf_config(Path(directory)) From 81bdb3b3587a0fe52339abc31720eaffe5898fcc Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 17 Oct 2024 14:11:18 +0200 Subject: [PATCH 026/164] nested too deeply --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index ff8da1eea..4b6e2ddc7 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1088,7 +1088,7 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Union[List[str], Dict[str, List[str]]]], bool]]] +LintConfigType = Optional[Dict[str, Union[List[str], List[Union[str, Dict[str, List[str]]]], bool]]] class NFCoreYamlConfig(BaseModel): From 3d8d4b7de9cb1356511d61c346f02f43ffad5936 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 17 Oct 2024 12:14:43 +0000 Subject: [PATCH 027/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 364a079a7..7a4fd3583 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,8 @@ ### Linting +- allow mixed str and dict in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) + ### Modules ### Subworkflows From 2b4029b699471e73114a4bd4c9da930c8259d55d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 13:51:55 +0200 Subject: [PATCH 028/164] handle new nf-core.yml structure --- nf_core/pipelines/lint/files_exist.py | 2 ++ nf_core/pipelines/lint/files_unchanged.py | 2 ++ nf_core/pipelines/lint/template_strings.py | 4 +++- 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index 9dd307d8b..62af34845 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -200,6 +200,8 @@ def files_exist(self) -> Dict[str, List[str]]: # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] + if ignore_files is None: + ignore_files = [] def pf(file_path: Union[str, Path]) -> Path: return Path(self.wf_path, file_path) diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index 300b3674b..2a0f8ffd3 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -144,6 +144,8 @@ def _tf(file_path: Union[str, Path]) -> Path: return Path(test_pipeline_dir, file_path) ignore_files = self.lint_config.get("files_unchanged", []) if self.lint_config is not None else [] + if ignore_files is None: + ignore_files = [] # Files that must be completely unchanged from template for files in files_exact: diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 11c5e8251..0bf2ccbec 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -39,8 +39,10 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] - files = self.list_files() + if ignore_files is None: + ignore_files = [] + files = self.list_files() # Loop through files, searching for string num_matches = 0 for fn in files: From 3a55f3682dde79066b3148e51388d80249a19de0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 13:52:38 +0200 Subject: [PATCH 029/164] update documentation for `multiqc_config` linting --- nf_core/pipelines/lint/multiqc_config.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/nf_core/pipelines/lint/multiqc_config.py b/nf_core/pipelines/lint/multiqc_config.py index 2b0fc7902..fec5b518e 100644 --- a/nf_core/pipelines/lint/multiqc_config.py +++ b/nf_core/pipelines/lint/multiqc_config.py @@ -31,6 +31,15 @@ def multiqc_config(self) -> Dict[str, List[str]]: lint: multiqc_config: False + To disable this test only for specific sections, you can specify a list of section names. + For example: + + .. code-block:: yaml + lint: + multiqc_config: + - report_section_order + - report_comment + """ passed: List[str] = [] From fcc442aae3019facd0a2a5b84397d08cbe503c0e Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 13:53:17 +0200 Subject: [PATCH 030/164] parse yaml correctly --- nf_core/pipelines/lint/nfcore_yml.py | 31 +++++++------- nf_core/utils.py | 62 +++++++++++++++++++++++++++- 2 files changed, 77 insertions(+), 16 deletions(-) diff --git a/nf_core/pipelines/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py index e0d5fb200..3395696d1 100644 --- a/nf_core/pipelines/lint/nfcore_yml.py +++ b/nf_core/pipelines/lint/nfcore_yml.py @@ -1,7 +1,8 @@ -import re from pathlib import Path from typing import Dict, List +from ruamel.yaml import YAML + from nf_core import __version__ REPOSITORY_TYPES = ["pipeline", "modules"] @@ -26,21 +27,23 @@ def nfcore_yml(self) -> Dict[str, List[str]]: failed: List[str] = [] ignored: List[str] = [] + yaml = YAML() + # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get(".nf-core", []) if self.lint_config is not None else [] - try: - with open(Path(self.wf_path, ".nf-core.yml")) as fh: - content = fh.read() - except FileNotFoundError: - with open(Path(self.wf_path, ".nf-core.yaml")) as fh: - content = fh.read() + for ext in (".yml", ".yaml"): + try: + nf_core_yml = yaml.load(Path(self.wf_path) / f".nf-core{ext}") + break + except FileNotFoundError: + continue + else: + raise FileNotFoundError("No `.nf-core.yml` file found.") if "repository_type" not in ignore_configs: # Check that the repository type is set in the .nf-core.yml - repo_type_re = r"repository_type: (.+)" - match = re.search(repo_type_re, content) - if match: - repo_type = match.group(1) + if "repository_type" in nf_core_yml: + repo_type = nf_core_yml["repository_type"] if repo_type not in REPOSITORY_TYPES: failed.append( f"Repository type in `.nf-core.yml` is not valid. " @@ -55,10 +58,8 @@ def nfcore_yml(self) -> Dict[str, List[str]]: if "nf_core_version" not in ignore_configs: # Check that the nf-core version is set in the .nf-core.yml - nf_core_version_re = r"nf_core_version: (.+)" - match = re.search(nf_core_version_re, content) - if match: - nf_core_version = match.group(1).strip('"') + if "nf_core_version" in nf_core_yml: + nf_core_version = nf_core_yml["nf_core_version"] if nf_core_version != __version__ and "dev" not in nf_core_version: warned.append( f"nf-core version in `.nf-core.yml` is not set to the latest version. " diff --git a/nf_core/utils.py b/nf_core/utils.py index 4b6e2ddc7..5cce2494c 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1091,6 +1091,63 @@ def get(self, item: str, default: Any = None) -> Any: LintConfigType = Optional[Dict[str, Union[List[str], List[Union[str, Dict[str, List[str]]]], bool]]] +class NFCoreYamlLintConfig(BaseModel): + """ + schema for linting config in `.nf-core.yml` should cover: + + .. code-block:: yaml + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + # multiqc_config: False + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + template_strings: + - docs/my_pdf.pdf + """ + + files_unchanged: Optional[List[str]] = None + """ List of files that should not be changed """ + modules_config: Optional[Union[bool, List[str]]] = None + """ List of modules that should not be changed """ + merge_markers: Optional[Union[bool, List[str]]] = None + """ List of files that should not contain merge markers """ + nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = None + """ List of Nextflow config files that should not be changed """ + multiqc_config: Optional[List[str]] = None + """ List of MultiQC config options that be changed """ + files_exist: Optional[List[str]] = None + """ List of files that can not exist """ + template_strings: Optional[Union[bool, List[str]]] = None + """ List of files that can contain template strings """ + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + return getattr(self, item, default) + + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) + + class NFCoreYamlConfig(BaseModel): """.nf-core.yml configuration file schema""" @@ -1100,7 +1157,7 @@ class NFCoreYamlConfig(BaseModel): """ Version of nf-core/tools used to create/update the pipeline""" org_path: Optional[str] = None """ Path to the organisation's modules repository (used for modules repo_type only) """ - lint: Optional[LintConfigType] = None + lint: Optional[NFCoreYamlLintConfig] = None """ Pipeline linting configuration, see https://nf-co.re/docs/nf-core-tools/pipelines/lint#linting-config for examples and documentation """ template: Optional[NFCoreTemplateConfig] = None """ Pipeline template configuration """ @@ -1115,6 +1172,9 @@ def __getitem__(self, item: str) -> Any: def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) + def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path], Optional[NFCoreYamlConfig]]: """ From 482f9f9e3f7b4ad1864af20a03078335f77ab12a Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 14:12:28 +0200 Subject: [PATCH 031/164] found a better way to handle the ignore file being None --- nf_core/pipelines/lint/files_exist.py | 2 -- nf_core/pipelines/lint/files_unchanged.py | 2 -- nf_core/pipelines/lint/template_strings.py | 2 -- nf_core/utils.py | 8 +++++--- 4 files changed, 5 insertions(+), 9 deletions(-) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index 62af34845..9dd307d8b 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -200,8 +200,6 @@ def files_exist(self) -> Dict[str, List[str]]: # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] - if ignore_files is None: - ignore_files = [] def pf(file_path: Union[str, Path]) -> Path: return Path(self.wf_path, file_path) diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index 2a0f8ffd3..300b3674b 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -144,8 +144,6 @@ def _tf(file_path: Union[str, Path]) -> Path: return Path(test_pipeline_dir, file_path) ignore_files = self.lint_config.get("files_unchanged", []) if self.lint_config is not None else [] - if ignore_files is None: - ignore_files = [] # Files that must be completely unchanged from template for files in files_exact: diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 0bf2ccbec..0cb669e55 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -39,8 +39,6 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] - if ignore_files is None: - ignore_files = [] files = self.list_files() # Loop through files, searching for string diff --git a/nf_core/utils.py b/nf_core/utils.py index 5cce2494c..283e2e5c7 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1123,7 +1123,7 @@ class NFCoreYamlLintConfig(BaseModel): - docs/my_pdf.pdf """ - files_unchanged: Optional[List[str]] = None + files_unchanged: List[str] = [] """ List of files that should not be changed """ modules_config: Optional[Union[bool, List[str]]] = None """ List of modules that should not be changed """ @@ -1131,12 +1131,14 @@ class NFCoreYamlLintConfig(BaseModel): """ List of files that should not contain merge markers """ nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = None """ List of Nextflow config files that should not be changed """ - multiqc_config: Optional[List[str]] = None + multiqc_config: List[str] = [] """ List of MultiQC config options that be changed """ - files_exist: Optional[List[str]] = None + files_exist: List[str] = [] """ List of files that can not exist """ template_strings: Optional[Union[bool, List[str]]] = None """ List of files that can contain template strings """ + nfcore_components: Optional[bool] = None + """ Include all required files to use nf-core modules and subworkflows """ def __getitem__(self, item: str) -> Any: return getattr(self, item) From 663a9329bed700a484f7b86d1b501ebce9df7b9c Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 14:15:37 +0200 Subject: [PATCH 032/164] handle new lint config structure --- nf_core/pipelines/lint/__init__.py | 12 +++++------- nf_core/utils.py | 1 + 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 8cc7c37cb..82361565f 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -27,8 +27,8 @@ from nf_core import __version__ from nf_core.components.lint import ComponentLint from nf_core.pipelines.lint_utils import console +from nf_core.utils import NFCoreYamlConfig, NFCoreYamlLintConfig, strip_ansi_codes from nf_core.utils import plural_s as _s -from nf_core.utils import strip_ansi_codes from .actions_awsfulltest import actions_awsfulltest from .actions_awstest import actions_awstest @@ -112,7 +112,7 @@ def __init__( # Initialise the parent object super().__init__(wf_path) - self.lint_config = {} + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.release_mode = release_mode self.fail_ignored = fail_ignored self.fail_warned = fail_warned @@ -173,12 +173,11 @@ def _load_lint_config(self) -> bool: Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.wf_path) - self.lint_config = getattr(tools_config, "lint", {}) or {} + self.lint_config = getattr(tools_config, "lint", None) or None is_correct = True - # Check if we have any keys that don't match lint test names if self.lint_config is not None: - for k in self.lint_config: + for k, v in self.lint_config: if k != "nfcore_components" and k not in self.lint_tests: # nfcore_components is an exception to allow custom pipelines without nf-core components log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") @@ -594,7 +593,7 @@ def run_linting( lint_obj._load_lint_config() lint_obj.load_pipeline_config() - if "nfcore_components" in lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: + if lint_obj.lint_config and lint_obj.lint_config["nfcore_components"] is False: module_lint_obj = None subworkflow_lint_obj = None else: @@ -679,5 +678,4 @@ def run_linting( if len(lint_obj.failed) > 0: if release_mode: log.info("Reminder: Lint tests were run in --release mode.") - return lint_obj, module_lint_obj, subworkflow_lint_obj diff --git a/nf_core/utils.py b/nf_core/utils.py index 283e2e5c7..c3eb91987 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1121,6 +1121,7 @@ class NFCoreYamlLintConfig(BaseModel): template_strings: False template_strings: - docs/my_pdf.pdf + nfcore_components: False """ files_unchanged: List[str] = [] From 53ae873e615478516e30c999c05338b8a5244823 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 17:05:41 +0200 Subject: [PATCH 033/164] add tests with different valid yaml structures --- nf_core/utils.py | 8 +- tests/pipelines/lint/test_files_exist.py | 82 ++++++++++--- tests/pipelines/lint/test_nextflow_config.py | 20 ++-- tests/pipelines/lint/test_nfcore_yml.py | 112 ++++++++++++++---- tests/pipelines/lint/test_template_strings.py | 28 ++++- 5 files changed, 196 insertions(+), 54 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index c3eb91987..03112dd1d 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1126,17 +1126,17 @@ class NFCoreYamlLintConfig(BaseModel): files_unchanged: List[str] = [] """ List of files that should not be changed """ - modules_config: Optional[Union[bool, List[str]]] = None + modules_config: Optional[Union[bool, List[str]]] = [] """ List of modules that should not be changed """ - merge_markers: Optional[Union[bool, List[str]]] = None + merge_markers: Optional[Union[bool, List[str]]] = [] """ List of files that should not contain merge markers """ - nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = None + nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = [] """ List of Nextflow config files that should not be changed """ multiqc_config: List[str] = [] """ List of MultiQC config options that be changed """ files_exist: List[str] = [] """ List of files that can not exist """ - template_strings: Optional[Union[bool, List[str]]] = None + template_strings: Optional[Union[bool, List[str]]] = [] """ List of files that can contain template strings """ nfcore_components: Optional[bool] = None """ Include all required files to use nf-core modules and subworkflows """ diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py index 97dd346cd..eb1ba9a17 100644 --- a/tests/pipelines/lint/test_files_exist.py +++ b/tests/pipelines/lint/test_files_exist.py @@ -1,5 +1,7 @@ from pathlib import Path +from ruamel.yaml import YAML + import nf_core.pipelines.lint from ..test_lint import TestLint @@ -9,17 +11,17 @@ class TestLintFilesExist(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_files_exist_missing_config(self): """Lint test: critical files missing FAIL""" Path(self.new_pipeline, "CHANGELOG.md").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `CHANGELOG.md`" in results["failed"] def test_files_exist_missing_main(self): @@ -27,10 +29,9 @@ def test_files_exist_missing_main(self): Path(self.new_pipeline, "main.nf").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `main.nf`" in results["warned"] def test_files_exist_deprecated_file(self): @@ -39,19 +40,17 @@ def test_files_exist_deprecated_file(self): nf = Path(self.new_pipeline, "parameters.settings.json") nf.touch() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == ["File must be removed: `parameters.settings.json`"] def test_files_exist_pass(self): """Lint check should pass if all files are there""" - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == [] def test_files_exist_pass_conditional_nfschema(self): @@ -62,9 +61,58 @@ def test_files_exist_pass_conditional_nfschema(self): with open(Path(self.new_pipeline, "nextflow.config"), "w") as f: f.write(config) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.schema"] = "nf-core" - results = lint_obj.files_exist() + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.schema"] = "nf-core" + results = self.lint_obj.files_exist() assert results["failed"] == [] assert results["ignored"] == [] + + def test_files_exists_pass_nf_core_yml_config(self): + """Check if linting passes with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + """ + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `.github/CONTRIBUTING.md`" in results["ignored"] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] + + def test_files_exists_fail_nf_core_yml_config(self): + """Check if linting fails with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - CITATIONS.md + """ + + # remove CITATIONS.md + Path(self.new_pipeline, "CITATIONS.md").unlink() + assert self.lint_obj._load() + # test first if linting fails correctly + results = self.lint_obj.files_exist() + assert "File not found: `CITATIONS.md`" in results["failed"] + + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index a655fb8ac..f8c3c1f31 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -6,7 +6,6 @@ import nf_core.pipelines.create.create import nf_core.pipelines.lint -from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -125,23 +124,30 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" + valid_yaml = """ + nextflow_config: + - manifest.name + - config_defaults: + - params.custom_config_version + """ # Add custom_config_version to the ignore list nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" - nf_core_yml = NFCoreYamlConfig( - repository_type="pipeline", - lint={"nextflow_config": [{"config_defaults": ["params.custom_config_version"]}]}, - ) + + with open(nf_core_yml_path) as f: + nf_core_yml = yaml.safe_load(f) + nf_core_yml["lint"] = yaml.safe_load(valid_yaml) with open(nf_core_yml_path, "w") as f: - yaml.dump(nf_core_yml.model_dump(), f) + yaml.dump(nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() lint_obj._load_lint_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 + assert len(result["ignored"]) == 2 assert "Config default value correct: params.custom_config_version" not in str(result["passed"]) assert "Config default ignored: params.custom_config_version" in str(result["ignored"]) + assert "Config variable ignored: `manifest.name`" in str(result["ignored"]) def test_default_values_float(self): """Test comparing two float values.""" diff --git a/tests/pipelines/lint/test_nfcore_yml.py b/tests/pipelines/lint/test_nfcore_yml.py index 955c00da8..780e21241 100644 --- a/tests/pipelines/lint/test_nfcore_yml.py +++ b/tests/pipelines/lint/test_nfcore_yml.py @@ -1,8 +1,9 @@ -import re from pathlib import Path -import nf_core.pipelines.create +from ruamel.yaml import YAML + import nf_core.pipelines.lint +from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -11,11 +12,14 @@ class TestLintNfCoreYml(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() - self.nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + self.yaml = YAML() + self.nf_core_yml: NFCoreYamlConfig = self.yaml.load(self.nf_core_yml_path) + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_nfcore_yml_pass(self): """Lint test: nfcore_yml - PASS""" - self.lint_obj._load() + assert self.lint_obj._load() results = self.lint_obj.nfcore_yml() assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) @@ -27,14 +31,10 @@ def test_nfcore_yml_pass(self): def test_nfcore_yml_fail_repo_type(self): """Lint test: nfcore_yml - FAIL - repository type not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = content.replace("repository_type: pipeline", "repository_type: foo") - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() + self.nf_core_yml["repository_type"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) assert len(results.get("warned", [])) == 0 assert len(results.get("passed", [])) >= 0 @@ -43,15 +43,87 @@ def test_nfcore_yml_fail_repo_type(self): def test_nfcore_yml_fail_nfcore_version(self): """Lint test: nfcore_yml - FAIL - nf-core version not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() + self.nf_core_yml["nf_core_version"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) assert len(results.get("failed", [])) == 0 assert len(results.get("passed", [])) >= 0 assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + # modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + # template_strings: False + template_strings: + - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config_bool(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + # modules_config: + # - fastqc + merge_markers: False + # merge_markers: + # - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + # template_strings: + # - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_template_strings.py b/tests/pipelines/lint/test_template_strings.py index 406ba63e0..37b760480 100644 --- a/tests/pipelines/lint/test_template_strings.py +++ b/tests/pipelines/lint/test_template_strings.py @@ -1,6 +1,8 @@ import subprocess from pathlib import Path +import yaml + import nf_core.pipelines.create import nf_core.pipelines.lint @@ -11,6 +13,9 @@ class TestLintTemplateStrings(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + with open(self.nf_core_yml_path) as f: + self.nf_core_yml = yaml.safe_load(f) def test_template_strings(self): """Tests finding a template string in a file fails linting.""" @@ -28,9 +33,12 @@ def test_template_strings(self): def test_template_strings_ignored(self): """Tests ignoring template_strings""" # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings: False") + valid_yaml = """ + template_strings: false + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() lint_obj._lint_pipeline() @@ -43,13 +51,21 @@ def test_template_strings_ignore_file(self): txt_file = Path(self.new_pipeline) / "docs" / "test.txt" with open(txt_file, "w") as f: f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + valid_yaml = """ + template_strings: + - docs/test.txt + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() result = lint_obj.template_strings() + assert len(result["failed"]) == 0 assert len(result["ignored"]) == 1 From 57f7ca8680e5da788f04cb81cded4de4cbd0ad42 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 17:33:31 +0200 Subject: [PATCH 034/164] remove last traces of LintConfigType --- nf_core/components/lint/__init__.py | 4 ++-- nf_core/pipelines/create/create.py | 6 +++--- nf_core/utils.py | 3 --- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index fcc3b414d..69740135a 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -22,7 +22,7 @@ from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_json import ModulesJson from nf_core.pipelines.lint_utils import console -from nf_core.utils import LintConfigType +from nf_core.utils import NFCoreYamlLintConfig from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -80,7 +80,7 @@ def __init__( self.failed: List[LintResult] = [] self.all_local_components: List[NFCoreComponent] = [] - self.lint_config: Optional[LintConfigType] = None + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.modules_json: Optional[ModulesJson] = None if self.component_type == "modules": diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8ab547c1c..98b2b704b 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,7 +8,7 @@ import re import shutil from pathlib import Path -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Tuple, Union import git import git.config @@ -21,7 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.utils import LintConfigType, NFCoreTemplateConfig +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlLintConfig log = logging.getLogger(__name__) @@ -395,7 +395,7 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and nf_core_yml is not None: - nf_core_yml.lint = cast(LintConfigType, lint_config) + nf_core_yml.lint = NFCoreYamlLintConfig(**lint_config) with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) diff --git a/nf_core/utils.py b/nf_core/utils.py index 03112dd1d..1b0d491e2 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1088,9 +1088,6 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Union[str, Dict[str, List[str]]]], bool]]] - - class NFCoreYamlLintConfig(BaseModel): """ schema for linting config in `.nf-core.yml` should cover: From e743185cf8d388bb18032aa9ebac5aca363d0da9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 17:48:58 +0200 Subject: [PATCH 035/164] fix incorrect type --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 1b0d491e2..ac886755f 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1129,7 +1129,7 @@ class NFCoreYamlLintConfig(BaseModel): """ List of files that should not contain merge markers """ nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = [] """ List of Nextflow config files that should not be changed """ - multiqc_config: List[str] = [] + multiqc_config: Union[bool, List[str]] = [] """ List of MultiQC config options that be changed """ files_exist: List[str] = [] """ List of files that can not exist """ From 58869a18facf3571c84b6f51a4c4a877f25c251d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 17:49:25 +0200 Subject: [PATCH 036/164] more type fixes --- nf_core/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index ac886755f..4c4d9f73d 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1121,7 +1121,7 @@ class NFCoreYamlLintConfig(BaseModel): nfcore_components: False """ - files_unchanged: List[str] = [] + files_unchanged: Union[bool, List[str]] = [] """ List of files that should not be changed """ modules_config: Optional[Union[bool, List[str]]] = [] """ List of modules that should not be changed """ @@ -1131,7 +1131,7 @@ class NFCoreYamlLintConfig(BaseModel): """ List of Nextflow config files that should not be changed """ multiqc_config: Union[bool, List[str]] = [] """ List of MultiQC config options that be changed """ - files_exist: List[str] = [] + files_exist: Union[bool, List[str]] = [] """ List of files that can not exist """ template_strings: Optional[Union[bool, List[str]]] = [] """ List of files that can contain template strings """ From afbd51b8c30b785cd49797434540b1fe2279ac1d Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 22 Oct 2024 13:33:29 +0200 Subject: [PATCH 037/164] add all lint tests to config --- nf_core/utils.py | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 4c4d9f73d..f7472ec94 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1136,7 +1136,43 @@ class NFCoreYamlLintConfig(BaseModel): template_strings: Optional[Union[bool, List[str]]] = [] """ List of files that can contain template strings """ nfcore_components: Optional[bool] = None - """ Include all required files to use nf-core modules and subworkflows """ + """ Lint all required files to use nf-core modules and subworkflows """ + actions_ci: Optional[bool] = None + """ Lint all required files to use GitHub Actions CI """ + actions_awstest: Optional[bool] = None + """ Lint all required files to run tests on AWS """ + actions_awsfulltest: Optional[bool] = None + """ Lint all required files to run full tests on AWS """ + readme: Optional[bool] = None + """ Lint the README.md file """ + pipeline_todos: Optional[bool] = None + """ Lint for TODOs statements""" + plugin_includes: Optional[bool] = None + """ Lint for nextflow plugin """ + pipeline_name_conventions: Optional[bool] = None + """ Lint for pipeline name conventions """ + schema_lint: Optional[bool] = None + """ Lint nextflow_schema.json file""" + schema_params: Optional[bool] = None + """ Lint schema for all params """ + system_exit: Optional[bool] = None + """ Lint for System.exit calls in groovy/nextflow code """ + schema_description: Optional[bool] = None + """ Check that every parameter in the schema has a description. """ + actions_schema_validation: Optional[bool] = None + """ Lint GitHub Action workflow files with schema""" + modules_json: Optional[bool] = None + """ Lint modules.json file """ + modules_structure: Optional[bool] = None + """ Lint modules structure """ + base_config: Optional[bool] = None + """ Lint base.config file """ + nfcore_yml: Optional[bool] = None + """ Lint nf-core.yml """ + version_consistency: Optional[bool] = None + """ Lint for version consistency """ + included_configs: Optional[bool] = None + """ Lint for included configs """ def __getitem__(self, item: str) -> Any: return getattr(self, item) From 9bf91f51fe36ecbc9baa62122016f2fbda32d788 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 22 Oct 2024 14:14:07 +0200 Subject: [PATCH 038/164] switch all defaults to None and drop them on dump --- nf_core/pipelines/create/create.py | 10 +++++----- nf_core/pipelines/lint/readme.py | 15 +++++++++++++++ nf_core/pipelines/sync.py | 6 +++--- nf_core/utils.py | 20 +++++++++++--------- 4 files changed, 34 insertions(+), 17 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 98b2b704b..776fc8943 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -67,7 +67,7 @@ def __init__( _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) # Obtain a CreateConfig object from `.nf-core.yml` config file if config_yml is not None and getattr(config_yml, "template", None) is not None: - self.config = CreateConfig(**config_yml["template"].model_dump()) + self.config = CreateConfig(**config_yml["template"].model_dump(exclude_none=True)) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") # Update the output directory @@ -205,7 +205,7 @@ def obtain_jinja_params_dict( config_yml = None # Set the parameters for the jinja template - jinja_params = self.config.model_dump() + jinja_params = self.config.model_dump(exclude_none=True) # Add template areas to jinja params and create list of areas with paths to skip skip_areas = [] @@ -363,8 +363,8 @@ def render_template(self) -> None: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - yaml.safe_dump(config_yml.model_dump(), fh) + config_yml.template = NFCoreTemplateConfig(**self.config.model_dump(exclude_none=True)) + yaml.safe_dump(config_yml.model_dump(exclude_none=True), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") # Run prettier on files @@ -397,7 +397,7 @@ def fix_linting(self): if config_fn is not None and nf_core_yml is not None: nf_core_yml.lint = NFCoreYamlLintConfig(**lint_config) with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) + yaml.dump(nf_core_yml.model_dump(exclude_none=True), fh, default_flow_style=False, sort_keys=False) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index bdfad5200..5a10fbfce 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -23,6 +23,21 @@ def readme(self): * If pipeline is released but still contains a 'zenodo.XXXXXXX' tag, the test fails + To disable this test, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + lint: + readme: False + + To disable subsets of these tests, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + + lint: + readme: + nextflow_badge + zenodo_release + """ passed = [] warned = [] diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 12b29f15e..896adda94 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -105,7 +105,7 @@ def __init__( with open(template_yaml_path) as f: self.config_yml.template = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: - yaml.safe_dump(self.config_yml.model_dump(), fh) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), fh) log.info(f"Saved pipeline creation settings to '{self.config_yml_path}'") raise SystemExit( f"Please commit your changes and delete the {template_yaml_path} file. Then run the sync command again." @@ -271,7 +271,7 @@ def make_template_pipeline(self): self.config_yml.template.force = True with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), config_path) try: pipeline_create_obj = nf_core.pipelines.create.create.PipelineCreate( @@ -291,7 +291,7 @@ def make_template_pipeline(self): self.config_yml.template.outdir = "." # Update nf-core version self.config_yml.nf_core_version = nf_core.__version__ - dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump()) + dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump(exclude_none=True)) except Exception as err: # Reset to where you were to prevent git getting messed up. diff --git a/nf_core/utils.py b/nf_core/utils.py index f7472ec94..b31863435 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1121,20 +1121,22 @@ class NFCoreYamlLintConfig(BaseModel): nfcore_components: False """ - files_unchanged: Union[bool, List[str]] = [] + files_unchanged: Optional[Union[bool, List[str]]] = None """ List of files that should not be changed """ - modules_config: Optional[Union[bool, List[str]]] = [] + modules_config: Optional[Optional[Union[bool, List[str]]]] = None """ List of modules that should not be changed """ - merge_markers: Optional[Union[bool, List[str]]] = [] + merge_markers: Optional[Optional[Union[bool, List[str]]]] = None """ List of files that should not contain merge markers """ - nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = [] + nextflow_config: Optional[Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]]] = None """ List of Nextflow config files that should not be changed """ - multiqc_config: Union[bool, List[str]] = [] + multiqc_config: Optional[Union[bool, List[str]]] = None """ List of MultiQC config options that be changed """ - files_exist: Union[bool, List[str]] = [] + files_exist: Optional[Union[bool, List[str]]] = None """ List of files that can not exist """ - template_strings: Optional[Union[bool, List[str]]] = [] + template_strings: Optional[Optional[Union[bool, List[str]]]] = None """ List of files that can contain template strings """ + readme: Optional[Union[bool, List[str]]] = None + """ Lint the README.md file """ nfcore_components: Optional[bool] = None """ Lint all required files to use nf-core modules and subworkflows """ actions_ci: Optional[bool] = None @@ -1143,8 +1145,6 @@ class NFCoreYamlLintConfig(BaseModel): """ Lint all required files to run tests on AWS """ actions_awsfulltest: Optional[bool] = None """ Lint all required files to run full tests on AWS """ - readme: Optional[bool] = None - """ Lint the README.md file """ pipeline_todos: Optional[bool] = None """ Lint for TODOs statements""" plugin_includes: Optional[bool] = None @@ -1178,6 +1178,8 @@ def __getitem__(self, item: str) -> Any: return getattr(self, item) def get(self, item: str, default: Any = None) -> Any: + if getattr(self, item, default) is None: + return default return getattr(self, item, default) def __setitem__(self, item: str, value: Any) -> None: From 61bb733943e5c1216574366da0e6dd89e83dc2c9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 22 Oct 2024 15:46:09 +0200 Subject: [PATCH 039/164] drop None values when checking for test names --- nf_core/pipelines/lint/__init__.py | 2 +- tests/pipelines/lint/test_files_exist.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 82361565f..f24374384 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -178,7 +178,7 @@ def _load_lint_config(self) -> bool: # Check if we have any keys that don't match lint test names if self.lint_config is not None: for k, v in self.lint_config: - if k != "nfcore_components" and k not in self.lint_tests: + if v is not None and k != "nfcore_components" and k not in self.lint_tests: # nfcore_components is an exception to allow custom pipelines without nf-core components log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") is_correct = False diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py index eb1ba9a17..ebc529247 100644 --- a/tests/pipelines/lint/test_files_exist.py +++ b/tests/pipelines/lint/test_files_exist.py @@ -37,8 +37,7 @@ def test_files_exist_missing_main(self): def test_files_exist_deprecated_file(self): """Check whether deprecated file issues warning""" - nf = Path(self.new_pipeline, "parameters.settings.json") - nf.touch() + Path(self.new_pipeline, "parameters.settings.json").touch() assert self.lint_obj._load() From e2ac2b57dc152f46a364dbdac9dce405064c1320 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 22 Oct 2024 16:33:19 +0200 Subject: [PATCH 040/164] fix test_lint tests --- tests/pipelines/test_lint.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 9ca29d249..ca7353d50 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -48,7 +48,8 @@ def test_init_pipeline_lint(self): def test_load_lint_config_not_found(self): """Try to load a linting config file that doesn't exist""" assert self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} + assert self.lint_obj.lint_config is not None + assert self.lint_obj.lint_config.model_dump(exclude_none=True) == {} def test_load_lint_config_ignore_all_tests(self): """Try to load a linting config file that ignores all tests""" @@ -64,7 +65,8 @@ def test_load_lint_config_ignore_all_tests(self): # Load the new lint config file and check lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + assert lint_obj.lint_config is not None + assert sorted(list(lint_obj.lint_config.model_dump(exclude_none=True))) == sorted(lint_obj.lint_tests) # Try running linting and make sure that all tests are ignored lint_obj._lint_pipeline() From 78e82640a0206dcda873f543181b804a59b95a5c Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 23 Oct 2024 15:30:57 +0200 Subject: [PATCH 041/164] move test in correct directory --- tests/{ => pipelines}/test_rocrate.py | 31 ++++++--------------------- 1 file changed, 7 insertions(+), 24 deletions(-) rename tests/{ => pipelines}/test_rocrate.py (73%) diff --git a/tests/test_rocrate.py b/tests/pipelines/test_rocrate.py similarity index 73% rename from tests/test_rocrate.py rename to tests/pipelines/test_rocrate.py index 6defd5d5e..54561af42 100644 --- a/tests/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -1,8 +1,6 @@ """Test the nf-core pipelines rocrate command""" import shutil -import tempfile -import unittest from pathlib import Path import rocrate.rocrate @@ -13,33 +11,18 @@ import nf_core.pipelines.rocrate import nf_core.utils +from ..test_pipelines import TestPipelines -class TestROCrate(unittest.TestCase): - """Class for lint tests""" - - def setUp(self): - """Function that runs at start of tests for common resources - Use nf_core.create() to make a pipeline that we can use for testing - """ - - self.tmp_dir = Path(tempfile.mkdtemp()) - self.test_pipeline_dir = Path(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - name="testpipeline", - description="This is a test pipeline", - author="Test McTestFace", - outdir=str(self.test_pipeline_dir), - version="1.0.0", - no_git=False, - force=True, - ) - self.create_obj.init_pipeline() +class TestROCrate(TestPipelines): + """Class for lint tests""" + def setUp(self) -> None: + super().setUp() # add fake metro map - Path(self.test_pipeline_dir, "docs", "images", "nf-core-testpipeline_metro_map.png").touch() + Path(self.pipeline_dir, "docs", "images", "nf-core-testpipeline_metro_map.png").touch() # commit the changes - repo = Repo(self.test_pipeline_dir) + repo = Repo(self.pipeline_dir) repo.git.add(A=True) repo.index.commit("Initial commit") From b74319de5c2251200b4e0a6f244f2e7f7fa453a7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 23 Oct 2024 16:49:59 +0200 Subject: [PATCH 042/164] fix tests --- nf_core/pipelines/rocrate.py | 35 +++++++++------------------------ tests/pipelines/test_rocrate.py | 8 ++++---- 2 files changed, 13 insertions(+), 30 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index de00189a2..04f91480c 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -7,7 +7,7 @@ import sys from datetime import datetime from pathlib import Path -from typing import Dict, List, Optional, Set, Union +from typing import Optional, Set, Union import requests import rocrate.rocrate @@ -90,7 +90,7 @@ def __init__(self, pipeline_dir: Path, version="") -> None: def create_rocrate( self, outdir: Path, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None - ) -> None: + ) -> bool: """ Create an RO Crate for a pipeline @@ -107,8 +107,6 @@ def create_rocrate( log.error(e) sys.exit(1) - # Change to the pipeline directory, because the RO Crate doesn't handle relative paths well - # Check that the checkout pipeline version is the same as the requested version if self.version != "": if self.version != self.pipeline_obj.nf_config.get("manifest.version"): @@ -132,11 +130,12 @@ def create_rocrate( # Save just the JSON metadata file if json_path is not None: - if json_path.name != "ro-crate-metadata.json": - json_path = json_path / "ro-crate-metadata.json" + if json_path.name == "ro-crate-metadata.json": + json_path = json_path.parent log.info(f"Saving metadata file to '{json_path}'") self.crate.metadata.write(json_path) + return True # Save the whole crate zip file if zip_path is not None: @@ -144,6 +143,10 @@ def create_rocrate( zip_path = zip_path / "ro-crate.crate.zip" log.info(f"Saving zip file '{zip_path}") self.crate.write_zip(zip_path) + return True + if json_path is None and zip_path is None: + log.error("Please provide a path to save the ro-crate file or the zip file.") + return False def make_workflow_rocrate(self) -> None: """ @@ -224,26 +227,6 @@ def set_main_entity(self, main_entity_filename: str): "url", f"https://nf-co.re/{self.crate.name.replace('nf-core/','')}/{url}/", compact=True ) self.crate.mainEntity.append_to("version", self.version, compact=True) - if self.pipeline_obj.schema_obj is not None: - log.debug("input value") - - schema_input = self.pipeline_obj.schema_obj.schema["definitions"]["input_output_options"]["properties"][ - "input" - ] - input_value: Dict[str, Union[str, List[str], bool]] = { - "@id": "#input", - "@type": ["FormalParameter"], - "default": schema_input.get("default", ""), - "encodingFormat": schema_input.get("mimetype", ""), - "valueRequired": "input" - in self.pipeline_obj.schema_obj.schema["definitions"]["input_output_options"]["required"], - "dct:conformsTo": "https://bioschemas.org/types/FormalParameter/1.0-RELEASE", - } - self.crate.add_jsonld(input_value) - self.crate.mainEntity.append_to( - "input", - {"@id": "#input"}, - ) # get keywords from nf-core website remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py index 54561af42..2e14878da 100644 --- a/tests/pipelines/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -36,14 +36,14 @@ def test_rocrate_creation(self): """Run the nf-core rocrate command""" # Run the command - self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.test_pipeline_dir) - self.rocrate_obj.create_rocrate(self.test_pipeline_dir, metadata_path=Path(self.test_pipeline_dir)) + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) # Check that the crate was created - self.assertTrue(Path(self.test_pipeline_dir, "ro-crate-metadata.json").exists()) + self.assertTrue(Path(self.pipeline_dir, "ro-crate-metadata.json").exists()) # Check that the entries in the crate are correct - crate = rocrate.rocrate.ROCrate(self.test_pipeline_dir) + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) entities = crate.get_entities() # Check if the correct entities are set: From 406bdf8bcba91bbd81f72fbab0c47786770193ba Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 09:08:35 +0200 Subject: [PATCH 043/164] run tests without commiting ro-crate --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 5871919ca..609236096 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -112,9 +112,9 @@ jobs: run: | cd create-test-lint-wf nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml template_skip_${{ matrix.TEMPLATE }}.yml + # fake ro-crate touch my-prefix-testpipeline/ro-crate-metadata.json - git commit -am "add ro-crate" - name: run the pipeline run: | From 94bddc22745dd94ce9d7500e035942b448e83e6b Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 09:16:11 +0200 Subject: [PATCH 044/164] add ro-crate creation to pipelines create command --- .github/workflows/create-test-lint-wf-template.yml | 3 --- nf_core/pipelines/create/create.py | 4 ++++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 609236096..f6ae34c90 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -113,9 +113,6 @@ jobs: cd create-test-lint-wf nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml template_skip_${{ matrix.TEMPLATE }}.yml - # fake ro-crate - touch my-prefix-testpipeline/ro-crate-metadata.json - - name: run the pipeline run: | cd create-test-lint-wf diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8ab547c1c..13f059c7b 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -21,6 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file +from nf_core.rocrate import ROCrate from nf_core.utils import LintConfigType, NFCoreTemplateConfig log = logging.getLogger(__name__) @@ -255,6 +256,9 @@ def init_pipeline(self): """Creates the nf-core pipeline.""" # Make the new pipeline self.render_template() + # Create the RO-Crate metadata file + rocrate_obj = ROCrate(self.outdir) + rocrate_obj.create_rocrate(self.outdir, json_path=self.outdir / "ro-crate-metadata.json") # Init the git repository and make the first commit if not self.no_git: From 5c4a5e613b381e2896e1c3266df85683c7ee823d Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 09:20:25 +0200 Subject: [PATCH 045/164] fix command import --- nf_core/pipelines/create/create.py | 2 +- nf_core/pipelines/rocrate.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 13f059c7b..c9be4e7be 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -21,7 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.rocrate import ROCrate +from nf_core.pipelines.rocrate import ROCrate from nf_core.utils import LintConfigType, NFCoreTemplateConfig log = logging.getLogger(__name__) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 04f91480c..1fe0e4cca 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -177,14 +177,14 @@ def make_workflow_rocrate(self) -> None: ) # add readme as description - readme = Path("README.md") + readme = self.pipeline_dir / "README.md" try: self.crate.description = readme.read_text() except FileNotFoundError: log.error(f"Could not find README.md in {self.pipeline_dir}") # get license from LICENSE file - license_file = Path("LICENSE") + license_file = self.pipeline_dir / "LICENSE" try: license = license_file.read_text() if license.startswith("MIT"): From a679a14c8e0eab470939d0fe0b3148057739830b Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 09:55:31 +0200 Subject: [PATCH 046/164] add rocrate to skip features --- nf_core/pipelines/create/create.py | 8 +++++--- nf_core/pipelines/create/template_features.yml | 10 ++++++++++ nf_core/pipelines/rocrate.py | 9 +++++++-- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index c9be4e7be..4f6fa1238 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -256,9 +256,6 @@ def init_pipeline(self): """Creates the nf-core pipeline.""" # Make the new pipeline self.render_template() - # Create the RO-Crate metadata file - rocrate_obj = ROCrate(self.outdir) - rocrate_obj.create_rocrate(self.outdir, json_path=self.outdir / "ro-crate-metadata.json") # Init the git repository and make the first commit if not self.no_git: @@ -360,6 +357,11 @@ def render_template(self) -> None: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() + if self.config.skip_features is None or "ro-crate" not in self.config.skip_features: + # Create the RO-Crate metadata file + rocrate_obj = ROCrate(self.outdir) + rocrate_obj.create_rocrate(self.outdir, json_path=self.outdir / "ro-crate-metadata.json") + # Update the .nf-core.yml with linting configurations self.fix_linting() diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index 0a3180286..cf1867bff 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -432,3 +432,13 @@ seqera_platform: You can extend this file adding any other desired configuration. nfcore_pipelines: False custom_pipelines: True +rocrate: + skippable_paths: + - "ro-crate-metadata.json" + short_description: "Add RO-Crate metadata" + description: "Add a RO-Crate metadata file to describe the pipeline" + help_text: | + RO-Crate is a metadata specification to describe research data and software. + This will add a `ro-crate-metadata.json` file to describe the pipeline. + nfcore_pipelines: False + custom_pipelines: True diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 1fe0e4cca..388d681eb 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -284,7 +284,9 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: # look at git contributors for author names try: git_contributors: Set[str] = set() - assert self.pipeline_obj.repo is not None # mypy + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return commits_touching_path = list(self.pipeline_obj.repo.iter_commits(paths="main.nf")) for commit in commits_touching_path: @@ -324,7 +326,10 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: for author in named_contributors: log.debug(f"Adding author: {author}") - assert self.pipeline_obj.repo is not None # mypy + + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return # get email from git log email = self.pipeline_obj.repo.git.log(f"--author={author}", "--pretty=format:%ae", "-1") orcid = get_orcid(author) From d79ba1009b7659734481c0b6e6600056e4596f74 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 10:11:28 +0200 Subject: [PATCH 047/164] remove schema loading, because it is not needed anymore --- nf_core/pipelines/rocrate.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 388d681eb..d4e605662 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -17,7 +17,6 @@ from rocrate.model.person import Person from rocrate.rocrate import ROCrate as BaseROCrate -from nf_core.pipelines.schema import PipelineSchema from nf_core.utils import Pipeline log = logging.getLogger(__name__) @@ -81,10 +80,6 @@ def __init__(self, pipeline_dir: Path, version="") -> None: self.crate: rocrate.rocrate.ROCrate self.pipeline_obj = Pipeline(self.pipeline_dir) self.pipeline_obj._load() - self.pipeline_obj.schema_obj = PipelineSchema() - # Assume we're in a pipeline dir root if schema path not set - self.pipeline_obj.schema_obj.get_schema_path(self.pipeline_dir) - self.pipeline_obj.schema_obj.load_schema() setup_requests_cachedir() From dfb9283c238b1e83b00c5c35d69f2896c615577d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Thu, 24 Oct 2024 09:06:26 +0000 Subject: [PATCH 048/164] update snapshots --- .../test_customisation_help.svg | 256 +++++++++--------- .../test_create_app/test_type_custom.svg | 254 ++++++++--------- 2 files changed, 255 insertions(+), 255 deletions(-) diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg index 07ab592d2..450f1d303 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -19,257 +19,257 @@ font-weight: 700; } - .terminal-3477423502-matrix { + .terminal-333203530-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3477423502-title { + .terminal-333203530-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3477423502-r1 { fill: #c5c8c6 } -.terminal-3477423502-r2 { fill: #e3e3e3 } -.terminal-3477423502-r3 { fill: #989898 } -.terminal-3477423502-r4 { fill: #e1e1e1 } -.terminal-3477423502-r5 { fill: #4ebf71;font-weight: bold } -.terminal-3477423502-r6 { fill: #1e1e1e } -.terminal-3477423502-r7 { fill: #e2e2e2 } -.terminal-3477423502-r8 { fill: #507bb3 } -.terminal-3477423502-r9 { fill: #808080 } -.terminal-3477423502-r10 { fill: #dde6ed;font-weight: bold } -.terminal-3477423502-r11 { fill: #001541 } -.terminal-3477423502-r12 { fill: #0178d4 } -.terminal-3477423502-r13 { fill: #454a50 } -.terminal-3477423502-r14 { fill: #e2e3e3;font-weight: bold } -.terminal-3477423502-r15 { fill: #000000 } -.terminal-3477423502-r16 { fill: #e4e4e4 } -.terminal-3477423502-r17 { fill: #14191f } -.terminal-3477423502-r18 { fill: #7ae998 } -.terminal-3477423502-r19 { fill: #0a180e;font-weight: bold } -.terminal-3477423502-r20 { fill: #008139 } -.terminal-3477423502-r21 { fill: #fea62b;font-weight: bold } -.terminal-3477423502-r22 { fill: #a7a9ab } -.terminal-3477423502-r23 { fill: #e2e3e3 } + .terminal-333203530-r1 { fill: #c5c8c6 } +.terminal-333203530-r2 { fill: #e3e3e3 } +.terminal-333203530-r3 { fill: #989898 } +.terminal-333203530-r4 { fill: #e1e1e1 } +.terminal-333203530-r5 { fill: #4ebf71;font-weight: bold } +.terminal-333203530-r6 { fill: #1e1e1e } +.terminal-333203530-r7 { fill: #e2e2e2 } +.terminal-333203530-r8 { fill: #507bb3 } +.terminal-333203530-r9 { fill: #808080 } +.terminal-333203530-r10 { fill: #dde6ed;font-weight: bold } +.terminal-333203530-r11 { fill: #001541 } +.terminal-333203530-r12 { fill: #14191f } +.terminal-333203530-r13 { fill: #0178d4 } +.terminal-333203530-r14 { fill: #454a50 } +.terminal-333203530-r15 { fill: #e2e3e3;font-weight: bold } +.terminal-333203530-r16 { fill: #000000 } +.terminal-333203530-r17 { fill: #e4e4e4 } +.terminal-333203530-r18 { fill: #7ae998 } +.terminal-333203530-r19 { fill: #0a180e;font-weight: bold } +.terminal-333203530-r20 { fill: #008139 } +.terminal-333203530-r21 { fill: #fea62b;font-weight: bold } +.terminal-333203530-r22 { fill: #a7a9ab } +.terminal-333203530-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - ⭘nf-core pipelines create â€” Create a new pipeline with the nf-core pipeline templa
 - - -Template features - - -▊▔▔▔▔▔▔▔▔▎ -▊▎        Toggle all features -▊▁▁▁▁▁▁▁▁▎ -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use a GitHub Create a GitHub  Show help  -▊▁▁▁▁▁▁▁▁▎        repository.repository for the â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -pipeline. - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add Github CI testsThe pipeline will  Show help  -▊▁▁▁▁▁▁▁▁▎include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous -Integration (CI)  -testing - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use reference genomesThe pipeline will be  Hide help  -▊▁▁▁▁▁▁▁▁▎configured to use a â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -copy of the most  -common reference  -genome files from  -iGenomes - - -Nf-core pipelines are configured to use a copy of the most common reference  -genome files. - -By selecting this option, your pipeline will include a configuration file  -specifying the paths to these files. - -The required code to use these files will also be included in the template.  -When the pipeline user provides an appropriate genome key, the pipeline will -automatically download the required reference files. -▅▅ -For more information about reference genomes in nf-core pipelines, see the  - - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add Github badgesThe README.md file of  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + ⭘nf-core pipelines create â€” Create a new pipeline with the nf-core pipeline templa
 + + +Template features + + +▊▔▔▔▔▔▔▔▔▎ +▊▎        Toggle all features +▊▁▁▁▁▁▁▁▁▎ +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use a GitHub Create a GitHub  Show help  +▊▁▁▁▁▁▁▁▁▎        repository.repository for the â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +pipeline. + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add Github CI testsThe pipeline will  Show help  +▊▁▁▁▁▁▁▁▁▎include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous▃▃ +Integration (CI)  +testing + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use reference genomesThe pipeline will be  Hide help  +▊▁▁▁▁▁▁▁▁▎configured to use a â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add Github badgesThe README.md file of  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg index cc34c9253..6e178ba84 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -19,256 +19,256 @@ font-weight: 700; } - .terminal-829252251-matrix { + .terminal-3425198753-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-829252251-title { + .terminal-3425198753-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-829252251-r1 { fill: #c5c8c6 } -.terminal-829252251-r2 { fill: #e3e3e3 } -.terminal-829252251-r3 { fill: #989898 } -.terminal-829252251-r4 { fill: #e1e1e1 } -.terminal-829252251-r5 { fill: #4ebf71;font-weight: bold } -.terminal-829252251-r6 { fill: #1e1e1e } -.terminal-829252251-r7 { fill: #0178d4 } -.terminal-829252251-r8 { fill: #e2e2e2 } -.terminal-829252251-r9 { fill: #507bb3 } -.terminal-829252251-r10 { fill: #808080 } -.terminal-829252251-r11 { fill: #dde6ed;font-weight: bold } -.terminal-829252251-r12 { fill: #001541 } -.terminal-829252251-r13 { fill: #14191f } -.terminal-829252251-r14 { fill: #454a50 } -.terminal-829252251-r15 { fill: #7ae998 } -.terminal-829252251-r16 { fill: #e2e3e3;font-weight: bold } -.terminal-829252251-r17 { fill: #0a180e;font-weight: bold } -.terminal-829252251-r18 { fill: #000000 } -.terminal-829252251-r19 { fill: #008139 } -.terminal-829252251-r20 { fill: #fea62b;font-weight: bold } -.terminal-829252251-r21 { fill: #a7a9ab } -.terminal-829252251-r22 { fill: #e2e3e3 } + .terminal-3425198753-r1 { fill: #c5c8c6 } +.terminal-3425198753-r2 { fill: #e3e3e3 } +.terminal-3425198753-r3 { fill: #989898 } +.terminal-3425198753-r4 { fill: #e1e1e1 } +.terminal-3425198753-r5 { fill: #4ebf71;font-weight: bold } +.terminal-3425198753-r6 { fill: #1e1e1e } +.terminal-3425198753-r7 { fill: #0178d4 } +.terminal-3425198753-r8 { fill: #e2e2e2 } +.terminal-3425198753-r9 { fill: #507bb3 } +.terminal-3425198753-r10 { fill: #808080 } +.terminal-3425198753-r11 { fill: #dde6ed;font-weight: bold } +.terminal-3425198753-r12 { fill: #001541 } +.terminal-3425198753-r13 { fill: #14191f } +.terminal-3425198753-r14 { fill: #454a50 } +.terminal-3425198753-r15 { fill: #7ae998 } +.terminal-3425198753-r16 { fill: #e2e3e3;font-weight: bold } +.terminal-3425198753-r17 { fill: #0a180e;font-weight: bold } +.terminal-3425198753-r18 { fill: #000000 } +.terminal-3425198753-r19 { fill: #008139 } +.terminal-3425198753-r20 { fill: #fea62b;font-weight: bold } +.terminal-3425198753-r21 { fill: #a7a9ab } +.terminal-3425198753-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - ⭘nf-core pipelines create â€” Create a new pipeline with the nf-core pipeline templa
 - - -Template features - - -▊▔▔▔▔▔▔▔▔▎ -▊▎        Toggle all features -▊▁▁▁▁▁▁▁▁▎ -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use a GitHub Create a GitHub  Show help  -▊▁▁▁▁▁▁▁▁▎        repository.repository for the â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -pipeline. - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add Github CI testsThe pipeline will  Show help  -▊▁▁▁▁▁▁▁▁▎include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous -Integration (CI) â–â– -testing - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use reference genomesThe pipeline will be  Show help  -▊▁▁▁▁▁▁▁▁▎configured to use a â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -copy of the most  -common reference  -genome files from  -iGenomes - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add Github badgesThe README.md file of  Show help  -▊▁▁▁▁▁▁▁▁▎the pipeline will â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -include GitHub badges - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add configuration The pipeline will  Show help  -▊▁▁▁▁▁▁▁▁▎        filesinclude configuration â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -profiles containing  -custom parameters  -required to run  -nf-core pipelines at  -different institutions - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use code lintersThe pipeline will  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + ⭘nf-core pipelines create â€” Create a new pipeline with the nf-core pipeline templa
 + + +Template features + + +▊▔▔▔▔▔▔▔▔▎ +▊▎        Toggle all features +▊▁▁▁▁▁▁▁▁▎ +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use a GitHub Create a GitHub  Show help  +▊▁▁▁▁▁▁▁▁▎        repository.repository for the â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +pipeline. + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add Github CI testsThe pipeline will  Show help  +▊▁▁▁▁▁▁▁▁▎include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI) â–„â–„ +testing + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use reference genomesThe pipeline will be  Show help  +▊▁▁▁▁▁▁▁▁▎configured to use a â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +copy of the most  +common reference  +genome files from  +iGenomes + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add Github badgesThe README.md file of  Show help  +▊▁▁▁▁▁▁▁▁▎the pipeline will â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +include GitHub badges + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add configuration The pipeline will  Show help  +▊▁▁▁▁▁▁▁▁▎        filesinclude configuration â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +profiles containing  +custom parameters  +required to run  +nf-core pipelines at  +different institutions + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use code lintersThe pipeline will  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  From 2696940ee2a86a79c6aa6d210a2a0a0e129766fd Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 12:22:48 +0200 Subject: [PATCH 049/164] try to fix coverage report generation --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index a29a6970e..7d8d3ea6f 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --cov-config=.coveragerc --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" From 7cd3dea18fed9af950c3a2249ab54797134e58e8 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 15 Nov 2024 16:42:14 +0100 Subject: [PATCH 050/164] update and fix swf patch tests --- nf_core/components/patch.py | 14 +- nf_core/components/remove.py | 2 +- nf_core/components/update.py | 11 +- nf_core/modules/lint/main_nf.py | 1 + nf_core/modules/lint/meta_yml.py | 2 + nf_core/modules/lint/module_changes.py | 1 + nf_core/modules/lint/module_patch.py | 1 + nf_core/modules/modules_differ.py | 20 ++- nf_core/modules/modules_json.py | 50 +++--- tests/modules/test_modules_json.py | 15 +- tests/subworkflows/patch.py | 212 ------------------------- tests/subworkflows/test_patch.py | 204 ++++++++++++++++++++++++ 12 files changed, 281 insertions(+), 252 deletions(-) delete mode 100644 tests/subworkflows/patch.py create mode 100644 tests/subworkflows/test_patch.py diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 41fccd8be..77717877f 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -65,7 +65,9 @@ def patch(self, component=None): component_fullname = str(Path(self.component_type, self.modules_repo.repo_path, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -127,7 +129,9 @@ def patch(self, component=None): raise UserWarning(f"{self.component_type[:-1]} '{component_fullname}' is unchanged. No patch to compute") # Write changes to modules.json - self.modules_json.add_patch_entry(component, self.modules_repo.remote_url, component_dir, patch_relpath) + self.modules_json.add_patch_entry( + self.component_type, component, self.modules_repo.remote_url, component_dir, patch_relpath + ) log.debug(f"Wrote patch path for {self.component_type[:-1]} {component} to modules.json") # Show the changes made to the module @@ -166,7 +170,9 @@ def remove(self, component): component_fullname = str(Path(self.component_type, component_dir, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -202,7 +208,7 @@ def remove(self, component): # Try to apply the patch in reverse and move resulting files to module dir temp_component_dir = self.modules_json.try_apply_patch_reverse( - component, self.modules_repo.repo_path, patch_relpath, component_path + self.component_type, component, self.modules_repo.repo_path, patch_relpath, component_path ) try: for file in Path(temp_component_dir).glob("*"): diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index c2c584391..37208629c 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -68,7 +68,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals if not component_dir.exists(): log.error(f"Installation directory '{component_dir}' does not exist.") - if modules_json.module_present(component, self.modules_repo.remote_url, repo_path): + if modules_json.component_present(component, self.modules_repo.remote_url, repo_path, self.component_type): log.error(f"Found entry for '{component}' in 'modules.json'. Removing...") modules_json.remove_entry(self.component_type, component, self.modules_repo.remote_url, repo_path) return False diff --git a/nf_core/components/update.py b/nf_core/components/update.py index bf176fb6d..7edb0ffd0 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -810,7 +810,9 @@ def try_apply_patch( shutil.copytree(component_install_dir, temp_component_dir) try: - new_files = ModulesDiffer.try_apply_patch(component, repo_path, patch_path, temp_component_dir) + new_files = ModulesDiffer.try_apply_patch( + self.component_type, component, repo_path, patch_path, temp_component_dir + ) except LookupError: # Patch failed. Save the patch file by moving to the install dir shutil.move(patch_path, Path(component_install_dir, patch_path.relative_to(component_dir))) @@ -848,7 +850,12 @@ def try_apply_patch( # Add the patch file to the modules.json file self.modules_json.add_patch_entry( - component, self.modules_repo.remote_url, repo_path, patch_relpath, write_file=write_file + self.component_type, + component, + self.modules_repo.remote_url, + repo_path, + patch_relpath, + write_file=write_file, ) return True diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index df5a48d5b..2b7878ca0 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -51,6 +51,7 @@ def main_nf( lines: List[str] = [] if module.is_patched: lines = ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 4ad728d10..59f0f0125 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -47,6 +47,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None meta_yaml = read_meta_yml(module_lint_object, module) if module.is_patched and module_lint_object.modules_repo.repo_path is not None: lines = ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, @@ -208,6 +209,7 @@ def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> # Check if we have a patch file, get original file in that case if module.is_patched: lines = ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index eb76f4b88..708a2bad6 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -31,6 +31,7 @@ def module_changes(module_lint_object, module): shutil.copytree(module.component_dir, tempdir) try: new_lines = ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module.org, module.patch_path, diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index 29bf78a66..19c6e76fe 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -162,6 +162,7 @@ def patch_reversible(module_lint_object, module, patch_path): """ try: ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, patch_path, diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index 6b0781bb8..c151fcce7 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -466,16 +466,22 @@ def try_apply_single_patch(file_lines, patch, reverse=False): @staticmethod def try_apply_patch( - module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False + component_type: str, + component: str, + repo_path: Union[str, Path], + patch_path: Union[str, Path], + component_dir: Path, + reverse: bool = False, ) -> Dict[str, List[str]]: """ - Try applying a full patch file to a module + Try applying a full patch file to a module or subworkflow Args: - module (str): Name of the module + component_type (str): The type of component (modules or subworkflows) + component (str): Name of the module or subworkflow repo_path (str): Name of the repository where the module resides patch_path (str): The absolute path to the patch file to be applied - module_dir (Path): The directory containing the module + component_dir (Path): The directory containing the component reverse (bool): Apply the patch in reverse Returns: @@ -485,13 +491,13 @@ def try_apply_patch( Raises: LookupError: If the patch application fails in a file """ - module_relpath = Path("modules", repo_path, module) + component_relpath = Path(component_type, repo_path, component) patches = ModulesDiffer.per_file_patch(patch_path) new_files = {} for file, patch in patches.items(): log.debug(f"Applying patch to {file}") - fn = Path(file).relative_to(module_relpath) - file_path = module_dir / fn + fn = Path(file).relative_to(component_relpath) + file_path = component_dir / fn try: with open(file_path) as fh: file_lines = fh.readlines() diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 05c64b6de..5628c7574 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -308,7 +308,9 @@ def determine_branches_and_shas( # If the module/subworkflow is patched patch_file = component_path / f"{component}.diff" if patch_file.is_file(): - temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) + temp_module_dir = self.try_apply_patch_reverse( + component_type, component, install_dir, patch_file, component_path + ) correct_commit_sha = self.find_correct_commit_sha( component_type, component, temp_module_dir, modules_repo ) @@ -805,7 +807,7 @@ def remove_entry(self, component_type, name, repo_url, install_dir, removed_by=N return False - def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, write_file=True): + def add_patch_entry(self, component_type, component_name, repo_url, install_dir, patch_filename, write_file=True): """ Adds (or replaces) the patch entry for a module """ @@ -815,9 +817,11 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr if repo_url not in self.modules_json["repos"]: raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") - if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: - raise LookupError(f"Module '{install_dir}/{module_name}' not present in 'modules.json'") - self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] = str(patch_filename) + if component_name not in self.modules_json["repos"][repo_url][component_type][install_dir]: + raise LookupError( + f"{component_type[:-1].title()} '{install_dir}/{component_name}' not present in 'modules.json'" + ) + self.modules_json["repos"][repo_url][component_type][install_dir][component_name]["patch"] = str(patch_filename) if write_file: self.dump() @@ -858,41 +862,46 @@ def get_patch_fn(self, module_name, repo_url, install_dir): ) return Path(path) if path is not None else None - def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): + def try_apply_patch_reverse(self, component_type, component, repo_name, patch_relpath, component_dir): """ - Try reverse applying a patch file to the modified module files + Try reverse applying a patch file to the modified module or subworkflow files Args: - module (str): The name of the module - repo_name (str): The name of the repository where the module resides + component_type (str): The type of component [modules, subworkflows] + component (str): The name of the module or subworkflow + repo_name (str): The name of the repository where the component resides patch_relpath (Path | str): The path to patch file in the pipeline - module_dir (Path | str): The module directory in the pipeline + component_dir (Path | str): The component directory in the pipeline Returns: - (Path | str): The path of the folder where the module patched files are + (Path | str): The path of the folder where the component patched files are Raises: LookupError: If patch was not applied """ - module_fullname = str(Path(repo_name, module)) + component_fullname = str(Path(repo_name, component)) patch_path = Path(self.directory / patch_relpath) try: - new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, module_dir, reverse=True) + new_files = ModulesDiffer.try_apply_patch( + component_type, component, repo_name, patch_path, component_dir, reverse=True + ) except LookupError as e: - raise LookupError(f"Failed to apply patch in reverse for module '{module_fullname}' due to: {e}") + raise LookupError( + f"Failed to apply patch in reverse for {component_type[:-1]} '{component_fullname}' due to: {e}" + ) # Write the patched files to a temporary directory log.debug("Writing patched files to tmpdir") temp_dir = Path(tempfile.mkdtemp()) - temp_module_dir = temp_dir / module - temp_module_dir.mkdir(parents=True, exist_ok=True) + temp_component_dir = temp_dir / component + temp_component_dir.mkdir(parents=True, exist_ok=True) for file, new_content in new_files.items(): - fn = temp_module_dir / file + fn = temp_component_dir / file with open(fn, "w") as fh: fh.writelines(new_content) - return temp_module_dir + return temp_component_dir def repo_present(self, repo_name): """ @@ -908,20 +917,21 @@ def repo_present(self, repo_name): return repo_name in self.modules_json.get("repos", {}) - def module_present(self, module_name, repo_url, install_dir): + def component_present(self, module_name, repo_url, install_dir, component_type): """ Checks if a module is present in the modules.json file Args: module_name (str): Name of the module repo_url (str): URL of the repository install_dir (str): Name of the directory where modules are installed + component_type (str): Type of component [modules, subworkflows] Returns: (bool): Whether the module is present in the 'modules.json' file """ if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( + return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get(component_type, {}).get( install_dir, {} ) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 0368c146c..325a8073b 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -175,14 +175,17 @@ def test_mod_json_repo_present(self): assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True assert mod_json_obj.repo_present("INVALID_REPO") is False - def test_mod_json_module_present(self): - """Tests the module_present function""" + def test_mod_json_component_present(self): + """Tests the component_present function""" mod_json_obj = ModulesJson(self.pipeline_dir) - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.component_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") is True + assert ( + mod_json_obj.component_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") + is False + ) + assert mod_json_obj.component_present("fastqc", "INVALID_REPO", "INVALID_DIR", "modules") is False + assert mod_json_obj.component_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR", "modules") is False def test_mod_json_get_module_version(self): """Test the get_module_version function""" diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py deleted file mode 100644 index 66065784c..000000000 --- a/tests/subworkflows/patch.py +++ /dev/null @@ -1,212 +0,0 @@ -import os -import tempfile -from pathlib import Path -from unittest import mock - -import pytest - -import nf_core.components.components_command -import nf_core.subworkflows - -from ..utils import ( - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_URL, - GITLAB_REPO -) - -# TODO: #Change this for the correct SUCCEED_SHA -SUCCEED_SHA = "????" -ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" - - -""" -Test the 'nf-core subworkflows patch' command -""" - - -def setup_patch(pipeline_dir, modify_subworkflow): - # Install the subworkflow bam_sort_stats_samtools - install_obj = nf_core.subworkflows.SubworkflowInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH, sha=ORG_SHA - ) - - # Install the module - install_obj.install("bam_sort_stats_samtools") - - if modify_subworkflow: - # Modify the subworkflow - subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - modify_main_nf(subworkflow_path / "main.nf") - - -def modify_main_nf(path): - """Modify a file to test patch creation""" - with open(path) as fh: - lines = fh.readlines() - # We want a patch file that looks something like: - # - ch_fasta // channel: [ val(meta), path(fasta) ] - for line_index in range(len(lines)): - if lines[line_index] == " ch_fasta // channel: [ val(meta), path(fasta) ]\n": - to_pop = line_index - lines.pop(to_pop) - with open(path, "w") as fh: - fh.writelines(lines) - - -def test_create_patch_no_change(self): - """Test creating a patch when there is a change to the module""" - setup_patch(self.pipeline_dir, False) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - with pytest.raises(UserWarning): - patch_obj.patch("bam_sort_stats_samtools") - - subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - - # Check that no patch file has been added to the directory - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} - - -def test_create_patch_change(self): - """Test creating a patch when there is no change to the subworkflow""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - patch_obj.patch("bam_sort_stats_samtools") - - subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - - patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check that the correct lines are in the patch file - with open(subworkflow_path / patch_fn) as fh: - patch_lines = fh.readlines() - subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) - assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" - assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines - assert "- ch_fasta // channel: [ val(meta), path(fasta) ]" in patch_lines - - -def test_create_patch_try_apply_successful(self): - """Test creating a patch file and applying it to a new version of the the files""" - setup_patch(self.pipeline_dir, True) - subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - patch_obj.patch("bam_sort_stats_samtools") - - patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - update_obj = nf_core.subworkflows.SubworkflowUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files("bam_sort_stats_samtools", SUCCEED_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" - patch_relpath = subworkflow_relpath / patch_fn - assert ( - update_obj.try_apply_patch( - "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir - ) - is True - ) - - # Move the files from the temporary directory - update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, SUCCEED_SHA) - - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check that the correct lines are in the patch file - with open(subworkflow_path / patch_fn) as fh: - patch_lines = fh.readlines() - subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) - assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" - assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines - assert "- ch_fasta // channel: [ val(meta), path(fasta) ]" in patch_lines - - # Check that 'main.nf' is updated correctly - with open(subworkflow_path / "main.nf") as fh: - main_nf_lines = fh.readlines() - # These lines should have been removed by the patch - assert " ch_fasta // channel: [ val(meta), path(fasta) ]\n" not in main_nf_lines - - -def test_create_patch_try_apply_failed(self): - """Test creating a patch file and applying it to a new version of the the files""" - setup_patch(self.pipeline_dir, True) - subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - patch_obj.patch("bam_sort_stats_samtools") - - patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - update_obj = nf_core.subworkflows.SubworkflowUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files("bam_sort_stats_samtools", SUCCEED_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" - patch_relpath = subworkflow_relpath / patch_fn - assert ( - update_obj.try_apply_patch( - "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir - ) - is False - ) - - -# TODO: create those two missing tests -def test_create_patch_update_success(self): - """Test creating a patch file and updating a subworkflow when there is a diff conflict""" - - -def test_create_patch_update_fail(self): - """ - Test creating a patch file and the updating the subworkflow - - Should have the same effect as 'test_create_patch_try_apply_successful' - but uses higher level api - """ - - -def test_remove_patch(self): - """Test creating a patch when there is no change to the subworkflow""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - patch_obj.patch("bam_sort_stats_samtools") - - subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - - patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - with mock.patch.object(nf_core.create.questionary, "confirm") as mock_questionary: - mock_questionary.unsafe_ask.return_value = True - patch_obj.remove("bam_sort_stats_samtools") - # Check that the diff file has been removed - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py new file mode 100644 index 000000000..ba452083f --- /dev/null +++ b/tests/subworkflows/test_patch.py @@ -0,0 +1,204 @@ +import os +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.components.components_command +import nf_core.components.patch +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_REPO, GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + +OLD_SHA = "dbb12457e32d3da8eea7dc4ae096201fff4747c5" +SUCCEED_SHA = "0a33e6a0d730ad22a0ec9f7f9a7540af6e943221" +FAIL_SHA = "b6e5e8739de9a1a0c4f85267144e43dbaf8f1461" + + +class TestSubworkflowsPatch(TestSubworkflows): + """ + Test the 'nf-core subworkflows patch' command + """ + + def modify_main_nf(self, path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - ch_fasta // channel: [ fasta ] + for line_index in range(len(lines)): + if lines[line_index] == " ch_fasta // channel: [ fasta ]\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + + def setup_patch(self, pipeline_dir, modify_subworkflow): + # Install the subworkflow bam_sort_stats_samtools + install_obj = nf_core.subworkflows.SubworkflowInstall( + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + sha=OLD_SHA, + ) + + # Install the module + install_obj.install("bam_sort_stats_samtools") + + if modify_subworkflow: + # Modify the subworkflow + subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + self.modify_main_nf(subworkflow_path / "main.nf") + + def test_create_patch_no_change(self): + """Test creating a patch when there is a change to the module""" + self.setup_patch(self.pipeline_dir, False) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that no patch file has been added to the directory + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + + def test_create_patch_change(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + print(patch_lines) + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + def test_create_patch_try_apply_successful(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", OLD_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is True + ) + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(subworkflow_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert "- ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is False + ) + + # TODO: create those two missing tests + def test_create_patch_update_success(self): + """Test creating a patch file and updating a subworkflow when there is a diff conflict""" + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and the updating the subworkflow + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + + def test_remove_patch(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove("bam_sort_stats_samtools") + # Check that the diff file has been removed + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} From dd477ab82f7a162e1515bc0f43a72af7147d3618 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Wed, 20 Nov 2024 22:50:15 -0600 Subject: [PATCH 051/164] build: Setup VS Code tests --- .vscode/settings.json | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..c4d5be0e3 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestEnabled": true, + "python.testing.unittestEnabled": false, + "python.testing.nosetestsEnabled": false, + "python.testing.pytestArgs": ["tests", "-v", "--tb=short"], + "python.testing.autoTestDiscoverOnSaveEnabled": true +} From 3456346eea3f6104cf0de7e38ccc8543a44d3c3f Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 21 Nov 2024 17:29:49 +0000 Subject: [PATCH 052/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a1643505..d4a96ca74 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 ([#3282](https://github.com/nf-core/tools/pull/3282)) - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) +- build: Setup VS Code tests ([#3292](https://github.com/nf-core/tools/pull/3292)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From 25940f7e90fd7f94fa9eae596e7b7d0a9dc54a3c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 22 Nov 2024 16:05:43 +0100 Subject: [PATCH 053/164] apply patch reverse when linting a patched subworkflow --- .../subworkflows/lint/subworkflow_changes.py | 27 ++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/nf_core/subworkflows/lint/subworkflow_changes.py b/nf_core/subworkflows/lint/subworkflow_changes.py index a9c9616a2..e6af89212 100644 --- a/nf_core/subworkflows/lint/subworkflow_changes.py +++ b/nf_core/subworkflows/lint/subworkflow_changes.py @@ -2,9 +2,12 @@ Check whether the content of a subworkflow has changed compared to the original repository """ +import shutil +import tempfile from pathlib import Path import nf_core.modules.modules_repo +from nf_core.modules.modules_differ import ModulesDiffer def subworkflow_changes(subworkflow_lint_object, subworkflow): @@ -20,7 +23,29 @@ def subworkflow_changes(subworkflow_lint_object, subworkflow): Only runs when linting a pipeline, not the modules repository """ - tempdir = subworkflow.component_dir + if subworkflow.is_patched: + # If the subworkflow is patched, we need to apply + # the patch in reverse before comparing with the remote + tempdir_parent = Path(tempfile.mkdtemp()) + tempdir = tempdir_parent / "tmp_subworkflow_dir" + shutil.copytree(subworkflow.component_dir, tempdir) + try: + new_lines = ModulesDiffer.try_apply_patch( + subworkflow.component_type, + subworkflow.component_name, + subworkflow.org, + subworkflow.patch_path, + tempdir, + reverse=True, + ) + for file, lines in new_lines.items(): + with open(tempdir / file, "w") as fh: + fh.writelines(lines) + except LookupError: + # This error is already reported by subworkflow_patch, so just return + return + else: + tempdir = subworkflow.component_dir subworkflow.branch = subworkflow_lint_object.modules_json.get_component_branch( "subworkflows", subworkflow.component_name, subworkflow.repo_url, subworkflow.org ) From b2cfd0125ba3e0658a17d301291fc952ca1f8afc Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 22 Nov 2024 16:31:47 +0100 Subject: [PATCH 054/164] update get_patch_fn to work with subworkflows --- nf_core/components/update.py | 11 +++++++++-- nf_core/modules/modules_json.py | 16 ++++++++-------- tests/modules/test_patch.py | 22 +++++++++++----------- 3 files changed, 28 insertions(+), 21 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 7edb0ffd0..76c6b2b07 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -449,7 +449,9 @@ def get_single_component_info(self, component): self.modules_repo.setup_branch(current_branch) # If there is a patch file, get its filename - patch_fn = self.modules_json.get_patch_fn(component, self.modules_repo.remote_url, install_dir) + patch_fn = self.modules_json.get_patch_fn( + self.component_type, component, self.modules_repo.remote_url, install_dir + ) return (self.modules_repo, component, sha, patch_fn) @@ -695,7 +697,12 @@ def get_all_components_info(self, branch=None): # Add patch filenames to the components that have them components_info = [ - (repo, comp, sha, self.modules_json.get_patch_fn(comp, repo.remote_url, repo.repo_path)) + ( + repo, + comp, + sha, + self.modules_json.get_patch_fn(self.component_type, comp, repo.remote_url, repo.repo_path), + ) for repo, comp, sha in components_info ] diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 5628c7574..64aab54bf 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -837,17 +837,17 @@ def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True if write_file: self.dump() - def get_patch_fn(self, module_name, repo_url, install_dir): + def get_patch_fn(self, component_type, component_name, repo_url, install_dir): """ - Get the patch filename of a module + Get the patch filename of a component Args: - module_name (str): The name of the module - repo_url (str): The URL of the repository containing the module - install_dir (str): The name of the directory where modules are installed + component_name (str): The name of the component + repo_url (str): The URL of the repository containing the component + install_dir (str): The name of the directory where components are installed Returns: - (str): The patch filename for the module, None if not present + (str): The patch filename for the component, None if not present """ if self.modules_json is None: self.load() @@ -855,9 +855,9 @@ def get_patch_fn(self, module_name, repo_url, install_dir): path = ( self.modules_json["repos"] .get(repo_url, {}) - .get("modules") + .get(component_type) .get(install_dir) - .get(module_name, {}) + .get(component_name, {}) .get("patch") ) return Path(path) if path is not None else None diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 2f60cd4a2..27c1e342e 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -80,7 +80,7 @@ def test_create_patch_no_change(self): # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None def test_create_patch_change(self): """Test creating a patch when there is a change to the module""" @@ -98,7 +98,7 @@ def test_create_patch_change(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -131,7 +131,7 @@ def test_create_patch_try_apply_successful(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -157,7 +157,7 @@ def test_create_patch_try_apply_successful(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -199,7 +199,7 @@ def test_create_patch_try_apply_failed(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -238,7 +238,7 @@ def test_create_patch_update_success(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -258,9 +258,9 @@ def test_create_patch_update_success(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) + ), modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file with open(module_path / patch_fn) as fh: @@ -299,7 +299,7 @@ def test_create_patch_update_fail(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -353,7 +353,7 @@ def test_remove_patch(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -365,4 +365,4 @@ def test_remove_patch(self): # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None From 6ec2dcd763865dcfd3c3672e244b7e3a401ac037 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 22 Nov 2024 16:49:39 +0100 Subject: [PATCH 055/164] move modules_differ.py to components_differ.py --- .../components_differ.py} | 112 +++++++++--------- nf_core/components/patch.py | 6 +- nf_core/components/update.py | 12 +- nf_core/modules/lint/main_nf.py | 4 +- nf_core/modules/lint/meta_yml.py | 6 +- nf_core/modules/lint/module_changes.py | 4 +- nf_core/modules/lint/module_patch.py | 16 +-- nf_core/modules/modules_json.py | 4 +- .../subworkflows/lint/subworkflow_changes.py | 4 +- 9 files changed, 84 insertions(+), 84 deletions(-) rename nf_core/{modules/modules_differ.py => components/components_differ.py} (83%) diff --git a/nf_core/modules/modules_differ.py b/nf_core/components/components_differ.py similarity index 83% rename from nf_core/modules/modules_differ.py rename to nf_core/components/components_differ.py index c151fcce7..db51c1910 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/components/components_differ.py @@ -16,10 +16,10 @@ log = logging.getLogger(__name__) -class ModulesDiffer: +class ComponentsDiffer: """ Static class that provides functionality for computing diffs between - different instances of a module + different instances of a module or subworkflow """ class DiffEnum(enum.Enum): @@ -34,15 +34,15 @@ class DiffEnum(enum.Enum): REMOVED = enum.auto() @staticmethod - def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): + def get_component_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): """ - Compute the diff between the current module version + Compute the diff between the current component version and the new version. Args: - from_dir (strOrPath): The folder containing the old module files - to_dir (strOrPath): The folder containing the new module files - path_in_diff (strOrPath): The directory displayed containing the module + from_dir (strOrPath): The folder containing the old component files + to_dir (strOrPath): The folder containing the new component files + path_in_diff (strOrPath): The directory displayed containing the component file in the diff. Added so that temporary dirs are not shown for_git (bool): indicates whether the diff file is to be @@ -52,7 +52,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d dsp_to_dir (str | Path): The to directory to display in the diff Returns: - dict[str, (ModulesDiffer.DiffEnum, str)]: A dictionary containing + dict[str, (ComponentsDiffer.DiffEnum, str)]: A dictionary containing the diff type and the diff string (empty if no diff) """ if for_git: @@ -72,7 +72,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d ) files = list(files) - # Loop through all the module files and compute their diffs if needed + # Loop through all the component files and compute their diffs if needed for file in files: temp_path = Path(to_dir, file) curr_path = Path(from_dir, file) @@ -84,7 +84,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d if new_lines == old_lines: # The files are identical - diffs[file] = (ModulesDiffer.DiffEnum.UNCHANGED, ()) + diffs[file] = (ComponentsDiffer.DiffEnum.UNCHANGED, ()) else: # Compute the diff diff = difflib.unified_diff( @@ -93,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CHANGED, diff) elif temp_path.exists(): with open(temp_path) as fh: @@ -106,7 +106,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path("/dev", "null")), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CREATED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CREATED, diff) elif curr_path.exists(): # The file was removed @@ -119,14 +119,14 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path("/dev", "null")), ) - diffs[file] = (ModulesDiffer.DiffEnum.REMOVED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.REMOVED, diff) return diffs @staticmethod def write_diff_file( diff_path, - module, + component, repo_path, from_dir, to_dir, @@ -139,20 +139,19 @@ def write_diff_file( limit_output=False, ): """ - Writes the diffs of a module to the diff file. + Writes the diffs of a component to the diff file. Args: diff_path (str | Path): The path to the file that should be appended - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - diffs (dict[str, (ModulesDiffer.DiffEnum, str)]): A dictionary containing + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + diffs (dict[str, (ComponentsDiffer.DiffEnum, str)]): A dictionary containing the type of change and the diff (if any) - module_dir (str | Path): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against for_git (bool): indicates whether the diff file is to be compatible with `git apply`. If true it adds a/ and b/ prefixes to the file paths @@ -165,36 +164,36 @@ def write_diff_file( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) - if all(diff_status == ModulesDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): - raise UserWarning("Module is unchanged") - log.debug(f"Writing diff of '{module}' to '{diff_path}'") + diffs = ComponentsDiffer.get_component_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) + if all(diff_status == ComponentsDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): + raise UserWarning("Component is unchanged") + log.debug(f"Writing diff of '{component}' to '{diff_path}'") with open(diff_path, file_action) as fh: if current_version is not None and new_version is not None: fh.write( - f"Changes in module '{Path(repo_path, module)}' between" + f"Changes in component '{Path(repo_path, component)}' between" f" ({current_version}) and" f" ({new_version})\n" ) else: - fh.write(f"Changes in module '{Path(repo_path, module)}'\n") + fh.write(f"Changes in component '{Path(repo_path, component)}'\n") for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was created\n") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was removed\n") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - fh.write(f"Changes in '{Path(module, file)}' but not shown\n") + fh.write(f"Changes in '{Path(component, file)}' but not shown\n") else: # The file has changed write the diff lines to the file - fh.write(f"Changes in '{Path(module, file)}':\n") + fh.write(f"Changes in '{Path(component, file)}':\n") for line in diff: fh.write(line) fh.write("\n") @@ -237,7 +236,7 @@ def append_modules_json_diff(diff_path, old_modules_json, new_modules_json, modu @staticmethod def print_diff( - module, + component, repo_path, from_dir, to_dir, @@ -248,16 +247,15 @@ def print_diff( limit_output=False, ): """ - Prints the diffs between two module versions to the terminal + Prints the diffs between two component versions to the terminal Args: - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - module_dir (str): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff limit_output (bool): If true, don't print the diff for files other than main.nf @@ -267,41 +265,43 @@ def print_diff( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs( + diffs = ComponentsDiffer.get_component_diffs( from_dir, to_dir, for_git=False, dsp_from_dir=dsp_from_dir, dsp_to_dir=dsp_to_dir ) console = Console(force_terminal=nf_core.utils.rich_force_colors()) if current_version is not None and new_version is not None: log.info( - f"Changes in module '{Path(repo_path, module)}' between" f" ({current_version}) and" f" ({new_version})" + f"Changes in component '{Path(repo_path, component)}' between" + f" ({current_version}) and" + f" ({new_version})" ) else: - log.info(f"Changes in module '{Path(repo_path, module)}'") + log.info(f"Changes in component '{Path(repo_path, component)}'") panel_group: list[RenderableType] = [] for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical log.info(f"'{Path(dsp_from_dir, file)}' is unchanged") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits log.info(f"'{Path(dsp_from_dir, file)}' was created") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - log.info(f"Changes in '{Path(module, file)}' but not shown") + log.info(f"Changes in '{Path(component, file)}' but not shown") else: # The file has changed - log.info(f"Changes in '{Path(module, file)}':") + log.info(f"Changes in '{Path(component, file)}':") # Pretty print the diff using the pygments diff lexer syntax = Syntax("".join(diff), "diff", theme="ansi_dark", line_numbers=True) panel_group.append(Panel(syntax, title=str(file), title_align="left", padding=0)) console.print( Panel( Group(*panel_group), - title=f"[white]{str(module)}[/white]", + title=f"[white]{str(component)}[/white]", title_align="left", padding=0, border_style="blue", @@ -422,7 +422,7 @@ def try_apply_single_patch(file_lines, patch, reverse=False): LookupError: If it fails to find the old lines from the patch in the file. """ - org_lines, patch_lines = ModulesDiffer.get_new_and_old_lines(patch) + org_lines, patch_lines = ComponentsDiffer.get_new_and_old_lines(patch) if reverse: patch_lines, org_lines = org_lines, patch_lines @@ -479,7 +479,7 @@ def try_apply_patch( Args: component_type (str): The type of component (modules or subworkflows) component (str): Name of the module or subworkflow - repo_path (str): Name of the repository where the module resides + repo_path (str): Name of the repository where the component resides patch_path (str): The absolute path to the patch file to be applied component_dir (Path): The directory containing the component reverse (bool): Apply the patch in reverse @@ -492,7 +492,7 @@ def try_apply_patch( LookupError: If the patch application fails in a file """ component_relpath = Path(component_type, repo_path, component) - patches = ModulesDiffer.per_file_patch(patch_path) + patches = ComponentsDiffer.per_file_patch(patch_path) new_files = {} for file, patch in patches.items(): log.debug(f"Applying patch to {file}") @@ -504,6 +504,6 @@ def try_apply_patch( except FileNotFoundError: # The file was added with the patch file_lines = [""] - patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) + patched_new_lines = ComponentsDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines return new_files diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 77717877f..59ec7a381 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -8,7 +8,7 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer from nf_core.modules.modules_json import ModulesJson log = logging.getLogger(__name__) @@ -114,7 +114,7 @@ def patch(self, component=None): # Write the patch to a temporary location (otherwise it is printed to the screen later) patch_temp_path = tempfile.mktemp() try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( patch_temp_path, component, self.modules_repo.repo_path, @@ -135,7 +135,7 @@ def patch(self, component=None): log.debug(f"Wrote patch path for {self.component_type[:-1]} {component} to modules.json") # Show the changes made to the module - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, self.modules_repo.repo_path, component_install_dir, diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 76c6b2b07..901a7f02f 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -9,13 +9,13 @@ import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.components_utils import ( get_components_to_install, prompt_component_version_sha, ) from nf_core.components.install import ComponentInstall from nf_core.components.remove import ComponentRemove -from nf_core.modules.modules_differ import ModulesDiffer from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo from nf_core.utils import plural_es, plural_s, plural_y @@ -223,7 +223,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr f"Writing diff file for {self.component_type[:-1]} '{component_fullname}' to '{self.save_diff_fn}'" ) try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( self.save_diff_fn, component, modules_repo.repo_path, @@ -265,7 +265,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) elif self.show_diff: - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, modules_repo.repo_path, component_dir, @@ -313,7 +313,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if self.save_diff_fn: # Write the modules.json diff to the file - ModulesDiffer.append_modules_json_diff( + ComponentsDiffer.append_modules_json_diff( self.save_diff_fn, old_modules_json, self.modules_json.get_modules_json(), @@ -817,7 +817,7 @@ def try_apply_patch( shutil.copytree(component_install_dir, temp_component_dir) try: - new_files = ModulesDiffer.try_apply_patch( + new_files = ComponentsDiffer.try_apply_patch( self.component_type, component, repo_path, patch_path, temp_component_dir ) except LookupError: @@ -837,7 +837,7 @@ def try_apply_patch( # Create the new patch file log.debug("Regenerating patch file") - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( Path(temp_component_dir, patch_path.relative_to(component_dir)), component, repo_path, diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 2b7878ca0..848e17130 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -15,8 +15,8 @@ import nf_core import nf_core.modules.modules_utils +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -50,7 +50,7 @@ def main_nf( # otherwise read the lines directly from the module lines: List[str] = [] if module.is_patched: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 59f0f0125..d0268a40c 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -6,9 +6,9 @@ import ruamel.yaml from jsonschema import exceptions, validators +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -46,7 +46,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None # Check if we have a patch file, get original file in that case meta_yaml = read_meta_yml(module_lint_object, module) if module.is_patched and module_lint_object.modules_repo.repo_path is not None: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, @@ -208,7 +208,7 @@ def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> yaml.preserve_quotes = True # Check if we have a patch file, get original file in that case if module.is_patched: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index 708a2bad6..121de00c0 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -7,7 +7,7 @@ from pathlib import Path import nf_core.modules.modules_repo -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer def module_changes(module_lint_object, module): @@ -30,7 +30,7 @@ def module_changes(module_lint_object, module): tempdir = tempdir_parent / "tmp_module_dir" shutil.copytree(module.component_dir, tempdir) try: - new_lines = ModulesDiffer.try_apply_patch( + new_lines = ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module.org, diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index 19c6e76fe..6347c5c55 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -1,7 +1,7 @@ from pathlib import Path +from ...components.components_differ import ComponentsDiffer from ...components.nfcore_component import NFCoreComponent -from ..modules_differ import ModulesDiffer def module_patch(module_lint_obj, module: NFCoreComponent): @@ -66,11 +66,11 @@ def check_patch_valid(module, patch_path): continue topath = Path(line.split(" ")[1].strip("\n")) if frompath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CREATED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CREATED)) elif topath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.REMOVED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.REMOVED)) elif frompath == topath: - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CHANGED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CHANGED)) else: module.failed.append( ( @@ -105,7 +105,7 @@ def check_patch_valid(module, patch_path): # Warn about any created or removed files passed = True for path, diff_status in paths_in_patch: - if diff_status == ModulesDiffer.DiffEnum.CHANGED: + if diff_status == ComponentsDiffer.DiffEnum.CHANGED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -116,7 +116,7 @@ def check_patch_valid(module, patch_path): ) passed = False continue - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -130,7 +130,7 @@ def check_patch_valid(module, patch_path): module.warned.append( ("patch", f"Patch file performs file creation of {path}. This is discouraged."), patch_path ) - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: if Path(module.base_dir, path).exists(): module.failed.append( ( @@ -161,7 +161,7 @@ def patch_reversible(module_lint_object, module, patch_path): (bool): False if any test failed, True otherwise """ try: - ModulesDiffer.try_apply_patch( + ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 64aab54bf..9ae735b1c 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -19,7 +19,7 @@ from nf_core.modules.modules_repo import ModulesRepo from nf_core.pipelines.lint_utils import dump_json_with_prettier -from .modules_differ import ModulesDiffer +from ..components.components_differ import ComponentsDiffer log = logging.getLogger(__name__) @@ -883,7 +883,7 @@ def try_apply_patch_reverse(self, component_type, component, repo_name, patch_re patch_path = Path(self.directory / patch_relpath) try: - new_files = ModulesDiffer.try_apply_patch( + new_files = ComponentsDiffer.try_apply_patch( component_type, component, repo_name, patch_path, component_dir, reverse=True ) except LookupError as e: diff --git a/nf_core/subworkflows/lint/subworkflow_changes.py b/nf_core/subworkflows/lint/subworkflow_changes.py index e6af89212..cf0fd7211 100644 --- a/nf_core/subworkflows/lint/subworkflow_changes.py +++ b/nf_core/subworkflows/lint/subworkflow_changes.py @@ -7,7 +7,7 @@ from pathlib import Path import nf_core.modules.modules_repo -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer def subworkflow_changes(subworkflow_lint_object, subworkflow): @@ -30,7 +30,7 @@ def subworkflow_changes(subworkflow_lint_object, subworkflow): tempdir = tempdir_parent / "tmp_subworkflow_dir" shutil.copytree(subworkflow.component_dir, tempdir) try: - new_lines = ModulesDiffer.try_apply_patch( + new_lines = ComponentsDiffer.try_apply_patch( subworkflow.component_type, subworkflow.component_name, subworkflow.org, From 27582f94a1b9b8e2fe18b118e7138c367f46ea8b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 11:06:07 +0100 Subject: [PATCH 056/164] add subworkflows patch missing tests --- tests/subworkflows/test_patch.py | 111 +++++++++++++++++++++++++++++-- 1 file changed, 107 insertions(+), 4 deletions(-) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py index ba452083f..388c5adde 100644 --- a/tests/subworkflows/test_patch.py +++ b/tests/subworkflows/test_patch.py @@ -172,17 +172,120 @@ def test_create_patch_try_apply_failed(self): is False ) - # TODO: create those two missing tests def test_create_patch_update_success(self): - """Test creating a patch file and updating a subworkflow when there is a diff conflict""" - - def test_create_patch_update_fail(self): """ Test creating a patch file and the updating the subworkflow Should have the same effect as 'test_create_patch_try_apply_successful' but uses higher level api """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Update the subworkflow + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=OLD_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + assert update_obj.update("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn), modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) + + # Check that the correct lines are in the patch file + with open(swf_path / patch_fn) as fh: + patch_lines = fh.readlines() + swf_relpath = swf_path.relative_to(self.pipeline_dir) + assert f"--- {swf_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {swf_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(swf_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # this line should have been removed by the patch + assert " ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and updating a subworkflow when there is a diff conflict + """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Save the file contents for downstream comparison + with open(swf_path / patch_fn) as fh: + patch_contents = fh.read() + + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=FAIL_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + update_obj.update("bam_sort_stats_samtools") + + # Check that the installed files have not been affected by the attempted patch + temp_dir = Path(tempfile.mkdtemp()) + nf_core.components.components_command.ComponentCommand( + "subworkflows", self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH + ).install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, temp_dir) + + temp_module_dir = temp_dir / "bam_sort_stats_samtools" + for file in os.listdir(temp_module_dir): + assert file in os.listdir(swf_path) + with open(swf_path / file) as fh: + installed = fh.read() + with open(temp_module_dir / file) as fh: + shouldbe = fh.read() + assert installed == shouldbe + + # Check that the patch file is unaffected + with open(swf_path / patch_fn) as fh: + new_patch_contents = fh.read() + assert patch_contents == new_patch_contents def test_remove_patch(self): """Test creating a patch when there is no change to the subworkflow""" From 4f93d5759db5b23383a2bf95665de4c8172c9e03 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 11:56:51 +0100 Subject: [PATCH 057/164] fix subworkflows update test --- tests/subworkflows/test_update.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 153038cd1..9f5d1939f 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -98,7 +98,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): with open(patch_path) as fh: line = fh.readline() assert line.startswith( - "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + "Changes in component 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" ) def test_install_at_hash_and_update_and_save_diff_limit_output(self): From 5ad45701e7d89736efef768ce712eddc5215f32e Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 15:24:04 +0100 Subject: [PATCH 058/164] update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a1643505..fed991f85 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,8 @@ ### Subworkflows +- Add `nf-core subworkflows patch` command ([#2861](https://github.com/nf-core/tools/pull/2861)) + ### General - Include .nf-core.yml in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) From 37ca244d7a0ebaa48b24b28d02c900114e780a01 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 15:24:17 +0100 Subject: [PATCH 059/164] add help text for --remove flag --- nf_core/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 6780ad80c..81d088e13 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1024,7 +1024,7 @@ def command_modules_update( default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-r", "--remove", is_flag=True, default=False) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") def command_modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module @@ -1578,7 +1578,7 @@ def command_subworkflows_install(ctx, subworkflow, directory, prompt, force, sha default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-r", "--remove", is_flag=True, default=False) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") def subworkflows_patch(ctx, tool, dir, remove): """ Create a patch file for minor changes in a subworkflow From 805ba91df58633eaf68df64677e714b4b7ca04f0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 15:24:33 +0100 Subject: [PATCH 060/164] apply code review suggestions to patch tests --- tests/modules/test_patch.py | 20 ++++++++++---------- tests/subworkflows/test_patch.py | 20 ++++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 27c1e342e..1c23871cc 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -76,7 +76,7 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} + assert "bismark-align.diff" in set(os.listdir(module_path)) # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -94,7 +94,7 @@ def test_create_patch_change(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -127,7 +127,7 @@ def test_create_patch_try_apply_successful(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -153,7 +153,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -195,7 +195,7 @@ def test_create_patch_try_apply_failed(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -234,7 +234,7 @@ def test_create_patch_update_success(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -254,7 +254,7 @@ def test_create_patch_update_success(self): assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -295,7 +295,7 @@ def test_create_patch_update_fail(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -349,7 +349,7 @@ def test_remove_patch(self): # Check that a patch file with the correct name has been created patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -361,7 +361,7 @@ def test_remove_patch(self): mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} + assert patch_fn not in set(os.listdir(module_path)) # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py index 388c5adde..3df575c3d 100644 --- a/tests/subworkflows/test_patch.py +++ b/tests/subworkflows/test_patch.py @@ -66,7 +66,7 @@ def test_create_patch_no_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that no patch file has been added to the directory - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + assert "bam_sort_stats_samtools.diff" not in set(os.listdir(subworkflow_path)) def test_create_patch_change(self): """Test creating a patch when there is no change to the subworkflow""" @@ -79,7 +79,7 @@ def test_create_patch_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) # Check that the correct lines are in the patch file with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: @@ -101,7 +101,7 @@ def test_create_patch_try_apply_successful(self): patch_obj.patch("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) update_obj = nf_core.subworkflows.SubworkflowUpdate( self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH @@ -125,7 +125,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) # Check that the correct lines are in the patch file with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: @@ -152,7 +152,7 @@ def test_create_patch_try_apply_failed(self): patch_obj.patch("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) update_obj = nf_core.subworkflows.SubworkflowUpdate( self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH @@ -188,7 +188,7 @@ def test_create_patch_update_success(self): patch_fn = "bam_sort_stats_samtools.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + assert patch_fn in set(os.listdir(swf_path)) # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -208,7 +208,7 @@ def test_create_patch_update_success(self): assert update_obj.update("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + assert patch_fn in set(os.listdir(swf_path)) # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -245,7 +245,7 @@ def test_create_patch_update_fail(self): patch_fn = "bam_sort_stats_samtools.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + assert patch_fn in set(os.listdir(swf_path)) # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -298,10 +298,10 @@ def test_remove_patch(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: mock_questionary.unsafe_ask.return_value = True patch_obj.remove("bam_sort_stats_samtools") # Check that the diff file has been removed - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + assert "bam_sort_stats_samtools.diff" not in set(os.listdir(subworkflow_path)) From 84cec26f4f8b350949f02d6a010b340a59d5cca0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 25 Nov 2024 15:28:28 +0100 Subject: [PATCH 061/164] Update tests/modules/test_patch.py --- tests/modules/test_patch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 1c23871cc..df24ce819 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -76,7 +76,7 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert "bismark-align.diff" in set(os.listdir(module_path)) + assert "bismark-align.diff" not in set(os.listdir(module_path)) # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) From 8b2bec4933fe41645a3fe9a5bd7db32d030a5237 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 15:39:55 +0100 Subject: [PATCH 062/164] apply suggestions by @mashehu --- tests/modules/test_patch.py | 20 ++++++++++---------- tests/subworkflows/test_patch.py | 20 ++++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index df24ce819..f60827861 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -76,7 +76,7 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert "bismark-align.diff" not in set(os.listdir(module_path)) + assert not (module_path / "bismark-align.diff").exists() # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -94,7 +94,7 @@ def test_create_patch_change(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -127,7 +127,7 @@ def test_create_patch_try_apply_successful(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -153,7 +153,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -195,7 +195,7 @@ def test_create_patch_try_apply_failed(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -234,7 +234,7 @@ def test_create_patch_update_success(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -254,7 +254,7 @@ def test_create_patch_update_success(self): assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -295,7 +295,7 @@ def test_create_patch_update_fail(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -349,7 +349,7 @@ def test_remove_patch(self): # Check that a patch file with the correct name has been created patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -361,7 +361,7 @@ def test_remove_patch(self): mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed - assert patch_fn not in set(os.listdir(module_path)) + assert not (module_path / patch_fn).exists() # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py index 3df575c3d..5bb6a6798 100644 --- a/tests/subworkflows/test_patch.py +++ b/tests/subworkflows/test_patch.py @@ -66,7 +66,7 @@ def test_create_patch_no_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that no patch file has been added to the directory - assert "bam_sort_stats_samtools.diff" not in set(os.listdir(subworkflow_path)) + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() def test_create_patch_change(self): """Test creating a patch when there is no change to the subworkflow""" @@ -79,7 +79,7 @@ def test_create_patch_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() # Check that the correct lines are in the patch file with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: @@ -101,7 +101,7 @@ def test_create_patch_try_apply_successful(self): patch_obj.patch("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() update_obj = nf_core.subworkflows.SubworkflowUpdate( self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH @@ -125,7 +125,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() # Check that the correct lines are in the patch file with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: @@ -152,7 +152,7 @@ def test_create_patch_try_apply_failed(self): patch_obj.patch("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() update_obj = nf_core.subworkflows.SubworkflowUpdate( self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH @@ -188,7 +188,7 @@ def test_create_patch_update_success(self): patch_fn = "bam_sort_stats_samtools.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(swf_path)) + assert (swf_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -208,7 +208,7 @@ def test_create_patch_update_success(self): assert update_obj.update("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(swf_path)) + assert (swf_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -245,7 +245,7 @@ def test_create_patch_update_fail(self): patch_fn = "bam_sort_stats_samtools.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(swf_path)) + assert (swf_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -298,10 +298,10 @@ def test_remove_patch(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: mock_questionary.unsafe_ask.return_value = True patch_obj.remove("bam_sort_stats_samtools") # Check that the diff file has been removed - assert "bam_sort_stats_samtools.diff" not in set(os.listdir(subworkflow_path)) + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() From 9e9d930ebbb16f717e8d3e51849cb9e7f41c0488 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 16:18:45 +0100 Subject: [PATCH 063/164] remove def from nextflow.config and add trace_report_suffix param --- nf_core/pipeline-template/nextflow.config | 10 +++++----- nf_core/pipeline-template/nextflow_schema.json | 8 +++++++- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 6970d0518..596c0e0b0 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -52,6 +52,7 @@ params { version = false {%- if test_config %} pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} + trace_report_suffix = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') {%- if nf_core_configs -%} // Config options @@ -249,22 +250,21 @@ set -C # No clobber - prevent output redirection from overwriting files. // Disable process selector warnings by default. Use debug profile to enable warnings. nextflow.enable.configProcessNamesValidation = false -def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true - file = "${params.outdir}/pipeline_info/execution_timeline_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_timeline_${params.trace_report_suffix}.html" } report { enabled = true - file = "${params.outdir}/pipeline_info/execution_report_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_report_${params.trace_report_suffix}.html" } trace { enabled = true - file = "${params.outdir}/pipeline_info/execution_trace_${trace_timestamp}.txt" + file = "${params.outdir}/pipeline_info/execution_trace_${params.trace_report_suffix}.txt" } dag { enabled = true - file = "${params.outdir}/pipeline_info/pipeline_dag_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/pipeline_dag_${params.trace_report_suffix}.html" } manifest { diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 4136a0b49..389f9d104 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -229,7 +229,13 @@ "description": "Base URL or local path to location of pipeline test dataset files", "default": "https://raw.githubusercontent.com/nf-core/test-datasets/", "hidden": true - }{% endif %} + }{% endif %}, + "trace_report_suffix": { + "type": "string", + "fa_icon": "far calendar", + "description": "Suffix to add to the trace report filename. Default is the date and time in the format yyyy-MM-dd_HH-mm-ss.", + "hidden": true + } } } }, From 225699c07eb043b327e11301c9d19719d8282b04 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 25 Nov 2024 15:20:34 +0000 Subject: [PATCH 064/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed991f85..213306cf1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) +- Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) ### Download From 805620b5dee7185dd9e5e4f3a409208fdf66c03a Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Mon, 25 Nov 2024 11:09:22 -0600 Subject: [PATCH 065/164] fix(#3297): Update warning message for pytest-workflow --- nf_core/subworkflows/lint/subworkflow_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 7ca825f04..8e9e62430 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -50,7 +50,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): subworkflow.warned.append( ( "test_dir_exists", - "nf-test directory is missing", + "Migrate pytest-workflow to nf-test", subworkflow.nftest_testdir, ) ) From 35eb958d39fe52b938e535c100afcb6bb19d1549 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Tue, 26 Nov 2024 02:17:48 +0000 Subject: [PATCH 066/164] Add Python terminal env settings --- .vscode/settings.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index c4d5be0e3..5ffdff086 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -3,5 +3,7 @@ "python.testing.unittestEnabled": false, "python.testing.nosetestsEnabled": false, "python.testing.pytestArgs": ["tests", "-v", "--tb=short"], - "python.testing.autoTestDiscoverOnSaveEnabled": true + "python.testing.autoTestDiscoverOnSaveEnabled": true, + "python.terminal.activateEnvInCurrentTerminal": true, + "python.terminal.shellIntegration.enabled": true } From 9f5a95c5866b2db9ab70cafb8ad6a11b77df0930 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 07:31:11 +0000 Subject: [PATCH 067/164] Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 28ce84bef..a7c7d38ce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.4 + rev: v0.8.0 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 5965621076746c79d491e23d272d702cd0982d84 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 26 Nov 2024 07:32:01 +0000 Subject: [PATCH 068/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed991f85..47981fe05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -46,6 +46,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 ([#3282](https://github.com/nf-core/tools/pull/3282)) - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From f903ce1808b788805ba802809f288b49616913d2 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 26 Nov 2024 09:59:12 +0100 Subject: [PATCH 069/164] add validation.monochromeLogs to config --- nf_core/pipeline-template/nextflow.config | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 6970d0518..7e4e7b3e7 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -286,6 +286,7 @@ plugins { validation { defaultIgnoreParams = ["genomes"] + monochromeLogs = params.monochrome_logs help { enabled = true command = "nextflow run {{ name }} -profile --input samplesheet.csv --outdir " From d56c87cbebececdc306ad6acd6dff790d3b47280 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 26 Nov 2024 10:41:35 +0100 Subject: [PATCH 070/164] ignore trace_report_suffix default check --- nf_core/pipelines/schema.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 96ee3ffbb..2a8a9a192 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -96,6 +96,7 @@ def _update_validation_plugin_from_config(self) -> None: conf.get("validation.help.shortParameter", "help"), conf.get("validation.help.fullParameter", "helpFull"), conf.get("validation.help.showHiddenParameter", "showHidden"), + "trace_report_suffix", # report suffix should be ignored by default as it is a Java Date object ] # Help parameter should be ignored by default ignored_params_config_str = conf.get("validation.defaultIgnoreParams", "") ignored_params_config = [ From af1a1be117dd04258bc184f75cefa7c4ca48dcba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Wed, 27 Nov 2024 15:42:15 +0000 Subject: [PATCH 071/164] update snapshots --- .../test_customisation_help.svg | 256 +++++++++--------- .../test_create_app/test_type_custom.svg | 254 ++++++++--------- 2 files changed, 255 insertions(+), 255 deletions(-) diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg index 450f1d303..c34bd8523 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -19,257 +19,257 @@ font-weight: 700; } - .terminal-333203530-matrix { + .terminal-4061415502-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-333203530-title { + .terminal-4061415502-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-333203530-r1 { fill: #c5c8c6 } -.terminal-333203530-r2 { fill: #e3e3e3 } -.terminal-333203530-r3 { fill: #989898 } -.terminal-333203530-r4 { fill: #e1e1e1 } -.terminal-333203530-r5 { fill: #4ebf71;font-weight: bold } -.terminal-333203530-r6 { fill: #1e1e1e } -.terminal-333203530-r7 { fill: #e2e2e2 } -.terminal-333203530-r8 { fill: #507bb3 } -.terminal-333203530-r9 { fill: #808080 } -.terminal-333203530-r10 { fill: #dde6ed;font-weight: bold } -.terminal-333203530-r11 { fill: #001541 } -.terminal-333203530-r12 { fill: #14191f } -.terminal-333203530-r13 { fill: #0178d4 } -.terminal-333203530-r14 { fill: #454a50 } -.terminal-333203530-r15 { fill: #e2e3e3;font-weight: bold } -.terminal-333203530-r16 { fill: #000000 } -.terminal-333203530-r17 { fill: #e4e4e4 } -.terminal-333203530-r18 { fill: #7ae998 } -.terminal-333203530-r19 { fill: #0a180e;font-weight: bold } -.terminal-333203530-r20 { fill: #008139 } -.terminal-333203530-r21 { fill: #fea62b;font-weight: bold } -.terminal-333203530-r22 { fill: #a7a9ab } -.terminal-333203530-r23 { fill: #e2e3e3 } + .terminal-4061415502-r1 { fill: #c5c8c6 } +.terminal-4061415502-r2 { fill: #e3e3e3 } +.terminal-4061415502-r3 { fill: #989898 } +.terminal-4061415502-r4 { fill: #e1e1e1 } +.terminal-4061415502-r5 { fill: #4ebf71;font-weight: bold } +.terminal-4061415502-r6 { fill: #1e1e1e } +.terminal-4061415502-r7 { fill: #e2e2e2 } +.terminal-4061415502-r8 { fill: #507bb3 } +.terminal-4061415502-r9 { fill: #808080 } +.terminal-4061415502-r10 { fill: #dde6ed;font-weight: bold } +.terminal-4061415502-r11 { fill: #001541 } +.terminal-4061415502-r12 { fill: #14191f } +.terminal-4061415502-r13 { fill: #0178d4 } +.terminal-4061415502-r14 { fill: #454a50 } +.terminal-4061415502-r15 { fill: #e2e3e3;font-weight: bold } +.terminal-4061415502-r16 { fill: #000000 } +.terminal-4061415502-r17 { fill: #e4e4e4 } +.terminal-4061415502-r18 { fill: #7ae998 } +.terminal-4061415502-r19 { fill: #0a180e;font-weight: bold } +.terminal-4061415502-r20 { fill: #008139 } +.terminal-4061415502-r21 { fill: #fea62b;font-weight: bold } +.terminal-4061415502-r22 { fill: #a7a9ab } +.terminal-4061415502-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - ⭘nf-core pipelines create â€” Create a new pipeline with the nf-core pipeline templa
 - - -Template features - - -▊▔▔▔▔▔▔▔▔▎ -▊▎        Toggle all features -▊▁▁▁▁▁▁▁▁▎ -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use a GitHub Create a GitHub  Show help  -▊▁▁▁▁▁▁▁▁▎        repository.repository for the â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -pipeline. - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add Github CI testsThe pipeline will  Show help  -▊▁▁▁▁▁▁▁▁▎include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous▃▃ -Integration (CI)  -testing - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use reference genomesThe pipeline will be  Hide help  -▊▁▁▁▁▁▁▁▁▎configured to use a â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -copy of the most  -common reference  -genome files from  -iGenomes - - -Nf-core pipelines are configured to use a copy of the most common reference  -genome files. - -By selecting this option, your pipeline will include a configuration file  -specifying the paths to these files. - -The required code to use these files will also be included in the template.  -When the pipeline user provides an appropriate genome key, the pipeline will -automatically download the required reference files. -▅▅ -For more information about reference genomes in nf-core pipelines, see the  - - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add Github badgesThe README.md file of  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + ⭘nf-core pipelines create â€” Create a new pipeline with the nf-core pipeline templa
 + + +Template features + + +▊▔▔▔▔▔▔▔▔▎ +▊▎        Toggle all features +▊▁▁▁▁▁▁▁▁▎ +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use a GitHub Create a GitHub  Show help  +▊▁▁▁▁▁▁▁▁▎        repository.repository for the â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +pipeline. + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add Github CI testsThe pipeline will  Show help  +▊▁▁▁▁▁▁▁▁▎include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous▅▅ +Integration (CI)  +testing + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use reference genomesThe pipeline will be  Hide help  +▊▁▁▁▁▁▁▁▁▎configured to use a â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add Github badgesThe README.md file of  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg index 6e178ba84..b8dea0560 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -19,256 +19,256 @@ font-weight: 700; } - .terminal-3425198753-matrix { + .terminal-1727160999-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3425198753-title { + .terminal-1727160999-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3425198753-r1 { fill: #c5c8c6 } -.terminal-3425198753-r2 { fill: #e3e3e3 } -.terminal-3425198753-r3 { fill: #989898 } -.terminal-3425198753-r4 { fill: #e1e1e1 } -.terminal-3425198753-r5 { fill: #4ebf71;font-weight: bold } -.terminal-3425198753-r6 { fill: #1e1e1e } -.terminal-3425198753-r7 { fill: #0178d4 } -.terminal-3425198753-r8 { fill: #e2e2e2 } -.terminal-3425198753-r9 { fill: #507bb3 } -.terminal-3425198753-r10 { fill: #808080 } -.terminal-3425198753-r11 { fill: #dde6ed;font-weight: bold } -.terminal-3425198753-r12 { fill: #001541 } -.terminal-3425198753-r13 { fill: #14191f } -.terminal-3425198753-r14 { fill: #454a50 } -.terminal-3425198753-r15 { fill: #7ae998 } -.terminal-3425198753-r16 { fill: #e2e3e3;font-weight: bold } -.terminal-3425198753-r17 { fill: #0a180e;font-weight: bold } -.terminal-3425198753-r18 { fill: #000000 } -.terminal-3425198753-r19 { fill: #008139 } -.terminal-3425198753-r20 { fill: #fea62b;font-weight: bold } -.terminal-3425198753-r21 { fill: #a7a9ab } -.terminal-3425198753-r22 { fill: #e2e3e3 } + .terminal-1727160999-r1 { fill: #c5c8c6 } +.terminal-1727160999-r2 { fill: #e3e3e3 } +.terminal-1727160999-r3 { fill: #989898 } +.terminal-1727160999-r4 { fill: #e1e1e1 } +.terminal-1727160999-r5 { fill: #4ebf71;font-weight: bold } +.terminal-1727160999-r6 { fill: #1e1e1e } +.terminal-1727160999-r7 { fill: #0178d4 } +.terminal-1727160999-r8 { fill: #e2e2e2 } +.terminal-1727160999-r9 { fill: #507bb3 } +.terminal-1727160999-r10 { fill: #808080 } +.terminal-1727160999-r11 { fill: #dde6ed;font-weight: bold } +.terminal-1727160999-r12 { fill: #001541 } +.terminal-1727160999-r13 { fill: #14191f } +.terminal-1727160999-r14 { fill: #454a50 } +.terminal-1727160999-r15 { fill: #7ae998 } +.terminal-1727160999-r16 { fill: #e2e3e3;font-weight: bold } +.terminal-1727160999-r17 { fill: #0a180e;font-weight: bold } +.terminal-1727160999-r18 { fill: #000000 } +.terminal-1727160999-r19 { fill: #008139 } +.terminal-1727160999-r20 { fill: #fea62b;font-weight: bold } +.terminal-1727160999-r21 { fill: #a7a9ab } +.terminal-1727160999-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - ⭘nf-core pipelines create â€” Create a new pipeline with the nf-core pipeline templa
 - - -Template features - - -▊▔▔▔▔▔▔▔▔▎ -▊▎        Toggle all features -▊▁▁▁▁▁▁▁▁▎ -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use a GitHub Create a GitHub  Show help  -▊▁▁▁▁▁▁▁▁▎        repository.repository for the â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -pipeline. - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add Github CI testsThe pipeline will  Show help  -▊▁▁▁▁▁▁▁▁▎include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous -Integration (CI) â–„â–„ -testing - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use reference genomesThe pipeline will be  Show help  -▊▁▁▁▁▁▁▁▁▎configured to use a â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -copy of the most  -common reference  -genome files from  -iGenomes - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add Github badgesThe README.md file of  Show help  -▊▁▁▁▁▁▁▁▁▎the pipeline will â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -include GitHub badges - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Add configuration The pipeline will  Show help  -▊▁▁▁▁▁▁▁▁▎        filesinclude configuration â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– -profiles containing  -custom parameters  -required to run  -nf-core pipelines at  -different institutions - -▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -▊▎        Use code lintersThe pipeline will  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + ⭘nf-core pipelines create â€” Create a new pipeline with the nf-core pipeline templa
 + + +Template features + + +▊▔▔▔▔▔▔▔▔▎ +▊▎        Toggle all features +▊▁▁▁▁▁▁▁▁▎ +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use a GitHub Create a GitHub  Show help  +▊▁▁▁▁▁▁▁▁▎        repository.repository for the â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +pipeline. + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add Github CI testsThe pipeline will  Show help  +▊▁▁▁▁▁▁▁▁▎include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI) â–‡â–‡ +testing + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use reference genomesThe pipeline will be  Show help  +▊▁▁▁▁▁▁▁▁▎configured to use a â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +copy of the most  +common reference  +genome files from  +iGenomes + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add Github badgesThe README.md file of  Show help  +▊▁▁▁▁▁▁▁▁▎the pipeline will â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +include GitHub badges + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Add configuration The pipeline will  Show help  +▊▁▁▁▁▁▁▁▁▎        filesinclude configuration â–â–â–â–â–â–â–â–â–â–â–â–â–â–â–â– +profiles containing  +custom parameters  +required to run  +nf-core pipelines at  +different institutions + +▊▔▔▔▔▔▔▔▔▎▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +▊▎        Use code lintersThe pipeline will  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  From d48b3004b04ba001af022d47485e1f11bc5429c7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 27 Nov 2024 18:42:54 +0100 Subject: [PATCH 072/164] add more tests --- nf_core/commands_pipelines.py | 2 +- nf_core/pipelines/create/create.py | 2 +- nf_core/pipelines/rocrate.py | 24 +----------- tests/pipelines/test_rocrate.py | 63 +++++++++++++++++++++++++++++- 4 files changed, 65 insertions(+), 26 deletions(-) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 9699dc53a..3b28f4979 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -299,7 +299,7 @@ def pipelines_rocrate( zip_path = Path(zip_path) try: rocrate_obj = ROCrate(pipeline_dir, pipeline_version) - rocrate_obj.create_rocrate(pipeline_dir, json_path=json_path, zip_path=zip_path) + rocrate_obj.create_rocrate(json_path=json_path, zip_path=zip_path) except (UserWarning, LookupError, FileNotFoundError) as e: log.error(e) sys.exit(1) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 4f6fa1238..dba0a40ca 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -360,7 +360,7 @@ def render_template(self) -> None: if self.config.skip_features is None or "ro-crate" not in self.config.skip_features: # Create the RO-Crate metadata file rocrate_obj = ROCrate(self.outdir) - rocrate_obj.create_rocrate(self.outdir, json_path=self.outdir / "ro-crate-metadata.json") + rocrate_obj.create_rocrate(json_path=self.outdir / "ro-crate-metadata.json") # Update the .nf-core.yml with linting configurations self.fix_linting() diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index d4e605662..d5a51eddf 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -83,9 +83,7 @@ def __init__(self, pipeline_dir: Path, version="") -> None: setup_requests_cachedir() - def create_rocrate( - self, outdir: Path, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None - ) -> bool: + def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None) -> bool: """ Create an RO Crate for a pipeline @@ -95,12 +93,6 @@ def create_rocrate( zip_path (Path): Path to the zip file """ - # Set input paths - try: - self.set_crate_paths(outdir) - except OSError as e: - log.error(e) - sys.exit(1) # Check that the checkout pipeline version is the same as the requested version if self.version != "": @@ -337,20 +329,6 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: if author in authors: wf_file.append_to("maintainer", author_entitity) - def set_crate_paths(self, path: Path) -> None: - """Given a pipeline name, directory, or path, set wf_crate_filename""" - - if path.is_dir(): - self.pipeline_dir = path - # wf_crate_filename = path / "ro-crate-metadata.json" - elif path.is_file(): - self.pipeline_dir = path.parent - # wf_crate_filename = path - - # Check that the schema file exists - if self.pipeline_dir is None: - raise OSError(f"Could not find pipeline '{path}'") - def get_orcid(name: str) -> Optional[str]: """ diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py index 2e14878da..01a77ecd7 100644 --- a/tests/pipelines/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -1,8 +1,10 @@ """Test the nf-core pipelines rocrate command""" import shutil +import tempfile from pathlib import Path +import git import rocrate.rocrate from git import Repo @@ -25,6 +27,7 @@ def setUp(self) -> None: repo = Repo(self.pipeline_dir) repo.git.add(A=True) repo.index.commit("Initial commit") + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) def tearDown(self): """Clean up temporary files and folders""" @@ -36,7 +39,7 @@ def test_rocrate_creation(self): """Run the nf-core rocrate command""" # Run the command - self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) + self.rocrate_obj assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) # Check that the crate was created @@ -64,3 +67,61 @@ def test_rocrate_creation(self): # check that it is set as author of the main entity if crate.mainEntity is not None: self.assertEqual(crate.mainEntity["author"][0].id, entity_json["@id"]) + + def test_rocrate_creation_wrong_pipeline_dir(self): + """Run the nf-core rocrate command with a wrong pipeline directory""" + # Run the command + + # Check that it raises a UserWarning + with self.assertRaises(UserWarning): + nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir / "bad_dir") + + # assert that the crate was not created + self.assertFalse(Path(self.pipeline_dir / "bad_dir", "ro-crate-metadata.json").exists()) + + def test_rocrate_creation_with_wrong_version(self): + """Run the nf-core rocrate command with a pipeline version""" + # Run the command + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_without_git(self): + """Run the nf-core rocrate command with a pipeline version""" + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + # remove git repo + shutil.rmtree(self.pipeline_dir / ".git") + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_to_zip(self): + """Run the nf-core rocrate command with a zip output""" + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, zip_path=self.pipeline_dir) + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate.crate.zip").exists()) + + def test_rocrate_creation_for_fetchngs(self): + """Run the nf-core rocrate command with nf-core/fetchngs""" + tmp_dir = Path(tempfile.mkdtemp()) + # git clone nf-core/fetchngs + git.Repo.clone_from("https://github.com/nf-core/fetchngs", tmp_dir / "fetchngs") + # Run the command + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(tmp_dir / "fetchngs", version="1.12.0") + assert self.rocrate_obj.create_rocrate(tmp_dir / "fetchngs", self.pipeline_dir) + + # Check that Sateesh Peri is mentioned in creator field + + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) + entities = crate.get_entities() + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "#main.nf": + assert "https://orcid.org/0000-0002-9879-9070" in entity_json["creator"] + + # Clean up + shutil.rmtree(tmp_dir) From 65d74d58210f6f509207967ee5e888a3c14597ab Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 27 Nov 2024 23:15:32 +0100 Subject: [PATCH 073/164] enable zip output --- nf_core/pipelines/rocrate.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index d5a51eddf..915f203f0 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -122,7 +122,6 @@ def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[No log.info(f"Saving metadata file to '{json_path}'") self.crate.metadata.write(json_path) - return True # Save the whole crate zip file if zip_path is not None: @@ -130,11 +129,13 @@ def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[No zip_path = zip_path / "ro-crate.crate.zip" log.info(f"Saving zip file '{zip_path}") self.crate.write_zip(zip_path) - return True + if json_path is None and zip_path is None: log.error("Please provide a path to save the ro-crate file or the zip file.") return False + return True + def make_workflow_rocrate(self) -> None: """ Create an RO Crate for a pipeline From 0d337393f455c8b5cc8256cf7e66aac477988e11 Mon Sep 17 00:00:00 2001 From: Louis LE NEZET <58640615+LouisLeNezet@users.noreply.github.com> Date: Thu, 28 Nov 2024 10:08:28 +0100 Subject: [PATCH 074/164] Move modules config import after base.config --- nf_core/pipeline-template/nextflow.config | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 6970d0518..a9aa9c553 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -76,6 +76,11 @@ params { includeConfig 'conf/base.config' {%- else %} +{% if modules -%} +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' +{%- endif %} + process { // TODO nf-core: Check the defaults for all processes cpus = { 1 * task.attempt } @@ -316,8 +321,3 @@ validation { }{% endif %} } {%- endif %} - -{% if modules -%} -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' -{%- endif %} From 43287c654f41e8e7dcae44be6f21215174ae61b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Thu, 28 Nov 2024 14:32:30 +0100 Subject: [PATCH 075/164] Update nf_core/pipelines/create/template_features.yml --- nf_core/pipelines/create/template_features.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index e2293567e..9841879e8 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -442,6 +442,9 @@ rocrate: This will add a `ro-crate-metadata.json` file to describe the pipeline. nfcore_pipelines: False custom_pipelines: True + linting: + files_warn: + - "ro-crate-metadata.json" vscode: skippable_paths: - ".vscode" From 2eec420b1db2f568a2607350f400c7cb5cf66a3d Mon Sep 17 00:00:00 2001 From: Louis Le Nezet Date: Thu, 28 Nov 2024 15:46:29 +0100 Subject: [PATCH 076/164] Update documentation --- .../pipeline-template/.github/CONTRIBUTING.md | 10 +++---- nf_core/pipeline-template/docs/usage.md | 29 +++++++++---------- 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 0200ea26c..3e6b96008 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# {{ name }}: Contributing Guidelines +# `{{ name }}`: Contributing Guidelines Hi there! Many thanks for taking an interest in improving {{ name }}. @@ -66,7 +66,7 @@ These tests are run both with the latest available version of `Nextflow` and als - On your own fork, make a new branch `patch` based on `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- A PR should be made on `master` from patch to directly this particular bug. +- A PR should be made on `master` from patch to directly adress this particular bug. {% if is_nfcore -%} @@ -78,13 +78,13 @@ For further information/help, please consult the [{{ name }} documentation](http ## Pipeline contribution conventions -To make the {{ name }} code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. +To make the `{{ name }}` code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. ### Adding a new step If you wish to contribute a new step, please use the following coding standards: -1. Define the corresponding input channel into your new process from the expected previous process channel +1. Define the corresponding input channel into your new process from the expected previous process channel. 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new parameters to `nextflow.config` with a default (see below). @@ -99,7 +99,7 @@ If you wish to contribute a new step, please use the following coding standards: ### Default values -Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. +Parameters should be initialised / defined with default values within the `params` scope in `nextflow.config`. Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json`. diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 67fda7865..f1d0f2bbb 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -79,9 +79,8 @@ If you wish to repeatedly use the same parameters for multiple runs, rather than Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. -:::warning -Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). -::: +> [!WARNING] +> Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). The above pipeline run specified with a params file in yaml format: @@ -110,7 +109,7 @@ nextflow pull {{ name }} ### Reproducibility -It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. +It is a good idea to specify the pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. @@ -118,15 +117,13 @@ This version number will be logged in reports when you run the pipeline, so that To further assist in reproducibility, you can use share and reuse [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. -:::tip -If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. -::: +> [!TIP] +> If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. ## Core Nextflow arguments -:::note -These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen). -::: +> [!NOTE] +> These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen) ### `-profile` @@ -134,13 +131,12 @@ Use this parameter to choose a configuration profile. Profiles can give configur Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Apptainer, Conda) - see below. -:::info -We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. -::: +> [!IMPORTANT] +> We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. {%- if nf_core_configs %} -The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). +The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to check if your system is suported, please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). {% else %} {% endif %} Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! @@ -185,13 +181,14 @@ Specify the path to a specific config file (this is a core Nextflow command). Se ### Resource requests -Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. +Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the pipeline steps, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher resources request (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. To change the resource requests, please see the [max resources](https://nf-co.re/docs/usage/configuration#max-resources) and [tuning workflow resources](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources) section of the nf-core website. ### Custom Containers -In some cases you may wish to change which container or conda environment a step of the pipeline uses for a particular tool. By default nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However in some cases the pipeline specified version maybe out of date. +In some cases, you may wish to change the container or conda environment used by a pipeline steps for a particular tool. By default, nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However, in some cases the pipeline specified version maybe out of date. + To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. From b12459a20b1742b8246961490e000149d7a4cfd4 Mon Sep 17 00:00:00 2001 From: Louis Le Nezet Date: Thu, 28 Nov 2024 16:21:08 +0100 Subject: [PATCH 077/164] Update CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36ea94084..8cde2a4d8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) +- Fix some typo and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) ### Download From c8f22cfe7f7cd97718741949474304bb99cb8e6f Mon Sep 17 00:00:00 2001 From: Louis Le Nezet Date: Thu, 28 Nov 2024 16:24:26 +0100 Subject: [PATCH 078/164] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36ea94084..f418dc0af 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) +- Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) ### Download From 07984beab52051ac1bd7d1728a49e368aee480cd Mon Sep 17 00:00:00 2001 From: LouisLeNezet Date: Thu, 28 Nov 2024 18:42:13 +0100 Subject: [PATCH 079/164] Fix linting --- nf_core/pipeline-template/docs/usage.md | 1 - 1 file changed, 1 deletion(-) diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index f1d0f2bbb..16e6220aa 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -189,7 +189,6 @@ To change the resource requests, please see the [max resources](https://nf-co.re In some cases, you may wish to change the container or conda environment used by a pipeline steps for a particular tool. By default, nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However, in some cases the pipeline specified version maybe out of date. - To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. ### Custom Tool Arguments From 82d6797edc306ad39ab6488d450f66b7ba92182b Mon Sep 17 00:00:00 2001 From: LouisLeNezet Date: Thu, 28 Nov 2024 18:45:27 +0100 Subject: [PATCH 080/164] Update CHANGELOG --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 06f584d6e..d778b1a8d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) - Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) -- Fix some typo and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) +- Fix some typos and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) ### Download From ac3fbc6497417d449463016159e0fc3e94f23781 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Tue, 19 Nov 2024 17:40:07 +0100 Subject: [PATCH 081/164] Download: Need to deduplicate Seqera Container matches as well, otherwise a race condition emerges. --- nf_core/pipelines/download.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 9a329aeaf..24a6d4ab2 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1016,8 +1016,8 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: log.debug(f"{c} matches and will be saved as {k}") d[k] = c - # combine deduplicated others and Seqera containers - return sorted(list(d.values()) + seqera_containers) + # combine deduplicated others and deduplicated Seqera containers + return sorted(list(d.values()) + list(set(seqera_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. From a5b0e866030f4b1a5c5316adcb4c9484d3be364d Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Tue, 19 Nov 2024 19:55:28 +0100 Subject: [PATCH 082/164] Add new function to handle Seqera Container Oras URIs. --- nf_core/pipelines/download.py | 49 ++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 24a6d4ab2..68e5e3a98 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1000,14 +1000,18 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data' 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' + Lastly, we want to remove at least a few Docker URIs for those modules, that have an oras:// download link. """ d: Dict[str, str] = {} - seqera_containers: List[str] = [] + seqera_containers_http: List[str] = [] + seqera_containers_oras: List[str] = [] all_others: List[str] = [] for c in container_list: if bool(re.search(r"/data$", c)): - seqera_containers.append(c) + seqera_containers_http.append(c) + elif bool(re.search(r"^oras$", c)): + seqera_containers_oras.append(c) else: all_others.append(c) @@ -1016,8 +1020,45 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: log.debug(f"{c} matches and will be saved as {k}") d[k] = c - # combine deduplicated others and deduplicated Seqera containers - return sorted(list(d.values()) + list(set(seqera_containers))) + combined_with_oras = self.reconcile_seqera_container_uris(seqera_containers_oras, list(d.values())) + + # combine deduplicated others (Seqera containers oras, http others and Docker URI others) and Seqera containers http + return sorted(list(set(combined_with_oras + seqera_containers_http))) + + @staticmethod + def reconcile_seqera_container_uris(prioritzed_container_list: List[str], other_list: List[str]) -> List[str]: + """ + Helper function that takes a list of Seqera container URIs, + extracts the software string and builds a regex from them to filter out + similar containers from the second container list. + + prioritzed_container_list = [ + ... "oras://community.wave.seqera.io/library/multiqc:1.25.1--f0e743d16869c0bf", + ... "oras://community.wave.seqera.io/library/multiqc_pip_multiqc-plugins:e1f4877f1515d03c" + ... ] + + will be cleaned to + + ['library/multiqc:1.25.1', 'library/multiqc_pip_multiqc-plugins'] + + Subsequently, build a regex from those and filter out matching duplicates in other_list: + """ + + # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns + trimmed_priority_list = [ + match.group() + for c in set(prioritzed_container_list) + if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) + ] + + # build regex + prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) + + # filter out matches in other list + filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] + + # combine priorized and regular container lists + return sorted(list(set(prioritzed_container_list + filtered_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. From 838286bbf92e481c2eecb3bec49b419867c61bd8 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Tue, 19 Nov 2024 20:05:42 +0100 Subject: [PATCH 083/164] Ensure, that oras:// containers are correctly handled. --- nf_core/pipelines/download.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 68e5e3a98..171078738 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1460,9 +1460,10 @@ def singularity_pull_image( # Sometimes, container still contain an explicit library specification, which # resulted in attempted pulls e.g. from docker://quay.io/quay.io/qiime2/core:2022.11 # Thus, if an explicit registry is specified, the provided -l value is ignored. + # Additionally, check if the container to be pulled is native Singularity: oras:// protocol. container_parts = container.split("/") if len(container_parts) > 2: - address = f"docker://{container}" + address = container if container.startswith("oras://") else f"docker://{container}" absolute_URI = True else: address = f"docker://{library}/{container.replace('docker://', '')}" From c2f9056c1c1237ca460b60539a5e6048e132af61 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Tue, 19 Nov 2024 20:28:54 +0100 Subject: [PATCH 084/164] Download: Add test data for oras:// modules. --- .../modules/mock_seqera_container_oras.nf | 11 +++++++++++ .../modules/mock_seqera_container_oras_mulled.nf | 11 +++++++++++ 2 files changed, 22 insertions(+) create mode 100644 tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf create mode 100644 tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf new file mode 100644 index 000000000..8278ac791 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6' : + 'community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1' }" + + // truncated + +} diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf new file mode 100644 index 000000000..234ca04a4 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER_MULLED { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3' : + 'community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd' }" + + // truncated + +} From 50896ead8e3862db4314caf820f383d59c922cf5 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Thu, 21 Nov 2024 19:39:25 +0100 Subject: [PATCH 085/164] Test the new container elimination routine. --- ...ainer.nf => mock_seqera_container_http.nf} | 0 tests/pipelines/test_download.py | 24 ++++++++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) rename tests/data/mock_module_containers/modules/{mock_seqera_container.nf => mock_seqera_container_http.nf} (100%) diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf similarity index 100% rename from tests/data/mock_module_containers/modules/mock_seqera_container.nf rename to tests/data/mock_module_containers/modules/mock_seqera_container_http.nf diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 86b07ef7f..265936106 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -257,7 +257,20 @@ def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): not in download_obj.containers ) - # mock_seqera_container.nf + # mock_seqera_container_oras.nf + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in download_obj.containers + assert "community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1" not in download_obj.containers + + # mock_seqera_container_oras_mulled.nf + assert ( + "oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3" + in download_obj.containers + ) + assert ( + "community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd" not in download_obj.containers + ) + + # mock_seqera_container_http.nf assert ( "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" in download_obj.containers @@ -356,6 +369,15 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress ) + # Test successful pull with absolute oras:// URI + download_obj.singularity_pull_image( + "oras://ghcr.io/scilifelab/umi-transfer:latest", + f"{tmp_dir}/umi-transfer-oras.sif", + None, + "docker.io", + mock_rich_progress, + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( From 7ef1cfb459d40da0c63056f0ea0c7fea6b4f9b7a Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Thu, 21 Nov 2024 20:06:56 +0100 Subject: [PATCH 086/164] Download: Update the tests. --- nf_core/pipelines/download.py | 30 ++++++++++--------- tests/pipelines/test_download.py | 51 +++++++++++++++++++++++++++++++- 2 files changed, 66 insertions(+), 15 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 171078738..b45395a93 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1026,7 +1026,7 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: return sorted(list(set(combined_with_oras + seqera_containers_http))) @staticmethod - def reconcile_seqera_container_uris(prioritzed_container_list: List[str], other_list: List[str]) -> List[str]: + def reconcile_seqera_container_uris(prioritized_container_list: List[str], other_list: List[str]) -> List[str]: """ Helper function that takes a list of Seqera container URIs, extracts the software string and builds a regex from them to filter out @@ -1043,22 +1043,24 @@ def reconcile_seqera_container_uris(prioritzed_container_list: List[str], other_ Subsequently, build a regex from those and filter out matching duplicates in other_list: """ + if not prioritized_container_list: + return other_list + else: + # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns + trimmed_priority_list = [ + match.group() + for c in set(prioritized_container_list) + if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) + ] - # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns - trimmed_priority_list = [ - match.group() - for c in set(prioritzed_container_list) - if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) - ] - - # build regex - prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) + # build regex + prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) - # filter out matches in other list - filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] + # filter out matches in other list + filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] - # combine priorized and regular container lists - return sorted(list(set(prioritzed_container_list + filtered_containers))) + # combine prioritized and regular container lists + return sorted(list(set(prioritized_container_list + filtered_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 265936106..8c68aa565 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -307,6 +307,7 @@ def test_prioritize_direct_download(self, tmp_path): "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1", "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data", "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", ] result = download_obj.prioritize_direct_download(test_container) @@ -329,7 +330,7 @@ def test_prioritize_direct_download(self, tmp_path): assert "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_0" in result assert "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1" in result - # Verify that Seqera containers are not deduplicated + # Verify that Seqera containers are not deduplicated... assert ( "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data" in result @@ -338,6 +339,54 @@ def test_prioritize_direct_download(self, tmp_path): "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" in result ) + # ...but identical ones are. + assert ( + result.count( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + ) + == 1 + ) + + # + # Test for 'reconcile_seqera_container_uris' + # + @with_temporary_folder + def test_reconcile_seqera_container_uris(self, tmp_path): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) + + prioritized_container = [ + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", + "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649", + ] + + test_container = [ + "https://depot.galaxyproject.org/singularity/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81", + "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373", + "biocontainers/sylph:0.6.1--b97274cdc1caa649", + ] + + result = download_obj.reconcile_seqera_container_uris(prioritized_container, test_container) + + # Verify that unrelated images are retained + assert "https://depot.galaxyproject.org/singularity/ubuntu:22.04" in result + assert "nf-core/ubuntu:22.04" in result + + # Verify that the priority works for regular Seqera container (Native Singularity over Docker, but only for Seqera registry) + assert "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649" in result + assert "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373" not in result + assert "biocontainers/sylph:0.6.1--b97274cdc1caa649" in result + + # Verify that version strings are respected: Version 1.0.0 does not replace version 1.5.0 + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in result + assert "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81" in result + + # assert that the deduplication works + assert test_container.count("nf-core/ubuntu:22.04") == 3 + assert result.count("nf-core/ubuntu:22.04") == 1 # # Tests for 'singularity_pull_image' From 335c48de10c45f77fd73b5ca325496cee5fccedc Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Fri, 22 Nov 2024 13:03:03 +0100 Subject: [PATCH 087/164] Add dedicated ORAS image format error. --- nf_core/pipelines/download.py | 14 ++++++++++++++ tests/pipelines/test_download.py | 16 +++++++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index b45395a93..e30815bb5 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1887,6 +1887,9 @@ def __init__( elif re.search(r"manifest\sunknown", line): self.error_type = self.InvalidTagError(self) break + elif re.search(r"ORAS\sSIF\simage\sshould\shave\sa\ssingle\slayer", line): + self.error_type = self.NoSingularityContainerError(self) + break elif re.search(r"Image\sfile\salready\sexists", line): self.error_type = self.ImageExistsError(self) break @@ -1951,6 +1954,17 @@ def __init__(self, error_log): self.helpmessage = f'Saving image of "{self.error_log.container}" failed, because "{self.error_log.out_path}" exists.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' super().__init__(self.message) + class NoSingularityContainerError(RuntimeError): + """The container image is no native Singularity Image Format.""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = ( + f'[bold red]"{self.error_log.container}" is no valid Singularity Image Format container.[/]\n' + ) + self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert form Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" + super().__init__(self.message) + class OtherError(RuntimeError): """Undefined error with the container""" diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 8c68aa565..01be5a5d4 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -369,6 +369,10 @@ def test_reconcile_seqera_container_uris(self, tmp_path): "biocontainers/sylph:0.6.1--b97274cdc1caa649", ] + # test that the test_container list is returned as it is, if no prioritized_containers are specified + result_empty = download_obj.reconcile_seqera_container_uris([], test_container) + assert result_empty == test_container + result = download_obj.reconcile_seqera_container_uris(prioritized_container, test_container) # Verify that unrelated images are retained @@ -420,13 +424,23 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p # Test successful pull with absolute oras:// URI download_obj.singularity_pull_image( - "oras://ghcr.io/scilifelab/umi-transfer:latest", + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", f"{tmp_dir}/umi-transfer-oras.sif", None, "docker.io", mock_rich_progress, ) + # try pulling Docker container image with oras:// + with pytest.raises(ContainerError.NoSingularityContainerError): + download_obj.singularity_pull_image( + "oras://ghcr.io/matthiaszepper/umi-transfer:dev", + f"{tmp_dir}/umi-transfer-oras.sif", + None, + "docker.io", + mock_rich_progress, + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( From 0d0fe6b85db3c90840c65d920dc60d4404700835 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Fri, 22 Nov 2024 14:38:45 +0100 Subject: [PATCH 088/164] Include oras:// regex in download to recognize the paths. --- nf_core/pipelines/download.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index e30815bb5..4fe38dd28 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -839,11 +839,12 @@ def rectify_raw_container_matches(self, raw_findings): url_regex = ( r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" ) + oras_regex = r"oras:\/\/[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/39672069/713980 docker_regex = r"^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(? List[str]: for c in container_list: if bool(re.search(r"/data$", c)): seqera_containers_http.append(c) - elif bool(re.search(r"^oras$", c)): + elif bool(re.search(r"^oras://", c)): seqera_containers_oras.append(c) else: all_others.append(c) From 66ffaf1804a90acfc6a2373611a42cd4061645bb Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Fri, 22 Nov 2024 16:09:18 +0100 Subject: [PATCH 089/164] Changelog. --- CHANGELOG.md | 1 + tests/pipelines/test_download.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f418dc0af..29c6db0cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ ### Download - First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- Further steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Enable limited support for `oras://` container paths (_only absolute URIs, no flexible registries like with Docker_) and prevent unnecessary image downloads for Seqera Container modules with `reconcile_seqera_container_uris()` ([#3293](https://github.com/nf-core/tools/pull/3293)). ### Linting diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 01be5a5d4..d1e2c41a6 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -435,7 +435,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.NoSingularityContainerError): download_obj.singularity_pull_image( "oras://ghcr.io/matthiaszepper/umi-transfer:dev", - f"{tmp_dir}/umi-transfer-oras.sif", + f"{tmp_dir}/umi-transfer-oras_impostor.sif", None, "docker.io", mock_rich_progress, @@ -445,7 +445,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( "hello-world", - f"{tmp_dir}/hello-world_new.sif", + f"{tmp_dir}/break_the_registry_test.sif", None, "register-this-domain-to-break-the-test.io", mock_rich_progress, @@ -481,7 +481,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.InvalidTagError): download_obj.singularity_pull_image( "ewels/multiqc:go-rewrite", - f"{tmp_dir}/umi-transfer.sif", + f"{tmp_dir}/multiqc-go.sif", None, "ghcr.io", mock_rich_progress, From 8c285304c72e69bf9ce0a93b4b8be7f62b51354b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 30 Nov 2024 07:24:39 +0000 Subject: [PATCH 090/164] Update dawidd6/action-download-artifact action to v7 (#3306) * Update dawidd6/action-download-artifact action to v7 * [automated] Update CHANGELOG.md --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: nf-core-bot --- CHANGELOG.md | 1 + nf_core/pipeline-template/.github/workflows/linting_comment.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f418dc0af..244dcb8ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ ### Download - First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- Update dawidd6/action-download-artifact action to v7 ([#3306](https://github.com/nf-core/tools/pull/3306)) ### Linting diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 908dcea15..63b20bb31 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 + uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7 with: workflow: linting.yml workflow_conclusion: completed From d7f1df2617406afc9b5bf035621d6e31285d2835 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 30 Nov 2024 11:22:59 +0000 Subject: [PATCH 091/164] Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.1 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a7c7d38ce..1494f5818 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.0 + rev: v0.8.1 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 456aa055b48af339a04ce7170d7aad3d5732b940 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 02:08:49 +0000 Subject: [PATCH 092/164] Update dependency textual-dev to v1.7.0 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index aab9b1e5d..48069d59a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ responses ruff Sphinx sphinx-rtd-theme -textual-dev==1.6.1 +textual-dev==1.7.0 types-PyYAML types-requests types-jsonschema From c5ffcf87e6c742fb224e79b116c8d318b448244e Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Sun, 1 Dec 2024 02:09:38 +0000 Subject: [PATCH 093/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 244dcb8ad..414ce3d1a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -50,6 +50,7 @@ - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) +- Update dependency textual-dev to v1.7.0 ([#3308](https://github.com/nf-core/tools/pull/3308)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From 800fd8da95d41888ec5bacb71c58b5d5705fa2ff Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 06:44:55 +0100 Subject: [PATCH 094/164] Update gitpod/workspace-base Docker digest to 12853f7 (#3309) * Update gitpod/workspace-base Docker digest to 12853f7 * [automated] Update CHANGELOG.md --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: nf-core-bot --- CHANGELOG.md | 1 + nf_core/gitpod/gitpod.Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 244dcb8ad..37c73d778 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -50,6 +50,7 @@ - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) +- Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 78a528c19..a0002ed42 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,7 +2,7 @@ # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . # See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. -FROM gitpod/workspace-base@sha256:2cc134fe5bd7d8fdbe44cab294925d4bc6d2d178d94624f4c376584a22d1f7b6 +FROM gitpod/workspace-base@sha256:12853f7c901eb2b677a549cb112c85f9679d18feb30093bcc63aa252540ecad9 USER root From acb91031a8081148ab6afd70b31135c8740264fd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 10:51:27 +0100 Subject: [PATCH 095/164] update modules and subworkflows --- nf_core/pipeline-template/modules.json | 8 +-- .../modules/nf-core/fastqc/main.nf | 2 +- .../modules/nf-core/fastqc/meta.yml | 1 + .../nf-core/utils_nextflow_pipeline/main.nf | 2 + .../tests/main.workflow.nf.test | 10 ++-- .../nf-core/utils_nfcore_pipeline/main.nf | 51 +++--------------- .../tests/main.function.nf.test | 52 ------------------- .../tests/main.function.nf.test.snap | 30 ----------- .../utils_nfschema_plugin/tests/main.nf.test | 4 +- 9 files changed, 22 insertions(+), 138 deletions(-) diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index f714eb1d9..90c5728d9 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -8,7 +8,7 @@ {%- if fastqc %} "fastqc": { "branch": "master", - "git_sha": "666652151335353eef2fcd58880bcef5bc2928e1", + "git_sha": "dc94b6ee04a05ddb9f7ae050712ff30a13149164", "installed_by": ["modules"] }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { @@ -23,17 +23,17 @@ "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "3aa0aec1d52d492fe241919f0c6100ebf0074082", + "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "1b6b9a3338d011367137808b49b923515080e3ba", + "git_sha": "9a1e8bb6a5d205cf7807dcefca872a3314b2f3e6", "installed_by": ["subworkflows"] }{% if nf_schema %}, "utils_nfschema_plugin": { "branch": "master", - "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c", + "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e", "installed_by": ["subworkflows"] }{% endif %} } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index d8989f481..752c3a10c 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -24,7 +24,7 @@ process FASTQC { // Make list of old name and new name pairs to use for renaming in the bash while loop def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } def rename_to = old_new_pairs*.join(' ').join(' ') - def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + def renamed_files = old_new_pairs.collect{ _old_name, new_name -> new_name }.join(' ') // The total amount of allocated RAM by FastQC is equal to the number of threads defined (--threads) time the amount of RAM defined (--memory) // https://github.com/s-andrews/FastQC/blob/1faeea0412093224d7f6a07f777fad60a5650795/fastqc#L211-L222 diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index 4827da7af..2b2e62b8a 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -11,6 +11,7 @@ tools: FastQC gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%A/C/G/T). + You get information about adapter contamination and other overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index 0fcbf7b3f..d6e593e85 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -92,10 +92,12 @@ def checkCondaChannels() { channels = config.channels } catch (NullPointerException e) { + log.debug(e) log.warn("Could not verify conda channel configuration.") return null } catch (IOException e) { + log.debug(e) log.warn("Could not verify conda channel configuration.") return null } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test index ca964ce8e..02dbf094c 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test @@ -52,10 +52,12 @@ nextflow_workflow { } then { - assertAll( - { assert workflow.success }, - { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } - ) + expect { + with(workflow) { + assert success + assert "nextflow_workflow v9.9.9" in stdout + } + } } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 5cb7bafef..228dbff89 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -56,21 +56,6 @@ def checkProfileProvided(nextflow_cli_args) { } } -// -// Citation string for pipeline -// -def workflowCitation() { - def temp_doi_ref = "" - def manifest_doi = workflow.manifest.doi.tokenize(",") - // Handling multiple DOIs - // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers - // Removing ` ` since the manifest.doi is a string and not a proper list - manifest_doi.each { doi_ref -> - temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" - } - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + temp_doi_ref + "\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" -} - // // Generate workflow version string // @@ -150,33 +135,6 @@ def paramsSummaryMultiqc(summary_params) { return yaml_file_text } -// -// nf-core logo -// -def nfCoreLogo(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) -} - -// -// Return dashed line -// -def dashedLine(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - return "-${colors.dim}----------------------------------------------------${colors.reset}-" -} - // // ANSII colours used for terminal logging // @@ -261,7 +219,8 @@ def attachMultiqcReport(multiqc_report) { } } } - catch (Exception all) { + catch (Exception msg) { + log.debug(msg) if (multiqc_report) { log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") } @@ -340,7 +299,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def email_html = html_template.toString() // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()] def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) @@ -358,7 +317,9 @@ new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') ['sendmail', '-t'].execute() << sendmail_html log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") } - catch (Exception all) { + catch (Exception msg) { + log.debug(msg) + log.debug("Trying with mail instead of sendmail") // Catch failures and try with plaintext def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] mail_cmd.execute() << email_html diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test index 1dc317f8f..e43d208b1 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -41,58 +41,6 @@ nextflow_function { } } - test("Test Function workflowCitation") { - - function "workflowCitation" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function nfCoreLogo") { - - function "nfCoreLogo" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function dashedLine") { - - function "dashedLine" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - test("Test Function without logColours") { function "logColours" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap index 1037232c9..02c670141 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap @@ -17,26 +17,6 @@ }, "timestamp": "2024-02-28T12:02:59.729647" }, - "Test Function nfCoreLogo": { - "content": [ - "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:10.562934" - }, - "Test Function workflowCitation": { - "content": [ - "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:07.019761" - }, "Test Function without logColours": { "content": [ { @@ -95,16 +75,6 @@ }, "timestamp": "2024-02-28T12:03:17.969323" }, - "Test Function dashedLine": { - "content": [ - "-\u001b[2m----------------------------------------------------\u001b[0m-" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:14.366181" - }, "Test Function with logColours": { "content": [ { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test index 842dc432a..8fb301648 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -42,7 +42,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { @@ -94,7 +94,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { From 8800b71da74c82f7953f127f97ded756537ad413 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 2 Dec 2024 09:55:15 +0000 Subject: [PATCH 096/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed991f85..16d3025e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) +- Use params.monochrome_logs in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) ### Download From 22ff44b17ab5de2a9f78fca8b8dfc056f2bad14f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 11:14:47 +0100 Subject: [PATCH 097/164] also add monochrome_logs if nf-schema is used --- nf_core/pipeline-template/nextflow.config | 2 +- nf_core/pipeline-template/nextflow_schema.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index abd186c60..bbd1ad7fc 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -41,7 +41,7 @@ params { email_on_fail = null plaintext_email = false {%- endif %} - {%- if modules %} + {%- if modules or nf_schema %} monochrome_logs = false{% endif %} {%- if slackreport or adaptivecard %} hook_url = null{% endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 389f9d104..3e59a8ba5 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -182,7 +182,7 @@ "fa_icon": "fas fa-file-upload", "hidden": true },{% endif %} - {%- if modules %} + {%- if modules or nf_schema %} "monochrome_logs": { "type": "boolean", "description": "Do not use coloured log outputs.", From b91bd77b9ffa248ce0d0bec89c1acabbc961c360 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 12:12:25 +0100 Subject: [PATCH 098/164] add manifest.contributors to nextflow.config --- nf_core/pipeline-template/nextflow.config | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index bbd1ad7fc..475fdf678 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -274,7 +274,16 @@ dag { manifest { name = '{{ name }}' - author = """{{ author }}""" + author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead + contributors = [ + // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 + [name: """{{ author }}""" + affiliation: "" + email: "" + github: "" + contribution: [] // List of contribution types ('author', 'maintainer' or 'contributor') + orcid: ""] + ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' From da021fecd08245122175219f875dca415f694e5b Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 2 Dec 2024 11:14:38 +0000 Subject: [PATCH 099/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3703d41ce..ac291ea73 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) - Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) - Use `params.monochrome_logs` in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) +- Add `manifest.contributors` to `nextflow.config` ([#3311](https://github.com/nf-core/tools/pull/3311)) ### Download From 9c8d50fc7034e7eca8dee04cba3ae168760833aa Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 12:25:48 +0100 Subject: [PATCH 100/164] fix contributors map --- nf_core/pipeline-template/nextflow.config | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 475fdf678..cc5a71ffe 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -277,12 +277,14 @@ manifest { author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead contributors = [ // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 - [name: """{{ author }}""" - affiliation: "" - email: "" - github: "" - contribution: [] // List of contribution types ('author', 'maintainer' or 'contributor') - orcid: ""] + [ + name: '{{ author }}', + affiliation: '', + email: '', + github: '', + contribution: [], // List of contribution types ('author', 'maintainer' or 'contributor') + orcid: '' + ] ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" From df3d25d944eb79b986b531174833eb18638cce88 Mon Sep 17 00:00:00 2001 From: Louis LE NEZET <58640615+LouisLeNezet@users.noreply.github.com> Date: Mon, 2 Dec 2024 13:01:42 +0100 Subject: [PATCH 101/164] Update nf_core/pipeline-template/.github/CONTRIBUTING.md Co-authored-by: Phil Ewels --- nf_core/pipeline-template/.github/CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 3e6b96008..37970c09e 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -66,7 +66,7 @@ These tests are run both with the latest available version of `Nextflow` and als - On your own fork, make a new branch `patch` based on `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- A PR should be made on `master` from patch to directly adress this particular bug. +- Open a pull-request from `patch` to `master` with the changes. {% if is_nfcore -%} From d72667c65e1ffdb6ee9274d2229bb091f20821ad Mon Sep 17 00:00:00 2001 From: Matthias Zepper <6963520+MatthiasZepper@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:56:00 +0100 Subject: [PATCH 102/164] Typo in error message. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipelines/download.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 4fe38dd28..d37dce86d 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1963,7 +1963,7 @@ def __init__(self, error_log): self.message = ( f'[bold red]"{self.error_log.container}" is no valid Singularity Image Format container.[/]\n' ) - self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert form Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" + self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert from Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" super().__init__(self.message) class OtherError(RuntimeError): From 86b926b5bfcaf556daf5fadd4f033e7c6faba52d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 2 Dec 2024 15:51:42 +0100 Subject: [PATCH 103/164] test also the main sync function itsel --- tests/pipelines/test_sync.py | 52 ++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index ffbe75510..5bd4e55aa 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -56,6 +56,8 @@ def mocked_requests_get(url) -> MockResponse: for branch_no in range(3, 7) ] return MockResponse(response_data, 200, url) + if url == "https://nf-co.re/pipelines.json": + return MockResponse({"remote_workflows": [{"name": "testpipeline", "topics": ["test", "pipeline"]}]}, 200, url) return MockResponse([{"html_url": url}], 404, url) @@ -398,3 +400,53 @@ def test_reset_target_dir_fake_branch(self): with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") + + def test_sync_success(self): + """Test successful pipeline sync with PR creation""" + # Set up GitHub auth token for PR creation + os.environ["GITHUB_AUTH_TOKEN"] = "dummy_token" + + with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( + "requests.post", side_effect=mocked_requests_post + ) as mock_post: + psync = nf_core.pipelines.sync.PipelineSync( + self.pipeline_dir, make_pr=True, gh_username="no_existing_pr", gh_repo="response" + ) + + # Run sync + psync.sync() + + # Verify that changes were made and PR was created + self.assertTrue(psync.made_changes) + mock_post.assert_called_once() + self.assertEqual(mock_post.call_args[0][0], "https://api.github.com/repos/no_existing_pr/response/pulls") + + def test_sync_no_changes(self): + """Test pipeline sync when no changes are needed""" + with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( + "requests.post", side_effect=mocked_requests_post + ) as mock_post: + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + + # Mock that no changes were made + psync.made_changes = False + + # Run sync + psync.sync() + + # Verify no PR was created + mock_post.assert_not_called() + + def test_sync_no_github_token(self): + """Test sync fails appropriately when GitHub token is missing""" + # Ensure GitHub token is not set + if "GITHUB_AUTH_TOKEN" in os.environ: + del os.environ["GITHUB_AUTH_TOKEN"] + + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir, make_pr=True) + psync.made_changes = True # Force changes to trigger PR attempt + + # Run sync and check for appropriate error + with self.assertRaises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: + psync.sync() + self.assertIn("GITHUB_AUTH_TOKEN not set!", str(exc_info.exception)) From ee86c151a8ff9b7e0d398184badd65621074d088 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 2 Dec 2024 15:54:07 +0100 Subject: [PATCH 104/164] combine json parsing code --- nf_core/pipelines/sync.py | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 896adda94..6f617295e 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,7 +6,7 @@ import re import shutil from pathlib import Path -from typing import Dict, Optional, Union +from typing import Any, Dict, Optional, Union import git import questionary @@ -416,12 +416,8 @@ def close_open_template_merge_prs(self): list_prs_url = f"https://api.github.com/repos/{self.gh_repo}/pulls" with self.gh_api.cache_disabled(): list_prs_request = self.gh_api.get(list_prs_url) - try: - list_prs_json = json.loads(list_prs_request.content) - list_prs_pp = json.dumps(list_prs_json, indent=4) - except Exception: - list_prs_json = list_prs_request.content - list_prs_pp = list_prs_request.content + + list_prs_json, list_prs_pp = self._parse_json_response(list_prs_request) log.debug(f"GitHub API listing existing PRs:\n{list_prs_url}\n{list_prs_pp}") if list_prs_request.status_code != 200: @@ -462,12 +458,8 @@ def close_open_pr(self, pr) -> bool: # Update the PR status to be closed with self.gh_api.cache_disabled(): pr_request = self.gh_api.patch(url=pr["url"], data=json.dumps({"state": "closed"})) - try: - pr_request_json = json.loads(pr_request.content) - pr_request_pp = json.dumps(pr_request_json, indent=4) - except Exception: - pr_request_json = pr_request.content - pr_request_pp = pr_request.content + + pr_request_json, pr_request_pp = self._parse_json_response(pr_request) # PR update worked if pr_request.status_code == 200: @@ -481,6 +473,22 @@ def close_open_pr(self, pr) -> bool: log.warning(f"Could not close PR ('{pr_request.status_code}'):\n{pr['url']}\n{pr_request_pp}") return False + @staticmethod + def _parse_json_response(response) -> tuple[Any, str]: + """Helper method to parse JSON response and create pretty-printed string. + + Args: + response: requests.Response object + + Returns: + Tuple of (parsed_json, pretty_printed_str) + """ + try: + json_data = json.loads(response.content) + return json_data, json.dumps(json_data, indent=4) + except Exception: + return response.content, str(response.content) + def reset_target_dir(self): """ Reset the target pipeline directory. Check out the original branch. From 538893ca9fdd74de62077c3f3d0b71ccc17562c7 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 16:40:24 +0100 Subject: [PATCH 105/164] loop over list of authors to supply contributors --- nf_core/pipeline-template/nextflow.config | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index cc5a71ffe..21174bbdc 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -277,14 +277,16 @@ manifest { author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead contributors = [ // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 + {%- for author_name in author.split(",") %} [ - name: '{{ author }}', + name: '{{ author_name }}', affiliation: '', email: '', github: '', contribution: [], // List of contribution types ('author', 'maintainer' or 'contributor') orcid: '' - ] + ], + {%- endfor %} ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" From 10e691bd4dd0c49b07e094d172905ee48e1b7a79 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 2 Dec 2024 17:45:30 +0100 Subject: [PATCH 106/164] remove broken test --- nf_core/pipelines/sync.py | 2 +- tests/pipelines/test_sync.py | 20 -------------------- 2 files changed, 1 insertion(+), 21 deletions(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 6f617295e..8ea561bd3 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -120,7 +120,7 @@ def __init__( requests.auth.HTTPBasicAuth(self.gh_username, os.environ["GITHUB_AUTH_TOKEN"]) ) - def sync(self): + def sync(self) -> None: """Find workflow attributes, create a new template pipeline on TEMPLATE""" # Clear requests_cache so that we don't get stale API responses diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index 5bd4e55aa..8bf8a3c4e 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -401,26 +401,6 @@ def test_reset_target_dir_fake_branch(self): psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") - def test_sync_success(self): - """Test successful pipeline sync with PR creation""" - # Set up GitHub auth token for PR creation - os.environ["GITHUB_AUTH_TOKEN"] = "dummy_token" - - with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( - "requests.post", side_effect=mocked_requests_post - ) as mock_post: - psync = nf_core.pipelines.sync.PipelineSync( - self.pipeline_dir, make_pr=True, gh_username="no_existing_pr", gh_repo="response" - ) - - # Run sync - psync.sync() - - # Verify that changes were made and PR was created - self.assertTrue(psync.made_changes) - mock_post.assert_called_once() - self.assertEqual(mock_post.call_args[0][0], "https://api.github.com/repos/no_existing_pr/response/pulls") - def test_sync_no_changes(self): """Test pipeline sync when no changes are needed""" with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( From 11f7f426ce937c53f17ff210a952413d8ad7b408 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 2 Dec 2024 21:16:57 +0100 Subject: [PATCH 107/164] fix type error --- nf_core/pipelines/sync.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 8ea561bd3..781b4f5f0 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,7 +6,7 @@ import re import shutil from pathlib import Path -from typing import Any, Dict, Optional, Union +from typing import Any, Dict, Optional, Tuple, Union import git import questionary @@ -474,7 +474,7 @@ def close_open_pr(self, pr) -> bool: return False @staticmethod - def _parse_json_response(response) -> tuple[Any, str]: + def _parse_json_response(response) -> Tuple[Any, str]: """Helper method to parse JSON response and create pretty-printed string. Args: From abb0fa2f4bee2123559583f27c2855c52588d3be Mon Sep 17 00:00:00 2001 From: Robert Syme Date: Fri, 29 Nov 2024 14:34:37 -0500 Subject: [PATCH 108/164] Remove toList() channel operation from inside onComplete block This PR resolves an important bug in the nf-core template, whereby all workflows will hang if the --email parameter is supplied. The onComplete block will hang if there are any (queue) channel operations inside the block. All values in the onComplete block must be resolved to single values or value channels _before_ the onComplete block starts. The async channels are not available inside onComplete, so calling the toList() operation will hang forever as the async queue channel will never be completed. --- CHANGELOG.md | 1 + .../local/utils_nfcore_pipeline_pipeline/main.nf | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f418dc0af..81cb62ede 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -49,6 +49,7 @@ - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) +- Remove toList() channel operation from inside onComplete block ([#3304](https://github.com/nf-core/tools/pull/3304)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index be5776b83..06692f1dc 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -140,6 +140,10 @@ workflow PIPELINE_COMPLETION { summary_params = [:] {%- endif %} + {%- if multiqc %} + def multiqc_reports = multiqc_report.toList() + {%- endif %} + // // Completion email and summary // @@ -153,7 +157,7 @@ workflow PIPELINE_COMPLETION { plaintext_email, outdir, monochrome_logs, - {% if multiqc %}multiqc_report.toList(){% else %}[]{% endif %} + {% if multiqc %}multiqc_reports.getVal(),{% else %}[]{% endif %} ) } {%- endif %} From eed0598c7baf5f20e87254cd8a95bc29b0836fc8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Tue, 3 Dec 2024 12:02:50 +0100 Subject: [PATCH 109/164] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: JĂșlia Mir Pedrol --- nf_core/pipelines/lint/__init__.py | 2 +- nf_core/pipelines/lint/readme.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index f24374384..154e38aea 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -593,7 +593,7 @@ def run_linting( lint_obj._load_lint_config() lint_obj.load_pipeline_config() - if lint_obj.lint_config and lint_obj.lint_config["nfcore_components"] is False: + if lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: module_lint_obj = None subworkflow_lint_obj = None else: diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index 5a10fbfce..75b05f16e 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -35,8 +35,8 @@ def readme(self): lint: readme: - nextflow_badge - zenodo_release + - nextflow_badge + - zenodo_release """ passed = [] From bd9608d688f722d17398d8a7fe1d5dfeaeed02be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Tue, 3 Dec 2024 13:38:33 +0100 Subject: [PATCH 110/164] Update nf_core/components/create.py --- nf_core/components/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 6b3b9dad2..4be165d3d 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -260,7 +260,7 @@ def _get_module_structure_components(self): "Where applicable all sample-specific information e.g. 'id', 'single_end', 'read_group' " "MUST be provided as an input via a Groovy Map called 'meta'. " "This information may [italic]not[/] be required in some instances, for example " - "[link=https://github.com/nf-core/modules/blob/main/modules/nf-core/bwa/index/main.nf]indexing reference genome files[/link]." + "[link=https://github.com/nf-core/modules/blob/master/modules/nf-core/bwa/index/main.nf]indexing reference genome files[/link]." ) while self.has_meta is None: self.has_meta = rich.prompt.Confirm.ask( From e9ed94ec0f8ece41b406e2f15cd1d7089bc54621 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 14:08:04 +0100 Subject: [PATCH 111/164] set defaultBranch in nextflow config to allow `main` --- nf_core/pipeline-template/nextflow.config | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 21174bbdc..000d7cd66 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -291,6 +291,7 @@ manifest { homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' + defaultBranch = '{{ default_branch }}' nextflowVersion = '!>=24.04.2' version = '{{ version }}' doi = '' From 00b8ae0da46a7562be763c9efa6b82c696a362ba Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 14:18:06 +0100 Subject: [PATCH 112/164] pin java version 21 in CI --- .github/actions/create-lint-wf/action.yml | 1 - .github/workflows/create-lint-wf.yml | 4 +--- .github/workflows/create-test-lint-wf-template.yml | 2 ++ .github/workflows/create-test-wf.yml | 4 +--- nf_core/pipeline-template/.github/workflows/ci.yml | 1 + .../pipeline-template/.github/workflows/download_pipeline.yml | 1 + 6 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 3ef076051..3ffd960d2 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -15,7 +15,6 @@ runs: cd create-lint-wf export NXF_WORK=$(pwd) - # Set up Nextflow - name: Install Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 37ab71bc3..78932871e 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -27,14 +27,12 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true -env: - NXF_ANSI_LOG: false - jobs: MakeTestWorkflow: runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} env: NXF_ANSI_LOG: false + JAVA_HOME: $JAVA_HOME_21_X64 strategy: matrix: NXF_VER: diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index fffa9ffe7..b0d4c13a4 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -28,6 +28,7 @@ concurrency: env: NXF_ANSI_LOG: false + JAVA_HOME: $JAVA_HOME_21_X64 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: @@ -51,6 +52,7 @@ jobs: needs: prepare-matrix env: NXF_ANSI_LOG: false + JAVA_HOME: $JAVA_HOME_21_X64 strategy: matrix: TEMPLATE: ${{ fromJson(needs.prepare-matrix.outputs.all_features) }} diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 93581b915..f69449dde 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -27,15 +27,13 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true -env: - NXF_ANSI_LOG: false - jobs: RunTestWorkflow: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} env: NXF_ANSI_LOG: false + JAVA_HOME: $JAVA_HOME_21_X64 strategy: matrix: NXF_VER: diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 9db393d9f..4f005a1bb 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -11,6 +11,7 @@ on: env: NXF_ANSI_LOG: false + JAVA_HOME: $JAVA_HOME_21_X64 NXF_SINGULARITY_CACHEDIR: ${{ github.workspace }}/.singularity NXF_SINGULARITY_LIBRARYDIR: ${{ github.workspace }}/.singularity diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 05397358c..95ebad9fe 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -26,6 +26,7 @@ on: env: NXF_ANSI_LOG: false + JAVA_HOME: $JAVA_HOME_21_X64 jobs: download: From 4b2338ff161a1ddea799c8ab5270e1cefe0f7da0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Tue, 3 Dec 2024 13:32:17 +0000 Subject: [PATCH 113/164] update modules and subworkflows --- nf_core/pipeline-template/modules.json | 8 +- .../modules/nf-core/fastqc/main.nf | 2 +- .../modules/nf-core/fastqc/meta.yml | 1 + .../nf-core/utils_nextflow_pipeline/main.nf | 2 + .../tests/main.workflow.nf.test | 10 ++- .../nf-core/utils_nfcore_pipeline/main.nf | 89 +++++-------------- .../tests/main.function.nf.test | 52 ----------- .../tests/main.function.nf.test.snap | 30 ------- .../utils_nfschema_plugin/tests/main.nf.test | 4 +- 9 files changed, 40 insertions(+), 158 deletions(-) diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index f714eb1d9..397c0cdb0 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -8,7 +8,7 @@ {%- if fastqc %} "fastqc": { "branch": "master", - "git_sha": "666652151335353eef2fcd58880bcef5bc2928e1", + "git_sha": "dc94b6ee04a05ddb9f7ae050712ff30a13149164", "installed_by": ["modules"] }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { @@ -23,17 +23,17 @@ "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "3aa0aec1d52d492fe241919f0c6100ebf0074082", + "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "1b6b9a3338d011367137808b49b923515080e3ba", + "git_sha": "85400682a2abac63b09c863c138e91e5df7236b5", "installed_by": ["subworkflows"] }{% if nf_schema %}, "utils_nfschema_plugin": { "branch": "master", - "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c", + "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e", "installed_by": ["subworkflows"] }{% endif %} } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index d8989f481..752c3a10c 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -24,7 +24,7 @@ process FASTQC { // Make list of old name and new name pairs to use for renaming in the bash while loop def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } def rename_to = old_new_pairs*.join(' ').join(' ') - def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + def renamed_files = old_new_pairs.collect{ _old_name, new_name -> new_name }.join(' ') // The total amount of allocated RAM by FastQC is equal to the number of threads defined (--threads) time the amount of RAM defined (--memory) // https://github.com/s-andrews/FastQC/blob/1faeea0412093224d7f6a07f777fad60a5650795/fastqc#L211-L222 diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index 4827da7af..2b2e62b8a 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -11,6 +11,7 @@ tools: FastQC gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%A/C/G/T). + You get information about adapter contamination and other overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index 0fcbf7b3f..d6e593e85 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -92,10 +92,12 @@ def checkCondaChannels() { channels = config.channels } catch (NullPointerException e) { + log.debug(e) log.warn("Could not verify conda channel configuration.") return null } catch (IOException e) { + log.debug(e) log.warn("Could not verify conda channel configuration.") return null } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test index ca964ce8e..02dbf094c 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test @@ -52,10 +52,12 @@ nextflow_workflow { } then { - assertAll( - { assert workflow.success }, - { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } - ) + expect { + with(workflow) { + assert success + assert "nextflow_workflow v9.9.9" in stdout + } + } } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 5cb7bafef..9e874fbf0 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -56,21 +56,6 @@ def checkProfileProvided(nextflow_cli_args) { } } -// -// Citation string for pipeline -// -def workflowCitation() { - def temp_doi_ref = "" - def manifest_doi = workflow.manifest.doi.tokenize(",") - // Handling multiple DOIs - // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers - // Removing ` ` since the manifest.doi is a string and not a proper list - manifest_doi.each { doi_ref -> - temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" - } - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + temp_doi_ref + "\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" -} - // // Generate workflow version string // @@ -150,33 +135,6 @@ def paramsSummaryMultiqc(summary_params) { return yaml_file_text } -// -// nf-core logo -// -def nfCoreLogo(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) -} - -// -// Return dashed line -// -def dashedLine(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - return "-${colors.dim}----------------------------------------------------${colors.reset}-" -} - // // ANSII colours used for terminal logging // @@ -245,28 +203,26 @@ def logColours(monochrome_logs=true) { return colorcodes } -// -// Attach the multiqc report to email -// -def attachMultiqcReport(multiqc_report) { - def mqc_report = null - try { - if (workflow.success) { - mqc_report = multiqc_report.getVal() - if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { - if (mqc_report.size() > 1) { +// Return a single report from an object that may be a Path or List +// +def getSingleReport(multiqc_reports) { + switch (multiqc_reports) { + case Path: + return multiqc_reports + case List: + switch (multiqc_reports.size()) { + case 0: + log.warn("[${workflow.manifest.name}] No reports found from process 'MULTIQC'") + return null + case 1: + return multiqc_reports.first() + default: log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") - } - mqc_report = mqc_report[0] + return multiqc_reports.first() } - } + default: + return null } - catch (Exception all) { - if (multiqc_report) { - log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") - } - } - return mqc_report } // @@ -320,7 +276,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi email_fields['summary'] = summary << misc_fields // On success try attach the multiqc report - def mqc_report = attachMultiqcReport(multiqc_report) + def mqc_report = getSingleReport(multiqc_report) // Check if we are only sending emails on failure def email_address = email @@ -340,7 +296,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def email_html = html_template.toString() // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()] def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) @@ -351,14 +307,17 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi if (email_address) { try { if (plaintext_email) { -new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } + new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') + } // Try to send HTML e-mail using sendmail def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") sendmail_tf.withWriter { w -> w << sendmail_html } ['sendmail', '-t'].execute() << sendmail_html log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") } - catch (Exception all) { + catch (Exception msg) { + log.debug(msg) + log.debug("Trying with mail instead of sendmail") // Catch failures and try with plaintext def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] mail_cmd.execute() << email_html diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test index 1dc317f8f..e43d208b1 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -41,58 +41,6 @@ nextflow_function { } } - test("Test Function workflowCitation") { - - function "workflowCitation" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function nfCoreLogo") { - - function "nfCoreLogo" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function dashedLine") { - - function "dashedLine" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - test("Test Function without logColours") { function "logColours" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap index 1037232c9..02c670141 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap @@ -17,26 +17,6 @@ }, "timestamp": "2024-02-28T12:02:59.729647" }, - "Test Function nfCoreLogo": { - "content": [ - "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:10.562934" - }, - "Test Function workflowCitation": { - "content": [ - "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:07.019761" - }, "Test Function without logColours": { "content": [ { @@ -95,16 +75,6 @@ }, "timestamp": "2024-02-28T12:03:17.969323" }, - "Test Function dashedLine": { - "content": [ - "-\u001b[2m----------------------------------------------------\u001b[0m-" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:14.366181" - }, "Test Function with logColours": { "content": [ { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test index 842dc432a..8fb301648 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -42,7 +42,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { @@ -94,7 +94,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { From a604cb5595e9f93059d58ba1e6a9d613430803b6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 14:37:17 +0100 Subject: [PATCH 114/164] set the variable manually --- .github/workflows/create-lint-wf.yml | 2 +- .github/workflows/create-test-lint-wf-template.yml | 9 +++++++-- .github/workflows/create-test-wf.yml | 8 +++++++- .github/workflows/pytest.yml | 6 ++++++ nf_core/pipeline-template/.github/workflows/ci.yml | 7 ++++++- .../.github/workflows/download_pipeline.yml | 7 ++++++- nf_core/pipeline-template/.github/workflows/linting.yml | 6 ++++++ 7 files changed, 39 insertions(+), 6 deletions(-) diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 78932871e..fa6c38ef0 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -32,7 +32,7 @@ jobs: runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} env: NXF_ANSI_LOG: false - JAVA_HOME: $JAVA_HOME_21_X64 + strategy: matrix: NXF_VER: diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index b0d4c13a4..b4579178b 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -28,7 +28,6 @@ concurrency: env: NXF_ANSI_LOG: false - JAVA_HOME: $JAVA_HOME_21_X64 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: @@ -52,7 +51,7 @@ jobs: needs: prepare-matrix env: NXF_ANSI_LOG: false - JAVA_HOME: $JAVA_HOME_21_X64 + strategy: matrix: TEMPLATE: ${{ fromJson(needs.prepare-matrix.outputs.all_features) }} @@ -92,6 +91,12 @@ jobs: python -m pip install --upgrade pip pip install . + # Set up JAVA_HOME for Java 21 + - name: Set Java 21 + run: | + echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV + echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - name: Install Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index f69449dde..4ad4d98e0 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -33,7 +33,7 @@ jobs: runs-on: ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} env: NXF_ANSI_LOG: false - JAVA_HOME: $JAVA_HOME_21_X64 + strategy: matrix: NXF_VER: @@ -59,6 +59,12 @@ jobs: python -m pip install --upgrade pip pip install . + # Set up JAVA_HOME for Java 21 + - name: Set Java 21 + run: | + echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV + echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - name: Install Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 76d5d710c..45d08d30b 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -120,6 +120,12 @@ jobs: id: date run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV + # Set up JAVA_HOME for Java 21 + - name: Set Java 21 + run: | + echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV + echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - name: Install Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 4f005a1bb..5279d8b6d 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -11,7 +11,6 @@ on: env: NXF_ANSI_LOG: false - JAVA_HOME: $JAVA_HOME_21_X64 NXF_SINGULARITY_CACHEDIR: ${{ github.workspace }}/.singularity NXF_SINGULARITY_LIBRARYDIR: ${{ github.workspace }}/.singularity @@ -48,6 +47,12 @@ jobs: - name: Check out pipeline code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + # Set up JAVA_HOME for Java 21 + - name: Set Java 21 + run: | + echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV + echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - name: Set up Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 95ebad9fe..4a483f6ed 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -26,12 +26,17 @@ on: env: NXF_ANSI_LOG: false - JAVA_HOME: $JAVA_HOME_21_X64 jobs: download: runs-on: ubuntu-latest steps: + # Set up JAVA_HOME for Java 21 + - name: Set Java 21 + run: | + echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV + echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - name: Install Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index cfdbcc12a..ede8a3435 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -33,6 +33,12 @@ jobs: - name: Check out pipeline code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + # Set up JAVA_HOME for Java 21 + - name: Set Java 21 + run: | + echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV + echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - name: Install Nextflow uses: nf-core/setup-nextflow@v2 From 8bc1a307d1ef53a9311f0ce5e44064217f2e1006 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 14:43:11 +0100 Subject: [PATCH 115/164] nope, setup-java it is --- .github/workflows/create-test-lint-wf-template.yml | 9 ++++----- .github/workflows/create-test-wf.yml | 9 ++++----- .github/workflows/pytest.yml | 9 ++++----- nf_core/pipeline-template/.github/workflows/ci.yml | 9 ++++----- .../.github/workflows/download_pipeline.yml | 9 ++++----- nf_core/pipeline-template/.github/workflows/linting.yml | 9 ++++----- 6 files changed, 24 insertions(+), 30 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index b4579178b..78d174434 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -91,11 +91,10 @@ jobs: python -m pip install --upgrade pip pip install . - # Set up JAVA_HOME for Java 21 - - name: Set Java 21 - run: | - echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV - echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - uses: actions/setup-java@v4 + with: + java-version: "21" + distribution: "temurin" - name: Install Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 4ad4d98e0..55b64f9cf 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -59,11 +59,10 @@ jobs: python -m pip install --upgrade pip pip install . - # Set up JAVA_HOME for Java 21 - - name: Set Java 21 - run: | - echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV - echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - uses: actions/setup-java@v4 + with: + java-version: "21" + distribution: "temurin" - name: Install Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 45d08d30b..dcf5f0b0c 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -120,11 +120,10 @@ jobs: id: date run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV - # Set up JAVA_HOME for Java 21 - - name: Set Java 21 - run: | - echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV - echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - uses: actions/setup-java@v4 + with: + java-version: "21" + distribution: "temurin" - name: Install Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 5279d8b6d..d022850ec 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -47,11 +47,10 @@ jobs: - name: Check out pipeline code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - # Set up JAVA_HOME for Java 21 - - name: Set Java 21 - run: | - echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV - echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - uses: actions/setup-java@v4 + with: + java-version: "21" + distribution: "temurin" - name: Set up Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 4a483f6ed..5d483bd0a 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -31,11 +31,10 @@ jobs: download: runs-on: ubuntu-latest steps: - # Set up JAVA_HOME for Java 21 - - name: Set Java 21 - run: | - echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV - echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - uses: actions/setup-java@v4 + with: + java-version: "21" + distribution: "temurin" - name: Install Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index ede8a3435..a6150781c 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -33,11 +33,10 @@ jobs: - name: Check out pipeline code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - # Set up JAVA_HOME for Java 21 - - name: Set Java 21 - run: | - echo "JAVA_HOME=$JAVA_HOME_21_X64" >> $GITHUB_ENV - echo "PATH=$JAVA_HOME_21_X64/bin:$PATH" >> $GITHUB_ENV + - uses: actions/setup-java@v4 + with: + java-version: "21" + distribution: "temurin" - name: Install Nextflow uses: nf-core/setup-nextflow@v2 From 1857ebf51658a0100d70fa324303b6e4fa3adb12 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 14:46:55 +0100 Subject: [PATCH 116/164] add missing setup-java action --- .github/actions/create-lint-wf/action.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 3ffd960d2..01b0fda42 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -15,6 +15,11 @@ runs: cd create-lint-wf export NXF_WORK=$(pwd) + - uses: actions/setup-java@v4 + with: + java-version: "21" + distribution: "temurin" + - name: Install Nextflow uses: nf-core/setup-nextflow@v2 with: From 14480d280739292c69c32bc2dd0f1f0127abe136 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 15:26:31 +0100 Subject: [PATCH 117/164] use java v17 --- .github/actions/create-lint-wf/action.yml | 2 +- .github/workflows/create-test-lint-wf-template.yml | 2 +- .github/workflows/create-test-wf.yml | 2 +- .github/workflows/pytest.yml | 2 +- nf_core/pipeline-template/.github/workflows/ci.yml | 2 +- .../pipeline-template/.github/workflows/download_pipeline.yml | 2 +- nf_core/pipeline-template/.github/workflows/linting.yml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 01b0fda42..257378479 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -17,7 +17,7 @@ runs: - uses: actions/setup-java@v4 with: - java-version: "21" + java-version: "17" distribution: "temurin" - name: Install Nextflow diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 78d174434..f5e40e837 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -93,7 +93,7 @@ jobs: - uses: actions/setup-java@v4 with: - java-version: "21" + java-version: "17" distribution: "temurin" - name: Install Nextflow diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 55b64f9cf..b12b503b2 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -61,7 +61,7 @@ jobs: - uses: actions/setup-java@v4 with: - java-version: "21" + java-version: "17" distribution: "temurin" - name: Install Nextflow diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index dcf5f0b0c..5bb526396 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -122,7 +122,7 @@ jobs: - uses: actions/setup-java@v4 with: - java-version: "21" + java-version: "17" distribution: "temurin" - name: Install Nextflow diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index d022850ec..3bc9db4d7 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -49,7 +49,7 @@ jobs: - uses: actions/setup-java@v4 with: - java-version: "21" + java-version: "17" distribution: "temurin" - name: Set up Nextflow diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 5d483bd0a..326ac0936 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/setup-java@v4 with: - java-version: "21" + java-version: "17" distribution: "temurin" - name: Install Nextflow diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index a6150781c..f9ef12335 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -35,7 +35,7 @@ jobs: - uses: actions/setup-java@v4 with: - java-version: "21" + java-version: "17" distribution: "temurin" - name: Install Nextflow From 9c43f15ea589c17374b593dabcf6dea621c07e64 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 15:36:20 +0100 Subject: [PATCH 118/164] avoid confusion by separating the string --- nf_core/pipeline-template/.github/workflows/branch.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index e0ae1aa8a..482f53f3e 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -1,6 +1,6 @@ name: nf-core branch protection # This workflow is triggered on PRs to main/master branch on the repository -# It fails when someone tries to make a PR against the nf-core `main/master` branch instead of `dev` +# It fails when someone tries to make a PR against the nf-core `main`/`master` branch instead of `dev` on: pull_request_target: branches: @@ -24,7 +24,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `main/master` branch :x: + ## This PR is against the `main`/`master` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -34,9 +34,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `main/master` branch. - The `main/master` branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to `main/master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `main`/`master` branch. + The `main`/`master` branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to `main`/`master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. From 2e4a8ede4b1f7af1efc003cd0ce8524b002e8612 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 16:50:53 +0100 Subject: [PATCH 119/164] use contect to figure out if it master or main Co-authored-by: @mirpredrol --- nf_core/pipeline-template/.github/workflows/branch.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index 482f53f3e..5c1798d3b 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -24,7 +24,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `main`/`master` branch :x: + ## This PR is against the `${{github.event.pull_request.base.ref}}` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` From 229a2329dfefc5981897006c546a00d61da587da Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 3 Dec 2024 18:14:33 +0100 Subject: [PATCH 120/164] disambiguate in more places --- nf_core/pipeline-template/.github/workflows/branch.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index 5c1798d3b..26df52f09 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -1,5 +1,5 @@ name: nf-core branch protection -# This workflow is triggered on PRs to main/master branch on the repository +# This workflow is triggered on PRs to `main`/`master` branch on the repository # It fails when someone tries to make a PR against the nf-core `main`/`master` branch instead of `dev` on: pull_request_target: @@ -24,7 +24,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `${{github.event.pull_request.base.ref}}` branch :x: + ## This PR is against the `{% raw %}${{github.event.pull_request.base.ref}}{% endraw %}` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -34,9 +34,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `main`/`master` branch. - The `main`/`master` branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to `main`/`master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) {% raw %}${{github.event.pull_request.base.ref}}{% endraw %} branch. + The {% raw %}${{github.event.pull_request.base.ref}}{% endraw %} branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to {% raw %}${{github.event.pull_request.base.ref}}{% endraw %} are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. From dc9c3b1763d3f41ae42a4c97a405ba9bfad61878 Mon Sep 17 00:00:00 2001 From: lmReef Date: Wed, 4 Dec 2024 09:43:11 +1300 Subject: [PATCH 121/164] fix: linting error in meta_yml where module.process_name is always "" --- nf_core/components/nfcore_component.py | 12 +++++++++++- nf_core/modules/lint/main_nf.py | 1 - 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 37e43a536..090f55a4b 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -62,7 +62,6 @@ def __init__( # Initialize the important files self.main_nf: Path = Path(self.component_dir, "main.nf") self.meta_yml: Optional[Path] = Path(self.component_dir, "meta.yml") - self.process_name = "" self.environment_yml: Optional[Path] = Path(self.component_dir, "environment.yml") component_list = self.component_name.split("/") @@ -96,6 +95,9 @@ def __init__( self.test_yml = None self.test_main_nf = None + # Set process_name after self.main_nf is defined + self.process_name = self._get_process_name() + def __repr__(self) -> str: return f"" @@ -169,6 +171,13 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st included_components.append(component) return included_components + def _get_process_name(self): + with open(self.main_nf) as fh: + for line in fh: + if re.search(r"^\s*process\s*\w*\s*{", line): + return re.search(r"^\s*process\s*(\w*)\s*{.*", line).group(1) or "" + return "" + def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" inputs: Any = [] # Can be 'list[list[dict[str, dict[str, str]]]]' or 'list[str]' @@ -263,3 +272,4 @@ def get_outputs_from_main_nf(self): pass log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs + diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 848e17130..9a1790aeb 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -256,7 +256,6 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): bioconda_packages = [] # Process name should be all capital letters - self.process_name = lines[0].split()[1] if all(x.upper() for x in self.process_name): self.passed.append(("process_capitals", "Process name is in capital letters", self.main_nf)) else: From b04351db539fc3962b816595ee7ab9eae3a83bd3 Mon Sep 17 00:00:00 2001 From: lmReef Date: Wed, 4 Dec 2024 09:54:53 +1300 Subject: [PATCH 122/164] fix: python linting warn --- nf_core/components/nfcore_component.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 090f55a4b..eda95be99 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -272,4 +272,3 @@ def get_outputs_from_main_nf(self): pass log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs - From d3d9c72d7a20b617e417a8e0ba22ad9b29944d07 Mon Sep 17 00:00:00 2001 From: lmReef Date: Wed, 4 Dec 2024 10:29:10 +1300 Subject: [PATCH 123/164] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index da5f72c35..29e405903 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ ### Linting - allow mixed `str` and `dict` entries in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) +- fix meta_yml linting test failing due to module.process_name always being "" (#3317) ### Modules From 4a8e97631b6164b92f039de74e462cd9b9dd94ee Mon Sep 17 00:00:00 2001 From: lmReef Date: Wed, 4 Dec 2024 10:41:19 +1300 Subject: [PATCH 124/164] update changelog entry with link --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 29e405903..7799ef1d4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,7 +25,7 @@ ### Linting - allow mixed `str` and `dict` entries in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) -- fix meta_yml linting test failing due to module.process_name always being "" (#3317) +- fix meta_yml linting test failing due to module.process_name always being "" ([#3317](https://github.com/nf-core/tools/pull/3317)) ### Modules From b94435b92891b2cee0a78c47890b1d7349d30376 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 4 Dec 2024 09:48:13 +0000 Subject: [PATCH 125/164] remove too many raw/endraw in branch.yml --- nf_core/pipeline-template/.github/workflows/branch.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index 26df52f09..110b4a5f5 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -24,7 +24,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `{% raw %}${{github.event.pull_request.base.ref}}{% endraw %}` branch :x: + ## This PR is against the `${{github.event.pull_request.base.ref}}` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -34,9 +34,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) {% raw %}${{github.event.pull_request.base.ref}}{% endraw %} branch. - The {% raw %}${{github.event.pull_request.base.ref}}{% endraw %} branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to {% raw %}${{github.event.pull_request.base.ref}}{% endraw %} are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) ${{github.event.pull_request.base.ref}} branch. + The ${{github.event.pull_request.base.ref}} branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to ${{github.event.pull_request.base.ref}} are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. From f22d7d9e03f33c078967e38bc7174e6a5de6547c Mon Sep 17 00:00:00 2001 From: Rob Syme Date: Wed, 4 Dec 2024 08:09:57 -0500 Subject: [PATCH 126/164] Revert changes to nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf --- .../nf-core/utils_nfcore_pipeline/main.nf | 40 ++++++++++--------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 9e874fbf0..228dbff89 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -203,26 +203,29 @@ def logColours(monochrome_logs=true) { return colorcodes } -// Return a single report from an object that may be a Path or List // -def getSingleReport(multiqc_reports) { - switch (multiqc_reports) { - case Path: - return multiqc_reports - case List: - switch (multiqc_reports.size()) { - case 0: - log.warn("[${workflow.manifest.name}] No reports found from process 'MULTIQC'") - return null - case 1: - return multiqc_reports.first() - default: +// Attach the multiqc report to email +// +def attachMultiqcReport(multiqc_report) { + def mqc_report = null + try { + if (workflow.success) { + mqc_report = multiqc_report.getVal() + if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { + if (mqc_report.size() > 1) { log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") - return multiqc_reports.first() + } + mqc_report = mqc_report[0] } - default: - return null + } } + catch (Exception msg) { + log.debug(msg) + if (multiqc_report) { + log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") + } + } + return mqc_report } // @@ -276,7 +279,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi email_fields['summary'] = summary << misc_fields // On success try attach the multiqc report - def mqc_report = getSingleReport(multiqc_report) + def mqc_report = attachMultiqcReport(multiqc_report) // Check if we are only sending emails on failure def email_address = email @@ -307,8 +310,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi if (email_address) { try { if (plaintext_email) { - new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') - } +new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") sendmail_tf.withWriter { w -> w << sendmail_html } From c3fd9bb084c86e1204e9d4cbeebef413444b1834 Mon Sep 17 00:00:00 2001 From: lmReef Date: Thu, 5 Dec 2024 08:59:07 +1300 Subject: [PATCH 127/164] fix: add explicit str type to self.process_name --- nf_core/components/nfcore_component.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index eda95be99..81c0ba98e 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -95,8 +95,7 @@ def __init__( self.test_yml = None self.test_main_nf = None - # Set process_name after self.main_nf is defined - self.process_name = self._get_process_name() + self.process_name: str = self._get_process_name() def __repr__(self) -> str: return f"" From 57a92a0eff27c3570ae2cfcf73a765a66ed3c541 Mon Sep 17 00:00:00 2001 From: lmReef Date: Thu, 5 Dec 2024 09:34:08 +1300 Subject: [PATCH 128/164] fix: module section linter regex --- nf_core/modules/lint/main_nf.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 848e17130..599d50cd2 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -96,19 +96,19 @@ def main_nf( for line in iter_lines: if re.search(r"^\s*process\s*\w*\s*{", line) and state == "module": state = "process" - if re.search(r"input\s*:", line) and state in ["process"]: + if re.search(r"^\s*input\s*:", line) and state in ["process"]: state = "input" continue - if re.search(r"output\s*:", line) and state in ["input", "process"]: + if re.search(r"^\s*output\s*:", line) and state in ["input", "process"]: state = "output" continue - if re.search(r"when\s*:", line) and state in ["input", "output", "process"]: + if re.search(r"^\s*when\s*:", line) and state in ["input", "output", "process"]: state = "when" continue - if re.search(r"script\s*:", line) and state in ["input", "output", "when", "process"]: + if re.search(r"^\s*script\s*:", line) and state in ["input", "output", "when", "process"]: state = "script" continue - if re.search(r"shell\s*:", line) and state in ["input", "output", "when", "process"]: + if re.search(r"^\s*shell\s*:", line) and state in ["input", "output", "when", "process"]: state = "shell" continue From 355dff2c8e8fa28b628459bab65efeadf3793dfc Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 4 Dec 2024 21:10:36 +0000 Subject: [PATCH 129/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index da5f72c35..91c70f443 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ ### Modules - add a panel around diff previews when updating ([#3246](https://github.com/nf-core/tools/pull/3246)) +- Fix module section linter regex ([#3321](https://github.com/nf-core/tools/pull/3321)) ### Subworkflows From 9996df9e061a7512f6cce1d34fc0e8f7a937df95 Mon Sep 17 00:00:00 2001 From: lmReef Date: Thu, 5 Dec 2024 10:11:58 +1300 Subject: [PATCH 130/164] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 91c70f443..b75b86574 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ ### Linting - allow mixed `str` and `dict` entries in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) +- fix module section regex matching wrong things ([#3321](https://github.com/nf-core/tools/pull/3321)) ### Modules From c22c571cff747b1fb47ae11d3dc1fa76e14a9a2f Mon Sep 17 00:00:00 2001 From: lmReef Date: Thu, 5 Dec 2024 10:14:01 +1300 Subject: [PATCH 131/164] Revert "[automated] Update CHANGELOG.md" This reverts commit 355dff2c8e8fa28b628459bab65efeadf3793dfc. --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b75b86574..67417f940 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,7 +30,6 @@ ### Modules - add a panel around diff previews when updating ([#3246](https://github.com/nf-core/tools/pull/3246)) -- Fix module section linter regex ([#3321](https://github.com/nf-core/tools/pull/3321)) ### Subworkflows From ce866c62c141ce1e0126dad9942f879e8bea3f89 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Thu, 5 Dec 2024 11:55:09 +0100 Subject: [PATCH 132/164] Remove not needed setup-java step --- .github/actions/create-lint-wf/action.yml | 5 ----- .github/workflows/create-test-lint-wf-template.yml | 5 ----- .github/workflows/create-test-wf.yml | 5 ----- .github/workflows/pytest.yml | 5 ----- nf_core/pipeline-template/.github/workflows/ci.yml | 5 ----- .../.github/workflows/download_pipeline.yml | 5 ----- nf_core/pipeline-template/.github/workflows/linting.yml | 5 ----- 7 files changed, 35 deletions(-) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 257378479..3ffd960d2 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -15,11 +15,6 @@ runs: cd create-lint-wf export NXF_WORK=$(pwd) - - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - name: Install Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index f5e40e837..cabd4b9ab 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -91,11 +91,6 @@ jobs: python -m pip install --upgrade pip pip install . - - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - name: Install Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index b12b503b2..53f84b72c 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -59,11 +59,6 @@ jobs: python -m pip install --upgrade pip pip install . - - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - name: Install Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 5bb526396..76d5d710c 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -120,11 +120,6 @@ jobs: id: date run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV - - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - name: Install Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 3bc9db4d7..9db393d9f 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -47,11 +47,6 @@ jobs: - name: Check out pipeline code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - name: Set up Nextflow uses: nf-core/setup-nextflow@v2 with: diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 326ac0936..05397358c 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -31,11 +31,6 @@ jobs: download: runs-on: ubuntu-latest steps: - - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - name: Install Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index f9ef12335..cfdbcc12a 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -33,11 +33,6 @@ jobs: - name: Check out pipeline code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - name: Install Nextflow uses: nf-core/setup-nextflow@v2 From a9d97e4f296551fd0c7b9e071a0990007b3b3770 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 12:16:57 +0000 Subject: [PATCH 133/164] Update python:3.12-slim Docker digest to 2b00791 (#3319) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index dc9948ea4..f2141145b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:2a6386ad2db20e7f55073f69a98d6da2cf9f168e05e7487d2670baeb9b7601c5 +FROM python:3.12-slim@sha256:2b0079146a74e23bf4ae8f6a28e1b484c6292f6fb904cbb51825b4a19812fcd8 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From c23af55faa65b9f4bc6735917bdcbb77acdc500e Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 5 Dec 2024 15:19:54 +0000 Subject: [PATCH 134/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed991f85..3272db6c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,7 @@ ### Subworkflows - Add `nf-core subworkflows patch` command ([#2861](https://github.com/nf-core/tools/pull/2861)) +- Improve subworkflow nf-test migration warning ([#3298](https://github.com/nf-core/tools/pull/3298)) ### General From 6ddfe4d35b4a159dd5f18a12c77c6761ec66068f Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 5 Dec 2024 16:30:44 +0100 Subject: [PATCH 135/164] fix headers --- docs/api/_src/api/pipelines/bump_version.md | 2 +- docs/api/_src/api/pipelines/create.md | 2 +- docs/api/_src/api/pipelines/download.md | 2 +- docs/api/_src/api/pipelines/launch.md | 2 +- docs/api/_src/api/pipelines/lint.md | 2 +- docs/api/_src/api/pipelines/list.md | 2 +- docs/api/_src/api/pipelines/params-file.md | 2 +- docs/api/_src/api/pipelines/schema.md | 2 +- docs/api/_src/api/pipelines/sync.md | 2 +- docs/api/_src/api/pipelines/utils.md | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/api/_src/api/pipelines/bump_version.md b/docs/api/_src/api/pipelines/bump_version.md index cd7dc280f..76db67837 100644 --- a/docs/api/_src/api/pipelines/bump_version.md +++ b/docs/api/_src/api/pipelines/bump_version.md @@ -1,4 +1,4 @@ -# nf_core.bump_version +# nf_core.pipelines.bump_version ```{eval-rst} .. automodule:: nf_core.pipelines.bump_version diff --git a/docs/api/_src/api/pipelines/create.md b/docs/api/_src/api/pipelines/create.md index 576335e95..5019a5f3c 100644 --- a/docs/api/_src/api/pipelines/create.md +++ b/docs/api/_src/api/pipelines/create.md @@ -1,4 +1,4 @@ -# nf_core.create +# nf_core.pipelines.create ```{eval-rst} .. automodule:: nf_core.pipelines.create diff --git a/docs/api/_src/api/pipelines/download.md b/docs/api/_src/api/pipelines/download.md index 540fb92c4..afb31ddea 100644 --- a/docs/api/_src/api/pipelines/download.md +++ b/docs/api/_src/api/pipelines/download.md @@ -1,4 +1,4 @@ -# nf_core.download +# nf_core.pipelines.download ```{eval-rst} .. automodule:: nf_core.pipelines.download diff --git a/docs/api/_src/api/pipelines/launch.md b/docs/api/_src/api/pipelines/launch.md index 0f7fc03f6..0d0260cae 100644 --- a/docs/api/_src/api/pipelines/launch.md +++ b/docs/api/_src/api/pipelines/launch.md @@ -1,4 +1,4 @@ -# nf_core.launch +# nf_core.pipelines.launch ```{eval-rst} .. automodule:: nf_core.pipelines.launch diff --git a/docs/api/_src/api/pipelines/lint.md b/docs/api/_src/api/pipelines/lint.md index aa62c404b..91b37c26f 100644 --- a/docs/api/_src/api/pipelines/lint.md +++ b/docs/api/_src/api/pipelines/lint.md @@ -1,4 +1,4 @@ -# nf_core.lint +# nf_core.pipelines.lint :::{seealso} See the [Lint Tests](/docs/nf-core-tools/api_reference/dev/pipeline_lint_tests) docs for information about specific linting functions. diff --git a/docs/api/_src/api/pipelines/list.md b/docs/api/_src/api/pipelines/list.md index 7df756454..5f404b91c 100644 --- a/docs/api/_src/api/pipelines/list.md +++ b/docs/api/_src/api/pipelines/list.md @@ -1,4 +1,4 @@ -# nf_core.list +# nf_core.pipelines.list ```{eval-rst} .. automodule:: nf_core.pipelines.list diff --git a/docs/api/_src/api/pipelines/params-file.md b/docs/api/_src/api/pipelines/params-file.md index 06f27cc59..37e91f458 100644 --- a/docs/api/_src/api/pipelines/params-file.md +++ b/docs/api/_src/api/pipelines/params-file.md @@ -1,4 +1,4 @@ -# nf_core.params_file +# nf_core.pipelines.params_file ```{eval-rst} .. automodule:: nf_core.pipelines.params_file diff --git a/docs/api/_src/api/pipelines/schema.md b/docs/api/_src/api/pipelines/schema.md index c885d9ed2..4ca1aab48 100644 --- a/docs/api/_src/api/pipelines/schema.md +++ b/docs/api/_src/api/pipelines/schema.md @@ -1,4 +1,4 @@ -# nf_core.schema +# nf_core.pipelines.schema ```{eval-rst} .. automodule:: nf_core.pipelines.schema diff --git a/docs/api/_src/api/pipelines/sync.md b/docs/api/_src/api/pipelines/sync.md index da1f468fe..f78733bb7 100644 --- a/docs/api/_src/api/pipelines/sync.md +++ b/docs/api/_src/api/pipelines/sync.md @@ -1,4 +1,4 @@ -# nf_core.sync +# nf_core.pipelines.sync ```{eval-rst} .. automodule:: nf_core.pipelines.sync diff --git a/docs/api/_src/api/pipelines/utils.md b/docs/api/_src/api/pipelines/utils.md index 86b8c3f36..36c2ecca4 100644 --- a/docs/api/_src/api/pipelines/utils.md +++ b/docs/api/_src/api/pipelines/utils.md @@ -1,4 +1,4 @@ -# nf_core.utils +# nf_core.pipelines.utils ```{eval-rst} .. automodule:: nf_core.pipelines.utils From f54e1234e9451afbafecb26f124261ba8bc3977b Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 5 Dec 2024 16:04:21 +0000 Subject: [PATCH 136/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 61d8dbf51..ffc43d225 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ - Update GitHub Actions ([#3237](https://github.com/nf-core/tools/pull/3237)) - add `--dir/-d` option to schema commands ([#3247](https://github.com/nf-core/tools/pull/3247)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.1 ([#3250](https://github.com/nf-core/tools/pull/3250)) +- fix headers in api docs ([#3323](https://github.com/nf-core/tools/pull/3323)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From 2c5932e66e2cdbf85728fb0ec03c695e5aebdb8a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 16:49:54 +0000 Subject: [PATCH 137/164] Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1494f5818..68a6fa3ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.1 + rev: v0.8.2 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From c03f21288a0ff05dc42f2b5dddf135531564fbed Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 5 Dec 2024 16:52:26 +0000 Subject: [PATCH 138/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c011dfa4..a4d8b4487 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,6 +57,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) - Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) - Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 ([#3325](https://github.com/nf-core/tools/pull/3325)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From da16adb69b352d9a2e5120137a5c2ac0be88264a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 5 Dec 2024 23:56:27 +0100 Subject: [PATCH 139/164] Make prompt less nf-core specific --- nf_core/components/components_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 67e05e0ce..23bf08bbd 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -43,10 +43,10 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P if not repo_type and use_prompt: log.warning("'repository_type' not defined in %s", config_fn.name) repo_type = questionary.select( - "Is this repository an nf-core pipeline or a fork of nf-core/modules?", + "Is this repository a pipeline or a modules repository?", choices=[ {"name": "Pipeline", "value": "pipeline"}, - {"name": "nf-core/modules", "value": "modules"}, + {"name": "Modules repository", "value": "modules"}, ], style=nf_core.utils.nfcore_question_style, ).unsafe_ask() From ba447fbf7027312d7562ac533b9ca26ff68574fe Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 5 Dec 2024 22:57:35 +0000 Subject: [PATCH 140/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a4d8b4487..4fef8ce40 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,6 +58,7 @@ - Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) - Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 ([#3325](https://github.com/nf-core/tools/pull/3325)) +- Make prompt less nf-core specific ([#3326](https://github.com/nf-core/tools/pull/3326)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From 6a59a9749450bcf796aab68506302145e5f93efd Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Fri, 6 Dec 2024 13:50:30 +0000 Subject: [PATCH 141/164] Update gitpod vscode extensions to use nf-core extension pack --- .gitpod.yml | 9 +-------- nf_core/pipeline-template/.gitpod.yml | 11 ++--------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/.gitpod.yml b/.gitpod.yml index efe193f35..d5948695b 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -9,11 +9,4 @@ tasks: vscode: extensions: - - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code - - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files - - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - - mechatroner.rainbow-csv # Highlight columns in csv files in different colors - - nextflow.nextflow # Nextflow syntax highlighting - - oderwat.indent-rainbow # Highlight indentation level - - streetsidesoftware.code-spell-checker # Spelling checker for source code - - charliermarsh.ruff # Code linter Ruff + - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 5907fb59c..b8165fbc0 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -6,13 +6,6 @@ tasks: nextflow self-update vscode: - extensions: # based on nf-core.nf-core-extensionpack + extensions: #{%- if code_linters -%} - - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code - - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files{% endif %} - - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - - mechatroner.rainbow-csv # Highlight columns in csv files in different colors - - nextflow.nextflow # Nextflow syntax highlighting - - oderwat.indent-rainbow # Highlight indentation level - - streetsidesoftware.code-spell-checker # Spelling checker for source code - - charliermarsh.ruff # Code linter Ruff + - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack From 0ba1c7007490f10a0f985a5d70fef474b58f0cac Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 6 Dec 2024 13:55:42 +0000 Subject: [PATCH 142/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a4d8b4487..91d9960a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,6 +58,7 @@ - Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) - Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 ([#3325](https://github.com/nf-core/tools/pull/3325)) +- Update gitpod vscode extensions to use nf-core extension pack ([#3327](https://github.com/nf-core/tools/pull/3327)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From 7a03d907f6cdf8d75344edbbf2a141854346da11 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 9 Dec 2024 09:47:45 +0000 Subject: [PATCH 143/164] [automated] Fix code linting --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5876c653b..20f48bb1c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -64,7 +64,6 @@ - Make CLI prompt less nf-core specific ([#3326](https://github.com/nf-core/tools/pull/3326)) - Update gitpod vscode extensions to use nf-core extension pack ([#3327](https://github.com/nf-core/tools/pull/3327)) - ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] ### Template From 69f00bc5003830677c42bceab34a60aec56e7365 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 9 Dec 2024 12:40:42 +0100 Subject: [PATCH 144/164] update utils_nfcore_pipeline swf --- nf_core/pipeline-template/modules.json | 2 +- .../nf-core/utils_nfcore_pipeline/main.nf | 42 ++++++++---------- .../tests/main.function.nf.test | 44 +++++++++++++++++++ 3 files changed, 64 insertions(+), 24 deletions(-) diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 397c0cdb0..7d2761d29 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -28,7 +28,7 @@ }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "85400682a2abac63b09c863c138e91e5df7236b5", + "git_sha": "51ae5406a030d4da1e49e4dab49756844fdd6c7a", "installed_by": ["subworkflows"] }{% if nf_schema %}, "utils_nfschema_plugin": { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 228dbff89..bfd258760 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -203,29 +203,24 @@ def logColours(monochrome_logs=true) { return colorcodes } +// Return a single report from an object that may be a Path or List // -// Attach the multiqc report to email -// -def attachMultiqcReport(multiqc_report) { - def mqc_report = null - try { - if (workflow.success) { - mqc_report = multiqc_report.getVal() - if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { - if (mqc_report.size() > 1) { - log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") - } - mqc_report = mqc_report[0] - } +def getSingleReport(multiqc_reports) { + if (multiqc_reports instanceof Path) { + return multiqc_reports + } else if (multiqc_reports instanceof List) { + if (multiqc_reports.size() == 0) { + log.warn("[${workflow.manifest.name}] No reports found from process 'MULTIQC'") + return null + } else if (multiqc_reports.size() == 1) { + return multiqc_reports.first() + } else { + log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") + return multiqc_reports.first() } + } else { + return null } - catch (Exception msg) { - log.debug(msg) - if (multiqc_report) { - log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") - } - } - return mqc_report } // @@ -279,7 +274,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi email_fields['summary'] = summary << misc_fields // On success try attach the multiqc report - def mqc_report = attachMultiqcReport(multiqc_report) + def mqc_report = getSingleReport(multiqc_report) // Check if we are only sending emails on failure def email_address = email @@ -310,7 +305,8 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi if (email_address) { try { if (plaintext_email) { -new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } + new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') + } // Try to send HTML e-mail using sendmail def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") sendmail_tf.withWriter { w -> w << sendmail_html } @@ -318,7 +314,7 @@ new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") } catch (Exception msg) { - log.debug(msg) + log.debug(msg.toString()) log.debug("Trying with mail instead of sendmail") // Catch failures and try with plaintext def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test index e43d208b1..f117040cb 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -79,4 +79,48 @@ nextflow_function { ) } } + + test("Test Function getSingleReport with a single file") { + function "getSingleReport" + + when { + function { + """ + input[0] = file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true) + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert function.result.contains("test.tsv") } + ) + } + } + + test("Test Function getSingleReport with multiple files") { + function "getSingleReport" + + when { + function { + """ + input[0] = [ + file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true), + file(params.modules_testdata_base_path + '/generic/tsv/network.tsv', checkIfExists: true), + file(params.modules_testdata_base_path + '/generic/tsv/expression.tsv', checkIfExists: true) + ] + """ + } + } + + then { + assertAll( + { assert function.success }, + { assert function.result.contains("test.tsv") }, + { assert !function.result.contains("network.tsv") }, + { assert !function.result.contains("expression.tsv") } + ) + } + } } From f2090961118d94c5be201b9e6bbbf03899ba920f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 9 Dec 2024 12:43:50 +0100 Subject: [PATCH 145/164] fix jinja template missing endif --- nf_core/pipeline-template/.gitpod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index b8165fbc0..67d24ff0f 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -8,4 +8,4 @@ tasks: vscode: extensions: #{%- if code_linters -%} - - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack + - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack{% endif %} From ee12866a96d2b4038be1765034f0d7511cb25be0 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 9 Dec 2024 11:45:14 +0000 Subject: [PATCH 146/164] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 703012f68..fd39884e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ - Use `params.monochrome_logs` in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) - Fix some typos and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) - Add `manifest.contributors` to `nextflow.config` ([#3311](https://github.com/nf-core/tools/pull/3311)) +- Update template components ([#3328](https://github.com/nf-core/tools/pull/3328)) ### Download From ae1066c79dccda7068b957360234feecdfe0d5c4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 13:10:57 +0100 Subject: [PATCH 147/164] bump to 3.1.0 --- .gitpod.yml | 2 +- CHANGELOG.md | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.gitpod.yml b/.gitpod.yml index d5948695b..db31d01be 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,4 +1,4 @@ -image: nfcore/gitpod:dev +image: nfcore/gitpod:latest tasks: - name: install current state of nf-core/tools and setup pre-commit command: | diff --git a/CHANGELOG.md b/CHANGELOG.md index 682a67652..0eacaed17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # nf-core/tools: Changelog -## v3.0.3dev +## [v3.1.0 - Brass Boxfish](https://github.com/nf-core/tools/releases/tag/3.1.0) - [2024-12-09] ### Template diff --git a/setup.py b/setup.py index 11b302249..6b68973a6 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.0.3dev" +version = "3.1.0" with open("README.md") as f: readme = f.read() From afdb012820ffbb07c037d36da0233e30502624a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 9 Dec 2024 12:29:16 +0000 Subject: [PATCH 148/164] add highlights and version updates sections to changelog --- CHANGELOG.md | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0eacaed17..6369cedd2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,12 +2,18 @@ ## [v3.1.0 - Brass Boxfish](https://github.com/nf-core/tools/releases/tag/3.1.0) - [2024-12-09] +**Highlights** + +- We added the new `contributors` field to the pipeline template `manifest`. +- The `nf-core pipelines download` command supports ORAS container URIs. +- New command `nf-core subworkflows patch`. + ### Template - Keep pipeline name in version.yml file ([#3223](https://github.com/nf-core/tools/pull/3223)) - Fix Manifest DOI text ([#3224](https://github.com/nf-core/tools/pull/3224)) - Do not assume pipeline name is url ([#3225](https://github.com/nf-core/tools/pull/3225)) -- fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) +- fix `workflow_dispatch` trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) @@ -18,14 +24,14 @@ ### Download -- First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity `https://` Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). - Further steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Enable limited support for `oras://` container paths (_only absolute URIs, no flexible registries like with Docker_) and prevent unnecessary image downloads for Seqera Container modules with `reconcile_seqera_container_uris()` ([#3293](https://github.com/nf-core/tools/pull/3293)). - Update dawidd6/action-download-artifact action to v7 ([#3306](https://github.com/nf-core/tools/pull/3306)) ### Linting - allow mixed `str` and `dict` entries in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) -- fix meta_yml linting test failing due to module.process_name always being "" ([#3317](https://github.com/nf-core/tools/pull/3317)) +- fix `meta_yml` linting test failing due to `module.process_name` always being `""` ([#3317](https://github.com/nf-core/tools/pull/3317)) - fix module section regex matching wrong things ([#3321](https://github.com/nf-core/tools/pull/3321)) ### Modules @@ -39,32 +45,28 @@ ### General -- Include .nf-core.yml in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) +- Include `.nf-core.yml` in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) - create: add shortcut to toggle all switches ([#3226](https://github.com/nf-core/tools/pull/3226)) - Remove unrelated values when saving `.nf-core` file ([#3227](https://github.com/nf-core/tools/pull/3227)) -- chore(deps): update pre-commit hook pre-commit/mirrors-mypy to v1.12.0 ([#3230](https://github.com/nf-core/tools/pull/3230)) -- chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.7.0 ([#3229](https://github.com/nf-core/tools/pull/3229)) -- Update python:3.12-slim Docker digest to 032c526 ([#3232](https://github.com/nf-core/tools/pull/3232)) - use correct `--profile` options for `nf-core subworkflows test` ([#3233](https://github.com/nf-core/tools/pull/3233)) - Update GitHub Actions ([#3237](https://github.com/nf-core/tools/pull/3237)) - add `--dir/-d` option to schema commands ([#3247](https://github.com/nf-core/tools/pull/3247)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.1 ([#3250](https://github.com/nf-core/tools/pull/3250)) - fix headers in api docs ([#3323](https://github.com/nf-core/tools/pull/3323)) - handle new schema structure in `nf-core pipelines create-params-file` ([#3276](https://github.com/nf-core/tools/pull/3276)) - Update Gitpod image to use Miniforge instead of Miniconda([#3274](https://github.com/nf-core/tools/pull/3274)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.3 ([#3275](https://github.com/nf-core/tools/pull/3275)) - Add hint to solve git errors with a synced repo ([#3279](https://github.com/nf-core/tools/pull/3279)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 ([#3282](https://github.com/nf-core/tools/pull/3282)) -- Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) -- Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) -- Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) - Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 ([#3325](https://github.com/nf-core/tools/pull/3325)) - Make CLI prompt less nf-core specific ([#3326](https://github.com/nf-core/tools/pull/3326)) - Update gitpod vscode extensions to use nf-core extension pack ([#3327](https://github.com/nf-core/tools/pull/3327)) - Remove toList() channel operation from inside onComplete block ([#3304](https://github.com/nf-core/tools/pull/3304)) - build: Setup VS Code tests ([#3292](https://github.com/nf-core/tools/pull/3292)) + +### Version updates + +- chore(deps): update pre-commit hook pre-commit/mirrors-mypy to v1.12.0 ([#3230](https://github.com/nf-core/tools/pull/3230)) +- Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) +- Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 ([#3325](https://github.com/nf-core/tools/pull/3325)) - Update dependency textual-dev to v1.7.0 ([#3308](https://github.com/nf-core/tools/pull/3308)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From 76ac145edee3cc5ee8e60c3959dba41e296d66ab Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 15:23:20 +0100 Subject: [PATCH 149/164] remove mention of GRCh37 from template if igenomes is skipped closes #3322 --- nf_core/pipeline-template/docs/usage.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 16e6220aa..85f119e04 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -61,7 +61,7 @@ An [example samplesheet](../assets/samplesheet.csv) has been provided with the p The typical command for running the pipeline is as follows: ```bash -nextflow run {{ name }} --input ./samplesheet.csv --outdir ./results --genome GRCh37 -profile docker +nextflow run {{ name }} --input ./samplesheet.csv --outdir ./results {% if igenomes %}--genome GRCh37{% endif %} -profile docker ``` This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles. @@ -93,7 +93,9 @@ with: ```yaml title="params.yaml" input: './samplesheet.csv' outdir: './results/' +{% if igenomes -%} genome: 'GRCh37' +{% endif -%} <...> ``` From e073622e8bdfb1879eb38fc981cf931152322aac Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 15:23:56 +0100 Subject: [PATCH 150/164] add missing closing tag --- nf_core/pipeline-template/.gitpod.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index b8165fbc0..1a0c9f2a8 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -9,3 +9,4 @@ vscode: extensions: #{%- if code_linters -%} - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack + #{%- endif -%} From 1da1fc600271fccdccc41b4c1cb8f5724a85c148 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 15:27:29 +0100 Subject: [PATCH 151/164] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0eacaed17..10f847ed2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -75,6 +75,7 @@ - Parallelize pipeline GHA tests over docker/conda/singularity ([#3214](https://github.com/nf-core/tools/pull/3214)) - Fix `template_version_comment.yml` github action ([#3212](https://github.com/nf-core/tools/pull/3212)) - Fix pre-commit linting on pipeline template ([#3218](https://github.com/nf-core/tools/pull/3218)) +- Template: Remove mention of GRCh37 if igenomes is skipped ([#3330](https://github.com/nf-core/tools/pull/3330)) ### Linting From a59e929bd677e4437ba59fcf8ac3d921b0570fd2 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 9 Dec 2024 15:44:24 +0100 Subject: [PATCH 152/164] revert updating textual-dev to 1.7.0 --- CHANGELOG.md | 1 - requirements-dev.txt | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 42945ef2c..982d48c56 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -68,7 +68,6 @@ - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.2 ([#3325](https://github.com/nf-core/tools/pull/3325)) -- Update dependency textual-dev to v1.7.0 ([#3308](https://github.com/nf-core/tools/pull/3308)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] diff --git a/requirements-dev.txt b/requirements-dev.txt index 48069d59a..04c6372d7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ responses ruff Sphinx sphinx-rtd-theme -textual-dev==1.7.0 +textual-dev==1.5.1 types-PyYAML types-requests types-jsonschema From 038549034dadeed8835719b893429cd8b29a80a3 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 16:41:31 +0100 Subject: [PATCH 153/164] don't break gitpod.yml with template string --- nf_core/pipeline-template/.gitpod.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 67d24ff0f..947315c0c 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -4,8 +4,8 @@ tasks: command: | pre-commit install --install-hooks nextflow self-update - +{% if code_linters %} vscode: extensions: - #{%- if code_linters -%} - - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack{% endif %} + - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack +{%- endif -%} From 337143daba96d73a78ff2c324c2ff38381f60e4f Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 16:43:57 +0100 Subject: [PATCH 154/164] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2ea872655..313be748b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -62,6 +62,7 @@ - Update gitpod vscode extensions to use nf-core extension pack ([#3327](https://github.com/nf-core/tools/pull/3327)) - Remove toList() channel operation from inside onComplete block ([#3304](https://github.com/nf-core/tools/pull/3304)) - build: Setup VS Code tests ([#3292](https://github.com/nf-core/tools/pull/3292)) +- Don't break gitpod.yml with template string ([#3332](https://github.com/nf-core/tools/pull/3332)) ### Version updates From 15880be40d065d4d89dd2c2c65513713a029289b Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 16:54:17 +0100 Subject: [PATCH 155/164] make prettier happy --- nf_core/pipeline-template/.gitpod.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 947315c0c..f2088365d 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -3,9 +3,8 @@ tasks: - name: Update Nextflow and setup pre-commit command: | pre-commit install --install-hooks - nextflow self-update -{% if code_linters %} + nextflow self-update {%- if code_linters %} + vscode: extensions: - - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack -{%- endif -%} + - nf-core.nf-core-extensionpack {%- endif -%} From 5b1cf24f7fc9548928221f914253570c7238b63d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Mon, 9 Dec 2024 17:09:32 +0100 Subject: [PATCH 156/164] Update nf_core/pipeline-template/.gitpod.yml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: JĂșlia Mir Pedrol --- nf_core/pipeline-template/.gitpod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index f2088365d..d48e12272 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -3,7 +3,7 @@ tasks: - name: Update Nextflow and setup pre-commit command: | pre-commit install --install-hooks - nextflow self-update {%- if code_linters %} + nextflow self-update {% if code_linters %} vscode: extensions: From 045aaec42944e2ef7dfd2d1d6e2fcd1a6775d320 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 17:11:04 +0100 Subject: [PATCH 157/164] fix template string --- nf_core/pipeline-template/.gitpod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index d48e12272..c02b93834 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -7,4 +7,4 @@ tasks: vscode: extensions: - - nf-core.nf-core-extensionpack {%- endif -%} + - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack{% endif %} From 3c4fa811d521fe4090b2aca78154471d707ee58d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 18:12:08 +0100 Subject: [PATCH 158/164] rocrate: remove duplicated entries for name and version --- nf_core/pipelines/rocrate.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 915f203f0..21df0513a 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -216,6 +216,9 @@ def set_main_entity(self, main_entity_filename: str): ) self.crate.mainEntity.append_to("version", self.version, compact=True) + # remove duplicate entries for version + self.crate.mainEntity["version"] = list(set(self.crate.mainEntity["version"])) + # get keywords from nf-core website remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] # go through all remote workflows and find the one that matches the pipeline name @@ -236,6 +239,9 @@ def set_main_entity(self, main_entity_filename: str): self.crate.mainEntity.append_to("license", self.crate.license) self.crate.mainEntity.append_to("name", self.crate.name) + # remove duplicate entries for name + self.crate.mainEntity["name"] = list(set(self.crate.mainEntity["name"])) + if "dev" in self.version: self.crate.creativeWorkStatus = "InProgress" else: From 0e62e6edcf77f46ff34d640b04019d781828ade2 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 9 Dec 2024 18:13:37 +0100 Subject: [PATCH 159/164] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 313be748b..bb897d76c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -63,6 +63,7 @@ - Remove toList() channel operation from inside onComplete block ([#3304](https://github.com/nf-core/tools/pull/3304)) - build: Setup VS Code tests ([#3292](https://github.com/nf-core/tools/pull/3292)) - Don't break gitpod.yml with template string ([#3332](https://github.com/nf-core/tools/pull/3332)) +- rocrate: remove duplicated entries for name and version ([#3333](https://github.com/nf-core/tools/pull/3333)) ### Version updates From 3e81adb62de4649c17a0c7233c8d3feec77e82e8 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 10 Dec 2024 10:13:51 +0100 Subject: [PATCH 160/164] update version in ro crate on version bump --- nf_core/pipelines/bump_version.py | 4 ++++ nf_core/pipelines/rocrate.py | 22 +++++++++++++++++++ tests/pipelines/test_rocrate.py | 35 +++++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+) diff --git a/nf_core/pipelines/bump_version.py b/nf_core/pipelines/bump_version.py index 3190ed70d..de0342c7f 100644 --- a/nf_core/pipelines/bump_version.py +++ b/nf_core/pipelines/bump_version.py @@ -11,6 +11,7 @@ from ruamel.yaml import YAML import nf_core.utils +from nf_core.pipelines.rocrate import ROCrate from nf_core.utils import Pipeline log = logging.getLogger(__name__) @@ -127,6 +128,9 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: yaml_key=["template", "version"], ) + # update rocrate + ROCrate(pipeline_obj.wf_path).update_rocrate() + def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: """Bumps the required Nextflow version number of a pipeline. diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 21df0513a..ec2b23e02 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -336,6 +336,28 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: if author in authors: wf_file.append_to("maintainer", author_entitity) + def update_rocrate(self) -> bool: + """ + Update the rocrate file + """ + # check if we need to output a json file and/or a zip file based on the file extensions + json_path = None + zip_path = None + # try to find a json file + json_path = Path(self.pipeline_dir, "ro-crate-metadata.json") + if json_path.exists(): + json_path = json_path + else: + json_path = None + + # try to find a zip file + zip_path = Path(self.pipeline_dir, "ro-crate.crate.zip") + if zip_path.exists(): + zip_path = zip_path + else: + zip_path = None + return self.create_rocrate(json_path=json_path, zip_path=zip_path) + def get_orcid(name: str) -> Optional[str]: """ diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py index 01a77ecd7..ac86e64bd 100644 --- a/tests/pipelines/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -1,5 +1,6 @@ """Test the nf-core pipelines rocrate command""" +import json import shutil import tempfile from pathlib import Path @@ -12,6 +13,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.rocrate import nf_core.utils +from nf_core.pipelines.bump_version import bump_pipeline_version from ..test_pipelines import TestPipelines @@ -125,3 +127,36 @@ def test_rocrate_creation_for_fetchngs(self): # Clean up shutil.rmtree(tmp_dir) + + def test_update_rocrate(self): + """Run the nf-core rocrate command with a zip output""" + + assert self.rocrate_obj.create_rocrate(json_path=self.pipeline_dir, zip_path=self.pipeline_dir) + + # read the crate json file + with open(Path(self.pipeline_dir, "ro-crate-metadata.json")) as f: + crate = json.load(f) + + # check the old version + self.assertEqual(crate["@graph"][2]["version"][0], "1.0.0dev") + # check creativeWorkStatus is InProgress + self.assertEqual(crate["@graph"][0]["creativeWorkStatus"], "InProgress") + + # bump version + bump_pipeline_version(self.pipeline_obj, "1.1.0") + + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate.crate.zip").exists()) + + # Check that the crate was updated + self.assertTrue(Path(self.pipeline_dir, "ro-crate-metadata.json").exists()) + + # read the crate json file + with open(Path(self.pipeline_dir, "ro-crate-metadata.json")) as f: + crate = json.load(f) + + # check that the version was updated + self.assertEqual(crate["@graph"][2]["version"][0], "1.1.0") + + # check creativeWorkStatus is Stable + self.assertEqual(crate["@graph"][0]["creativeWorkStatus"], "Stable") From 6ae3de544a77354c2276d4be16863b65ccaaa823 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 10 Dec 2024 10:14:47 +0100 Subject: [PATCH 161/164] handle new author field --- nf_core/pipelines/rocrate.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index ec2b23e02..cbbda8d3d 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -267,14 +267,26 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: # add author entity to crate try: - authors = self.pipeline_obj.nf_config["manifest.author"].split(",") - # remove spaces - authors = [a.strip() for a in authors] + authors = [] + if "manifest.author" in self.pipeline_obj.nf_config: + authors.extend([a.strip() for a in self.pipeline_obj.nf_config["manifest.author"].split(",")]) + if "manifest.contributor" in self.pipeline_obj.nf_config: + authors.extend( + [ + c.get("name", "").strip() + for c in self.pipeline_obj.nf_config["manifest.contributor"] + if "name" in c + ] + ) + if not authors: + raise KeyError("No authors found") # add manifest authors as maintainer to crate except KeyError: - log.error("No author field found in manifest of nextflow.config") + log.error("No author or contributor fields found in manifest of nextflow.config") return + # remove duplicates + authors = list(set(authors)) # look at git contributors for author names try: git_contributors: Set[str] = set() From b976ec32a49f17e51784c425b2fc1a49be3a3307 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 10 Dec 2024 10:16:12 +0100 Subject: [PATCH 162/164] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb897d76c..9015ac6da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -64,6 +64,7 @@ - build: Setup VS Code tests ([#3292](https://github.com/nf-core/tools/pull/3292)) - Don't break gitpod.yml with template string ([#3332](https://github.com/nf-core/tools/pull/3332)) - rocrate: remove duplicated entries for name and version ([#3333](https://github.com/nf-core/tools/pull/3333)) +- rocrate: Update crate with version bump and handle new contributor field ([#3334](https://github.com/nf-core/tools/pull/3334)) ### Version updates From 8a78d4bcf489c1073ca4376a93c763e4c4927154 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Tue, 10 Dec 2024 10:51:44 +0100 Subject: [PATCH 163/164] Update nf_core/pipelines/rocrate.py --- nf_core/pipelines/rocrate.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index cbbda8d3d..9af306aa2 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -353,8 +353,6 @@ def update_rocrate(self) -> bool: Update the rocrate file """ # check if we need to output a json file and/or a zip file based on the file extensions - json_path = None - zip_path = None # try to find a json file json_path = Path(self.pipeline_dir, "ro-crate-metadata.json") if json_path.exists(): From 953055d337811a6c45db9f49b71008485e51e6b0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 10 Dec 2024 11:22:01 +0100 Subject: [PATCH 164/164] fix type error --- nf_core/pipelines/rocrate.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 9af306aa2..bc868273c 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -354,18 +354,17 @@ def update_rocrate(self) -> bool: """ # check if we need to output a json file and/or a zip file based on the file extensions # try to find a json file - json_path = Path(self.pipeline_dir, "ro-crate-metadata.json") - if json_path.exists(): - json_path = json_path - else: - json_path = None + json_path: Optional[Path] = None + potential_json_path = Path(self.pipeline_dir, "ro-crate-metadata.json") + if potential_json_path.exists(): + json_path = potential_json_path # try to find a zip file - zip_path = Path(self.pipeline_dir, "ro-crate.crate.zip") - if zip_path.exists(): - zip_path = zip_path - else: - zip_path = None + zip_path: Optional[Path] = None + potential_zip_path = Path(self.pipeline_dir, "ro-crate.crate.zip") + if potential_zip_path.exists(): + zip_path = potential_zip_path + return self.create_rocrate(json_path=json_path, zip_path=zip_path)