diff --git a/.editorconfig b/.editorconfig index 449f446a3..f266805d6 100644 --- a/.editorconfig +++ b/.editorconfig @@ -20,3 +20,11 @@ indent_style = unset [**/Makefile] indent_style = unset + +[tests/pipelines/__snapshots__/*] +charset = unset +end_of_line = unset +insert_final_newline = unset +trim_trailing_whitespace = unset +indent_style = unset +indent_size = unset diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 5043b37ac..5b627a145 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -24,7 +24,7 @@ body: description: Steps to reproduce the behaviour. Please paste the command and output from your terminal. render: console placeholder: | - $ nf-core lint ... + $ nf-core pipelines lint ... Some output where something broke diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index 9a1905c7a..3e0b19ec6 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -9,11 +9,10 @@ 7. Create a PR from `dev` to `master` 8. Make sure all CI tests are passing again (additional tests are run on PRs to `master`) 9. Request review (2 approvals required) -10. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) -11. Merge the PR into `master` -12. Wait for CI tests on the commit to passed -13. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. -14. Create a new release copying the `CHANGELOG` for that release into the description section. +10. Merge the PR into `master` +11. Wait for CI tests on the commit to passed +12. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. +13. Create a new release copying the `CHANGELOG` for that release into the description section. ## After release @@ -21,3 +20,4 @@ 2. Check that the automatic `PyPi` deployment has worked: [pypi.org/project/nf-core](https://pypi.org/project/nf-core/) 3. Check `BioConda` has an automated PR to bump the version, and merge. eg. [bioconda/bioconda-recipes #20065](https://github.com/bioconda/bioconda-recipes/pull/20065) 4. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:dev`. +5. Run `rich-codex` on the [tools/website repo](https://github.com/nf-core/website/actions/workflows/rich-codex.yml) to regenerate docs screengrabs (actions `workflow_dispatch` button) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 0bc5e432e..ecd0eef87 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -27,12 +27,12 @@ runs: run: | mkdir -p create-lint-wf && cd create-lint-wf export NXF_WORK=$(pwd) - nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" # Try syncing it before we change anything - - name: nf-core sync + - name: nf-core pipelines sync shell: bash - run: nf-core --log-file log.txt sync --dir nf-core-testpipeline/ + run: nf-core --log-file log.txt pipelines sync --dir nf-core-testpipeline/ working-directory: create-lint-wf # Run code style linting @@ -53,26 +53,32 @@ runs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + shell: bash + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX shell: bash run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-lint-wf - # Run nf-core linting - - name: nf-core lint + # Run nf-core pipelines linting + - name: nf-core pipelines lint shell: bash - run: nf-core --verbose --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned + run: nf-core --verbose --log-file log.txt --hide-progress pipelines lint --dir nf-core-testpipeline --fail-ignored --fail-warned working-directory: create-lint-wf - - name: nf-core bump-version to release + - name: nf-core pipelines bump-version to release shell: bash - run: nf-core --log-file log.txt bump-version --dir nf-core-testpipeline/ 1.1 + run: nf-core --log-file log.txt pipelines bump-version --dir nf-core-testpipeline/ 1.1 working-directory: create-lint-wf - - name: nf-core lint in release mode + - name: nf-core pipelines lint in release mode shell: bash - run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release + run: nf-core --log-file log.txt --hide-progress pipelines lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release working-directory: create-lint-wf - name: Upload log file artifact diff --git a/.github/workflows/changelog.py b/.github/workflows/changelog.py index 471665e4b..24130e65c 100644 --- a/.github/workflows/changelog.py +++ b/.github/workflows/changelog.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 """ Taken from https://github.com/MultiQC/MultiQC/blob/main/.github/workflows/changelog.py and updated for nf-core @@ -18,7 +19,7 @@ import re import sys from pathlib import Path -from typing import List +from typing import List, Tuple REPO_URL = "https://github.com/nf-core/tools" @@ -32,7 +33,7 @@ assert pr_number, pr_number # Trim the PR number added when GitHub squashes commits, e.g. "Template: Updated (#2026)" -pr_title = pr_title.removesuffix(f" (#{pr_number})") +pr_title = pr_title.removesuffix(f" (#{pr_number})") # type: ignore changelog_path = workspace_path / "CHANGELOG.md" @@ -50,7 +51,7 @@ sys.exit(0) -def _determine_change_type(pr_title) -> tuple[str, str]: +def _determine_change_type(pr_title) -> Tuple[str, str]: """ Determine the type of the PR: Template, Download, Linting, Modules, Subworkflows, or General Returns a tuple of the section name and the module info. @@ -85,7 +86,7 @@ def _determine_change_type(pr_title) -> tuple[str, str]: pr_link = f"([#{pr_number}]({REPO_URL}/pull/{pr_number}))" # Handle manual changelog entries through comments. -if comment := comment.removeprefix("@nf-core-bot changelog").strip(): +if comment := comment.removeprefix("@nf-core-bot changelog").strip(): # type: ignore print(f"Adding manual changelog entry: {comment}") pr_title = comment new_lines = [ diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 03b9aa241..e0b4c67cf 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -38,7 +38,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.04.0" + - "24.04.2" - "latest-everything" steps: - name: go to subdirectory and change nextflow workdir @@ -78,13 +78,18 @@ jobs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-lint-wf + # Run the other nf-core commands - - name: nf-core list - run: nf-core --log-file log.txt list + - name: nf-core pipelines list + run: nf-core --log-file log.txt pipelines list working-directory: create-lint-wf - - name: nf-core schema - run: nf-core --log-file log.txt schema build --dir nf-core-testpipeline/ --no-prompts + - name: nf-core pipelines schema + run: nf-core --log-file log.txt pipelines schema build --dir nf-core-testpipeline/ --no-prompts working-directory: create-lint-wf - name: Cleanup work directory diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 0de7287a5..d8df2f690 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -31,27 +31,43 @@ env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: + prepare-matrix: + name: Retrieve all template features + runs-on: ubuntu-latest + outputs: + all_features: ${{ steps.create_matrix.outputs.matrix }} + steps: + - name: 🏗 Set up yq + uses: frenck/action-setup-yq@v1 + - name: checkout + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - name: Create Matrix + id: create_matrix + run: | + echo "matrix=$(yq 'keys | filter(. != "github") | filter(. != "is_nfcore") | filter(. != "test_config") | tojson(0)' nf_core/pipelines/create/template_features.yml)" >> $GITHUB_OUTPUT + RunTestWorkflow: runs-on: ${{ matrix.runner }} + needs: prepare-matrix env: NXF_ANSI_LOG: false strategy: matrix: - TEMPLATE: - - "template_skip_github_badges.yml" - - "template_skip_igenomes.yml" - - "template_skip_ci.yml" + TEMPLATE: ${{ fromJson(needs.prepare-matrix.outputs.all_features) }} runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} profile: ["self_hosted_runner"] include: - - TEMPLATE: "template_skip_all.yml" + - TEMPLATE: all runner: ubuntu-latest profile: "docker" - - TEMPLATE: "template_skip_nf_core_configs.yml" + - TEMPLATE: nf_core_configs runner: ubuntu-latest profile: "docker" + exclude: + - TEMPLATE: nf_core_configs + profile: "self_hosted_runner" fail-fast: false steps: @@ -80,33 +96,22 @@ jobs: version: latest-everything # Create template files - - name: Create template skip all (except github) + - name: Create template skip ${{ matrix.TEMPLATE }} run: | mkdir create-test-lint-wf export NXF_WORK=$(pwd) - printf "prefix: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > create-test-lint-wf/template_skip_all.yml - - - name: Create template skip github_badges - run: | - printf "prefix: my-prefix\nskip: github_badges" > create-test-lint-wf/template_skip_github_badges.yml - - - name: Create template skip igenomes - run: | - printf "prefix: my-prefix\nskip: igenomes" > create-test-lint-wf/template_skip_igenomes.yml - - - name: Create template skip ci - run: | - printf "prefix: my-prefix\nskip: ci" > create-test-lint-wf/template_skip_ci.yml - - - name: Create template skip nf_core_configs - run: | - printf "prefix: my-prefix\nskip: nf_core_configs" > create-test-lint-wf/template_skip_nf_core_configs.yml + if [ ${{ matrix.TEMPLATE }} == "all" ] + then + printf "org: my-prefix\nskip_features: ${{ needs.prepare-matrix.outputs.all_features }}" > create-test-lint-wf/template_skip_all.yml + else + printf "org: my-prefix\nskip_features: [${{ matrix.TEMPLATE }}]" > create-test-lint-wf/template_skip_${{ matrix.TEMPLATE }}.yml + fi # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | cd create-test-lint-wf - nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml template_skip_${{ matrix.TEMPLATE }}.yml - name: run the pipeline run: | @@ -119,8 +124,8 @@ jobs: rm -rf create-test-lint-wf/results # Try syncing it before we change anything - - name: nf-core sync - run: nf-core --log-file log.txt sync --dir create-test-lint-wf/my-prefix-testpipeline/ + - name: nf-core pipelines sync + run: nf-core --log-file log.txt pipelines sync --dir create-test-lint-wf/my-prefix-testpipeline/ # Run code style linting - name: Run pre-commit @@ -132,24 +137,29 @@ jobs: run: find my-prefix-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-test-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: find my-prefix-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-test-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-test-lint-wf # Run nf-core linting - - name: nf-core lint - run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned + - name: nf-core pipelines lint + run: nf-core --log-file log.txt --hide-progress pipelines lint --dir my-prefix-testpipeline --fail-warned working-directory: create-test-lint-wf # Run bump-version - - name: nf-core bump-version - run: nf-core --log-file log.txt bump-version --dir my-prefix-testpipeline/ 1.1 + - name: nf-core pipelines bump-version + run: nf-core --log-file log.txt pipelines bump-version --dir my-prefix-testpipeline/ 1.1 working-directory: create-test-lint-wf # Run nf-core linting in release mode - - name: nf-core lint in release mode - run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned --release + - name: nf-core pipelines lint in release mode + run: nf-core --log-file log.txt --hide-progress pipelines lint --dir my-prefix-testpipeline --fail-warned --release working-directory: create-test-lint-wf - name: Tar files diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 87cdf2e7b..782a08ac9 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -39,7 +39,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.04.0" + - "24.04.2" - "latest-everything" steps: - name: go to working directory @@ -70,8 +70,8 @@ jobs: run: | mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) - nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain - nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" + nextflow run nf-core-testpipeline -profile self_hosted_runner,test --outdir ./results - name: Upload log file artifact if: ${{ always() }} diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 70b9cfd0a..dc8803188 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests -type f -name "test_*.py" | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} @@ -142,10 +142,22 @@ jobs: exit 1 fi + - name: Store snapshot report + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + if: always() + with: + name: Snapshot Report ${{ matrix.test }} + path: ./snapshot_report.html + + - name: remove slashes from test name + run: | + test=$(echo ${{ matrix.test }} | sed 's/\//__/g') + echo "test=${test}" >> $GITHUB_ENV + - name: Upload coverage uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 with: - name: coverage_${{ matrix.test }} + name: coverage_${{ env.test }} path: .coverage coverage: diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml deleted file mode 100644 index cd12b139d..000000000 --- a/.github/workflows/rich-codex.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Generate images for docs -on: - workflow_dispatch: -jobs: - rich_codex: - runs-on: ubuntu-latest - steps: - - name: Check out the repo - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 - with: - python-version: 3.x - cache: pip - cache-dependency-path: setup.py - - - name: Install Nextflow - uses: nf-core/setup-nextflow@v2 - - - name: Install nf-test - uses: nf-core/setup-nf-test@v1 - - - name: Install nf-core/tools - run: pip install git+https://github.com/nf-core/tools.git@dev - - - name: Generate terminal images with rich-codex - uses: ewels/rich-codex@8ce988cc253c240a3027ba58e33e47640935dd8b # v1 - env: - COLUMNS: 100 - HIDE_PROGRESS: "true" - with: - commit_changes: "true" - clean_img_paths: docs/images/*.svg - terminal_width: 100 - before_command: > - which nextflow && - which nf-core && - nextflow -version && - nf-core --version diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 55880e813..d453dc930 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -19,6 +19,10 @@ on: description: "Force a PR to be created" type: boolean default: false + pipeline: + description: "Pipeline to sync" + type: string + default: "all" # Cancel if a newer run is started concurrency: @@ -35,6 +39,14 @@ jobs: run: | if [ "${{ github.event.inputs.testpipeline }}" == "true" ]; then echo '{"pipeline":["testpipeline"]}' > pipeline_names.json + elif [ "${{ github.event.inputs.pipeline }}" != "all" ]; then + curl -O https://nf-co.re/pipeline_names.json + # check if the pipeline exists + if ! grep -q "\"${{ github.event.inputs.pipeline }}\"" pipeline_names.json; then + echo "Pipeline ${{ github.event.inputs.pipeline }} does not exist" + exit 1 + fi + echo '{"pipeline":["${{ github.event.inputs.pipeline }}"]}' > pipeline_names.json else curl -O https://nf-co.re/pipeline_names.json fi @@ -82,7 +94,7 @@ jobs: run: | git config --global user.email "core@nf-co.re" git config --global user.name "nf-core-bot" - nf-core --log-file sync_log_${{ matrix.pipeline }}.txt sync -d nf-core/${{ matrix.pipeline }} \ + nf-core --log-file sync_log_${{ matrix.pipeline }}.txt pipelines sync -d nf-core/${{ matrix.pipeline }} \ --from-branch dev \ --pull-request \ --username nf-core-bot \ diff --git a/.github/workflows/update-textual-snapshots.yml b/.github/workflows/update-textual-snapshots.yml new file mode 100644 index 000000000..fb936762f --- /dev/null +++ b/.github/workflows/update-textual-snapshots.yml @@ -0,0 +1,93 @@ +name: Update Textual snapshots from a comment +on: + issue_comment: + types: [created] + +jobs: + update-snapshots: + # Only run if comment is on a PR with the main repo, and if it contains the magic keywords + if: > + contains(github.event.comment.html_url, '/pull/') && + contains(github.event.comment.body, '@nf-core-bot update snapshots') && + github.repository == 'nf-core/tools' + runs-on: ubuntu-latest + steps: + # Use the @nf-core-bot token to check out so we can push later + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + with: + token: ${{ secrets.nf_core_bot_auth_token }} + + # indication that the command is running + - name: React on comment + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: eyes + + # Action runs on the issue comment, so we don't get the PR by default + # Use the gh cli to check out the PR + - name: Checkout Pull Request + run: gh pr checkout ${{ github.event.issue.number }} + env: + GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} + + # Install dependencies and run pytest + - name: Set up Python + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + with: + python-version: "3.12" + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip -r requirements-dev.txt + pip install -e . + + - name: Run pytest to update snapshots + id: pytest + run: | + python3 -m pytest tests/test_create_app.py --snapshot-update --color=yes --durations=0 + continue-on-error: true + + # indication that the run has finished + - name: react if finished succesfully + if: steps.pytest.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: "+1" + + - name: Commit & push changes + id: commit-and-push + if: steps.pytest.outcome == 'failure' + run: | + git config user.email "core@nf-co.re" + git config user.name "nf-core-bot" + git config push.default upstream + git add . + git status + git commit -m "[automated] Update Textual snapshots" + git push + + - name: react if snapshots were updated + id: react-if-updated + if: steps.commit-and-push.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: hooray + + - name: react if snapshots were not updated + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: confused + + - name: react if snapshots were not updated + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + issue-number: ${{ github.event.issue.number }} + body: | + @${{ github.actor }} I tried to update the snapshots, but it didn't work. Please update them manually. diff --git a/.gitignore b/.gitignore index 271fdb14e..a3721da86 100644 --- a/.gitignore +++ b/.gitignore @@ -115,3 +115,6 @@ ENV/ pip-wheel-metadata .vscode .*.sw? + +# Textual +snapshot_report.html diff --git a/.gitpod.yml b/.gitpod.yml index 445cb3570..f92457278 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -6,17 +6,14 @@ tasks: python -m pip install -r requirements-dev.txt pre-commit install --install-hooks nextflow self-update - - name: unset JAVA_TOOL_OPTIONS - command: | - unset JAVA_TOOL_OPTIONS vscode: - extensions: # based on nf-core.nf-core-extensionpack + extensions: - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - mechatroner.rainbow-csv # Highlight columns in csv files in different colors - # - nextflow.nextflow # Nextflow syntax highlighting + - nextflow.nextflow # Nextflow syntax highlighting - oderwat.indent-rainbow # Highlight indentation level - streetsidesoftware.code-spell-checker # Spelling checker for source code - charliermarsh.ruff # Code linter Ruff diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 887cbe027..67aa3204c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.3 + rev: v0.6.9 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix @@ -10,16 +10,16 @@ repos: hooks: - id: prettier additional_dependencies: - - prettier@3.2.5 + - prettier@3.3.3 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: "2.7.3" + rev: "3.0.3" hooks: - id: editorconfig-checker alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.10.0" + rev: "v1.11.2" hooks: - id: mypy additional_dependencies: @@ -28,3 +28,4 @@ repos: - types-jsonschema - types-Markdown - types-setuptools + - pydantic diff --git a/.prettierignore b/.prettierignore index b923532bd..cbe7274a4 100644 --- a/.prettierignore +++ b/.prettierignore @@ -4,9 +4,11 @@ slackreport.json docs/api/_build testing nf_core/module-template/meta.yml -nf_core/module-template/tests/tags.yml -nf_core/subworkflow-template/tests/tags.yml nf_core/pipeline-template/nextflow_schema.json +nf_core/pipeline-template/modules.json +nf_core/pipeline-template/tower.yml +nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml +tests/data/pipeline_create_template_skip.yml # don't run on things handled by ruff *.py *.pyc diff --git a/CHANGELOG.md b/CHANGELOG.md index e130d3c37..201665682 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,124 @@ # nf-core/tools: Changelog +## [v3.0.0 - Titanium Tapir](https://github.com/nf-core/tools/releases/tag/3.0.0) - [2024-10-08] + +**Highlights** + +- Pipeline commands are renamed from `nf-core ` to `nf-core pipelines ` to follow the same command structure as modules and subworkflows commands. +- More customisation for pipeline templates. The template has been divided into features which can be skipped, e.g. you can create a new pipeline without any traces of FastQC in it. +- A new Text User Interface app when running `nf-core pipelines create` to help us guide you through the process better (no worries, you can still use the cli if you give all values as parameters) +- We replaced nf-validation with nf-schema in the pipeline template +- CI tests now lint with the nf-core tools version matching the template version of the pipeline, to minimise errors in opened PRs with every new tools release. +- `nf-core licences` command is deprecated. +- The structure of nf-core/tools pytests has been updated +- The structure of the API docs has been updated + +### Template + +- Change paths to test data ([#2985](https://github.com/nf-core/tools/pull/2985)) +- Run awsfulltest on PRs to `master` with two PR approvals ([#3042](https://github.com/nf-core/tools/pull/3042)) +- Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) +- Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) +- Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) +- handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108), [#3112](https://github.com/nf-core/tools/pull/3112)) +- add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) +- add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101) and [#3169](https://github.com/nf-core/tools/pull/3169)) +- add option to exclude gitpod for custom pipeline template ([#3100](https://github.com/nf-core/tools/pull/3100)) +- add option to exclude codespaces from pipeline template ([#3105](https://github.com/nf-core/tools/pull/3105)) +- add option to exclude multiqc from pipeline template ([#3103](https://github.com/nf-core/tools/pull/3103)) +- add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) +- add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) +- add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) +- add option to exclude nf-schema from the template ([#3116](https://github.com/nf-core/tools/pull/3116)) +- add option to exclude fastqc from pipeline template ([#3129](https://github.com/nf-core/tools/pull/3129)) +- add option to exclude documentation from pipeline template ([#3130](https://github.com/nf-core/tools/pull/3130)) +- add option to exclude test configs from pipeline template ([#3133](https://github.com/nf-core/tools/pull/3133)) +- add option to exclude tower.yml from pipeline template ([#3134](https://github.com/nf-core/tools/pull/3134)) +- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) +- test pipeline with conda and singularity on PRs to master ([#3149](https://github.com/nf-core/tools/pull/3149)) +- run nf-core lint `--release` on PRs to master ([#3148](https://github.com/nf-core/tools/pull/3148)) +- Add tests to ensure all files are part of a template customisation group and all groups are tested ([#3099](https://github.com/nf-core/tools/pull/3099)) +- Update the syntax of `utils_nfcore_pipeline_pipeline` local subworkflow ([#3166](https://github.com/nf-core/tools/pull/3166)) +- Remove if/else block to include `igenomes.config` ([#3168](https://github.com/nf-core/tools/pull/3168)) +- Fixed release announcement hashtags for Mastodon ([#3099](https://github.com/nf-core/tools/pull/3176)) +- Remove try/catch blocks from `nextflow.config` ([#3167](https://github.com/nf-core/tools/pull/3167)) +- Extend `download_pipeline.yml` to count pre-downloaded container images. ([#3182](https://github.com/nf-core/tools/pull/3182)) + +### Linting + +- Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) +- Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) +- Remove defaults from conda `environment.yml` file. ([#3029](https://github.com/nf-core/tools/pull/3029)) +- Restructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) +- Fix module linting warning for process_high_memory ([#3086](https://github.com/nf-core/tools/issues/3086)) +- Linting will now fail when an unpinned plugin is used ([#3116](https://github.com/nf-core/tools/pull/3116)) +- Linting will now check if the schema is correct for the used validation plugin ([#3116])(https://github.com/nf-core/tools/pull/3116) +- Linting will now check the use of the right validation plugin include statements in the workflow scripts ([#3116])(https://github.com/nf-core/tools/pull/3116) +- Full linting for correct use of nf-schema and nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) +- Handle cases where the directory path contains the name of the component ([#3147](https://github.com/nf-core/tools/pull/3147)) +- Don't test conda `environment.yml` `name` attribute (which should no longer be there) ([#3161](https://github.com/nf-core/tools/pull/3161)) + +### Pipeline create command + +- Allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- Display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) +- Allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) + +### Components + +- The `modules_nfcore` tag in the `main.nf.test` file of modules/subworkflows now displays the organization name in custom modules repositories ([#3005](https://github.com/nf-core/tools/pull/3005)) +- Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) +- Allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Add option `--fix` to update the `meta.yml` file of subworkflows ([#3077](https://github.com/nf-core/tools/pull/3077)) + +### Download + +- Fully removed already deprecated `-t` / `--tower` flag. +- Refactored the CLI for consistency (short flag is usually second word, e.g. also for `--container-library` etc.): + +| Old parameter | New parameter | +| --------------------------------- | --------------------------------- | +| `-d` / `--download-configuration` | `-c` / `--download-configuration` | +| `-p` / `--parallel-downloads` | `-d` / `--parallel-downloads` | +| new parameter | `-p` / (`--platform`) | + +### General + +- Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) +- Remove `rich-codex.yml` action, images are now generated on the website repo ([#2989](https://github.com/nf-core/tools/pull/2989)) +- Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) +- move pipeline subcommands for v3.0 ([#2983](https://github.com/nf-core/tools/pull/2983)) +- return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) +- Remove nf-core licences command ([#3012](https://github.com/nf-core/tools/pull/3012)) +- README - absolute image paths ([#3013](https://github.com/nf-core/tools/pull/3013)) +- Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) +- move pipeline commands to functions to avoid duplication ([#3039](https://github.com/nf-core/tools/pull/3039)) +- update output_dir for api docs to new website structure ([#3051](https://github.com/nf-core/tools/pull/3051)) +- Add `--limit-output` argument for modules/subworkflow update ([#3047](https://github.com/nf-core/tools/pull/3047)) +- update api docs to new structure ([#3054](https://github.com/nf-core/tools/pull/3054)) +- handle new jsonschema error type ([#3061](https://github.com/nf-core/tools/pull/3061)) +- Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) +- Add bot action to update textual snapshots and write bot documentation ([#3102](https://github.com/nf-core/tools/pull/3102)) +- Update gitpod setup ([#3136](https://github.com/nf-core/tools/pull/3136)) +- fix syncing a pipeline from current directory ([#3143](https://github.com/nf-core/tools/pull/3143)) +- Patch gitpod conda setup to not use defaults channel ([#3159](https://github.com/nf-core/tools/pull/3159)) + +## Version updates + +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.6.0 ([#3122](https://github.com/nf-core/tools/pull/3122)) +- Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) +- Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) +- Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) +- Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) +- Update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) + ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] ### Template - Don't cache pip in `linting.yml` ([#2961](https://github.com/nf-core/tools/pull/2961)) +- Lint pipelines with the nf-core template version and post comment if it is outdated ([#2978](https://github.com/nf-core/tools/pull/2978)) ### General @@ -75,6 +189,8 @@ - fix(collectfile): sort true for methods_description_mqc.yaml ([#2947](https://github.com/nf-core/tools/pull/2947)) - chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.4.3 ([#2951](https://github.com/nf-core/tools/pull/2951)) - Restructure CHANGELOG.md ([#2954](https://github.com/nf-core/tools/pull/2954)) +- fix: ensure path object converted to string before stripping quotes ([#2878](https://github.com/nf-core/tools/pull/2878)) +- Test data uses paths instead of config map ([#2877](https://github.com/nf-core/tools/pull/2877)) ## [v2.13.1 - Tin Puppy Patch](https://github.com/nf-core/tools/releases/tag/2.13) - [2024-02-29] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 04d327bd8..f9773296c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -107,8 +107,8 @@ This ensures that any changes we make to either the linting or the template stay You can replicate this process locally with the following commands: ```bash -nf-core create -n testpipeline -d "This pipeline is for testing" -nf-core lint nf-core-testpipeline +nf-core pipelines create -n testpipeline -d "This pipeline is for testing" +nf-core pipelines lint nf-core-testpipeline ``` ## GitHub Codespaces @@ -125,3 +125,40 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) + +## nf-core-bot + +nf-core has a bot which you can use to perform certain actions on a PR. + +- Fix linting: + +If the linting tests is failing on a PR to nf-core/tools, you can post a comment with the magic words: + +``` +@nf-core-bot fix linting +``` + +The bot will try to fix the linting, push to your branch, and react to the comment when it starts running (👀) and if the fix was successful (👍🏻) or not (😕). + +- Update the `CHANGELOG.md`: + +The nf-core-bot runs automatically on every PR updating the `CHANGELOG.md` if it was not updated. It will add the new change using the title of your PR. +If the action didn't run automatically, or you want to provide a different title, you can post a comment with: + +``` +@nf-core-bot changelog +``` + +Optionally followed by the description that you want to add to the changelog. + +- Update Textual snapshots: + +If the Textual snapshots (run by `tests/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. +If you are sure that these changes are correct, you can automatically update the snapshots form the PR by posting a comment with the magic words: + +``` +@nf-core-bot update snapshots +``` + +> [!WARNING] +> Please always check the HTML report to make sure that the changes are expected. diff --git a/Dockerfile b/Dockerfile index ae3a4e1a3..8269e9570 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:2be8daddbb82756f7d1f2c7ece706aadcb284bf6ab6d769ea695cc3ed6016743 +FROM python:3.12-slim@sha256:af4e85f1cac90dd3771e47292ea7c8a9830abfabbe4faa5c53f158854c2e819d LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" diff --git a/MANIFEST.in b/MANIFEST.in index 5ec177b78..ce2e08c09 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,3 +9,5 @@ include nf_core/assets/logo/nf-core-repo-logo-base-lightbg.png include nf_core/assets/logo/nf-core-repo-logo-base-darkbg.png include nf_core/assets/logo/placeholder_logo.svg include nf_core/assets/logo/MavenPro-Bold.ttf +include nf_core/pipelines/create/create.tcss +include nf_core/pipelines/create/template_features.yml diff --git a/README.md b/README.md index 522285408..3597f1ea1 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@

- - nf-core/tools + + nf-core/tools

@@ -16,64 +16,17 @@ A python package with helper tools for the nf-core community. -> **Read this documentation on the nf-core website: [https://nf-co.re/tools](https://nf-co.re/tools)** - -## Table of contents - -- [`nf-core` tools installation](#installation) -- [`nf-core` tools update](#update-tools) -- [`nf-core list` - List available pipelines](#listing-pipelines) -- [`nf-core launch` - Run a pipeline with interactive parameter prompts](#launch-a-pipeline) -- [`nf-core create-params-file` - Create a parameter file](#create-a-parameter-file) -- [`nf-core download` - Download a pipeline for offline use](#downloading-pipelines-for-offline-use) -- [`nf-core licences` - List software licences in a pipeline](#pipeline-software-licences) -- [`nf-core create` - Create a new pipeline with the nf-core template](#creating-a-new-pipeline) -- [`nf-core lint` - Check pipeline code against nf-core guidelines](#linting-a-workflow) -- [`nf-core schema` - Work with pipeline schema files](#pipeline-schema) -- [`nf-core bump-version` - Update nf-core pipeline version number](#bumping-a-pipeline-version-number) -- [`nf-core sync` - Synchronise pipeline TEMPLATE branches](#sync-a-pipeline-with-the-template) -- [`nf-core create-logo` - Create an nf-core pipeline logo](#create-an-nf-core-pipeline-logo) -- [`nf-core tui` - Explore the nf-core command line graphically](#tools-cli-tui) -- [`nf-core modules` - commands for dealing with DSL2 modules](#modules) - - - [`modules list` - List available modules](#list-modules) - - [`modules list remote` - List remote modules](#list-remote-modules) - - [`modules list local` - List installed modules](#list-installed-modules) - - [`modules info` - Show information about a module](#show-information-about-a-module) - - [`modules install` - Install modules in a pipeline](#install-modules-in-a-pipeline) - - [`modules update` - Update modules in a pipeline](#update-modules-in-a-pipeline) - - [`modules remove` - Remove a module from a pipeline](#remove-a-module-from-a-pipeline) - - [`modules patch` - Create a patch file for a module](#create-a-patch-file-for-a-module) - - [`modules create` - Create a module from the template](#create-a-new-module) - - [`modules lint` - Check a module against nf-core guidelines](#check-a-module-against-nf-core-guidelines) - - [`modules test` - Run the tests for a module](#run-the-tests-for-a-module-using-pytest) - - [`modules bump-versions` - Bump software versions of modules](#bump-bioconda-and-container-versions-of-modules-in) - -- [`nf-core subworkflows` - commands for dealing with subworkflows](#subworkflows) - - [`subworkflows list` - List available subworkflows](#list-subworkflows) - - [`subworkflows list remote` - List remote subworkflows](#list-remote-subworkflows) - - [`subworkflows list local` - List installed subworkflows](#list-installed-subworkflows) - - [`subworkflows info` - Show information about a subworkflow](#show-information-about-a-subworkflow) - - [`subworkflows install` - Install subworkflows in a pipeline](#install-subworkflows-in-a-pipeline) - - [`subworkflows update` - Update subworkflows in a pipeline](#update-subworkflows-in-a-pipeline) - - [`subworkflows remove` - Remove a subworkflow from a pipeline](#remove-a-subworkflow-from-a-pipeline) - - [`subworkflows create` - Create a subworkflow from the template](#create-a-new-subworkflow) - - [`subworkflows lint` - Check a subworkflow against nf-core guidelines](#check-a-subworkflow-against-nf-core-guidelines) - - [`subworkflows test` - Run the tests for a subworkflow](#run-the-tests-for-a-subworkflow-using-pytest) -- [Citation](#citation) - The nf-core tools package is written in Python and can be imported and used within other packages. For documentation of the internal Python functions, please refer to the [Tools Python API docs](https://nf-co.re/tools/docs/). ## Installation -### Bioconda +For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/nf-core-tools/installation). +Below is a quick-start for those who know what they're doing: -You can install `nf-core/tools` from [bioconda](https://bioconda.github.io/recipes/nf-core/README.html). +### Bioconda -First, install conda and configure the channels to use bioconda -(see the [bioconda documentation](https://bioconda.github.io/index.html#usage)). -Then, just run the conda installation command: +Install [from Bioconda](https://bioconda.github.io/recipes/nf-core/README.html): ```bash conda install nf-core @@ -88,1194 +41,29 @@ conda activate nf-core ### Python Package Index -`nf-core/tools` can also be installed from [PyPI](https://pypi.python.org/pypi/nf-core/) using pip as follows: +Install [from PyPI](https://pypi.python.org/pypi/nf-core/): ```bash pip install nf-core ``` -### Docker image - -There is a docker image that you can use to run `nf-core/tools` that has all of the requirements packaged (including Nextflow) and so should work out of the box. It is called [`nfcore/tools`](https://hub.docker.com/r/nfcore/tools) _**(NB: no hyphen!)**_ - -You can use this container on the command line as follows: - -```bash -docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools -``` - -- `-i` and `-t` are needed for the interactive cli prompts to work (this tells Docker to use a pseudo-tty with stdin attached) -- The `-v` argument tells Docker to bind your current working directory (`pwd`) to the same path inside the container, so that files created there will be saved to your local file system outside of the container. -- `-w` sets the working directory in the container to this path, so that it's the same as your working directory outside of the container. -- `-u` sets your local user account as the user inside the container, so that any files created have the correct ownership permissions - -After the above base command, you can use the regular command line flags that you would use with other types of installation. -For example, to launch the `viralrecon` pipeline: - -```bash -docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools launch viralrecon -r 1.1.0 -``` - -If you use `$NXF_SINGULARITY_CACHEDIR` for downloads, you'll also need to make this folder and environment variable available to the continer: - -```bash -docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) -v $NXF_SINGULARITY_CACHEDIR:$NXF_SINGULARITY_CACHEDIR -e NXF_SINGULARITY_CACHEDIR nfcore/tools launch viralrecon -r 1.1.0 -``` - -#### Docker bash alias - -The above base command is a bit of a mouthful to type, to say the least. -To make it easier to use, we highly recommend adding the following bash alias to your `~/.bashrc` file: - -```bash -alias nf-core="docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools" -``` - -Once applied (you may need to reload your shell) you can just use the `nf-core` command instead: - -```bash -nf-core list -``` - -#### Docker versions - -You can use docker image tags to specify the version you would like to use. For example, `nfcore/tools:dev` for the latest development version of the code, or `nfcore/tools:1.14` for version `1.14` of tools. -If you omit this, it will default to `:latest`, which should be the latest stable release. - -If you need a specific version of Nextflow inside the container, you can build an image yourself. -Clone the repo locally and check out whatever version of nf-core/tools that you need. -Then build using the `--build-arg NXF_VER` flag as follows: - -```bash -docker build -t nfcore/tools:dev . --build-arg NXF_VER=20.04.0 -``` - ### Development version -If you would like the latest development version of tools, the command is: - ```bash pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev ``` -If you intend to make edits to the code, first make a fork of the repository and then clone it locally. -Go to the cloned directory and install with pip (also installs development requirements): +If editing, fork and clone the repo, then install as follows: ```bash pip install --upgrade -r requirements-dev.txt -e . ``` -### Using a specific Python interpreter - -If you prefer, you can also run tools with a specific Python interpreter. -The command line usage and flags are then exactly the same as if you ran with the `nf-core` command. -Note that the module is `nf_core` with an underscore, not a hyphen like the console command. - -For example: - -```bash -python -m nf_core --help -python3 -m nf_core list -~/my_env/bin/python -m nf_core create --name mypipeline --description "This is a new skeleton pipeline" -``` - -### Using with your own Python scripts - -The tools functionality is written in such a way that you can import it into your own scripts. -For example, if you would like to get a list of all available nf-core pipelines: - -```python -import nf_core.list -wfs = nf_core.list.Workflows() -wfs.get_remote_workflows() -for wf in wfs.remote_workflows: - print(wf.full_name) -``` - -Please see [https://nf-co.re/tools/docs/](https://nf-co.re/tools/docs/) for the function documentation. - -### Automatic version check - -nf-core/tools automatically checks the web to see if there is a new version of nf-core/tools available. -If you would prefer to skip this check, set the environment variable `NFCORE_NO_VERSION_CHECK`. For example: - -```bash -export NFCORE_NO_VERSION_CHECK=1 -``` - -### Update tools - -It is advisable to keep nf-core/tools updated to the most recent version. The command to update depends on the system used to install it, for example if you have installed it with conda you can use: - -```bash -conda update nf-core -``` - -if you used pip: - -```bash -pip install --upgrade nf-core -``` - -Please refer to the respective documentation for further details to manage packages, as for example [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-pkgs.html#updating-packages) or [pip](https://packaging.python.org/en/latest/tutorials/installing-packages/#upgrading-packages). - -### Activate shell completions for nf-core/tools - -Auto-completion for the `nf-core` command is available for bash, zsh and fish. To activate it, add the following lines to the respective shell config files. - -| shell | shell config file | command | -| ----- | ----------------------------------------- | -------------------------------------------------- | -| bash | `~/.bashrc` | `eval "$(_NF_CORE_COMPLETE=bash_source nf-core)"` | -| zsh | `~/.zshrc` | `eval "$(_NF_CORE_COMPLETE=zsh_source nf-core)"` | -| fish | `~/.config/fish/completions/nf-core.fish` | `eval (env _NF_CORE_COMPLETE=fish_source nf-core)` | - -After a restart of the shell session you should have auto-completion for the `nf-core` command and all its sub-commands and options. - -> [!NOTE] -> The added line will run the command `nf-core` (which will also slow down startup time of your shell). You should therefore either have the nf-core/tools installed globally. -> You can also wrap it inside `if type nf-core > /dev/null; then ` \ `fi` for bash and zsh or `if command -v nf-core &> /dev/null eval (env _NF_CORE_COMPLETE=fish_source nf-core) end` for fish. You need to then source the config in your environment for the completions to be activated. - -> [!TIP] -> If you see the error `command not found compdef` , be sure that your config file contains the line `autoload -Uz compinit && compinit` before the eval line. - -## Listing pipelines - -The command `nf-core list` shows all available nf-core pipelines along with their latest version, when that was published and how recently the pipeline code was pulled to your local system (if at all). - -An example of the output from the command is as follows: - - - -![`nf-core list`](docs/images/nf-core-list.svg) - -To narrow down the list, supply one or more additional keywords to filter the pipelines based on matches in titles, descriptions and topics: - -![`nf-core list rna rna-seq`](docs/images/nf-core-list-rna.svg) - -You can sort the results by latest release (`-s release`, default), -when you last pulled a local copy (`-s pulled`), -alphabetically (`-s name`), -or number of GitHub stars (`-s stars`). - - - -![`nf-core list -s stars`](docs/images/nf-core-list-stars.svg) - -To return results as JSON output for downstream use, use the `--json` flag. - -Archived pipelines are not returned by default. To include them, use the `--show_archived` flag. - -## Launch a pipeline - -Some nextflow pipelines have a considerable number of command line flags that can be used. -To help with this, you can use the `nf-core launch` command. -You can choose between a web-based graphical interface or an interactive command-line wizard tool to enter the pipeline parameters for your run. -Both interfaces show documentation alongside each parameter and validate your inputs. - -The tool uses the `nextflow_schema.json` file from a pipeline to give parameter descriptions, defaults and grouping. -If no file for the pipeline is found, one will be automatically generated at runtime. - -Nextflow `params` variables are saved in to a JSON file called `nf-params.json` and used by nextflow with the `-params-file` flag. -This makes it easier to reuse these in the future. - -The command takes one argument - either the name of an nf-core pipeline which will be pulled automatically, -or the path to a directory containing a Nextflow pipeline _(can be any pipeline, doesn't have to be nf-core)_. - - - -![`nf-core launch rnaseq -r 3.8.1`](docs/images/nf-core-launch-rnaseq.svg) - -Once complete, the wizard will ask you if you want to launch the Nextflow run. -If not, you can copy and paste the Nextflow command with the `nf-params.json` file of your inputs. - -```console -INFO [✓] Input parameters look valid -INFO Nextflow command: - nextflow run nf-core/rnaseq -params-file "nf-params.json" - - -Do you want to run this command now? [y/n]: -``` - -### Launch tool options - -- `-r`, `--revision` - - Specify a pipeline release (or branch / git commit sha) of the project to run -- `-i`, `--id` - - You can use the web GUI for nf-core pipelines by clicking _"Launch"_ on the website. Once filled in you will be given an ID to use with this command which is used to retrieve your inputs. -- `-c`, `--command-only` - - If you prefer not to save your inputs in a JSON file and use `-params-file`, this option will specify all entered params directly in the nextflow command. -- `-p`, `--params-in PATH` - - To use values entered in a previous pipeline run, you can supply the `nf-params.json` file previously generated. - - This will overwrite the pipeline schema defaults before the wizard is launched. -- `-o`, `--params-out PATH` - - Path to save parameters JSON file to. (Default: `nf-params.json`) -- `-a`, `--save-all` - - Without this option the pipeline will ignore any values that match the pipeline schema defaults. - - This option saves _all_ parameters found to the JSON file. -- `-h`, `--show-hidden` - - A pipeline JSON schema can define some parameters as 'hidden' if they are rarely used or for internal pipeline use only. - - This option forces the wizard to show all parameters, including those labelled as 'hidden'. -- `--url` - - Change the URL used for the graphical interface, useful for development work on the website. - -## Create a parameter file - -Sometimes it is easier to manually edit a parameter file than to use the web interface or interactive commandline wizard -provided by `nf-core launch`, for example when running a pipeline with many options on a remote server without a graphical interface. - -You can create a parameter file with all parameters of a pipeline with the `nf-core create-params-file` command. -This file can then be passed to `nextflow` with the `-params-file` flag. - -This command takes one argument - either the name of a nf-core pipeline which will be pulled automatically, -or the path to a directory containing a Nextflow pipeline _(can be any pipeline, doesn't have to be nf-core)_. - -The generated YAML file contains all parameters set to the pipeline default value along with their description in comments. -This template can then be used by uncommenting and modifying the value of parameters you want to pass to a pipline run. - -Hidden options are not included by default, but can be included using the `-x`/`--show-hidden` flag. - -## Downloading pipelines for offline use - -Sometimes you may need to run an nf-core pipeline on a server or HPC system that has no internet connection. -In this case you will need to fetch the pipeline files first, then manually transfer them to your system. - -To make this process easier and ensure accurate retrieval of correctly versioned code and software containers, we have written a download helper tool. - -The `nf-core download` command will download both the pipeline code and the [institutional nf-core/configs](https://github.com/nf-core/configs) files. It can also optionally download any singularity image files that are required. - -If run without any arguments, the download tool will interactively prompt you for the required information. -Each option has a flag, if all are supplied then it will run without any user input needed. - - - -![`nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d`](docs/images/nf-core-download.svg) - -Once downloaded, you will see something like the following file structure for the downloaded pipeline: - - - -![`tree -L 2 nf-core-rnaseq/`](docs/images/nf-core-download-tree.svg) - -You can run the pipeline by simply providing the directory path for the `workflow` folder to your `nextflow run` command: - -```bash -nextflow run /path/to/download/nf-core-rnaseq-dev/workflow/ --input mydata.csv --outdir results # usual parameters here -``` - -> [!NOTE] -> If you downloaded Singularity container images, you will need to use `-profile singularity` or have it enabled in your config file. - -### Downloaded nf-core configs - -The pipeline files are automatically updated (`params.custom_config_base` is set to `../configs`), so that the local copy of institutional configs are available when running the pipeline. -So using `-profile ` should work if available within [nf-core/configs](https://github.com/nf-core/configs). - -> [!WARNING] -> This option is not available when downloading a pipeline for use with [Seqera Platform](#adapting-downloads-to-seqera-platform) because the application manages all configurations separately. - -### Downloading Apptainer containers - -If you're using [Singularity](https://apptainer.org) (Apptainer), the `nf-core download` command can also fetch the required container images for you. -To do this, select `singularity` in the prompt or specify `--container-system singularity` in the command. -Your archive / target output directory will then also include a separate folder `singularity-containers`. - -The downloaded workflow files are again edited to add the following line to the end of the pipeline's `nextflow.config` file: - -```nextflow -singularity.cacheDir = "${projectDir}/../singularity-images/" -``` - -This tells Nextflow to use the `singularity-containers` directory relative to the workflow for the singularity image cache directory. -All images should be downloaded there, so Nextflow will use them instead of trying to pull from the internet. - -#### Singularity cache directory - -We highly recommend setting the `$NXF_SINGULARITY_CACHEDIR` environment variable on your system, even if that is a different system to where you will be running Nextflow. - -If found, the tool will fetch the Singularity images to this directory first before copying to the target output archive / directory. -Any images previously fetched will be found there and copied directly - this includes images that may be shared with other pipelines or previous pipeline version downloads or download attempts. - -If you are running the download on the same system where you will be running the pipeline (eg. a shared filesystem where Nextflow won't have an internet connection at a later date), you can choose to _only_ use the cache via a prompt or cli options `--container-cache-utilisation amend`. This instructs `nf-core download` to fetch all Singularity images to the `$NXF_SINGULARITY_CACHEDIR` directory but does _not_ copy them to the workflow archive / directory. The workflow config file is _not_ edited. This means that when you later run the workflow, Nextflow will just use the cache folder directly. - -If you are downloading a workflow for a different system, you can provide information about the contents of its image cache to `nf-core download`. To avoid unnecessary container image downloads, choose `--container-cache-utilisation remote` and provide a list of already available images as plain text file to `--container-cache-index my_list_of_remotely_available_images.txt`. To generate this list on the remote system, run `find $NXF_SINGULARITY_CACHEDIR -name "*.img" > my_list_of_remotely_available_images.txt`. The tool will then only download and copy images into your output directory, which are missing on the remote system. - -#### How the Singularity image downloads work - -The Singularity image download finds containers using two methods: - -1. It runs `nextflow config` on the downloaded workflow to look for a `process.container` statement for the whole pipeline. - This is the typical method used for DSL1 pipelines. -2. It scrapes any files it finds with a `.nf` file extension in the workflow `modules` directory for lines - that look like `container = "xxx"`. This is the typical method for DSL2 pipelines, which have one container per process. - -Some DSL2 modules have container addresses for docker (eg. `biocontainers/fastqc:0.11.9--0`) and also URLs for direct downloads of a Singularity container (eg. `https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0`). -Where both are found, the download URL is preferred. - -Once a full list of containers is found, they are processed in the following order: - -1. If the target image already exists, nothing is done (eg. with `$NXF_SINGULARITY_CACHEDIR` and `--container-cache-utilisation amend` specified) -2. If found in `$NXF_SINGULARITY_CACHEDIR` and `--container-cache-utilisation copy` is specified, they are copied to the output directory -3. If they start with `http` they are downloaded directly within Python (default 4 at a time, you can customise this with `--parallel-downloads`) -4. If they look like a Docker image name, they are fetched using a `singularity pull` command. Choose the container libraries (registries) queried by providing one or multiple `--container-library` parameter(s). For example, if you call `nf-core download` with `-l quay.io -l ghcr.io -l docker.io`, every image will be pulled from `quay.io` unless an error is encountered. Subsequently, `ghcr.io` and then `docker.io` will be queried for any image that has failed before. - - This requires Singularity/Apptainer to be installed on the system and is substantially slower - -Note that compressing many GBs of binary files can be slow, so specifying `--compress none` is recommended when downloading Singularity images that are copied to the output directory. - -If the download speeds are much slower than your internet connection is capable of, you can set `--parallel-downloads` to a large number to download loads of images at once. - -### Adapting downloads to Seqera Platform - -[Seqera Platform](https://seqera.io/platform/) (formerly _"Nextflow Tower"_) provides a graphical user interface to oversee pipeline runs, gather statistics and configure compute resources. While pipelines added to _Seqera Platform_ are preferably hosted at a Git service, providing them as disconnected, self-reliant repositories is also possible for premises with restricted network access. Choosing the `--platform` flag will download the pipeline in an appropriate form. - -Subsequently, the `*.git` folder can be moved to it's final destination and linked with a pipeline in _Seqera Platform_ using the `file:/` prefix. - -> [!TIP] -> Also without access to Seqera Platform, pipelines downloaded with the `--platform` flag can be run if the _absolute_ path is specified: `nextflow run -r 2.5 file:/path/to/pipelinedownload.git`. Downloads in this format allow you to include multiple revisions of a pipeline in a single file, but require that the revision (e.g. `-r 2.5`) is always explicitly specified. - -Facilities and those who are setting up pipelines for others to use may find the `--tag` argument helpful. It allows customizing the downloaded pipeline with additional tags that can be used to select particular revisions in the Seqera Platform interface. For example, an accredited facility may opt to tag particular revisions according to their structured release management process: `--tag "3.12.0=testing" --tag "3.9.0=validated"` so their staff can easily ensure that the correct version of the pipeline is run in production. -The `--tag` argument must be followed by a string in a `key=value` format and can be provided multiple times. The `key` must refer to a valid branch, tag or commit SHA. The right-hand side must comply with the naming conventions for Git tags and may not yet exist in the repository. - -## Pipeline software licences - -Sometimes it's useful to see the software licences of the tools used in a pipeline. -You can use the `licences` subcommand to fetch and print the software licence from each conda / PyPI package used in an nf-core pipeline. - -> [!WARNING] -> This command does not currently work for newer DSL2 pipelines. This will hopefully be addressed [soon](https://github.com/nf-core/tools/issues/1155). - - - -![`nf-core licences deepvariant`](docs/images/nf-core-licences.svg) - -## Creating a new pipeline - -The `create` subcommand makes a new pipeline using the nf-core base template. -With a given pipeline name, description and author, it makes a starter pipeline which follows nf-core best practices. - -After creating the files, the command initialises the folder as a git repository and makes an initial commit. -This first "vanilla" commit which is identical to the output from the templating tool is important, as it allows us to keep your pipeline in sync with the base template in the future. -See the [nf-core syncing docs](https://nf-co.re/developers/sync) for more information. - - - -![` nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique" -a "Big Steve" --plain`](docs/images/nf-core-create.svg) - -Once you have run the command, create a new empty repository on GitHub under your username (not the `nf-core` organisation, yet) and push the commits from your computer using the example commands in the above log. -You can then continue to edit, commit and push normally as you build your pipeline. - -Please see the [nf-core documentation](https://nf-co.re/developers/adding_pipelines) for a full walkthrough of how to create a new nf-core workflow. - -> [!TIP] -> As the log output says, remember to come and discuss your idea for a pipeline as early as possible! -> See the [documentation](https://nf-co.re/developers/adding_pipelines#join-the-community) for instructions. - -Note that if the required arguments for `nf-core create` are not given, it will interactively prompt for them. If you prefer, you can supply them as command line arguments. See `nf-core create --help` for more information. - -### Customizing the creation of a pipeline - -The `nf-core create` command comes with a number of options that allow you to customize the creation of a pipeline if you intend to not publish it as an -nf-core pipeline. This can be done in two ways: by using interactive prompts, or by supplying a `template.yml` file using the `--template-yaml ` option. -Both options allow you to specify a custom pipeline prefix to use instead of the common `nf-core`, as well as selecting parts of the template to be excluded during pipeline creation. -The interactive prompts will guide you through the pipeline creation process. An example of a `template.yml` file is shown below. - -```yaml -name: coolpipe -description: A cool pipeline -author: me -prefix: myorg -skip: - - github - - ci - - github_badges - - igenomes - - nf_core_configs -``` - -This will create a pipeline called `coolpipe` in the directory `myorg-coolpipe` (`-`) with `me` as the author. It will exclude all possible parts of the template: - -- `github`: removed all files required for GitHub hosting of the pipeline. Specifically, the `.github` folder and `.gitignore` file. -- `ci`: removes the GitHub continuous integration tests from the pipeline. Specifically, the `.github/workflows/` folder. -- `github_badges`: removes GitHub badges from the `README.md` file. -- `igenomes`: removes pipeline options related to iGenomes. Including the `conf/igenomes.config` file and all references to it. -- `nf_core_configs`: excludes `nf_core/configs` repository options, which make multiple config profiles for various institutional clusters available. - -To run the pipeline creation silently (i.e. without any prompts) with the nf-core template, you can use the `--plain` option. - -## Linting a workflow - -The `lint` subcommand checks a given pipeline for all nf-core community guidelines. -This is the same test that is used on the automated continuous integration tests. - -For example, the current version looks something like this: - - - -![`nf-core lint`](docs/images/nf-core-lint.svg) - -You can use the `-k` / `--key` flag to run only named tests for faster debugging, eg: `nf-core lint -k files_exist -k files_unchanged`. The `nf-core lint` command lints the current working directory by default, to specify another directory you can use `--dir `. - -### Linting documentation - -Each test result name on the left is a terminal hyperlink. -In most terminals you can ctrl + click ( cmd + click) these -links to open documentation specific to this test in your browser. - -Alternatively visit and find your test to read more. - -### Linting config - -It's sometimes desirable to disable certain lint tests, especially if you're using nf-core/tools with your -own pipeline that is outside of nf-core. - -To help with this, you can add a tools config file to your pipeline called `.nf-core.yml` in the pipeline root directory (previously: `.nf-core-lint.yml`). -Here you can list the names of any tests that you would like to disable and set them to `False`, for example: - -```yaml -lint: - actions_awsfulltest: False - pipeline_todos: False -``` - -Some lint tests allow greater granularity, for example skipping a test only for a specific file. -This is documented in the test-specific docs but generally involves passing a list, for example: - -```yaml -lint: - files_exist: - - CODE_OF_CONDUCT.md - files_unchanged: - - assets/email_template.html - - CODE_OF_CONDUCT.md -``` - -Note that you have to list all configurations for the `nf-core lint` command under the `lint:` field in the `.nf-core.yml` file, as this file is also used for configuration of other commands. - -### Automatically fix errors - -Some lint tests can try to automatically fix any issues they find. To enable this functionality, use the `--fix` flag. -The pipeline must be a `git` repository with no uncommitted changes for this to work. -This is so that any automated changes can then be reviewed and undone (`git checkout .`) if you disagree. - -### Lint results output - -The output from `nf-core lint` is designed to be viewed on the command line and is deliberately succinct. -You can view all passed tests with `--show-passed` or generate JSON / markdown results with the `--json` and `--markdown` flags. - -## Pipeline schema - -nf-core pipelines have a `nextflow_schema.json` file in their root which describes the different parameters used by the workflow. -These files allow automated validation of inputs when running the pipeline, are used to generate command line help and can be used to build interfaces to launch pipelines. -Pipeline schema files are built according to the [JSONSchema specification](https://json-schema.org/) (Draft 7). - -To help developers working with pipeline schema, nf-core tools has three `schema` sub-commands: - -- `nf-core schema validate` -- `nf-core schema build` -- `nf-core schema docs` -- `nf-core schema lint` - -### Validate pipeline parameters - -Nextflow can take input parameters in a JSON or YAML file when running a pipeline using the `-params-file` option. -This command validates such a file against the pipeline schema. - -Usage is `nf-core schema validate `. eg with the pipeline downloaded [above](#download-pipeline), you can run: - - - -![`nf-core schema validate nf-core-rnaseq/3_8 nf-params.json`](docs/images/nf-core-schema-validate.svg) - -The `pipeline` option can be a directory containing a pipeline, a path to a schema file or the name of an nf-core pipeline (which will be downloaded using `nextflow pull`). - -### Build a pipeline schema - -Manually building JSONSchema documents is not trivial and can be very error prone. -Instead, the `nf-core schema build` command collects your pipeline parameters and gives interactive prompts about any missing or unexpected params. -If no existing schema is found it will create one for you. - -Once built, the tool can send the schema to the nf-core website so that you can use a graphical interface to organise and fill in the schema. -The tool checks the status of your schema on the website and once complete, saves your changes locally. - -Usage is `nf-core schema build -d `, eg: - - - -![`nf-core schema build --no-prompts`](docs/images/nf-core-schema-build.svg) - -There are four flags that you can use with this command: - -- `--dir `: Specify a pipeline directory other than the current working directory -- `--no-prompts`: Make changes without prompting for confirmation each time. Does not launch web tool. -- `--web-only`: Skips comparison of the schema against the pipeline parameters and only launches the web tool. -- `--url `: Supply a custom URL for the online tool. Useful when testing locally. - -### Display the documentation for a pipeline schema - -To get an impression about the current pipeline schema you can display the content of the `nextflow_schema.json` with `nf-core schema docs `. This will print the content of your schema in Markdown format to the standard output. - -There are four flags that you can use with this command: - -- `--output `: Output filename. Defaults to standard out. -- `--format [markdown|html]`: Format to output docs in. -- `--force`: Overwrite existing files -- `--columns `: CSV list of columns to include in the parameter tables - -### Add new parameters to the pipeline schema - -If you want to add a parameter to the schema, you first have to add the parameter and its default value to the `nextflow.config` file with the `params` scope. Afterwards, you run the command `nf-core schema build` to add the parameters to your schema and open the graphical interface to easily modify the schema. - -The graphical interface is oganzised in groups and within the groups the single parameters are stored. For a better overview you can collapse all groups with the `Collapse groups` button, then your new parameters will be the only remaining one at the bottom of the page. Now you can either create a new group with the `Add group` button or drag and drop the paramters in an existing group. Therefor the group has to be expanded. The group title will be displayed, if you run your pipeline with the `--help` flag and its description apears on the parameter page of your pipeline. - -Now you can start to change the parameter itself. The `ID` of a new parameter should be defined in small letters without whitespaces. The description is a short free text explanation about the parameter, that appears if you run your pipeline with the `--help` flag. By clicking on the dictionary icon you can add a longer explanation for the parameter page of your pipeline. Usually, they contain a small paragraph about the parameter settings or a used datasource, like databases or references. If you want to specify some conditions for your parameter, like the file extension, you can use the nut icon to open the settings. This menu depends on the `type` you assigned to your parameter. For integers you can define a min and max value, and for strings the file extension can be specified. - -The `type` field is one of the most important points in your pipeline schema, since it defines the datatype of your input and how it will be interpreted. This allows extensive testing prior to starting the pipeline. - -The basic datatypes for a pipeline schema are: - -- `string` -- `number` -- `integer` -- `boolean` - -For the `string` type you have three different options in the settings (nut icon): `enumerated values`, `pattern` and `format`. The first option, `enumerated values`, allows you to specify a list of specific input values. The list has to be separated with a pipe. The `pattern` and `format` settings can depend on each other. The `format` has to be either a directory or a file path. Depending on the `format` setting selected, specifying the `pattern` setting can be the most efficient and time saving option, especially for `file paths`. The `number` and `integer` types share the same settings. Similarly to `string`, there is an `enumerated values` option with the possibility of specifying a `min` and `max` value. For the `boolean` there is no further settings and the default value is usually `false`. The `boolean` value can be switched to `true` by adding the flag to the command. This parameter type is often used to skip specific sections of a pipeline. - -After filling the schema, click on the `Finished` button in the top right corner, this will automatically update your `nextflow_schema.json`. If this is not working, the schema can be copied from the graphical interface and pasted in your `nextflow_schema.json` file. - -### Update existing pipeline schema - -It's important to change the default value of a parameter in the `nextflow.config` file first and then in the pipeline schema, because the value in the config file overwrites the value in the pipeline schema. To change any other parameter use `nf-core schema build --web-only` to open the graphical interface without rebuilding the pipeline schema. Now, the parameters can be changed as mentioned above but keep in mind that changing the parameter datatype depends on the default value specified in the `nextflow.config` file. - -### Linting a pipeline schema - -The pipeline schema is linted as part of the main pipeline `nf-core lint` command, -however sometimes it can be useful to quickly check the syntax of the JSONSchema without running a full lint run. - -Usage is `nf-core schema lint ` (defaulting to `nextflow_schema.json`), eg: - - - -![`nf-core schema lint`](docs/images/nf-core-schema-lint.svg) - -## Bumping a pipeline version number - -When releasing a new version of a nf-core pipeline, version numbers have to be updated in several different places. The helper command `nf-core bump-version` automates this for you to avoid manual errors (and frustration!). - -The command uses results from the linting process, so will only work with workflows that pass these tests. - -Usage is `nf-core bump-version `, eg: - - - -![`nf-core bump-version 1.1`](docs/images/nf-core-bump-version.svg) - -You can change the directory from the current working directory by specifying `--dir `. To change the required version of Nextflow instead of the pipeline version number, use the flag `--nextflow`. - -## Sync a pipeline with the template - -Over time, the main nf-core pipeline template is updated. To keep all nf-core pipelines up to date, -we synchronise these updates automatically when new versions of nf-core/tools are released. -This is done by maintaining a special `TEMPLATE` branch, containing a vanilla copy of the nf-core template -with only the variables used when it first ran (name, description etc.). This branch is updated and a -pull-request can be made with just the updates from the main template code. - -Note that pipeline synchronisation happens automatically each time nf-core/tools is released, creating an automated pull-request on each pipeline. -**As such, you do not normally need to run this command yourself!** - -This command takes a pipeline directory and attempts to run this synchronisation. -Usage is `nf-core sync`, eg: - - - -![`nf-core sync`](docs/images/nf-core-sync.svg) - -The sync command tries to check out the `TEMPLATE` branch from the `origin` remote or an existing local branch called `TEMPLATE`. -It will fail if it cannot do either of these things. -The `nf-core create` command should make this template automatically when you first start your pipeline. -Please see the [nf-core website sync documentation](https://nf-co.re/developers/sync) if you have difficulties. - -To specify a directory to sync other than the current working directory, use the `--dir `. - -By default, the tool will collect workflow variables from the current branch in your pipeline directory. -You can supply the `--from-branch` flag to specific a different branch. - -Finally, if you give the `--pull-request` flag, the command will push any changes to the remote and attempt to create a pull request using the GitHub API. -The GitHub username and repository name will be fetched from the remote url (see `git remote -v | grep origin`), or can be supplied with `--username` and `--github-repository`. - -To create the pull request, a personal access token is required for API authentication. -These can be created at [https://github.com/settings/tokens](https://github.com/settings/tokens). -Supply this using the `--auth-token` flag. - -## Create an nf-core pipeline logo - -The `nf-core create-logo` command creates a logo for your pipeline based on the nf-core template and the pipeline name. You can specify the width of the logo in pixels with the `--width` flag. Additionally, you can specify the output format to be either `png` or `svg` with the `--format` flag. The default format is `png`. - -Usage is `nf-core create-logo `, eg: - - - -![`nf-core create-logo nextbigthing`](docs/images/nf-core-create-logo.svg) - -## Tools CLI TUI - -_CLI:_ Command line interface -_TUI:_ Terminal user interface - -The `nf-core` command line interface is fairly large, with a lot of commands and options. -To make it easier to explore and use, run `nf-core tui` to launch a graphical terminal interface. - -This functionality works using [Textualize/trogon](https://github.com/Textualize/trogon) -and is based on the underlying CLI implementation that uses [Click](https://click.palletsprojects.com/). - -## Modules - -With the advent of [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html), we are creating a centralised repository of modules. -These are software tool process definitions that can be imported into any pipeline. -This allows multiple pipelines to use the same code for share tools and gives a greater degree of granulairy and unit testing. - -The nf-core DSL2 modules repository is at - -### Custom remote modules - -The modules supercommand comes with two flags for specifying a custom remote: - -- `--git-remote `: Specify the repository from which the modules should be fetched as a git URL. Defaults to the github repository of `nf-core/modules`. -- `--branch `: Specify the branch from which the modules should be fetched. Defaults to the default branch of your repository. - -For example, if you want to install the `fastqc` module from the repository `nf-core/modules-test` hosted at `gitlab.com`, you can use the following command: - -```terminal -nf-core modules --git-remote git@gitlab.com:nf-core/modules-test.git install fastqc -``` - -Note that a custom remote must follow a similar directory structure to that of `nf-core/moduleś` for the `nf-core modules` commands to work properly. - -The directory where modules are installed will be prompted or obtained from `org_path` in the `.nf-core.yml` file if available. If your modules are located at `modules/my-folder/TOOL/SUBTOOL` your `.nf-core.yml` should have: - -```yaml -org_path: my-folder -``` - -Please avoid installing the same tools from two different remotes, as this can lead to further errors. - -The modules commands will during initalisation try to pull changes from the remote repositories. If you want to disable this, for example -due to performance reason or if you want to run the commands offline, you can use the flag `--no-pull`. Note however that the commands will -still need to clone repositories that have previously not been used. - -### Private remote repositories - -You can use the modules command with private remote repositories. Make sure that your local `git` is correctly configured with your private remote -and then specify the remote the same way you would do with a public remote repository. - -### List modules - -The `nf-core modules list` command provides the subcommands `remote` and `local` for listing modules installed in a remote repository and in the local pipeline respectively. Both subcommands allow to use a pattern for filtering the modules by keywords eg: `nf-core modules list `. - -#### List remote modules - -To list all modules available on [nf-core/modules](https://github.com/nf-core/modules), you can use -`nf-core modules list remote`, which will print all available modules to the terminal. - - - -![`nf-core modules list remote`](docs/images/nf-core-modules-list-remote.svg) - -#### List installed modules - -To list modules installed in a local pipeline directory you can use `nf-core modules list local`. This will list the modules install in the current working directory by default. If you want to specify another directory, use the `--dir ` flag. - - - -![`nf-core modules list local`](docs/images/nf-core-modules-list-local.svg) - -## Show information about a module - -For quick help about how a module works, use `nf-core modules info `. -This shows documentation about the module on the command line, similar to what's available on the -[nf-core website](https://nf-co.re/modules). - - - -![`nf-core modules info abacas`](docs/images/nf-core-modules-info.svg) - -### Install modules in a pipeline - -You can install modules from [nf-core/modules](https://github.com/nf-core/modules) in your pipeline using `nf-core modules install`. -A module installed this way will be installed to the `./modules/nf-core/modules` directory. - - - -![`nf-core modules install abacas`](docs/images/nf-core-modules-install.svg) - -You can pass the module name as an optional argument to `nf-core modules install` instead of using the cli prompt, eg: `nf-core modules install fastqc`. You can specify a pipeline directory other than the current working directory by using the `--dir `. - -There are three additional flags that you can use when installing a module: - -- `--force`: Overwrite a previously installed version of the module. -- `--prompt`: Select the module version using a cli prompt. -- `--sha `: Install the module at a specific commit. - -### Update modules in a pipeline - -You can update modules installed from a remote repository in your pipeline using `nf-core modules update`. - - - -![`nf-core modules update --all --no-preview`](docs/images/nf-core-modules-update.svg) - -You can pass the module name as an optional argument to `nf-core modules update` instead of using the cli prompt, eg: `nf-core modules update fastqc`. You can specify a pipeline directory other than the current working directory by using the `--dir `. - -There are five additional flags that you can use with this command: - -- `--force`: Reinstall module even if it appears to be up to date -- `--prompt`: Select the module version using a cli prompt. -- `--sha `: Install the module at a specific commit from the `nf-core/modules` repository. -- `--preview/--no-preview`: Show the diff between the installed files and the new version before installing. -- `--save-diff `: Save diffs to a file instead of updating in place. The diffs can then be applied with `git apply `. -- `--all`: Use this flag to run the command on all modules in the pipeline. - -If you don't want to update certain modules or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `star/align` module installed from `nf-core/modules` from being updated by adding the following to the `.nf-core.yml` file: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: - star/align: False -``` - -If you want this module to be updated only to a specific version (or downgraded), you could instead specifiy the version: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: - star/align: "e937c7950af70930d1f34bb961403d9d2aa81c7" -``` - -This also works at the repository level. For example, if you want to exclude all modules installed from `nf-core/modules` from being updated you could add: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: False -``` - -or if you want all modules in `nf-core/modules` at a specific version: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: "e937c7950af70930d1f34bb961403d9d2aa81c7" -``` - -Note that the module versions specified in the `.nf-core.yml` file has higher precedence than versions specified with the command line flags, thus aiding you in writing reproducible pipelines. - -### Remove a module from a pipeline - -To delete a module from your pipeline, run `nf-core modules remove`. - - - -![`nf-core modules remove abacas`](docs/images/nf-core-modules-remove.svg) - -You can pass the module name as an optional argument to `nf-core modules remove` instead of using the cli prompt, eg: `nf-core modules remove fastqc`. To specify the pipeline directory, use `--dir `. - -### Create a patch file for a module - -If you want to make a minor change to a locally installed module but still keep it up date with the remote version, you can create a patch file using `nf-core modules patch`. - - - -![`nf-core modules patch fastqc`](docs/images/nf-core-modules-patch.svg) - -The generated patches work with `nf-core modules update`: when you install a new version of the module, the command tries to apply -the patch automatically. The patch application fails if the new version of the module modifies the same lines as the patch. In this case, -the patch new version is installed but the old patch file is preserved. - -When linting a patched module, the linting command will check the validity of the patch. When running other lint tests the patch is applied in reverse, and the original files are linted. - -### Create a new module - -This command creates a new nf-core module from the nf-core module template. -This ensures that your module follows the nf-core guidelines. -The template contains extensive `TODO` messages to walk you through the changes you need to make to the template. - -You can create a new module using `nf-core modules create`. - -This command can be used both when writing a module for the shared [nf-core/modules](https://github.com/nf-core/modules) repository, -and also when creating local modules for a pipeline. - -Which type of repository you are working in is detected by the `repository_type` flag in a `.nf-core.yml` file in the root directory, -set to either `pipeline` or `modules`. -The command will automatically look through parent directories for this file to set the root path, so that you can run the command in a subdirectory. -It will start in the current working directory, or whatever is specified with `--dir `. - -The `nf-core modules create` command will prompt you with the relevant questions in order to create all of the necessary module files. - - - -![`cd modules && nf-core modules create fastqc --author @nf-core-bot --label process_low --meta --force`](docs/images/nf-core-modules-create.svg) - -### Check a module against nf-core guidelines - -Run the `nf-core modules lint` command to check modules in the current working directory (pipeline or nf-core/modules clone) against nf-core guidelines. - -Use the `--all` flag to run linting on all modules found. Use `--dir ` to specify another directory than the current working directory. - - - -![`nf-core modules lint multiqc`](docs/images/nf-core-modules-lint.svg) - -### Create a test for a module - -All modules on [nf-core/modules](https://github.com/nf-core/modules) have a strict requirement of being unit tested using minimal test data. We use [nf-test](https://code.askimed.com/nf-test/) as our testing framework. -Each module comes already with a template for the test file in `test/main.nf.test`. Replace the placeholder code in that file with your specific input, output and proces. In order to generate the corresponding snapshot after writing your test, you can use the `nf-core modules test` command. This command will run `nf-test test` twice, to also check for snapshot stability, i.e. that the same snapshot is generated on multiple runs. - -You can specify the module name in the form TOOL/SUBTOOL in the command or provide it later through interactive prompts. - - - -![`nf-core modules test fastqc --no-prompts`](docs/images/nf-core-modules-test.svg) - -In case you changed something in the test and want to update the snapshot, run - -```bash -nf-core modules test --update -``` - -If you want to run the test only once without checking for snapshot stability, you can use the `--once` flag. - -### Bump bioconda and container versions of modules in - -If you are contributing to the `nf-core/modules` repository and want to bump bioconda and container versions of certain modules, you can use the `nf-core modules bump-versions` helper tool. This will bump the bioconda version of a single or all modules to the latest version and also fetch the correct Docker and Singularity container tags. - - - -![`nf-core modules bump-versions fastqc`](docs/images/nf-core-modules-bump-version.svg) - -If you don't want to update certain modules or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `star/align` module from being updated by adding the following to the `.nf-core.yml` file: - -```yaml -bump-versions: - star/align: False -``` - -If you want this module to be updated only to a specific version (or downgraded), you could instead specifiy the version: - -```yaml -bump-versions: - star/align: "2.6.1d" -``` - -## Subworkflows - -After the launch of nf-core modules, we can provide now also nf-core subworkflows to fully utilize the power of DSL2 modularization. -Subworkflows are chains of multiple module definitions that can be imported into any pipeline. -This allows multiple pipelines to use the same code for a the same tasks, and gives a greater degree of reusability and unit testing. - -To allow us to test modules and subworkflows together we put the nf-core DSL2 subworkflows into the `subworkflows` directory of the modules repository is at . - -### Custom remote subworkflows - -The subworkflows supercommand released in nf-core/tools version 2.7 comes with two flags for specifying a custom remote repository: - -- `--git-remote `: Specify the repository from which the subworkflows should be fetched as a git URL. Defaults to the github repository of `nf-core/modules`. -- `--branch `: Specify the branch from which the subworkflows should be fetched. Defaults to the default branch of your repository. - -For example, if you want to install the `bam_stats_samtools` subworkflow from the repository `nf-core/modules-test` hosted at `gitlab.com` in the branch `subworkflows`, you can use the following command: - -```bash -nf-core subworkflows --git-remote git@gitlab.com:nf-core/modules-test.git --branch subworkflows install bam_stats_samtools -``` - -Note that a custom remote must follow a similar directory structure to that of `nf-core/modules` for the `nf-core subworkflows` commands to work properly. - -The directory where subworkflows are installed will be prompted or obtained from `org_path` in the `.nf-core.yml` file if available. If your subworkflows are located at `subworkflows/my-folder/SUBWORKFLOW_NAME` your `.nf-core.yml` file should have: - -```yaml -org_path: my-folder -``` - -Please avoid installing the same tools from two different remotes, as this can lead to further errors. - -The subworkflows commands will during initalisation try to pull changes from the remote repositories. If you want to disable this, for example due to performance reason or if you want to run the commands offline, you can use the flag `--no-pull`. Note however that the commands will still need to clone repositories that have previously not been used. - -### Private remote repositories - -You can use the subworkflows command with private remote repositories. Make sure that your local `git` is correctly configured with your private remote -and then specify the remote the same way you would do with a public remote repository. - -### List subworkflows - -The `nf-core subworkflows list` command provides the subcommands `remote` and `local` for listing subworkflows installed in a remote repository and in the local pipeline respectively. Both subcommands allow to use a pattern for filtering the subworkflows by keywords eg: `nf-core subworkflows list `. - -#### List remote subworkflows - -To list all subworkflows available on [nf-core/modules](https://github.com/nf-core/modules), you can use -`nf-core subworkflows list remote`, which will print all available subworkflows to the terminal. - - - -![`nf-core subworkflows list remote`](docs/images/nf-core-subworkflows-list-remote.svg) - -#### List installed subworkflows - -To list subworkflows installed in a local pipeline directory you can use `nf-core subworkflows list local`. This will list the subworkflows install in the current working directory by default. If you want to specify another directory, use the `--dir ` flag. - - - -![`nf-core subworkflows list local`](docs/images/nf-core-subworkflows-list-local.svg) - -## Show information about a subworkflow - -For quick help about how a subworkflow works, use `nf-core subworkflows info `. -This shows documentation about the subworkflow on the command line, similar to what's available on the -[nf-core website](https://nf-co.re/subworkflows). - - - -![`nf-core subworkflows info bam_rseqc`](docs/images/nf-core-subworkflows-info.svg) - -### Install subworkflows in a pipeline - -You can install subworkflows from [nf-core/modules](https://github.com/nf-core/modules) in your pipeline using `nf-core subworkflows install`. -A subworkflow installed this way will be installed to the `./subworkflows/nf-core` directory. - - - -![`nf-core subworkflows install bam_rseqc`](docs/images/nf-core-subworkflows-install.svg) - -You can pass the subworkflow name as an optional argument to `nf-core subworkflows install` like above or select it from a list of available subworkflows by only running `nf-core subworkflows install`. - -There are four additional flags that you can use when installing a subworkflow: - -- `--dir`: Pipeline directory, the default is the current working directory. -- `--force`: Overwrite a previously installed version of the subworkflow. -- `--prompt`: Select the subworkflow version using a cli prompt. -- `--sha `: Install the subworkflow at a specific commit. - -### Update subworkflows in a pipeline - -You can update subworkflows installed from a remote repository in your pipeline using `nf-core subworkflows update`. - - - -![`nf-core subworkflows update --all --no-preview`](docs/images/nf-core-subworkflows-update.svg) - -You can pass the subworkflow name as an optional argument to `nf-core subworkflows update` like above or select it from the list of available subworkflows by only running `nf-core subworkflows update`. - -There are six additional flags that you can use with this command: - -- `--dir`: Pipeline directory, the default is the current working directory. -- `--force`: Reinstall subworkflow even if it appears to be up to date -- `--prompt`: Select the subworkflow version using a cli prompt. -- `--sha `: Install the subworkflow at a specific commit from the `nf-core/modules` repository. -- `--preview/--no-preview`: Show the diff between the installed files and the new version before installing. -- `--save-diff `: Save diffs to a file instead of updating in place. The diffs can then be applied with `git apply `. -- `--all`: Use this flag to run the command on all subworkflows in the pipeline. -- `--update-deps`: Use this flag to automatically update all dependencies of a subworkflow. - -If you don't want to update certain subworkflows or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `bam_rseqc` subworkflow installed from `nf-core/modules` from being updated by adding the following to the `.nf-core.yml` file: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: - bam_rseqc: False -``` - -If you want this subworkflow to be updated only to a specific version (or downgraded), you could instead specifiy the version: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: - bam_rseqc: "36a77f7c6decf2d1fb9f639ae982bc148d6828aa" -``` - -This also works at the repository level. For example, if you want to exclude all modules and subworkflows installed from `nf-core/modules` from being updated you could add: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: False -``` - -or if you want all subworkflows in `nf-core/modules` at a specific version: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: "e937c7950af70930d1f34bb961403d9d2aa81c7" -``` - -Note that the subworkflow versions specified in the `.nf-core.yml` file has higher precedence than versions specified with the command line flags, thus aiding you in writing reproducible pipelines. - -### Remove a subworkflow from a pipeline - -To delete a subworkflow from your pipeline, run `nf-core subworkflows remove`. - - - -![`nf-core subworkflows remove bam_rseqc`](docs/images/nf-core-subworkflows-remove.svg) - -You can pass the subworkflow name as an optional argument to `nf-core subworkflows remove` like above or select it from the list of available subworkflows by only running `nf-core subworkflows remove`. To specify the pipeline directory, use `--dir `. - -### Create a new subworkflow - -This command creates a new nf-core subworkflow from the nf-core subworkflow template. -This ensures that your subworkflow follows the nf-core guidelines. -The template contains extensive `TODO` messages to walk you through the changes you need to make to the template. -See the [subworkflow documentation](https://nf-co.re/docs/contributing/subworkflows) for more details around creating a new subworkflow, including rules about nomenclature and a step-by-step guide. - -You can create a new subworkflow using `nf-core subworkflows create`. - -This command can be used both when writing a subworkflow for the shared [nf-core/modules](https://github.com/nf-core/modules) repository, -and also when creating local subworkflows for a pipeline. - -Which type of repository you are working in is detected by the `repository_type` flag in a `.nf-core.yml` file in the root directory, -set to either `pipeline` or `modules`. -The command will automatically look through parent directories for this file to set the root path, so that you can run the command in a subdirectory. -It will start in the current working directory, or whatever is specified with `--dir `. - -The `nf-core subworkflows create` command will prompt you with the relevant questions in order to create all of the necessary subworkflow files. - - - -![`nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force`](docs/images/nf-core-subworkflows-create.svg) - -### Create a test for a subworkflow - -All subworkflows on [nf-core/modules](https://github.com/nf-core/modules) have a strict requirement of being unit tested using minimal test data. We use [nf-test](https://code.askimed.com/nf-test/) as our testing framework. -Each subworkflow comes already with a template for the test file in `test/main.nf.test`. Replace the placeholder code in that file with your specific input, output and proces. In order to generate the corresponding snapshot after writing your test, you can use the `nf-core subworkflows test` command. This command will run `nf-test test` twice, to also check for snapshot stability, i.e. that the same snapshot is generated on multiple runs. - -You can specify the subworkflow name in the command or provide it later through interactive prompts. - - - -![`nf-core subworkflows test bam_rseqc --no-prompts`](docs/images/nf-core-subworkflows-test.svg) - -In case you changed something in the test and want to update the snapshot, run - -```bash -nf-core subworkflows test --update -``` - -If you want to run the test only once without checking for snapshot stability, you can use the `--once` flag. - -### Check a subworkflow against nf-core guidelines - -Run the `nf-core subworkflows lint` command to check subworkflows in the current working directory (a pipeline or a clone of nf-core/modules) against nf-core guidelines. - -Use the `--all` flag to run linting on all subworkflows found. Use `--dir ` to specify a different directory than the current working directory. +## Contributions and Support - +If you would like to contribute to this package, please see the [contributing guidelines](.github/CONTRIBUTING.md). -![`nf-core subworkflows lint bam_stats_samtools`](docs/images/nf-core-subworkflows-lint.svg) +For further information or help, don't hesitate to get in touch on the [Slack `#tools` channel](https://nfcore.slack.com/channels/tools) (you can join with [this invite](https://nf-co.re/join/slack)). ## Citation diff --git a/docs/api/_src/_static/css/custom.css b/docs/api/_src/_static/css/custom.css deleted file mode 100644 index e892dd999..000000000 --- a/docs/api/_src/_static/css/custom.css +++ /dev/null @@ -1,437 +0,0 @@ -@media (prefers-color-scheme: light) { - a, - a:visited { - color: #246eb9; - } - a:hover, - a:focus, - a:active { - color: #c03221; - } - .wy-nav-side { - background-color: #ededed; - } - .wy-nav-top, - .wy-side-nav-search, - .wy-menu-vertical a:active { - background-color: #32ad65; - } - .wy-menu-vertical a { - color: #343434; - } - .wy-menu-vertical a:hover { - background-color: #abacab85; - } - .wy-menu-vertical header, - .wy-menu-vertical p.caption { - color: #32ad65; - } - - .wy-side-nav-search input[type="text"] { - border: none; - } - - code, - .rst-content code.literal { - background-color: rgba(220, 220, 220, 0.4); - color: #c03221; - border: none; - } - .rst-content .note .admonition-title { - background-color: #72757bfc; - } - - html.writer-html4 .rst-content dl:not(.docutils) > dt, - html.writer-html5 - .rst-content - dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) - > dt { - background-color: #32ad65; - border: none; - } - .rst-content .method > dt > code { - color: #f5f6f7; - } - .rst-content div[class^="highlight"], - .rst-content pre.literal-block { - border: none; - } - .highlight { - background-color: #242424; - color: #e5e6e7; - } - .highlight .hll { - color: #e5e6e7; - background-color: #585b60fc; - } - .highlight .k, - .highlight .nt, - .highlight .no { - color: #246eb9; - } - .highlight .s, - .highlight .s1, - .highlight .s2, - .highlight .na { - color: #32ad65; - } - .highlight .nb, - .highlight .o, - .highlight .cm { - color: #ffbe0b; - } - .highlight .c1 { - color: #88898afc; - } - .highlight .nv, - .py > span { - color: #c03221; - } - .py .sig-param, - .py .sig-paren, - .property .pre { - color: #eef0f2; - } - - .method > .py .sig-param, - .method > .py .sig-paren { - color: #32ad65; - } - .btn.btn-neutral { - background-color: #e5e6e7d4 !important; - } - .rst-content .hint .admonition-title, - .rst-content .hint .wy-alert-title, - .rst-content .important .admonition-title, - .rst-content .important .wy-alert-title, - .rst-content .tip .admonition-title, - .rst-content .tip .wy-alert-title, - .rst-content .wy-alert-success.admonition-todo .admonition-title, - .rst-content .wy-alert-success.admonition-todo .wy-alert-title, - .rst-content .wy-alert-success.admonition .admonition-title, - .rst-content .wy-alert-success.admonition .wy-alert-title, - .rst-content .wy-alert-success.attention .admonition-title, - .rst-content .wy-alert-success.attention .wy-alert-title, - .rst-content .wy-alert-success.caution .admonition-title, - .rst-content .wy-alert-success.caution .wy-alert-title, - .rst-content .wy-alert-success.danger .admonition-title, - .rst-content .wy-alert-success.danger .wy-alert-title, - .rst-content .wy-alert-success.error .admonition-title, - .rst-content .wy-alert-success.error .wy-alert-title, - .rst-content .wy-alert-success.note .admonition-title, - .rst-content .wy-alert-success.note .wy-alert-title, - .rst-content .wy-alert-success.seealso .admonition-title, - .rst-content .wy-alert-success.seealso .wy-alert-title, - .rst-content .wy-alert-success.warning .admonition-title, - .rst-content .wy-alert-success.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-success .admonition-title, - .wy-alert.wy-alert-success .rst-content .admonition-title, - .wy-alert.wy-alert-success .wy-alert-title { - background-color: #32ad65; - color: #e5e6e7; - } - - .rst-content .note, - .rst-content .seealso, - .rst-content .wy-alert-info.admonition, - .rst-content .wy-alert-info.admonition-todo, - .rst-content .wy-alert-info.attention, - .rst-content .wy-alert-info.caution, - .rst-content .wy-alert-info.danger, - .rst-content .wy-alert-info.error, - .rst-content .wy-alert-info.hint, - .rst-content .wy-alert-info.important, - .rst-content .wy-alert-info.tip, - .rst-content .wy-alert-info.warning, - .wy-alert.wy-alert-info, - .rst-content .hint, - .rst-content .important, - .rst-content .tip, - .rst-content .wy-alert-success.admonition, - .rst-content .wy-alert-success.admonition-todo, - .rst-content .wy-alert-success.attention, - .rst-content .wy-alert-success.caution, - .rst-content .wy-alert-success.danger, - .rst-content .wy-alert-success.error, - .rst-content .wy-alert-success.note, - .rst-content .wy-alert-success.seealso, - .rst-content .wy-alert-success.warning, - .wy-alert.wy-alert-success { - color: #343434; - background-color: #e3e3e3; - border: none; - } - - .rst-content .admonition-todo .admonition-title, - .rst-content .admonition-todo .wy-alert-title, - .rst-content .attention .admonition-title, - .rst-content .attention .wy-alert-title, - .rst-content .caution .admonition-title, - .rst-content .caution .wy-alert-title, - .rst-content .warning .admonition-title, - .rst-content .warning .wy-alert-title, - .rst-content .wy-alert-warning.admonition .admonition-title, - .rst-content .wy-alert-warning.admonition .wy-alert-title, - .rst-content .wy-alert-warning.danger .admonition-title, - .rst-content .wy-alert-warning.danger .wy-alert-title, - .rst-content .wy-alert-warning.error .admonition-title, - .rst-content .wy-alert-warning.error .wy-alert-title, - .rst-content .wy-alert-warning.hint .admonition-title, - .rst-content .wy-alert-warning.hint .wy-alert-title, - .rst-content .seealso .admonition-title, - .rst-content .seealso .wy-alert-title, - .rst-content .wy-alert-warning.important .admonition-title, - .rst-content .wy-alert-warning.important .wy-alert-title, - .rst-content .wy-alert-warning.note .admonition-title, - .rst-content .wy-alert-warning.note .wy-alert-title, - .rst-content .wy-alert-warning.seealso .admonition-title, - .rst-content .wy-alert-warning.seealso .wy-alert-title, - .rst-content .wy-alert-warning.tip .admonition-title, - .rst-content .wy-alert-warning.tip .wy-alert-title, - .rst-content .wy-alert.wy-alert-warning .admonition-title, - .wy-alert.wy-alert-warning .rst-content .admonition-title, - .wy-alert.wy-alert-warning .wy-alert-title { - background-color: #246eb9; - } - .rst-content .admonition-todo, - .rst-content .attention, - .rst-content .caution, - .rst-content .warning, - .rst-content .wy-alert-warning.admonition, - .rst-content .wy-alert-warning.danger, - .rst-content .wy-alert-warning.error, - .rst-content .wy-alert-warning.hint, - .rst-content .wy-alert-warning.important, - .rst-content .wy-alert-warning.note, - .rst-content .wy-alert-warning.seealso, - .rst-content .wy-alert-warning.tip, - .wy-alert.wy-alert-warning { - color: #343434; - background-color: #e3e3e3; - } -} -@media (prefers-color-scheme: dark) { - .wy-nav-content-wrap { - background-color: #181a1b; - } - .wy-nav-top, - .wy-side-nav-search, - .wy-menu-vertical a:active { - background-color: #32ad65; - } - .wy-menu-vertical header, - .wy-menu-vertical p.caption { - color: #32ad65; - } - .wy-nav-side { - background-color: #2e2e2e; - } - .wy-nav-content { - background-color: #343434; - color: #e5e6e7; - } - a, - a:visited { - color: #6fb2e8; - } - a:hover, - a:focus, - a:active { - color: #db9444; - } - code, - .rst-content code.literal { - background-color: rgba(220, 220, 220, 0.1); - color: #db9444; - border: none; - } - .wy-side-nav-search input[type="text"] { - border: none; - background-color: #e5e6e7; - } - .wy-side-nav-search > div.version { - color: #e5e6e7c9; - } - .wy-side-nav-search .wy-dropdown > a, - .wy-side-nav-search > a { - color: #e5e6e7; - } - - html.writer-html4 .rst-content dl:not(.docutils) > dt, - html.writer-html5 - .rst-content - dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) - > dt { - background-color: #32ad65; - border: none; - } - html.writer-html4 .rst-content dl:not(.docutils) dl:not(.field-list) > dt, - html.writer-html5 - .rst-content - dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) - dl:not(.field-list) - > dt { - background-color: #777; - color: #e5e6e7a6; - } - .rst-content code { - color: #e5e6e7d4; - } - .rst-content .method > dt > code { - color: #e5e6e7; - } - .sig-paren { - color: #db9444; - } - .rst-content .note .admonition-title { - background-color: #72757bfc; - } - .rst-content div[class^="highlight"], - .rst-content pre.literal-block { - border: none; - } - - .highlight { - background-color: #242424; - color: #e5e6e7; - } - .highlight .hll { - color: #e5e6e7; - background-color: #585b60fc; - } - .highlight .k, - .highlight .nt, - .highlight .no { - color: #6fb2e8; - } - .highlight .s, - .highlight .s1, - .highlight .s2, - .highlight .na { - color: #32ad65; - } - .highlight .nb, - .highlight .o, - .highlight .cm { - color: #db9444; - } - .highlight .c1 { - color: #88898afc; - } - .highlight .nv { - color: #f9d977; - } - .btn.btn-neutral { - background-color: #e5e6e7d4 !important; - } - - .rst-content .hint .admonition-title, - .rst-content .hint .wy-alert-title, - .rst-content .important .admonition-title, - .rst-content .important .wy-alert-title, - .rst-content .tip .admonition-title, - .rst-content .tip .wy-alert-title, - .rst-content .wy-alert-success.admonition-todo .admonition-title, - .rst-content .wy-alert-success.admonition-todo .wy-alert-title, - .rst-content .wy-alert-success.admonition .admonition-title, - .rst-content .wy-alert-success.admonition .wy-alert-title, - .rst-content .wy-alert-success.attention .admonition-title, - .rst-content .wy-alert-success.attention .wy-alert-title, - .rst-content .wy-alert-success.caution .admonition-title, - .rst-content .wy-alert-success.caution .wy-alert-title, - .rst-content .wy-alert-success.danger .admonition-title, - .rst-content .wy-alert-success.danger .wy-alert-title, - .rst-content .wy-alert-success.error .admonition-title, - .rst-content .wy-alert-success.error .wy-alert-title, - .rst-content .wy-alert-success.note .admonition-title, - .rst-content .wy-alert-success.note .wy-alert-title, - .rst-content .wy-alert-success.seealso .admonition-title, - .rst-content .wy-alert-success.seealso .wy-alert-title, - .rst-content .wy-alert-success.warning .admonition-title, - .rst-content .wy-alert-success.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-success .admonition-title, - .wy-alert.wy-alert-success .rst-content .admonition-title, - .wy-alert.wy-alert-success .wy-alert-title { - background-color: #32ad65; - color: #e5e6e7; - } - - .rst-content .note, - .rst-content .seealso, - .rst-content .wy-alert-info.admonition, - .rst-content .wy-alert-info.admonition-todo, - .rst-content .wy-alert-info.attention, - .rst-content .wy-alert-info.caution, - .rst-content .wy-alert-info.danger, - .rst-content .wy-alert-info.error, - .rst-content .wy-alert-info.hint, - .rst-content .wy-alert-info.important, - .rst-content .wy-alert-info.tip, - .rst-content .wy-alert-info.warning, - .wy-alert.wy-alert-info, - .rst-content .hint, - .rst-content .important, - .rst-content .tip, - .rst-content .wy-alert-success.admonition, - .rst-content .wy-alert-success.admonition-todo, - .rst-content .wy-alert-success.attention, - .rst-content .wy-alert-success.caution, - .rst-content .wy-alert-success.danger, - .rst-content .wy-alert-success.error, - .rst-content .wy-alert-success.note, - .rst-content .wy-alert-success.seealso, - .rst-content .wy-alert-success.warning, - .wy-alert.wy-alert-success { - color: #343434; - background-color: #e3e3e3; - border: none; - } - - .rst-content .admonition-todo .admonition-title, - .rst-content .admonition-todo .wy-alert-title, - .rst-content .attention .admonition-title, - .rst-content .attention .wy-alert-title, - .rst-content .caution .admonition-title, - .rst-content .caution .wy-alert-title, - .rst-content .warning .admonition-title, - .rst-content .warning .wy-alert-title, - .rst-content .wy-alert-warning.admonition .admonition-title, - .rst-content .wy-alert-warning.admonition .wy-alert-title, - .rst-content .wy-alert-warning.danger .admonition-title, - .rst-content .wy-alert-warning.danger .wy-alert-title, - .rst-content .wy-alert-warning.error .admonition-title, - .rst-content .wy-alert-warning.error .wy-alert-title, - .rst-content .wy-alert-warning.hint .admonition-title, - .rst-content .wy-alert-warning.hint .wy-alert-title, - .rst-content .seealso .admonition-title, - .rst-content .seealso .wy-alert-title, - .rst-content .wy-alert-warning.important .admonition-title, - .rst-content .wy-alert-warning.important .wy-alert-title, - .rst-content .wy-alert-warning.note .admonition-title, - .rst-content .wy-alert-warning.note .wy-alert-title, - .rst-content .wy-alert-warning.seealso .admonition-title, - .rst-content .wy-alert-warning.seealso .wy-alert-title, - .rst-content .wy-alert-warning.tip .admonition-title, - .rst-content .wy-alert-warning.tip .wy-alert-title, - .rst-content .wy-alert.wy-alert-warning .admonition-title, - .wy-alert.wy-alert-warning .rst-content .admonition-title, - .wy-alert.wy-alert-warning .wy-alert-title { - background-color: #f4a25b; - } - .rst-content .admonition-todo, - .rst-content .attention, - .rst-content .caution, - .rst-content .warning, - .rst-content .wy-alert-warning.admonition, - .rst-content .wy-alert-warning.danger, - .rst-content .wy-alert-warning.error, - .rst-content .wy-alert-warning.hint, - .rst-content .wy-alert-warning.important, - .rst-content .wy-alert-warning.note, - .rst-content .wy-alert-warning.seealso, - .rst-content .wy-alert-warning.tip, - .wy-alert.wy-alert-warning { - color: #343434; - background-color: #e3e3e3; - } -} diff --git a/docs/api/_src/_static/nf-core-logo.png b/docs/api/_src/_static/nf-core-logo.png deleted file mode 100644 index 91ddb58d8..000000000 Binary files a/docs/api/_src/_static/nf-core-logo.png and /dev/null differ diff --git a/docs/api/_src/_templates/layout.html b/docs/api/_src/_templates/layout.html deleted file mode 100644 index 42341a97c..000000000 --- a/docs/api/_src/_templates/layout.html +++ /dev/null @@ -1,4 +0,0 @@ -{% extends '!layout.html' %} {% block header %} - - -{% endblock %} diff --git a/docs/api/_src/api/index.md b/docs/api/_src/api/index.md index a1863f7e3..035a89688 100644 --- a/docs/api/_src/api/index.md +++ b/docs/api/_src/api/index.md @@ -1,8 +1,11 @@ -# API Reference +# nf-core/tools documentation -```{toctree} -:glob: true -:maxdepth: 1 +This API documentation is for the [`nf-core/tools`](https://github.com/nf-core/tools) package. -* -``` +## Contents + +- [Pipeline commands](./pipeline_lint_tests/) (run by `nf-core pipelines lint`) +- [Module commands](./module_lint_tests/) (run by `nf-core modules lint`) +- [Subworkflow commands](./subworkflow_lint_tests/) (run by `nf-core subworkflows lint`) +- [nf-core/tools Python package API reference](./api/) + - [nf-core/tools pipeline commands API referece](./api/pipelines/) diff --git a/docs/api/_src/api/modules.md b/docs/api/_src/api/modules.md deleted file mode 100644 index e3f1e39be..000000000 --- a/docs/api/_src/api/modules.md +++ /dev/null @@ -1,9 +0,0 @@ -# nf_core.modules - -```{eval-rst} -.. automodule:: nf_core.modules - :members: - :undoc-members: - :show-inheritance: - :private-members: -``` diff --git a/docs/api/_src/api/bump_version.md b/docs/api/_src/api/pipelines/bump_version.md similarity index 72% rename from docs/api/_src/api/bump_version.md rename to docs/api/_src/api/pipelines/bump_version.md index 54c377440..cd7dc280f 100644 --- a/docs/api/_src/api/bump_version.md +++ b/docs/api/_src/api/pipelines/bump_version.md @@ -1,7 +1,7 @@ # nf_core.bump_version ```{eval-rst} -.. automodule:: nf_core.bump_version +.. automodule:: nf_core.pipelines.bump_version :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/create.md b/docs/api/_src/api/pipelines/create.md similarity index 73% rename from docs/api/_src/api/create.md rename to docs/api/_src/api/pipelines/create.md index 5d5f6a62d..576335e95 100644 --- a/docs/api/_src/api/create.md +++ b/docs/api/_src/api/pipelines/create.md @@ -1,7 +1,7 @@ # nf_core.create ```{eval-rst} -.. automodule:: nf_core.create +.. automodule:: nf_core.pipelines.create :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/download.md b/docs/api/_src/api/pipelines/download.md similarity index 73% rename from docs/api/_src/api/download.md rename to docs/api/_src/api/pipelines/download.md index 18ab51376..540fb92c4 100644 --- a/docs/api/_src/api/download.md +++ b/docs/api/_src/api/pipelines/download.md @@ -1,7 +1,7 @@ # nf_core.download ```{eval-rst} -.. automodule:: nf_core.download +.. automodule:: nf_core.pipelines.download :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/pipelines/index.md b/docs/api/_src/api/pipelines/index.md new file mode 100644 index 000000000..a1863f7e3 --- /dev/null +++ b/docs/api/_src/api/pipelines/index.md @@ -0,0 +1,8 @@ +# API Reference + +```{toctree} +:glob: true +:maxdepth: 1 + +* +``` diff --git a/docs/api/_src/api/launch.md b/docs/api/_src/api/pipelines/launch.md similarity index 73% rename from docs/api/_src/api/launch.md rename to docs/api/_src/api/pipelines/launch.md index eef777ca8..0f7fc03f6 100644 --- a/docs/api/_src/api/launch.md +++ b/docs/api/_src/api/pipelines/launch.md @@ -1,7 +1,7 @@ # nf_core.launch ```{eval-rst} -.. automodule:: nf_core.launch +.. automodule:: nf_core.pipelines.launch :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/lint.md b/docs/api/_src/api/pipelines/lint.md similarity index 64% rename from docs/api/_src/api/lint.md rename to docs/api/_src/api/pipelines/lint.md index 1380f7ec7..aa62c404b 100644 --- a/docs/api/_src/api/lint.md +++ b/docs/api/_src/api/pipelines/lint.md @@ -1,11 +1,11 @@ # nf_core.lint :::{seealso} -See the [Lint Tests](../pipeline_lint_tests/index.md) docs for information about specific linting functions. +See the [Lint Tests](/docs/nf-core-tools/api_reference/dev/pipeline_lint_tests) docs for information about specific linting functions. ::: ```{eval-rst} -.. automodule:: nf_core.lint +.. automodule:: nf_core.pipelines.lint :members: run_linting :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/list.md b/docs/api/_src/api/pipelines/list.md similarity index 74% rename from docs/api/_src/api/list.md rename to docs/api/_src/api/pipelines/list.md index 35c819bc5..7df756454 100644 --- a/docs/api/_src/api/list.md +++ b/docs/api/_src/api/pipelines/list.md @@ -1,7 +1,7 @@ # nf_core.list ```{eval-rst} -.. automodule:: nf_core.list +.. automodule:: nf_core.pipelines.list :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/params-file.md b/docs/api/_src/api/pipelines/params-file.md similarity index 72% rename from docs/api/_src/api/params-file.md rename to docs/api/_src/api/pipelines/params-file.md index c5bbfc0f1..06f27cc59 100644 --- a/docs/api/_src/api/params-file.md +++ b/docs/api/_src/api/pipelines/params-file.md @@ -1,7 +1,7 @@ # nf_core.params_file ```{eval-rst} -.. automodule:: nf_core.params_file +.. automodule:: nf_core.pipelines.params_file :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/schema.md b/docs/api/_src/api/pipelines/schema.md similarity index 73% rename from docs/api/_src/api/schema.md rename to docs/api/_src/api/pipelines/schema.md index a702d2805..c885d9ed2 100644 --- a/docs/api/_src/api/schema.md +++ b/docs/api/_src/api/pipelines/schema.md @@ -1,7 +1,7 @@ # nf_core.schema ```{eval-rst} -.. automodule:: nf_core.schema +.. automodule:: nf_core.pipelines.schema :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/sync.md b/docs/api/_src/api/pipelines/sync.md similarity index 74% rename from docs/api/_src/api/sync.md rename to docs/api/_src/api/pipelines/sync.md index 8cc02209d..da1f468fe 100644 --- a/docs/api/_src/api/sync.md +++ b/docs/api/_src/api/pipelines/sync.md @@ -1,7 +1,7 @@ # nf_core.sync ```{eval-rst} -.. automodule:: nf_core.sync +.. automodule:: nf_core.pipelines.sync :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/licences.md b/docs/api/_src/api/pipelines/utils.md similarity index 63% rename from docs/api/_src/api/licences.md rename to docs/api/_src/api/pipelines/utils.md index 95b5f9768..86b8c3f36 100644 --- a/docs/api/_src/api/licences.md +++ b/docs/api/_src/api/pipelines/utils.md @@ -1,7 +1,7 @@ -# nf_core.licences +# nf_core.utils ```{eval-rst} -.. automodule:: nf_core.licences +.. automodule:: nf_core.pipelines.utils :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/subworkflows.md b/docs/api/_src/api/subworkflows.md deleted file mode 100644 index 438ccd018..000000000 --- a/docs/api/_src/api/subworkflows.md +++ /dev/null @@ -1,9 +0,0 @@ -# nf_core.subworkflows - -```{eval-rst} -.. automodule:: nf_core.subworkflows - :members: - :undoc-members: - :show-inheritance: - :private-members: -``` diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index bfdbd7888..729cf6ba3 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -40,7 +40,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon"] +extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinxcontrib.autodoc_pydantic"] # Add any paths that contain templates here, relative to this directory. templates_path = ["./_templates"] diff --git a/docs/api/_src/index.md b/docs/api/_src/index.md index 17bf2407f..d81a0e90d 100644 --- a/docs/api/_src/index.md +++ b/docs/api/_src/index.md @@ -1,22 +1,10 @@ # nf-core/tools documentation -```{toctree} -:caption: 'Contents:' -:glob: true -:hidden: true -:maxdepth: 2 - -pipeline_lint_tests/index.rst -module_lint_tests/index.rst -subworkflow_lint_tests/index.rst -api/index.rst -``` - -This documentation is for the `nf-core/tools` package. +This API documentation is for the [`nf-core/tools`](https://github.com/nf-core/tools) package. ## Contents -- [Pipeline code lint tests](pipeline_lint_tests/index.md) (run by `nf-core lint`) -- [Module code lint tests](module_lint_tests/index.md) (run by `nf-core modules lint`) -- [Subworkflow code lint tests](subworkflow_lint_tests/index.md) (run by `nf-core subworkflows lint`) -- [nf-core/tools Python package API reference](api/index.md) +- [Pipeline code lint tests](./pipeline_lint_tests/) (run by `nf-core pipelines lint`) +- [Module code lint tests](./module_lint_tests/) (run by `nf-core modules lint`) +- [Subworkflow code lint tests](./subworkflow_lint_tests/) (run by `nf-core subworkflows lint`) +- [nf-core/tools Python package API reference](./api/) diff --git a/docs/api/_src/module_lint_tests/index.md b/docs/api/_src/module_lint_tests/index.md index dee84d06d..7039ba259 100644 --- a/docs/api/_src/module_lint_tests/index.md +++ b/docs/api/_src/module_lint_tests/index.md @@ -1,8 +1,11 @@ -# Module lint tests +# Module Lint Tests -```{toctree} -:glob: true -:maxdepth: 1 - -* -``` + - [environment_yml](./environment_yml/) + - [main_nf](./main_nf/) + - [meta_yml](./meta_yml/) + - [module_changes](./module_changes/) + - [module_deprecations](./module_deprecations/) + - [module_patch](./module_patch/) + - [module_tests](./module_tests/) + - [module_todos](./module_todos/) + - [module_version](./module_version/) diff --git a/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.md b/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.md index c0f0aef5a..4bb47569a 100644 --- a/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.md +++ b/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.md @@ -1,5 +1,5 @@ # actions_awsfulltest ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.actions_awsfulltest +.. automethod:: nf_core.pipelines.lint.PipelineLint.actions_awsfulltest ``` diff --git a/docs/api/_src/pipeline_lint_tests/actions_awstest.md b/docs/api/_src/pipeline_lint_tests/actions_awstest.md index 42441c8ac..b250571a0 100644 --- a/docs/api/_src/pipeline_lint_tests/actions_awstest.md +++ b/docs/api/_src/pipeline_lint_tests/actions_awstest.md @@ -1,5 +1,5 @@ # actions_awstest ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.actions_awstest +.. automethod:: nf_core.pipelines.lint.PipelineLint.actions_awstest ``` diff --git a/docs/api/_src/pipeline_lint_tests/actions_ci.md b/docs/api/_src/pipeline_lint_tests/actions_ci.md index 78ea3aea0..68cbc089a 100644 --- a/docs/api/_src/pipeline_lint_tests/actions_ci.md +++ b/docs/api/_src/pipeline_lint_tests/actions_ci.md @@ -1,5 +1,5 @@ # actions_ci ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.actions_ci +.. automethod:: nf_core.pipelines.lint.PipelineLint.actions_ci ``` diff --git a/docs/api/_src/pipeline_lint_tests/actions_schema_validation.md b/docs/api/_src/pipeline_lint_tests/actions_schema_validation.md index f0c98eb21..860acb2d2 100644 --- a/docs/api/_src/pipeline_lint_tests/actions_schema_validation.md +++ b/docs/api/_src/pipeline_lint_tests/actions_schema_validation.md @@ -1,5 +1,5 @@ # actions_schema_validation ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.actions_schema_validation +.. automethod:: nf_core.pipelines.lint.PipelineLint.actions_schema_validation ``` diff --git a/docs/api/_src/pipeline_lint_tests/base_config.md b/docs/api/_src/pipeline_lint_tests/base_config.md index 4a56ef978..803627ced 100644 --- a/docs/api/_src/pipeline_lint_tests/base_config.md +++ b/docs/api/_src/pipeline_lint_tests/base_config.md @@ -1,5 +1,5 @@ # base_config ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.base_config +.. automethod:: nf_core.pipelines.lint.PipelineLint.base_config ``` diff --git a/docs/api/_src/pipeline_lint_tests/files_exist.md b/docs/api/_src/pipeline_lint_tests/files_exist.md index 309ea62f0..69890d169 100644 --- a/docs/api/_src/pipeline_lint_tests/files_exist.md +++ b/docs/api/_src/pipeline_lint_tests/files_exist.md @@ -1,5 +1,5 @@ # files_exist ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.files_exist +.. automethod:: nf_core.pipelines.lint.PipelineLint.files_exist ``` diff --git a/docs/api/_src/pipeline_lint_tests/files_unchanged.md b/docs/api/_src/pipeline_lint_tests/files_unchanged.md index 2f3b04fe1..3f626e5b8 100644 --- a/docs/api/_src/pipeline_lint_tests/files_unchanged.md +++ b/docs/api/_src/pipeline_lint_tests/files_unchanged.md @@ -1,5 +1,5 @@ # files_unchanged ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.files_unchanged +.. automethod:: nf_core.pipelines.lint.PipelineLint.files_unchanged ``` diff --git a/docs/api/_src/pipeline_lint_tests/included_configs.md b/docs/api/_src/pipeline_lint_tests/included_configs.md new file mode 100644 index 000000000..f68f7da25 --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/included_configs.md @@ -0,0 +1,5 @@ +# included_configs + + ```{eval-rst} + .. automethod:: nf_core.pipelines.lint.PipelineLint.included_configs + ``` diff --git a/docs/api/_src/pipeline_lint_tests/index.md b/docs/api/_src/pipeline_lint_tests/index.md index c631610d6..4dd93442d 100644 --- a/docs/api/_src/pipeline_lint_tests/index.md +++ b/docs/api/_src/pipeline_lint_tests/index.md @@ -1,8 +1,27 @@ -# Pipeline lint tests +# Pipeline Lint Tests -```{toctree} -:glob: true -:maxdepth: 1 - -* -``` + - [actions_awsfulltest](./actions_awsfulltest/) + - [actions_awstest](./actions_awstest/) + - [actions_ci](./actions_ci/) + - [actions_schema_validation](./actions_schema_validation/) + - [base_config](./base_config/) + - [files_exist](./files_exist/) + - [files_unchanged](./files_unchanged/) + - [included_configs](./included_configs/) + - [merge_markers](./merge_markers/) + - [modules_config](./modules_config/) + - [modules_json](./modules_json/) + - [modules_structure](./modules_structure/) + - [multiqc_config](./multiqc_config/) + - [nextflow_config](./nextflow_config/) + - [nfcore_yml](./nfcore_yml/) + - [pipeline_name_conventions](./pipeline_name_conventions/) + - [pipeline_todos](./pipeline_todos/) + - [plugin_includes](./plugin_includes/) + - [readme](./readme/) + - [schema_description](./schema_description/) + - [schema_lint](./schema_lint/) + - [schema_params](./schema_params/) + - [system_exit](./system_exit/) + - [template_strings](./template_strings/) + - [version_consistency](./version_consistency/) diff --git a/docs/api/_src/pipeline_lint_tests/merge_markers.md b/docs/api/_src/pipeline_lint_tests/merge_markers.md index b52fdba1a..7b620b4af 100644 --- a/docs/api/_src/pipeline_lint_tests/merge_markers.md +++ b/docs/api/_src/pipeline_lint_tests/merge_markers.md @@ -1,5 +1,5 @@ # merge_markers ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.merge_markers +.. automethod:: nf_core.pipelines.lint.PipelineLint.merge_markers ``` diff --git a/docs/api/_src/pipeline_lint_tests/modules_config.md b/docs/api/_src/pipeline_lint_tests/modules_config.md index 2a4f51c5a..c8eac0cf9 100644 --- a/docs/api/_src/pipeline_lint_tests/modules_config.md +++ b/docs/api/_src/pipeline_lint_tests/modules_config.md @@ -1,5 +1,5 @@ # modules_config ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.modules_config +.. automethod:: nf_core.pipelines.lint.PipelineLint.modules_config ``` diff --git a/docs/api/_src/pipeline_lint_tests/modules_json.md b/docs/api/_src/pipeline_lint_tests/modules_json.md index 0c0ba71a8..3070f83f5 100644 --- a/docs/api/_src/pipeline_lint_tests/modules_json.md +++ b/docs/api/_src/pipeline_lint_tests/modules_json.md @@ -1,5 +1,5 @@ -# nextflow_config +# modules_json ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.modules_json +.. automethod:: nf_core.pipelines.lint.PipelineLint.modules_json ``` diff --git a/docs/api/_src/pipeline_lint_tests/modules_structure.md b/docs/api/_src/pipeline_lint_tests/modules_structure.md index faa39ca77..8a8391885 100644 --- a/docs/api/_src/pipeline_lint_tests/modules_structure.md +++ b/docs/api/_src/pipeline_lint_tests/modules_structure.md @@ -1,5 +1,5 @@ # modules_structure ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.modules_structure +.. automethod:: nf_core.pipelines.lint.PipelineLint.modules_structure ``` diff --git a/docs/api/_src/pipeline_lint_tests/multiqc_config.md b/docs/api/_src/pipeline_lint_tests/multiqc_config.md index 311f6b304..281957749 100644 --- a/docs/api/_src/pipeline_lint_tests/multiqc_config.md +++ b/docs/api/_src/pipeline_lint_tests/multiqc_config.md @@ -1,5 +1,5 @@ # multiqc_config ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.multiqc_config +.. automethod:: nf_core.pipelines.lint.PipelineLint.multiqc_config ``` diff --git a/docs/api/_src/pipeline_lint_tests/nextflow_config.md b/docs/api/_src/pipeline_lint_tests/nextflow_config.md index e23e96614..98e8df257 100644 --- a/docs/api/_src/pipeline_lint_tests/nextflow_config.md +++ b/docs/api/_src/pipeline_lint_tests/nextflow_config.md @@ -1,5 +1,5 @@ # nextflow_config ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.nextflow_config +.. automethod:: nf_core.pipelines.lint.PipelineLint.nextflow_config ``` diff --git a/docs/api/_src/pipeline_lint_tests/nfcore_yml.md b/docs/api/_src/pipeline_lint_tests/nfcore_yml.md index f7e797a29..226eb4c94 100644 --- a/docs/api/_src/pipeline_lint_tests/nfcore_yml.md +++ b/docs/api/_src/pipeline_lint_tests/nfcore_yml.md @@ -1,5 +1,5 @@ # nfcore_yml ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.nfcore_yml +.. automethod:: nf_core.pipelines.lint.PipelineLint.nfcore_yml ``` diff --git a/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.md b/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.md index 0034319d3..09396b6af 100644 --- a/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.md +++ b/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.md @@ -1,5 +1,5 @@ # pipeline_name_conventions ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.pipeline_name_conventions +.. automethod:: nf_core.pipelines.lint.PipelineLint.pipeline_name_conventions ``` diff --git a/docs/api/_src/pipeline_lint_tests/pipeline_todos.md b/docs/api/_src/pipeline_lint_tests/pipeline_todos.md index 08e456ea2..8292075b9 100644 --- a/docs/api/_src/pipeline_lint_tests/pipeline_todos.md +++ b/docs/api/_src/pipeline_lint_tests/pipeline_todos.md @@ -1,5 +1,5 @@ # pipeline_todos ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.pipeline_todos +.. automethod:: nf_core.pipelines.lint.PipelineLint.pipeline_todos ``` diff --git a/docs/api/_src/pipeline_lint_tests/plugin_includes.md b/docs/api/_src/pipeline_lint_tests/plugin_includes.md new file mode 100644 index 000000000..48bddadc8 --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/plugin_includes.md @@ -0,0 +1,5 @@ +# plugin_includes + +```{eval-rst} +.. automethod:: nf_core.pipelines.lint.PipelineLint.plugin_includes +``` diff --git a/docs/api/_src/pipeline_lint_tests/readme.md b/docs/api/_src/pipeline_lint_tests/readme.md index 9583a56e0..bf947bb61 100644 --- a/docs/api/_src/pipeline_lint_tests/readme.md +++ b/docs/api/_src/pipeline_lint_tests/readme.md @@ -1,5 +1,5 @@ # readme ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.readme +.. automethod:: nf_core.pipelines.lint.PipelineLint.readme ``` diff --git a/docs/api/_src/pipeline_lint_tests/schema_description.md b/docs/api/_src/pipeline_lint_tests/schema_description.md index 14f756acd..0429b3cc1 100644 --- a/docs/api/_src/pipeline_lint_tests/schema_description.md +++ b/docs/api/_src/pipeline_lint_tests/schema_description.md @@ -1,5 +1,5 @@ # schema_description ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.schema_description +.. automethod:: nf_core.pipelines.lint.PipelineLint.schema_description ``` diff --git a/docs/api/_src/pipeline_lint_tests/schema_lint.md b/docs/api/_src/pipeline_lint_tests/schema_lint.md index 39be2ea65..95bd5cc2f 100644 --- a/docs/api/_src/pipeline_lint_tests/schema_lint.md +++ b/docs/api/_src/pipeline_lint_tests/schema_lint.md @@ -1,5 +1,5 @@ # schema_lint ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.schema_lint +.. automethod:: nf_core.pipelines.lint.PipelineLint.schema_lint ``` diff --git a/docs/api/_src/pipeline_lint_tests/schema_params.md b/docs/api/_src/pipeline_lint_tests/schema_params.md index 80a626e88..8c6594c57 100644 --- a/docs/api/_src/pipeline_lint_tests/schema_params.md +++ b/docs/api/_src/pipeline_lint_tests/schema_params.md @@ -1,5 +1,5 @@ # schema_params ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.schema_params +.. automethod:: nf_core.pipelines.lint.PipelineLint.schema_params ``` diff --git a/docs/api/_src/pipeline_lint_tests/system_exit.md b/docs/api/_src/pipeline_lint_tests/system_exit.md index 3d0ac20f8..9ba67d4d2 100644 --- a/docs/api/_src/pipeline_lint_tests/system_exit.md +++ b/docs/api/_src/pipeline_lint_tests/system_exit.md @@ -1,5 +1,5 @@ # system_exit ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.system_exit +.. automethod:: nf_core.pipelines.lint.PipelineLint.system_exit ``` diff --git a/docs/api/_src/pipeline_lint_tests/template_strings.md b/docs/api/_src/pipeline_lint_tests/template_strings.md index 3d03bfb25..ee334a2a2 100644 --- a/docs/api/_src/pipeline_lint_tests/template_strings.md +++ b/docs/api/_src/pipeline_lint_tests/template_strings.md @@ -1,5 +1,5 @@ # template_strings ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.template_strings +.. automethod:: nf_core.pipelines.lint.PipelineLint.template_strings ``` diff --git a/docs/api/_src/pipeline_lint_tests/version_consistency.md b/docs/api/_src/pipeline_lint_tests/version_consistency.md index e8038f853..868a34870 100644 --- a/docs/api/_src/pipeline_lint_tests/version_consistency.md +++ b/docs/api/_src/pipeline_lint_tests/version_consistency.md @@ -1,5 +1,5 @@ # version_consistency ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.version_consistency +.. automethod:: nf_core.pipelines.lint.PipelineLint.version_consistency ``` diff --git a/docs/api/_src/subworkflow_lint_tests/index.md b/docs/api/_src/subworkflow_lint_tests/index.md index 0ecf590c0..da8db49a7 100644 --- a/docs/api/_src/subworkflow_lint_tests/index.md +++ b/docs/api/_src/subworkflow_lint_tests/index.md @@ -1,8 +1,8 @@ -# Subworkflow lint tests +# Subworkflow Lint Tests -```{toctree} -:glob: true -:maxdepth: 1 - -* -``` + - [main_nf](./main_nf/) + - [meta_yml](./meta_yml/) + - [subworkflow_changes](./subworkflow_changes/) + - [subworkflow_tests](./subworkflow_tests/) + - [subworkflow_todos](./subworkflow_todos/) + - [subworkflow_version](./subworkflow_version/) diff --git a/docs/api/generate-api-docs.sh b/docs/api/generate-api-docs.sh index 6b3c3abfa..cc65e1bb1 100644 --- a/docs/api/generate-api-docs.sh +++ b/docs/api/generate-api-docs.sh @@ -28,7 +28,7 @@ done # Set the output directory if not set if [[ -z "$output_dir" ]]; then - output_dir="../src/content/tools/docs" + output_dir="../sites/docs/src/content/api_reference" fi # if no release is specified, use all releases diff --git a/docs/api/make_lint_md.py b/docs/api/make_lint_md.py index 35e38a55c..432f0d16b 100644 --- a/docs/api/make_lint_md.py +++ b/docs/api/make_lint_md.py @@ -1,53 +1,57 @@ #!/usr/bin/env python +from pathlib import Path -import fnmatch -import os - -import nf_core.lint import nf_core.modules.lint +import nf_core.pipelines.lint import nf_core.subworkflows.lint -def make_docs(docs_basedir, lint_tests, md_template): - # Get list of existing .md files - existing_docs = [] - for fn in os.listdir(docs_basedir): - if fnmatch.fnmatch(fn, "*.md") and not fnmatch.fnmatch(fn, "index.md"): - existing_docs.append(os.path.join(docs_basedir, fn)) +def create_docs(docs_basedir, lint_tests, md_template): + docs_basedir.mkdir(parents=True, exist_ok=True) + existing_docs = list(docs_basedir.glob("*.md")) + existing_docs.remove(docs_basedir / "index.md") for test_name in lint_tests: - fn = os.path.join(docs_basedir, f"{test_name}.md") - if os.path.exists(fn): + fn = docs_basedir / f"{test_name}.md" + if fn.exists(): existing_docs.remove(fn) else: with open(fn, "w") as fh: fh.write(md_template.format(test_name)) for fn in existing_docs: - os.remove(fn) + fn.unlink() + + +def create_index_file(basedir, title): + index_file = basedir / "index.md" + with open(index_file, "w") as fh: + fh.write(f"# {title}\n\n") + for fn in sorted(basedir.glob("*.md")): + if fn.name != "index.md": + fh.write(f" - [{fn.stem}](./{fn.stem}/)\n") # Create the pipeline docs -pipeline_docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "pipeline_lint_tests") -make_docs( - pipeline_docs_basedir, - nf_core.lint.PipelineLint._get_all_lint_tests(True), +pipeline_lint_docs_basedir = Path(__file__).resolve().parent / "_src" / "pipeline_lint_tests" +create_docs( + pipeline_lint_docs_basedir, + nf_core.pipelines.lint.PipelineLint._get_all_lint_tests(True), """# {0} -```{{eval-rst}} -.. automethod:: nf_core.lint.PipelineLint.{0} -``` -""", + ```{{eval-rst}} + .. automethod:: nf_core.pipelines.lint.PipelineLint.{0} + ``` + """, ) +create_index_file(pipeline_lint_docs_basedir, "Pipeline Lint Tests") -# Create the modules lint docs -modules_docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "module_lint_tests") -make_docs( - modules_docs_basedir, - list( - set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True)).union( - nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=False) - ) +# Create the modules docs +modules_lint_docs_basedir = Path(__file__).resolve().parent / "_src" / "module_lint_tests" +create_docs( + modules_lint_docs_basedir, + set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True)).union( + nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=False) ), """# {0} @@ -56,15 +60,14 @@ def make_docs(docs_basedir, lint_tests, md_template): ``` """, ) +create_index_file(modules_lint_docs_basedir, "Module Lint Tests") -# Create the subworkflows lint docs -subworkflows_docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "subworkflow_lint_tests") -make_docs( - subworkflows_docs_basedir, - list( - set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)).union( - nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=False) - ) +# Create the subworkflow docs +subworkflow_lint_docs_basedir = Path(__file__).resolve().parent / "_src" / "subworkflow_lint_tests" +create_docs( + subworkflow_lint_docs_basedir, + set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)).union( + nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=False) ), """# {0} @@ -73,3 +76,4 @@ def make_docs(docs_basedir, lint_tests, md_template): ``` """, ) +create_index_file(subworkflow_lint_docs_basedir, "Subworkflow Lint Tests") diff --git a/docs/api/requirements.txt b/docs/api/requirements.txt index abffe3074..1d23f0b27 100644 --- a/docs/api/requirements.txt +++ b/docs/api/requirements.txt @@ -1,3 +1,4 @@ +autodoc_pydantic Sphinx>=3.3.1 sphinxcontrib-napoleon sphinx-markdown-builder diff --git a/docs/images/nf-core-bump-version.svg b/docs/images/nf-core-bump-version.svg deleted file mode 100644 index 2ed97e79a..000000000 --- a/docs/images/nf-core-bump-version.svg +++ /dev/null @@ -1,187 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core bump-version 1.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -INFO     Changing version number from '1.0dev' to '1.1' -INFO     Updated version in 'nextflow.config' - - version         = '1.0dev' - + version         = '1.1' - - -INFO     Updated version in 'assets/multiqc_config.yml' - - This report has been generated by the <a  -href="https://github.com/nf-core/nextbigthing/tree/dev" target="_blank">nf-core/nextbigthing</a> - + This report has been generated by the <a  -href="https://github.com/nf-core/nextbigthing/releases/tag/1.1"  -target="_blank">nf-core/nextbigthing</a> - - -INFO     Updated version in 'assets/multiqc_config.yml' - - <a href="https://nf-co.re/nextbigthing/dev/docs/output"  -target="_blank">documentation</a>. - + <a href="https://nf-co.re/nextbigthing/1.1/docs/output"  -target="_blank">documentation</a>. - - - - - - diff --git a/docs/images/nf-core-create-logo.svg b/docs/images/nf-core-create-logo.svg deleted file mode 100644 index 8313bf8a2..000000000 --- a/docs/images/nf-core-create-logo.svg +++ /dev/null @@ -1,107 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core create-logo nextbigthing - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -INFO     Created logo: nf-core-nextbigthing_logo_light.png - - - - diff --git a/docs/images/nf-core-create.svg b/docs/images/nf-core-create.svg deleted file mode 100644 index cd25e8805..000000000 --- a/docs/images/nf-core-create.svg +++ /dev/null @@ -1,162 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique"  --a "Big Steve" --plain - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -INFO     Creating new nf-core pipeline: 'nf-core/nextbigthing' -INFO     Initialising pipeline git repository                                                        -INFO     Done. Remember to add a remote and push to GitHub:                                          - cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing - git remote add origin git@github.com:USERNAME/REPO_NAME.git  - git push --all origin                                        -INFO     This will also push your newly created dev branch and the TEMPLATE branch for syncing.      -INFO    !!!!!! IMPORTANT !!!!!! - -If you are interested in adding your pipeline to the nf-core community, -PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! - -Please read: https://nf-co.re/developers/adding_pipelines#join-the-community - - - - diff --git a/docs/images/nf-core-download-tree.svg b/docs/images/nf-core-download-tree.svg deleted file mode 100644 index fc9585c8c..000000000 --- a/docs/images/nf-core-download-tree.svg +++ /dev/null @@ -1,190 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ tree -L 2 nf-core-rnaseq/ -nf-core-rnaseq/ -├── 3_8 -│   ├── CHANGELOG.md -│   ├── CITATIONS.md -│   ├── CODE_OF_CONDUCT.md -│   ├── LICENSE -│   ├── README.md -│   ├── assets -│   ├── bin -│   ├── conf -│   ├── docs -│   ├── lib -│   ├── main.nf -│   ├── modules -│   ├── modules.json -│   ├── nextflow.config -│   ├── nextflow_schema.json -│   ├── subworkflows -│   ├── tower.yml -│   └── workflows -└── configs -    ├── CITATION.cff -    ├── LICENSE -    ├── README.md -    ├── bin -    ├── conf -    ├── configtest.nf -    ├── docs -    ├── nextflow.config -    ├── nfcore_custom.config -    └── pipeline - -14 directories, 16 files - - - - diff --git a/docs/images/nf-core-download.svg b/docs/images/nf-core-download.svg deleted file mode 100644 index 98e99944e..000000000 --- a/docs/images/nf-core-download.svg +++ /dev/null @@ -1,139 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -WARNING  Could not find GitHub authentication token. Some API requests may fail.                     -INFO     Saving 'nf-core/rnaseq' -          Pipeline revision: '3.8' -          Use containers: 'none' -          Container library: 'quay.io' -          Output directory: 'nf-core-rnaseq' -          Include default institutional configuration: 'True' -INFO     Downloading centralised configs from GitHub                                                 -INFO     Downloading workflow files from GitHub                                                      - - - - diff --git a/docs/images/nf-core-launch-rnaseq.svg b/docs/images/nf-core-launch-rnaseq.svg deleted file mode 100644 index 0b191ff2e..000000000 --- a/docs/images/nf-core-launch-rnaseq.svg +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core launch rnaseq -r 3.8.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -INFO     NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config      -         files or profiles                                                                           - -INFO     Downloading workflow: nf-core/rnaseq (3.8.1) - - - - diff --git a/docs/images/nf-core-licences.svg b/docs/images/nf-core-licences.svg deleted file mode 100644 index 74aab4fa8..000000000 --- a/docs/images/nf-core-licences.svg +++ /dev/null @@ -1,107 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core licences deepvariant - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -INFO     Fetching licence information for 8 tools                                                    - - - - diff --git a/docs/images/nf-core-lint.svg b/docs/images/nf-core-lint.svg deleted file mode 100644 index bb7b6b879..000000000 --- a/docs/images/nf-core-lint.svg +++ /dev/null @@ -1,208 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Testing pipeline: . - - -╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ - -pipeline_todos: pipeline_todos                                                                   - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ - -readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo  -doi (after the first release).                                                                   - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ - - -╭──────────────────────╮ -LINT RESULTS SUMMARY -├──────────────────────┤ -[✔] 194 Tests Passed -[?]   1 Test Ignored -[!]   1 Test Warning -[✗]   0 Tests Failed -╰──────────────────────╯ - - - - diff --git a/docs/images/nf-core-list-rna.svg b/docs/images/nf-core-list-rna.svg deleted file mode 100644 index c2bc73b4e..000000000 --- a/docs/images/nf-core-list-rna.svg +++ /dev/null @@ -1,176 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core list rna rna-seq - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ differentialabunda… │    45 │          1.5.0 │     yesterday │           - │ -                   │ -│ smrnaseq            │    68 │          2.3.1 │   3 weeks ago │           - │ -                   │ -│ rnaseq              │   785 │         3.14.0 │  4 months ago │           - │ -                   │ -│ spatialvi           │    39 │            dev │    3 days ago │           - │ -                   │ -│ scnanoseq           │     5 │            dev │   1 weeks ago │           - │ -                   │ -│ scrnaseq            │   169 │          2.6.0 │     yesterday │           - │ -                   │ -│ circrna             │    40 │            dev │   2 weeks ago │           - │ -                   │ -│ rnafusion           │   129 │          3.0.2 │   4 weeks ago │           - │ -                   │ -│ rnasplice           │    33 │          1.0.3 │  2 months ago │           - │ -                   │ -│ dualrnaseq          │    16 │          1.0.0 │   3 years ago │           - │ -                   │ -│ marsseq             │     5 │          1.0.3 │ 10 months ago │           - │ -                   │ -│ lncpipe             │    30 │            dev │   2 years ago │           - │ -                   │ -│ scflow              │    24 │            dev │   3 years ago │           - │ -                   │ -└─────────────────────┴───────┴────────────────┴───────────────┴─────────────┴─────────────────────┘ - - - - diff --git a/docs/images/nf-core-list-stars.svg b/docs/images/nf-core-list-stars.svg deleted file mode 100644 index 70cb20a71..000000000 --- a/docs/images/nf-core-list-stars.svg +++ /dev/null @@ -1,141 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core list -s stars - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnaseq              │   785 │         3.14.0 │  4 months ago │           - │ -                   │ -│ sarek               │   340 │          3.4.2 │     yesterday │           - │ -                   │ -│ mag                 │   182 │          2.5.4 │  3 months ago │           - │ -                   │ -│ chipseq             │   173 │          2.0.0 │   2 years ago │           - │ -                   │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-list.svg b/docs/images/nf-core-list.svg deleted file mode 100644 index 32680c614..000000000 --- a/docs/images/nf-core-list.svg +++ /dev/null @@ -1,145 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core list - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ oncoanalyser        │    14 │            dev │  11 hours ago │           - │ -                   │ -│ metapep             │     6 │            dev │  20 hours ago │           - │ -                   │ -│ reportho            │     1 │            dev │     yesterday │           - │ -                   │ -│ differentialabunda… │    45 │          1.5.0 │     yesterday │           - │ -                   │ -│ smrnaseq            │    68 │          2.3.1 │   3 weeks ago │           - │ -                   │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-logo.png b/docs/images/nf-core-logo.png deleted file mode 100644 index 95a519194..000000000 Binary files a/docs/images/nf-core-logo.png and /dev/null differ diff --git a/docs/images/nf-core-modules-bump-version.svg b/docs/images/nf-core-modules-bump-version.svg deleted file mode 100644 index c71a3091f..000000000 --- a/docs/images/nf-core-modules-bump-version.svg +++ /dev/null @@ -1,144 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules bump-versions fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - - -╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ -[!] 1 Module version up to date. -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ -Module name                             Update Message                                        -├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ - fastqc                                    Module version up to date: fastqc                      -╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ - - - - diff --git a/docs/images/nf-core-modules-create.svg b/docs/images/nf-core-modules-create.svg deleted file mode 100644 index 973d5eed0..000000000 --- a/docs/images/nf-core-modules-create.svg +++ /dev/null @@ -1,123 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Using Bioconda package: 'bioconda::fastqc=0.12.1' - - - - diff --git a/docs/images/nf-core-modules-info.svg b/docs/images/nf-core-modules-info.svg deleted file mode 100644 index cc0cf02fd..000000000 --- a/docs/images/nf-core-modules-info.svg +++ /dev/null @@ -1,240 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules info abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 🔧 Tools: abacas                                                                                 │ -│ 📖 Description: contiguate draft genome assembly                                                 │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -                  ╷                                                                   ╷              -📥 Inputs        Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} -                  ╵                                                                   ╵              -                  ╷                                                                   ╷              -📤 Outputs       Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - results  (file) │List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* -                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ -                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ -                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ -                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - versions  (file)│File containing software versions                                  │versions.yml -                  ╵                                                                   ╵              - - 💻  Installation command: nf-core modules install abacas - - - - - diff --git a/docs/images/nf-core-modules-install.svg b/docs/images/nf-core-modules-install.svg deleted file mode 100644 index 975938bdc..000000000 --- a/docs/images/nf-core-modules-install.svg +++ /dev/null @@ -1,126 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules install abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Installing 'abacas' -INFO     Use the following statement to include this module:                                         - - include { ABACAS } from '../modules/nf-core/abacas/main'                                            - - - - - diff --git a/docs/images/nf-core-modules-lint.svg b/docs/images/nf-core-modules-lint.svg deleted file mode 100644 index 692069b4f..000000000 --- a/docs/images/nf-core-modules-lint.svg +++ /dev/null @@ -1,114 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules lint multiqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Linting modules repo: '.' -INFO     Linting module: 'multiqc' - - - - diff --git a/docs/images/nf-core-modules-list-local.svg b/docs/images/nf-core-modules-list-local.svg deleted file mode 100644 index ab9cc28c0..000000000 --- a/docs/images/nf-core-modules-list-local.svg +++ /dev/null @@ -1,143 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Repository type: pipeline -INFO     Modules installed in '.':                                                                   - -┏━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Module NameRepository     Version SHAMessage                             Date       -┡━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ fastqc      │ nf-core/modules │ 285a505     │ Fix FastQC memory allocation (#5432) │ 2024-04-05 │ -│ multiqc     │ nf-core/modules │ b7ebe95     │ Update MQC container (#5006)         │ 2024-02-29 │ -└─────────────┴─────────────────┴─────────────┴──────────────────────────────────────┴────────────┘ - - - - diff --git a/docs/images/nf-core-modules-list-remote.svg b/docs/images/nf-core-modules-list-remote.svg deleted file mode 100644 index dce6553c9..000000000 --- a/docs/images/nf-core-modules-list-remote.svg +++ /dev/null @@ -1,169 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Modules available from https://github.com/nf-core/modules.git(master):                     - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Module Name                                           -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ abacas                                                │ -│ abricate/run                                          │ -│ abricate/summary                                      │ -│ abritamr/run                                          │ -│ adapterremoval                                        │ -│ adapterremovalfixprefix                               │ -│ admixture                                             │ -│ affy/justrma                                          │ -│ agat/convertspgff2gtf                                 │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-modules-patch.svg b/docs/images/nf-core-modules-patch.svg deleted file mode 100644 index 73358ec2f..000000000 --- a/docs/images/nf-core-modules-patch.svg +++ /dev/null @@ -1,193 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules patch fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Changes in module 'nf-core/fastqc' -INFO     Changes in 'fastqc/main.nf':                                                                - ---- modules/nf-core/fastqc/main.nf -+++ modules/nf-core/fastqc/main.nf -@@ -1,6 +1,6 @@ -process FASTQC {                                                                                   -    tag "$meta.id"                                                                                 --    label 'process_medium' -+    label 'process_low' - -    conda "${moduleDir}/environment.yml"                                                           -    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_  - - -INFO    'modules/nf-core/fastqc/meta.yml' is unchanged                                              -INFO    'modules/nf-core/fastqc/environment.yml' is unchanged                                       -INFO    'modules/nf-core/fastqc/tests/tags.yml' is unchanged                                        -INFO    'modules/nf-core/fastqc/tests/main.nf.test.snap' is unchanged                               -INFO    'modules/nf-core/fastqc/tests/main.nf.test' is unchanged                                    -INFO     Patch file of 'modules/nf-core/fastqc' written to 'modules/nf-core/fastqc/fastqc.diff' - - - - diff --git a/docs/images/nf-core-modules-remove.svg b/docs/images/nf-core-modules-remove.svg deleted file mode 100644 index 7c2db6031..000000000 --- a/docs/images/nf-core-modules-remove.svg +++ /dev/null @@ -1,110 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules remove abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Removed files for 'abacas' and its dependencies 'abacas'.                                   - - - - diff --git a/docs/images/nf-core-modules-test.svg b/docs/images/nf-core-modules-test.svg deleted file mode 100644 index f72c920ef..000000000 --- a/docs/images/nf-core-modules-test.svg +++ /dev/null @@ -1,110 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules test fastqc --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Generating nf-test snapshot                                                                 - - - - diff --git a/docs/images/nf-core-modules-update.svg b/docs/images/nf-core-modules-update.svg deleted file mode 100644 index c4800530e..000000000 --- a/docs/images/nf-core-modules-update.svg +++ /dev/null @@ -1,122 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO    'modules/nf-core/abacas' is already up to date                                              -INFO    'modules/nf-core/fastqc' is already up to date                                              -INFO    'modules/nf-core/multiqc' is already up to date                                             -INFO     Updates complete ✨                                                                         - - - - diff --git a/docs/images/nf-core-schema-build.svg b/docs/images/nf-core-schema-build.svg deleted file mode 100644 index 646243917..000000000 --- a/docs/images/nf-core-schema-build.svg +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core schema build --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 31 params) -INFO     Writing schema with 32 params: 'nextflow_schema.json' - - - - diff --git a/docs/images/nf-core-schema-lint.svg b/docs/images/nf-core-schema-lint.svg deleted file mode 100644 index 538c162a5..000000000 --- a/docs/images/nf-core-schema-lint.svg +++ /dev/null @@ -1,114 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core schema lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 32 params) - - - - diff --git a/docs/images/nf-core-schema-validate.svg b/docs/images/nf-core-schema-validate.svg deleted file mode 100644 index 71de9f660..000000000 --- a/docs/images/nf-core-schema-validate.svg +++ /dev/null @@ -1,118 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core schema validate nf-core-rnaseq/3_8 nf-params.json - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 93 params) -INFO    [] Input parameters look valid - - - - diff --git a/docs/images/nf-core-subworkflows-create.svg b/docs/images/nf-core-subworkflows-create.svg deleted file mode 100644 index ddf50e89f..000000000 --- a/docs/images/nf-core-subworkflows-create.svg +++ /dev/null @@ -1,143 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Created component template: 'bam_stats_samtools' -INFO     Created following files:                                                                    -           subworkflows/nf-core/bam_stats_samtools/main.nf                                           -           subworkflows/nf-core/bam_stats_samtools/meta.yml                                          -           subworkflows/nf-core/bam_stats_samtools/tests/tags.yml                                    -           subworkflows/nf-core/bam_stats_samtools/tests/main.nf.test                                - - - - diff --git a/docs/images/nf-core-subworkflows-info.svg b/docs/images/nf-core-subworkflows-info.svg deleted file mode 100644 index 83eda7387..000000000 --- a/docs/images/nf-core-subworkflows-info.svg +++ /dev/null @@ -1,172 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows info bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -╭─ Subworkflow: bam_rseqc  ────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package                        │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -[..truncated..] - readduplication_rscript  (file)    │script to reproduce the plot       │                      *.R -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - tin_txt  (file)                    │TXT file containing tin.py results │                    *.txt -                                     │summary                            │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - versions  (file)                   │File containing software versions  │             versions.yml -                                     ╵                                   ╵                           - - 💻  Installation command: nf-core subworkflows install bam_rseqc - - - - - diff --git a/docs/images/nf-core-subworkflows-install.svg b/docs/images/nf-core-subworkflows-install.svg deleted file mode 100644 index ee5ef0ec2..000000000 --- a/docs/images/nf-core-subworkflows-install.svg +++ /dev/null @@ -1,110 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows install bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Installing 'bam_rseqc' - - - - diff --git a/docs/images/nf-core-subworkflows-lint.svg b/docs/images/nf-core-subworkflows-lint.svg deleted file mode 100644 index 08135d4e7..000000000 --- a/docs/images/nf-core-subworkflows-lint.svg +++ /dev/null @@ -1,345 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows lint bam_stats_samtools - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Linting modules repo: '.' -INFO     Linting subworkflow: 'bam_stats_samtools' - -╭─[!] 15 Subworkflow Test Warnings───────────────────────────────────────────────────────────────╮ -                     ╷                                     ╷                                       -Subworkflow name   File path                          Test message                         -╶────────────────────┼─────────────────────────────────────┼─────────────────────────────────────╴ -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.test//Add  -all required assertions to verify  -the test output. -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testAdd  -tags for all modules used within  -this subworkflow. Example: -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testChange  -the test name preferably indicating  -the test-data and file-format used -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testOnce  -you have added the required tests,  -please run the following command to  -build this file: -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testdefine  -inputs of the workflow here.  -Example: -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf -subworkflow SHOULD import at least  -two modules -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfIf in doubt  -look at other nf-core/subworkflows  -to see how we are doing things! :) -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfedit emitted -channels -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfedit input  -(take) channels -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfsubstitute  -modules here for the modules of your -subworkflow -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#Add a  -description of the subworkflow and  -list keywords -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#Add a list -of the modules and/or subworkflows  -used in the subworkflow -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#List all  -of the channels used as input with a -description and their structure -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#List all  -of the channels used as output with  -a descriptions and their structure -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…versions not found in snapshot file  -                     ╵                                     ╵                                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭───────────────────────╮ -LINT RESULTS SUMMARY -├───────────────────────┤ -[✔]  42 Tests Passed  -[!]  15 Test Warnings -[✗]   0 Tests Failed  -╰───────────────────────╯ - - - - diff --git a/docs/images/nf-core-subworkflows-list-local.svg b/docs/images/nf-core-subworkflows-list-local.svg deleted file mode 100644 index 85b5237f6..000000000 --- a/docs/images/nf-core-subworkflows-list-local.svg +++ /dev/null @@ -1,168 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Repository type: pipeline -INFO     Subworkflows installed in '.':                                                              - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Subworkflow Name        Repository     Version SHAMessage                 Date       -┡━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ utils_nextflow_pipeline  │ nf-core/modules │ 5caf764     │ remove params.outdir     │ 2024-02-28 │ -│                          │                 │             │ from modules and         │            │ -│                          │                 │             │ subworfklows tests       │            │ -│                          │                 │             │ (#5007)                  │            │ -│ utils_nfcore_pipeline    │ nf-core/modules │ 92de218     │ handle multiple DOIs     │ 2024-05-02 │ -│                          │                 │             │ (#5556)                  │            │ -│ utils_nfvalidation_plug… │ nf-core/modules │ 5caf764     │ remove params.outdir     │ 2024-02-28 │ -│                          │                 │             │ from modules and         │            │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-subworkflows-list-remote.svg b/docs/images/nf-core-subworkflows-list-remote.svg deleted file mode 100644 index 8b7d4e803..000000000 --- a/docs/images/nf-core-subworkflows-list-remote.svg +++ /dev/null @@ -1,169 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Subworkflow Name                              -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ bam_cnv_wisecondorx                           │ -│ bam_create_som_pon_gatk                       │ -│ bam_dedup_stats_samtools_umicollapse          │ -│ bam_dedup_stats_samtools_umitools             │ -│ bam_docounts_contamination_angsd              │ -│ bam_markduplicates_picard                     │ -│ bam_markduplicates_samtools                   │ -│ bam_ngscheckmate                              │ -│ bam_qc_picard                                 │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-subworkflows-remove.svg b/docs/images/nf-core-subworkflows-remove.svg deleted file mode 100644 index 9da913508..000000000 --- a/docs/images/nf-core-subworkflows-remove.svg +++ /dev/null @@ -1,134 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows remove bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Removed files for 'rseqc/bamstat' and its dependencies 'rseqc/bamstat'.                     -INFO     Removed files for 'rseqc/inferexperiment' and its dependencies 'rseqc/inferexperiment'.     -INFO     Removed files for 'rseqc/innerdistance' and its dependencies 'rseqc/innerdistance'.         -INFO     Removed files for 'rseqc/junctionannotation' and its dependencies                           -'rseqc/junctionannotation'.                                                                 -INFO     Removed files for 'bam_rseqc' and its dependencies 'bam_rseqc, rseqc_bamstat,  -rseqc_inferexperiment, rseqc_innerdistance, rseqc_junctionannotation'.                      - - - - diff --git a/docs/images/nf-core-subworkflows-test.svg b/docs/images/nf-core-subworkflows-test.svg deleted file mode 100644 index cbd67278a..000000000 --- a/docs/images/nf-core-subworkflows-test.svg +++ /dev/null @@ -1,110 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows test bam_rseqc --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -INFO     Generating nf-test snapshot                                                                 - - - - diff --git a/docs/images/nf-core-subworkflows-update.svg b/docs/images/nf-core-subworkflows-update.svg deleted file mode 100644 index d6da3d97a..000000000 --- a/docs/images/nf-core-subworkflows-update.svg +++ /dev/null @@ -1,111 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - - -ERROR   'rseqc/junctionsaturation' - - - - diff --git a/docs/images/nf-core-sync.svg b/docs/images/nf-core-sync.svg deleted file mode 100644 index d5c5b666c..000000000 --- a/docs/images/nf-core-sync.svg +++ /dev/null @@ -1,117 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core sync - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.14.1 - https://nf-co.re - - -WARNING  Could not find GitHub authentication token. Some API requests may fail.                     -INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing -INFO     Original pipeline repository branch is 'master' - - - - diff --git a/docs/images/nfcore-tools_logo_dark.png b/docs/images/nfcore-tools_logo_dark.png deleted file mode 100644 index 1b9cc02b1..000000000 Binary files a/docs/images/nfcore-tools_logo_dark.png and /dev/null differ diff --git a/docs/images/nfcore-tools_logo_light.png b/docs/images/nfcore-tools_logo_light.png deleted file mode 100644 index cc4ccea1c..000000000 Binary files a/docs/images/nfcore-tools_logo_light.png and /dev/null differ diff --git a/mypy.ini b/mypy.ini index c48aa5884..5a9522316 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,4 @@ [mypy] warn_unused_configs = True ignore_missing_imports = true +plugins = pydantic.mypy diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 147b0586b..08589fc24 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -4,7 +4,6 @@ import logging import os import sys -from pathlib import Path import rich import rich.console @@ -14,10 +13,48 @@ from trogon import tui from nf_core import __version__ -from nf_core.download import DownloadError -from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE -from nf_core.params_file import ParamsFileBuilder -from nf_core.utils import check_if_outdated, rich_force_colors, setup_nfcore_dir +from nf_core.commands_modules import ( + modules_bump_versions, + modules_create, + modules_info, + modules_install, + modules_lint, + modules_list_local, + modules_list_remote, + modules_patch, + modules_remove, + modules_test, + modules_update, +) +from nf_core.commands_pipelines import ( + pipelines_bump_version, + pipelines_create, + pipelines_create_logo, + pipelines_create_params_file, + pipelines_download, + pipelines_launch, + pipelines_lint, + pipelines_list, + pipelines_schema_build, + pipelines_schema_docs, + pipelines_schema_lint, + pipelines_schema_validate, + pipelines_sync, +) +from nf_core.commands_subworkflows import ( + subworkflows_create, + subworkflows_info, + subworkflows_install, + subworkflows_lint, + subworkflows_list_local, + subworkflows_list_remote, + subworkflows_remove, + subworkflows_test, + subworkflows_update, +) +from nf_core.components.components_utils import NF_CORE_MODULES_REMOTE +from nf_core.pipelines.download import DownloadError +from nf_core.utils import check_if_outdated, nfcore_logo, rich_force_colors, setup_nfcore_dir # Set up logging as the root logger # Submodules should all traverse back to this @@ -32,30 +69,25 @@ click.rich_click.COMMAND_GROUPS = { "nf-core": [ { - "name": "Commands for users", + "name": "Commands", "commands": [ - "list", - "launch", - "create-params-file", - "download", - "licences", - "tui", - ], - }, - { - "name": "Commands for developers", - "commands": [ - "create", - "lint", + "pipelines", "modules", "subworkflows", - "schema", - "create-logo", - "bump-version", - "sync", + "interface", ], }, ], + "nf-core pipelines": [ + { + "name": "For users", + "commands": ["list", "launch", "download", "create-params-file"], + }, + { + "name": "For developers", + "commands": ["create", "lint", "bump-version", "sync", "schema", "create-logo"], + }, + ], "nf-core modules": [ { "name": "For pipelines", @@ -63,19 +95,20 @@ }, { "name": "Developing new modules", - "commands": ["create", "lint", "bump-versions", "test"], + "commands": ["create", "lint", "test", "bump-versions"], }, ], "nf-core subworkflows": [ { "name": "For pipelines", - "commands": ["info", "install", "list", "remove", "update"], + "commands": ["list", "info", "install", "update", "remove"], }, { "name": "Developing new subworkflows", - "commands": ["create", "test", "lint"], + "commands": ["create", "lint", "test"], }, ], + "nf-core pipelines schema": [{"name": "Schema commands", "commands": ["validate", "build", "lint", "docs"]}], } click.rich_click.OPTION_GROUPS = { "nf-core modules list local": [{"options": ["--dir", "--json", "--help"]}], @@ -93,7 +126,7 @@ # because they are actually preliminary, but intended program terminations. # (Custom exceptions are cleaner than `sys.exit(1)`, which we used before) def selective_traceback_hook(exctype, value, traceback): - if exctype in {DownloadError}: # extend set as needed + if exctype in {DownloadError, UserWarning, ValueError}: # extend set as needed log.error(value) else: # print the colored traceback for all other exceptions with rich as usual @@ -111,29 +144,26 @@ def normalize_case(ctx, param, component_name): return component_name.casefold() +# Define a custom click group class to sort options and commands in the help message +# TODO: Remove this class and use COMMANDS_BEFORE_OPTIONS when rich-click is updated +# See https://github.com/ewels/rich-click/issues/200 for more information +class CustomRichGroup(click.RichGroup): + def format_options(self, ctx, formatter) -> None: + from rich_click.rich_help_rendering import get_rich_options + + self.format_commands(ctx, formatter) + get_rich_options(self, ctx, formatter) + + def run_nf_core(): # print nf-core header if environment variable is not set if os.environ.get("_NF_CORE_COMPLETE") is None: # Print nf-core header - stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False) - stderr.print( - "[blue] ___ __ __ __ ___ [green]/,-._.--~\\", - highlight=False, - ) - stderr.print( - r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", - highlight=False, - ) - stderr.print( - r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", - highlight=False, - ) - stderr.print( - "[green] `._,._,'\n", - highlight=False, - ) + stderr.print("\n") + for line in nfcore_logo: + stderr.print(line, highlight=False) stderr.print( - f"[grey39] nf-core/tools version {__version__} - [link=https://nf-co.re]https://nf-co.re[/]", + f"\n[grey39] nf-core/tools version {__version__} - [link=https://nf-co.re]https://nf-co.re[/]", highlight=False, ) try: @@ -150,8 +180,11 @@ def run_nf_core(): nf_core_cli(auto_envvar_prefix="NFCORE") -@tui() -@click.group(context_settings=dict(help_option_names=["-h", "--help"])) +@tui( + command="interface", + help="Launch the nf-core interface", +) +@click.group(context_settings=dict(help_option_names=["-h", "--help"]), cls=CustomRichGroup) @click.version_option(__version__) @click.option( "-v", @@ -200,157 +233,124 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): } -# nf-core list -@nf_core_cli.command("list") -@click.argument("keywords", required=False, nargs=-1, metavar="") -@click.option( - "-s", - "--sort", - type=click.Choice(["release", "pulled", "name", "stars"]), - default="release", - help="How to sort listed pipelines", -) -@click.option("--json", is_flag=True, default=False, help="Print full output as JSON") -@click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") -def list_pipelines(keywords, sort, json, show_archived): +# nf-core pipelines subcommands +@nf_core_cli.group() +@click.pass_context +def pipelines(ctx): """ - List available nf-core pipelines with local info. - - Checks the web for a list of nf-core pipelines with their latest releases. - Shows which nf-core pipelines you have pulled locally and whether they are up to date. + Commands to manage nf-core pipelines. """ - from nf_core.list import list_workflows - - stdout.print(list_workflows(keywords, sort, json, show_archived)) + # ensure that ctx.obj exists and is a dict (in case `cli()` is called + # by means other than the `if` block below) + ctx.ensure_object(dict) -# nf-core launch -@nf_core_cli.command() -@click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)") -@click.option("-i", "--id", help="ID for web-gui launch parameter set") +# nf-core pipelines create +@pipelines.command("create") +@click.pass_context @click.option( - "-c", - "--command-only", - is_flag=True, - default=False, - help="Create Nextflow command with params (no params file)", + "-n", + "--name", + type=str, + help="The name of your new pipeline", ) +@click.option("-d", "--description", type=str, help="A short description of your pipeline") +@click.option("-a", "--author", type=str, help="Name of the main author(s)") +@click.option("--version", type=str, default="1.0.0dev", help="The initial version number to use") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") +@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @click.option( - "-o", - "--params-out", - type=click.Path(), - default=os.path.join(os.getcwd(), "nf-params.json"), - help="Path to save run parameters file", + "--organisation", + type=str, + default="nf-core", + help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", ) +def command_pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation): + """ + Create a new pipeline using the nf-core template. + """ + pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation) + + +# nf-core pipelines lint +@pipelines.command("lint") @click.option( - "-p", - "--params-in", + "-d", + "--dir", + "directory", type=click.Path(exists=True), - help="Set of input run params to use from a previous run", + default=".", + help=r"Pipeline directory [dim]\[default: current working directory][/]", ) @click.option( - "-a", - "--save-all", + "--release", is_flag=True, - default=False, - help="Save all parameters, even if unchanged from default", + default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") + and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", + help="Execute additional checks for release-ready workflows.", ) @click.option( - "-x", - "--show-hidden", - is_flag=True, - default=False, - help="Show hidden params which don't normally need changing", + "-f", + "--fix", + type=str, + metavar="", + multiple=True, + help="Attempt to automatically fix specified lint test", ) @click.option( - "-u", - "--url", + "-k", + "--key", type=str, - default="https://nf-co.re/launch", - help="Customise the builder URL (for development work)", + metavar="", + multiple=True, + help="Run only these lint tests", ) -def launch( - pipeline, - id, - revision, - command_only, - params_in, - params_out, - save_all, - show_hidden, - url, -): - """ - Launch a pipeline using a web GUI or command line prompts. - - Uses the pipeline schema file to collect inputs for all available pipeline - parameters. Parameter names, descriptions and help text are shown. - The pipeline schema is used to validate all inputs as they are entered. - - When finished, saves a file with the selected parameters which can be - passed to Nextflow using the -params-file option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory or an ID from the nf-core web launch tool. - """ - from nf_core.launch import Launch - - launcher = Launch( - pipeline, - revision, - command_only, - params_in, - params_out, - save_all, - show_hidden, - url, - id, - ) - if not launcher.launch_pipeline(): - sys.exit(1) - - -# nf-core create-params-file -@nf_core_cli.command() -@click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--revision", help="Release/branch/SHA of the pipeline (if remote)") +@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") +@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") +@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option( - "-o", - "--output", + "--markdown", type=str, - default="nf-params.yml", metavar="", - help="Output filename. Defaults to `nf-params.yml`.", + help="File to write linting results to (Markdown)", ) -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( - "-x", - "--show-hidden", - is_flag=True, - default=False, - help="Show hidden params which don't normally need changing", + "--json", + type=str, + metavar="", + help="File to write linting results to (JSON)", ) -def create_params_file(pipeline, revision, output, force, show_hidden): +@click.option( + "--sort-by", + type=click.Choice(["module", "test"]), + default="test", + help="Sort lint output by module or test name.", + show_default=True, +) +@click.pass_context +def command_pipelines_lint( + ctx, + directory, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): """ - Build a parameter file for a pipeline. - - Uses the pipeline schema file to generate a YAML parameters file. - Parameters are set to the pipeline defaults and descriptions are shown in comments. - After the output file is generated, it can then be edited as needed before - passing to nextflow using the `-params-file` option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory. + Check pipeline code against nf-core guidelines. """ - builder = ParamsFileBuilder(pipeline, revision) + pipelines_lint(ctx, directory, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) - if not builder.write_params_file(output, show_hidden=show_hidden, force=force): - sys.exit(1) - -# nf-core download -@nf_core_cli.command() +# nf-core pipelines download +@pipelines.command("download") @click.argument("pipeline", required=False, metavar="") @click.option( "-r", @@ -366,26 +366,18 @@ def create_params_file(pipeline, revision, output, force, show_hidden): help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") -# TODO: Remove this in a future release. Deprecated in March 2024. -@click.option( - "-t", - "--tower", - is_flag=True, - default=False, - hidden=True, - help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", -) @click.option( + "-p", "--platform", is_flag=True, default=False, help="Download for Seqera Platform (formerly Nextflow Tower)", ) @click.option( - "-d", + "-c", "--download-configuration", - is_flag=True, - default=False, + type=click.Choice(["yes", "no"]), + default="no", help="Include configuration profiles in download. Not available with `--platform`", ) @click.option( @@ -420,19 +412,20 @@ def create_params_file(pipeline, revision, output, force, show_hidden): help="List of images already available in a remote `singularity.cacheDir`.", ) @click.option( - "-p", + "-d", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads", ) -def download( +@click.pass_context +def command_pipelines_download( + ctx, pipeline, revision, outdir, compress, force, - tower, platform, download_configuration, tag, @@ -444,22 +437,15 @@ def download( ): """ Download a pipeline, nf-core/configs and pipeline singularity images. - - Collects all files in a single archive and configures the downloaded - workflow to use relative paths to the configs and singularity images. """ - from nf_core.download import DownloadWorkflow - - if tower: - log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") - - dl = DownloadWorkflow( + pipelines_download( + ctx, pipeline, revision, outdir, compress, force, - tower or platform, # True if either specified + platform, download_configuration, tag, container_system, @@ -468,389 +454,456 @@ def download( container_cache_index, parallel_downloads, ) - dl.download_workflow() - - -# nf-core licences -@nf_core_cli.command() -@click.argument("pipeline", required=True, metavar="") -@click.option("--json", is_flag=True, default=False, help="Print output in JSON") -def licences(pipeline, json): - """ - List software licences for a given workflow (DSL1 only). - - Checks the pipeline environment.yml file which lists all conda software packages, which is not available for DSL2 workflows. Therefore, this command only supports DSL1 workflows (for now). - Each of these is queried against the anaconda.org API to find the licence. - Package name, version and licence is printed to the command line. - """ - from nf_core.licences import WorkflowLicences - lic = WorkflowLicences(pipeline) - lic.as_json = json - try: - stdout.print(lic.run_licences()) - except LookupError as e: - log.error(e) - sys.exit(1) - -# nf-core create -@nf_core_cli.command() +# nf-core pipelines create-params-file +@pipelines.command("create-params-file") +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--revision", help="Release/branch/SHA of the pipeline (if remote)") @click.option( - "-n", - "--name", + "-o", + "--output", type=str, - help="The name of your new pipeline", + default="nf-params.yml", + metavar="", + help="Output filename. Defaults to `nf-params.yml`.", ) -@click.option("-d", "--description", type=str, help="A short description of your pipeline") -@click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, default="1.0dev", help="The initial version number to use") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( - "-f", - "--force", + "-x", + "--show-hidden", is_flag=True, default=False, - help="Overwrite output directory if it already exists", + help="Show hidden params which don't normally need changing", ) -@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") -@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -@click.option("--plain", is_flag=True, help="Use the standard nf-core template") -def create(name, description, author, version, force, outdir, template_yaml, plain): +@click.pass_context +def command_pipelines_create_params_file(ctx, pipeline, revision, output, force, show_hidden): """ - Create a new pipeline using the nf-core template. - - Uses the nf-core template to make a skeleton Nextflow pipeline with all required - files, boilerplate code and best-practices. - """ - from nf_core.create import PipelineCreate - - try: - create_obj = PipelineCreate( - name, - description, - author, - version=version, - force=force, - outdir=outdir, - template_yaml_path=template_yaml, - plain=plain, - ) - create_obj.init_pipeline() - except UserWarning as e: - log.error(e) - sys.exit(1) + Build a parameter file for a pipeline. + """ + pipelines_create_params_file(ctx, pipeline, revision, output, force, show_hidden) -# nf-core lint -@nf_core_cli.command() +# nf-core pipelines launch +@pipelines.command("launch") +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)") +@click.option("-i", "--id", help="ID for web-gui launch parameter set") @click.option( - "-d", - "--dir", - type=click.Path(exists=True), - default=".", - help=r"Pipeline directory [dim]\[default: current working directory][/]", + "-c", + "--command-only", + is_flag=True, + default=False, + help="Create Nextflow command with params (no params file)", ) @click.option( - "--release", - is_flag=True, - default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" - and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") - and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", - help="Execute additional checks for release-ready workflows.", + "-o", + "--params-out", + type=click.Path(), + default=os.path.join(os.getcwd(), "nf-params.json"), + help="Path to save run parameters file", ) @click.option( - "-f", - "--fix", - type=str, - metavar="", - multiple=True, - help="Attempt to automatically fix specified lint test", + "-p", + "--params-in", + type=click.Path(exists=True), + help="Set of input run params to use from a previous run", ) @click.option( - "-k", - "--key", - type=str, - metavar="", - multiple=True, - help="Run only these lint tests", + "-a", + "--save-all", + is_flag=True, + default=False, + help="Save all parameters, even if unchanged from default", ) -@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") -@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") -@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option( - "--markdown", - type=str, - metavar="", - help="File to write linting results to (Markdown)", + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", ) @click.option( - "--json", + "-u", + "--url", type=str, - metavar="", - help="File to write linting results to (JSON)", -) -@click.option( - "--sort-by", - type=click.Choice(["module", "test"]), - default="test", - help="Sort lint output by module or test name.", - show_default=True, + default="https://nf-co.re/launch", + help="Customise the builder URL (for development work)", ) @click.pass_context -def lint( +def command_pipelines_launch( ctx, - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - markdown, - json, - sort_by, + pipeline, + id, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, ): """ - Check pipeline code against nf-core guidelines. + Launch a pipeline using a web GUI or command line prompts. + """ + pipelines_launch(ctx, pipeline, id, revision, command_only, params_in, params_out, save_all, show_hidden, url) - Runs a large number of automated tests to ensure that the supplied pipeline - meets the nf-core guidelines. Documentation of all lint tests can be found - on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] - - You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. - See the documentation for details. - """ - from nf_core.lint import run_linting - from nf_core.utils import is_pipeline_directory - - # Check if pipeline directory is a pipeline - try: - is_pipeline_directory(dir) - except UserWarning as e: - log.error(e) - sys.exit(1) - - # Run the lint tests! - try: - lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - sort_by, - markdown, - json, - ctx.obj["hide_progress"], - ) - swf_failed = 0 - if subworkflow_lint_obj is not None: - swf_failed = len(subworkflow_lint_obj.failed) - if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: - sys.exit(1) - except AssertionError as e: - log.critical(e) - sys.exit(1) - except UserWarning as e: - log.error(e) - sys.exit(1) + +# nf-core pipelines list +@pipelines.command("list") +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option( + "-s", + "--sort", + type=click.Choice(["release", "pulled", "name", "stars"]), + default="release", + help="How to sort listed pipelines", +) +@click.option("--json", is_flag=True, default=False, help="Print full output as JSON") +@click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") +@click.pass_context +def command_pipelines_list(ctx, keywords, sort, json, show_archived): + """ + List available nf-core pipelines with local info. + """ + pipelines_list(ctx, keywords, sort, json, show_archived) -# nf-core modules subcommands -@nf_core_cli.group() +# nf-core pipelines sync +@pipelines.command("sync") +@click.pass_context @click.option( - "-g", - "--git-remote", - type=str, - default=NF_CORE_MODULES_REMOTE, - help="Remote git repo to fetch files from", + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option( "-b", - "--branch", + "--from-branch", type=str, - default=None, - help="Branch of git repository hosting modules.", + help="The git branch to use to fetch workflow variables.", ) @click.option( - "-N", - "--no-pull", + "-p", + "--pull-request", is_flag=True, default=False, - help="Do not pull in latest changes to local clone of modules repository.", + help="Make a GitHub pull-request with the changes.", ) -@click.pass_context -def modules(ctx, git_remote, branch, no_pull): +@click.option( + "--force_pr", + is_flag=True, + default=False, + help="Force the creation of a pull-request, even if there are no changes.", +) +@click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") +@click.option("-u", "--username", type=str, help="GitHub PR: auth username.") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +def command_pipelines_sync( + ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr +): """ - Commands to manage Nextflow DSL2 modules (tool wrappers). + Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. """ - # ensure that ctx.obj exists and is a dict (in case `cli()` is called - # by means other than the `if` block below) - ctx.ensure_object(dict) + pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) - # Place the arguments in a context object - ctx.obj["modules_repo_url"] = git_remote - ctx.obj["modules_repo_branch"] = branch - ctx.obj["modules_repo_no_pull"] = no_pull + +# nf-core pipelines bump-version +@pipelines.command("bump-version") +@click.pass_context +@click.argument("new_version", required=True, metavar="") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option( + "-n", + "--nextflow", + is_flag=True, + default=False, + help="Bump required nextflow version instead of pipeline version", +) +def command_pipelines_bump_version(ctx, new_version, directory, nextflow): + """ + Update nf-core pipeline version number with `nf-core pipelines bump-version`. + """ + pipelines_bump_version(ctx, new_version, directory, nextflow) -# nf-core subworkflows click command -@nf_core_cli.group() +# nf-core pipelines create-logo +@pipelines.command("create-logo") +@click.argument("logo-text", metavar="") +@click.option("-d", "--dir", "directory", type=click.Path(), default=".", help="Directory to save the logo in.") @click.option( - "-g", - "--git-remote", + "-n", + "--name", type=str, - default=NF_CORE_MODULES_REMOTE, - help="Remote git repo to fetch files from", + help="Name of the output file (with or without '.png' suffix).", ) @click.option( - "-b", - "--branch", - type=str, - default=None, - help="Branch of git repository hosting modules.", + "--theme", + type=click.Choice(["light", "dark"]), + default="light", + help="Theme for the logo.", + show_default=True, ) @click.option( - "-N", - "--no-pull", + "--width", + type=int, + default=2300, + help="Width of the logo in pixels.", + show_default=True, +) +@click.option( + "--format", + type=click.Choice(["png", "svg"]), + default="png", + help="Image format of the logo, either PNG or SVG.", + show_default=True, +) +@click.option( + "-f", + "--force", is_flag=True, default=False, - help="Do not pull in latest changes to local clone of modules repository.", + help="Overwrite any files if they already exist", ) -@click.pass_context -def subworkflows(ctx, git_remote, branch, no_pull): +def command_pipelines_create_logo(logo_text, directory, name, theme, width, format, force): """ - Commands to manage Nextflow DSL2 subworkflows (tool wrappers). + Generate a logo with the nf-core logo template. """ - # ensure that ctx.obj exists and is a dict (in case `cli()` is called - # by means other than the `if` block below) - ctx.ensure_object(dict) - - # Place the arguments in a context object - ctx.obj["modules_repo_url"] = git_remote - ctx.obj["modules_repo_branch"] = branch - ctx.obj["modules_repo_no_pull"] = no_pull + pipelines_create_logo(logo_text, directory, name, theme, width, format, force) -# nf-core modules list subcommands -@modules.group("list") -@click.pass_context -def modules_list(ctx): - """ - List modules in a local pipeline or remote repository. +# nf-core pipelines schema subcommands +@pipelines.group("schema") +def pipeline_schema(): """ - pass - + Suite of tools for developers to manage pipeline schema. -# nf-core modules list remote -@modules_list.command("remote") -@click.pass_context -@click.argument("keywords", required=False, nargs=-1, metavar="") -@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def modules_list_remote(ctx, keywords, json): - """ - List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + All nf-core pipelines should have a nextflow_schema.json file in their + root directory that describes the different pipeline parameters. """ - from nf_core.modules import ModuleList - - try: - module_list = ModuleList( - None, - True, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(module_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + pass -# nf-core modules list local -@modules_list.command("local") -@click.pass_context -@click.argument("keywords", required=False, nargs=-1, metavar="") -@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -@click.option( - "-d", - "--dir", - type=click.Path(exists=True), - default=".", - help=r"Pipeline directory. [dim]\[default: Current working directory][/]", -) -def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +# nf-core pipelines schema validate +@pipeline_schema.command("validate") +@click.argument("pipeline", required=True, metavar="") +@click.argument("params", type=click.Path(exists=True), required=True, metavar="") +def command_pipelines_schema_validate(pipeline, params): """ - List modules installed locally in a pipeline + Validate a set of parameters against a pipeline schema. """ - from nf_core.modules import ModuleList - - try: - module_list = ModuleList( - dir, - False, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(module_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + pipelines_schema_validate(pipeline, params) -# nf-core modules install -@modules.command("install") -@click.pass_context -@click.argument("tool", type=str, callback=normalize_case, required=False, metavar=" or ") +# nf-core pipelines schema build +@pipeline_schema.command("build") @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option( - "-p", - "--prompt", + "--no-prompts", is_flag=True, - default=False, - help="Prompt for the version of the module", + help="Do not confirm changes, just update parameters and exit", ) @click.option( - "-f", - "--force", + "--web-only", is_flag=True, - default=False, + help="Skip building using Nextflow config, just launch the web tool", +) +@click.option( + "--url", + type=str, + default="https://nf-co.re/pipeline_schema_builder", + help="Customise the builder URL (for development work)", +) +def command_pipelines_schema_build(directory, no_prompts, web_only, url): + """ + Interactively build a pipeline schema from Nextflow params. + """ + pipelines_schema_build(directory, no_prompts, web_only, url) + + +# nf-core pipelines schema lint +@pipeline_schema.command("lint") +@click.argument( + "schema_path", + type=click.Path(exists=True), + default="nextflow_schema.json", + metavar="", +) +def command_pipelines_schema_lint(schema_path): + """ + Check that a given pipeline schema is valid. + """ + pipelines_schema_lint(schema_path) + + +# nf-core pipelines schema docs +@pipeline_schema.command("docs") +@click.argument( + "schema_path", + type=click.Path(exists=True), + default="nextflow_schema.json", + required=False, + metavar="", +) +@click.option( + "-o", + "--output", + type=str, + metavar="", + help="Output filename. Defaults to standard out.", +) +@click.option( + "-x", + "--format", + type=click.Choice(["markdown", "html"]), + default="markdown", + help="Format to output docs in.", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +@click.option( + "-c", + "--columns", + type=str, + metavar="", + help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", + default="parameter,description,type,default,required,hidden", +) +def command_pipelines_schema_docs(schema_path, output, format, force, columns): + """ + Outputs parameter documentation for a pipeline schema. + """ + pipelines_schema_docs(schema_path, output, format, force, columns) + + +# nf-core modules subcommands +@nf_core_cli.group() +@click.option( + "-g", + "--git-remote", + type=str, + default=NF_CORE_MODULES_REMOTE, + help="Remote git repo to fetch files from", +) +@click.option( + "-b", + "--branch", + type=str, + default=None, + help="Branch of git repository hosting modules.", +) +@click.option( + "-N", + "--no-pull", + is_flag=True, + default=False, + help="Do not pull in latest changes to local clone of modules repository.", +) +@click.pass_context +def modules(ctx, git_remote, branch, no_pull): + """ + Commands to manage Nextflow DSL2 modules (tool wrappers). + """ + # ensure that ctx.obj exists and is a dict (in case `cli()` is called + # by means other than the `if` block below) + ctx.ensure_object(dict) + + # Place the arguments in a context object + ctx.obj["modules_repo_url"] = git_remote + ctx.obj["modules_repo_branch"] = branch + ctx.obj["modules_repo_no_pull"] = no_pull + + +# nf-core modules list subcommands +@modules.group("list") +@click.pass_context +def modules_list(ctx): + """ + List modules in a local pipeline or remote repository. + """ + pass + + +# nf-core modules list remote +@modules_list.command("remote") +@click.pass_context +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") +def command_modules_list_remote(ctx, keywords, json): + """ + List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + """ + modules_list_remote(ctx, keywords, json) + + +# nf-core modules list local +@modules_list.command("local") +@click.pass_context +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: Current working directory][/]", +) +def command_modules_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin + """ + List modules installed locally in a pipeline + """ + modules_list_local(ctx, keywords, json, directory) + + +# nf-core modules install +@modules.command("install") +@click.pass_context +@click.argument("tool", type=str, callback=normalize_case, required=False, metavar=" or ") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option( + "-p", + "--prompt", + is_flag=True, + default=False, + help="Prompt for the version of the module", +) +@click.option( + "-f", + "--force", + is_flag=True, + default=False, help="Force reinstallation of module if it already exists", ) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") -def modules_install(ctx, tool, dir, prompt, force, sha): +def command_modules_install(ctx, tool, directory, prompt, force, sha): """ Install DSL2 modules within a pipeline. - - Fetches and installs module files from a remote repo e.g. nf-core/modules. """ - from nf_core.modules import ModuleInstall - - try: - module_install = ModuleInstall( - dir, - force, - prompt, - sha, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - exit_status = module_install.install(tool) - if not exit_status: - sys.exit(1) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + modules_install(ctx, tool, directory, prompt, force, sha) # nf-core modules update @@ -873,6 +926,13 @@ def modules_install(ctx, tool, dir, prompt, force, sha): default=False, help="Prompt for the version of the module", ) +@click.option( + "--limit-output", + "limit_output", + is_flag=True, + default=False, + help="Limit output to only the difference in main.nf", +) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") @click.option( "-a", @@ -904,7 +964,7 @@ def modules_install(ctx, tool, dir, prompt, force, sha): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def modules_update( +def command_modules_update( ctx, tool, directory, @@ -915,71 +975,32 @@ def modules_update( preview, save_diff, update_deps, + limit_output, ): """ Update DSL2 modules within a pipeline. - - Fetches and updates module files from a remote repo e.g. nf-core/modules. - """ - from nf_core.modules import ModuleUpdate - - try: - module_install = ModuleUpdate( - directory, - force, - prompt, - sha, - install_all, - preview, - save_diff, - update_deps, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - exit_status = module_install.update(tool) - if not exit_status and install_all: - sys.exit(1) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + """ + modules_update(ctx, tool, directory, force, prompt, sha, install_all, preview, save_diff, update_deps, limit_output) # nf-core modules patch -@modules.command() +@modules.command("patch") @click.pass_context @click.argument("tool", type=str, callback=normalize_case, required=False, metavar=" or ") @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-r", "--remove", is_flag=True, default=False) -def patch(ctx, tool, dir, remove): +def command_modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module - - Checks if a module has been modified locally and creates a patch file - describing how the module has changed from the remote version """ - from nf_core.modules import ModulePatch - - try: - module_patch = ModulePatch( - dir, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - if remove: - module_patch.remove(tool) - else: - module_patch.patch(tool) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + modules_patch(ctx, tool, directory, remove) # nf-core modules remove @@ -989,34 +1010,23 @@ def patch(ctx, tool, dir, remove): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def modules_remove(ctx, dir, tool): +def command_modules_remove(ctx, directory, tool): """ Remove a module from a pipeline. """ - from nf_core.modules import ModuleRemove - - try: - module_remove = ModuleRemove( - dir, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - module_remove.remove(tool) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + modules_remove(ctx, directory, tool) # nf-core modules create @modules.command("create") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option("-d", "--dir", "directory", type=click.Path(exists=True), default=".", metavar="") @click.option( "-a", "--author", @@ -1079,10 +1089,10 @@ def modules_remove(ctx, dir, tool): default=False, help="Migrate a module with pytest tests to nf-test", ) -def create_module( +def command_modules_create( ctx, tool, - dir, + directory, author, label, meta, @@ -1095,54 +1105,38 @@ def create_module( ): """ Create a new DSL2 module from the nf-core template. - - If the specified directory is a pipeline, this function creates a file called - 'modules/local/tool_subtool.nf' - - If the specified directory is a clone of nf-core/modules, it creates or modifies files - in 'modules/', 'tests/modules' and 'tests/config/pytest_modules.yml' - """ - # Combine two bool flags into one variable - has_meta = None - if meta and no_meta: - log.critical("Both arguments '--meta' and '--no-meta' given. Please pick one.") - elif meta: - has_meta = True - elif no_meta: - has_meta = False - - from nf_core.modules import ModuleCreate - - # Run function - try: - module_create = ModuleCreate( - dir, - tool, - author, - label, - has_meta, - force, - conda_name, - conda_package_version, - empty_template, - migrate_pytest, - ) - module_create.create() - except UserWarning as e: - log.critical(e) - sys.exit(1) - except LookupError as e: - log.error(e) - sys.exit(1) + """ + modules_create( + ctx, + tool, + directory, + author, + label, + meta, + no_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, + ) # nf-core modules test @modules.command("test") @click.pass_context @click.argument("tool", type=str, callback=normalize_case, required=False, metavar=" or ") +@click.option( + "-v", + "--verbose", + is_flag=True, + default=False, + help="Print verbose output to the console. Sets `--debug` inside the nf-test command.", +) @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1168,31 +1162,19 @@ def create_module( default=None, help="Run tests with a specific profile", ) -def test_module(ctx, tool, dir, no_prompts, update, once, profile): +@click.option( + "--migrate-pytest", + is_flag=True, + default=False, + help="Migrate a module with pytest tests to nf-test", +) +def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest, verbose): """ Run nf-test for a module. - - Given the name of a module, runs the nf-test command to test the module and generate snapshots. - """ - from nf_core.components.components_test import ComponentsTest - - try: - module_tester = ComponentsTest( - component_type="modules", - component_name=tool, - directory=dir, - no_prompts=no_prompts, - update=update, - once=once, - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - verbose=ctx.obj["verbose"], - profile=profile, - ) - module_tester.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + """ + if verbose: + ctx.obj["verbose"] = verbose + modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest) # nf-core modules lint @@ -1202,6 +1184,7 @@ def test_module(ctx, tool, dir, no_prompts, update, once, profile): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1238,48 +1221,14 @@ def test_module(ctx, tool, dir, no_prompts, update, once, profile): is_flag=True, help="Fix the module version if a newer version is available", ) -def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_modules_lint( + ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix +): """ Lint one or more modules in a directory. - - Checks DSL2 module code against nf-core guidelines to ensure - that all modules follow the same standards. - - Test modules within a pipeline or a clone of the - nf-core/modules repository. """ - from nf_core.components.lint import LintExceptionError - from nf_core.modules import ModuleLint - - try: - module_lint = ModuleLint( - dir, - fail_warned=fail_warned, - registry=ctx.params["registry"], - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - no_pull=ctx.obj["modules_repo_no_pull"], - hide_progress=ctx.obj["hide_progress"], - ) - module_lint.lint( - module=tool, - registry=registry, - key=key, - all_modules=all, - print_results=True, - local=local, - show_passed=passed, - sort_by=sort_by, - fix_version=fix_version, - ) - if len(module_lint.failed) > 0: - sys.exit(1) - except LintExceptionError as e: - log.error(e) - sys.exit(1) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix) # nf-core modules info @@ -1289,80 +1238,83 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def modules_info(ctx, tool, dir): +def command_modules_info(ctx, tool, directory): """ Show developer usage information about a given module. - - Parses information from a module's [i]meta.yml[/] and renders help - on the command line. A handy equivalent to searching the - [link=https://nf-co.re/modules]nf-core website[/]. - - If run from a pipeline and a local copy of the module is found, the command - will print this usage info. - If not, usage from the remote modules repo will be shown. """ - from nf_core.modules import ModuleInfo - - try: - module_info = ModuleInfo( - dir, - tool, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(module_info.get_component_info()) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + modules_info(ctx, tool, directory) # nf-core modules bump-versions -@modules.command() +@modules.command("bump-versions") @click.pass_context @click.argument("tool", type=str, callback=normalize_case, required=False, metavar=" or ") @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", ) @click.option("-a", "--all", is_flag=True, help="Run on all modules") @click.option("-s", "--show-all", is_flag=True, help="Show up-to-date modules in results too") -def bump_versions(ctx, tool, dir, all, show_all): +def command_modules_bump_versions(ctx, tool, directory, all, show_all): """ Bump versions for one or more modules in a clone of the nf-core/modules repo. """ - from nf_core.modules.bump_versions import ModuleVersionBumper - from nf_core.modules.modules_utils import ModuleExceptionError + modules_bump_versions(ctx, tool, directory, all, show_all) - try: - version_bumper = ModuleVersionBumper( - dir, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all) - except ModuleExceptionError as e: - log.error(e) - sys.exit(1) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + +# nf-core subworkflows click command +@nf_core_cli.group() +@click.option( + "-g", + "--git-remote", + type=str, + default=NF_CORE_MODULES_REMOTE, + help="Remote git repo to fetch files from", +) +@click.option( + "-b", + "--branch", + type=str, + default=None, + help="Branch of git repository hosting modules.", +) +@click.option( + "-N", + "--no-pull", + is_flag=True, + default=False, + help="Do not pull in latest changes to local clone of modules repository.", +) +@click.pass_context +def subworkflows(ctx, git_remote, branch, no_pull): + """ + Commands to manage Nextflow DSL2 subworkflows (tool wrappers). + """ + # ensure that ctx.obj exists and is a dict (in case `cli()` is called + # by means other than the `if` block below) + ctx.ensure_object(dict) + + # Place the arguments in a context object + ctx.obj["modules_repo_url"] = git_remote + ctx.obj["modules_repo_branch"] = branch + ctx.obj["modules_repo_no_pull"] = no_pull # nf-core subworkflows create @subworkflows.command("create") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option("-d", "--dir", "directory", type=click.Path(exists=True), default=".", metavar="") @click.option( "-a", "--author", @@ -1383,28 +1335,11 @@ def bump_versions(ctx, tool, dir, all, show_all): default=False, help="Migrate a module with pytest tests to nf-test", ) -def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): +def command_subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. - - If the specified directory is a pipeline, this function creates a file called - 'subworkflows/local/.nf' - - If the specified directory is a clone of nf-core/modules, it creates or modifies files - in 'subworkflows/', 'tests/subworkflows' and 'tests/config/pytest_modules.yml' """ - from nf_core.subworkflows import SubworkflowCreate - - # Run function - try: - subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force, migrate_pytest) - subworkflow_create.create() - except UserWarning as e: - log.critical(e) - sys.exit(1) - except LookupError as e: - log.error(e) - sys.exit(1) + subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest) # nf-core subworkflows test @@ -1414,6 +1349,7 @@ def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1439,31 +1375,17 @@ def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): default=None, help="Run tests with a specific profile", ) -def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once, profile): - """ - Run nf-test for a subworkflow. - - Given the name of a subworkflow, runs the nf-test command to test the subworkflow and generate snapshots. - """ - from nf_core.components.components_test import ComponentsTest - - try: - sw_tester = ComponentsTest( - component_type="subworkflows", - component_name=subworkflow, - directory=dir, - no_prompts=no_prompts, - update=update, - once=once, - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - verbose=ctx.obj["verbose"], - profile=profile, - ) - sw_tester.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) +@click.option( + "--migrate-pytest", + is_flag=True, + default=False, + help="Migrate a subworkflow with pytest tests to nf-test", +) +def command_subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile, migrate_pytest): + """ + Run nf-test for a subworkflow. + """ + subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile, migrate_pytest) # nf-core subworkflows list subcommands @@ -1481,25 +1403,11 @@ def subworkflows_list(ctx): @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def subworkflows_list_remote(ctx, keywords, json): +def command_subworkflows_list_remote(ctx, keywords, json): """ List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ - from nf_core.subworkflows import SubworkflowList - - try: - subworkflow_list = SubworkflowList( - None, - True, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - - stdout.print(subworkflow_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + subworkflows_list_remote(ctx, keywords, json) # nf-core subworkflows list local @@ -1510,28 +1418,16 @@ def subworkflows_list_remote(ctx, keywords, json): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def command_subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ - from nf_core.subworkflows import SubworkflowList - - try: - subworkflow_list = SubworkflowList( - dir, - False, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(subworkflow_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + subworkflows_list_local(ctx, keywords, json, directory) # nf-core subworkflows lint @@ -1541,6 +1437,7 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1572,47 +1469,14 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi help="Sort lint output by subworkflow or test name.", show_default=True, ) -def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_subworkflows_lint( + ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix +): """ Lint one or more subworkflows in a directory. - - Checks DSL2 subworkflow code against nf-core guidelines to ensure - that all subworkflows follow the same standards. - - Test subworkflows within a pipeline or a clone of the - nf-core/modules repository. """ - from nf_core.components.lint import LintExceptionError - from nf_core.subworkflows import SubworkflowLint - - try: - subworkflow_lint = SubworkflowLint( - dir, - fail_warned=fail_warned, - registry=ctx.params["registry"], - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - no_pull=ctx.obj["modules_repo_no_pull"], - hide_progress=ctx.obj["hide_progress"], - ) - subworkflow_lint.lint( - subworkflow=subworkflow, - registry=registry, - key=key, - all_subworkflows=all, - print_results=True, - local=local, - show_passed=passed, - sort_by=sort_by, - ) - if len(subworkflow_lint.failed) > 0: - sys.exit(1) - except LintExceptionError as e: - log.error(e) - sys.exit(1) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix) # nf-core subworkflows info @@ -1622,36 +1486,16 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def subworkflows_info(ctx, subworkflow, dir): +def command_subworkflows_info(ctx, subworkflow, directory): """ Show developer usage information about a given subworkflow. - - Parses information from a subworkflow's [i]meta.yml[/] and renders help - on the command line. A handy equivalent to searching the - [link=https://nf-co.re/modules]nf-core website[/]. - - If run from a pipeline and a local copy of the subworkflow is found, the command - will print this usage info. - If not, usage from the remote subworkflows repo will be shown. """ - from nf_core.subworkflows import SubworkflowInfo - - try: - subworkflow_info = SubworkflowInfo( - dir, - subworkflow, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(subworkflow_info.get_component_info()) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + subworkflows_info(ctx, subworkflow, directory) # nf-core subworkflows install @@ -1661,6 +1505,7 @@ def subworkflows_info(ctx, subworkflow, dir): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1686,30 +1531,11 @@ def subworkflows_info(ctx, subworkflow, dir): metavar="", help="Install subworkflow at commit SHA", ) -def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): +def command_subworkflows_install(ctx, subworkflow, directory, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. - - Fetches and installs subworkflow files from a remote repo e.g. nf-core/modules. """ - from nf_core.subworkflows import SubworkflowInstall - - try: - subworkflow_install = SubworkflowInstall( - dir, - force, - prompt, - sha, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - exit_status = subworkflow_install.install(subworkflow) - if not exit_status: - sys.exit(1) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + subworkflows_install(ctx, subworkflow, directory, prompt, force, sha) # nf-core subworkflows remove @@ -1719,27 +1545,16 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def subworkflows_remove(ctx, dir, subworkflow): +def command_subworkflows_remove(ctx, directory, subworkflow): """ Remove a subworkflow from a pipeline. """ - from nf_core.subworkflows import SubworkflowRemove - - try: - module_remove = SubworkflowRemove( - dir, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - module_remove.remove(subworkflow) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + subworkflows_remove(ctx, directory, subworkflow) # nf-core subworkflows update @@ -1749,6 +1564,7 @@ def subworkflows_remove(ctx, dir, subworkflow): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1768,6 +1584,14 @@ def subworkflows_remove(ctx, dir, subworkflow): metavar="", help="Install subworkflow at commit SHA", ) +@click.option( + "-l", + "--limit-output", + "limit_output", + is_flag=True, + default=False, + help="Limit ouput to only the difference in main.nf", +) @click.option( "-a", "--all", @@ -1798,10 +1622,10 @@ def subworkflows_remove(ctx, dir, subworkflow): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def subworkflows_update( +def command_subworkflows_update( ctx, subworkflow, - dir, + directory, force, prompt, sha, @@ -1809,84 +1633,48 @@ def subworkflows_update( preview, save_diff, update_deps, + limit_output, ): """ Update DSL2 subworkflow within a pipeline. + """ + subworkflows_update( + ctx, subworkflow, directory, force, prompt, sha, install_all, preview, save_diff, update_deps, limit_output + ) - Fetches and updates subworkflow files from a remote repo e.g. nf-core/modules. - """ - from nf_core.subworkflows import SubworkflowUpdate - - try: - subworkflow_install = SubworkflowUpdate( - dir, - force, - prompt, - sha, - install_all, - preview, - save_diff, - update_deps, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - exit_status = subworkflow_install.update(subworkflow) - if not exit_status and install_all: - sys.exit(1) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) +## DEPRECATED commands since v3.0.0 -# nf-core schema subcommands -@nf_core_cli.group() + +# nf-core schema subcommands (deprecated) +@nf_core_cli.group(deprecated=True, hidden=True) def schema(): """ - Suite of tools for developers to manage pipeline schema. - - All nf-core pipelines should have a nextflow_schema.json file in their - root directory that describes the different pipeline parameters. + Use `nf-core pipelines schema ` instead. """ pass -# nf-core schema validate -@schema.command() +# nf-core schema validate (deprecated) +@schema.command("validate", deprecated=True) @click.argument("pipeline", required=True, metavar="") @click.argument("params", type=click.Path(exists=True), required=True, metavar="") -def validate(pipeline, params): +def command_schema_validate(pipeline, params): """ - Validate a set of parameters against a pipeline schema. - - Nextflow can be run using the -params-file flag, which loads - script parameters from a JSON file. - - This command takes such a file and validates it against the pipeline - schema, checking whether all schema rules are satisfied. + Use `nf-core pipelines schema validate` instead. """ - from nf_core.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(pipeline) - # Load and check schema - schema_obj.load_lint_schema() - except AssertionError as e: - log.error(e) - sys.exit(1) - schema_obj.load_input_params(params) - try: - schema_obj.validate_params() - except AssertionError: - sys.exit(1) + log.warning( + "The `[magenta]nf-core schema validate[/]` command is deprecated. Use `[magenta]nf-core pipelines schema validate[/]` instead." + ) + pipelines_schema_validate(pipeline, params) -# nf-core schema build -@schema.command() +# nf-core schema build (deprecated) +@schema.command("build", deprecated=True) @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1907,65 +1695,36 @@ def validate(pipeline, params): default="https://nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) -def build(dir, no_prompts, web_only, url): +def command_schema_build(directory, no_prompts, web_only, url): """ - Interactively build a pipeline schema from Nextflow params. - - Automatically detects parameters from the pipeline config and main.nf and - compares these to the pipeline schema. Prompts to add or remove parameters - if the two do not match one another. - - Once all parameters are accounted for, can launch a web GUI tool on the - https://nf-co.re website where you can annotate and organise parameters. - Listens for this to be completed and saves the updated schema. + Use `nf-core pipelines schema build` instead. """ - from nf_core.schema import PipelineSchema - - try: - schema_obj = PipelineSchema() - if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: - sys.exit(1) - except (UserWarning, AssertionError) as e: - log.error(e) - sys.exit(1) + log.warning( + "The `[magenta]nf-core schema build[/]` command is deprecated. Use `[magenta]nf-core pipelines schema build[/]` instead." + ) + pipelines_schema_build(directory, no_prompts, web_only, url) -# nf-core schema lint -@schema.command("lint") +# nf-core schema lint (deprecated) +@schema.command("lint", deprecated=True) @click.argument( "schema_path", type=click.Path(exists=True), default="nextflow_schema.json", metavar="", ) -def schema_lint(schema_path): +def command_schema_lint(schema_path): """ - Check that a given pipeline schema is valid. - - Checks whether the pipeline schema validates as JSON Schema Draft 7 - and adheres to the additional nf-core schema requirements. - - This function runs as part of the nf-core lint command, this is a convenience - command that does just the schema linting nice and quickly. - - If no schema path is provided, "nextflow_schema.json" will be used (if it exists). + Use `nf-core pipelines schema lint` instead. """ - from nf_core.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(schema_path) - schema_obj.load_lint_schema() - # Validate title and description - just warnings as schema should still work fine - try: - schema_obj.validate_schema_title_description() - except AssertionError as e: - log.warning(e) - except AssertionError: - sys.exit(1) + log.warning( + "The `[magenta]nf-core schema lint[/]` command is deprecated. Use `[magenta]nf-core pipelines schema lint[/]` instead." + ) + pipelines_schema_lint(schema_path) -@schema.command() +# nf-core schema docs (deprecated) +@schema.command("docs", deprecated=True) @click.argument( "schema_path", type=click.Path(exists=True), @@ -1996,78 +1755,20 @@ def schema_lint(schema_path): help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", default="parameter,description,type,default,required,hidden", ) -def docs(schema_path, output, format, force, columns): +def command_schema_docs(schema_path, output, format, force, columns): """ - Outputs parameter documentation for a pipeline schema. + Use `nf-core pipelines schema docs` instead. """ - if not os.path.exists(schema_path): - log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") - sys.exit(1) - - from nf_core.schema import PipelineSchema - - schema_obj = PipelineSchema() - # Assume we're in a pipeline dir root if schema path not set - schema_obj.get_schema_path(schema_path) - schema_obj.load_schema() - schema_obj.print_documentation(output, format, force, columns.split(",")) - - -# nf-core bump-version -@nf_core_cli.command("bump-version") -@click.argument("new_version", required=True, metavar="") -@click.option( - "-d", - "--dir", - type=click.Path(exists=True), - default=".", - help=r"Pipeline directory. [dim]\[default: current working directory][/]", -) -@click.option( - "-n", - "--nextflow", - is_flag=True, - default=False, - help="Bump required nextflow version instead of pipeline version", -) -def bump_version(new_version, dir, nextflow): - """ - Update nf-core pipeline version number. - - The pipeline version number is mentioned in a lot of different places - in nf-core pipelines. This tool updates the version for you automatically, - so that you don't accidentally miss any. - - Should be used for each pipeline release, and again for the next - development version after release. - - As well as the pipeline version, you can also change the required version of Nextflow. - """ - from nf_core.bump_version import bump_nextflow_version, bump_pipeline_version - from nf_core.utils import Pipeline, is_pipeline_directory - - try: - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Make a pipeline object and load config etc - pipeline_obj = Pipeline(dir) - pipeline_obj._load() - - # Bump the pipeline version number - if not nextflow: - bump_pipeline_version(pipeline_obj, new_version) - else: - bump_nextflow_version(pipeline_obj, new_version) - except UserWarning as e: - log.error(e) - sys.exit(1) + log.warning( + "The `[magenta]nf-core schema docs[/]` command is deprecated. Use `[magenta]nf-core pipelines schema docs[/]` instead." + ) + pipelines_schema_docs(schema_path, output, format, force, columns) -# nf-core create-logo -@nf_core_cli.command("create-logo") +# nf-core create-logo (deprecated) +@nf_core_cli.command("create-logo", deprecated=True, hidden=True) @click.argument("logo-text", metavar="") -@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") +@click.option("-d", "--dir", "directory", type=click.Path(), default=".", help="Directory to save the logo in.") @click.option( "-n", "--name", @@ -2102,34 +1803,23 @@ def bump_version(new_version, dir, nextflow): default=False, help="Overwrite any files if they already exist", ) -def logo(logo_text, dir, name, theme, width, format, force): +def command_create_logo(logo_text, directory, name, theme, width, format, force): """ - Generate a logo with the nf-core logo template. - - This command generates an nf-core pipeline logo, using the supplied + Use `nf-core pipelines create-logo` instead. """ - from nf_core.create_logo import create_logo - - try: - if dir == ".": - dir = Path.cwd() - logo_path = create_logo(logo_text, dir, name, theme, width, format, force) - # Print path to logo relative to current working directory - try: - logo_path = Path(logo_path).relative_to(Path.cwd()) - except ValueError: - logo_path = Path(logo_path) - log.info(f"Created logo: [magenta]{logo_path}[/]") - except UserWarning as e: - log.error(e) - sys.exit(1) + log.warning( + "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." + ) + pipelines_create_logo(logo_text, directory, name, theme, width, format, force) -# nf-core sync -@nf_core_cli.command("sync") +# nf-core sync (deprecated) +@nf_core_cli.command("sync", hidden=True, deprecated=True) +@click.pass_context @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -2156,32 +1846,391 @@ def logo(logo_text, dir, name, theme, width, format, force): @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ - Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. + Use `nf-core pipelines sync` instead. + """ + log.warning( + "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." + ) + pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + + +# nf-core bump-version (deprecated) +@nf_core_cli.command("bump-version", hidden=True, deprecated=True) +@click.pass_context +@click.argument("new_version", default="") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option( + "-n", + "--nextflow", + is_flag=True, + default=False, + help="Bump required nextflow version instead of pipeline version", +) +def command_bump_version(ctx, new_version, directory, nextflow): + """ + Use `nf-core pipelines bump-version` instead. + """ + log.warning( + "The `[magenta]nf-core bump-version[/]` command is deprecated. Use `[magenta]nf-core pipelines bump-version[/]` instead." + ) + pipelines_bump_version(ctx, new_version, directory, nextflow) + + +# nf-core list (deprecated) +@nf_core_cli.command("list", deprecated=True, hidden=True) +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option( + "-s", + "--sort", + type=click.Choice(["release", "pulled", "name", "stars"]), + default="release", + help="How to sort listed pipelines", +) +@click.option("--json", is_flag=True, default=False, help="Print full output as JSON") +@click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") +@click.pass_context +def command_list(ctx, keywords, sort, json, show_archived): + """ + DEPREUse `nf-core pipelines list` instead.CATED + """ + log.warning( + "The `[magenta]nf-core list[/]` command is deprecated. Use `[magenta]nf-core pipelines list[/]` instead." + ) + pipelines_list(ctx, keywords, sort, json, show_archived) + + +# nf-core launch (deprecated) +@nf_core_cli.command("launch", deprecated=True, hidden=True) +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)") +@click.option("-i", "--id", help="ID for web-gui launch parameter set") +@click.option( + "-c", + "--command-only", + is_flag=True, + default=False, + help="Create Nextflow command with params (no params file)", +) +@click.option( + "-o", + "--params-out", + type=click.Path(), + default=os.path.join(os.getcwd(), "nf-params.json"), + help="Path to save run parameters file", +) +@click.option( + "-p", + "--params-in", + type=click.Path(exists=True), + help="Set of input run params to use from a previous run", +) +@click.option( + "-a", + "--save-all", + is_flag=True, + default=False, + help="Save all parameters, even if unchanged from default", +) +@click.option( + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", +) +@click.option( + "-u", + "--url", + type=str, + default="https://nf-co.re/launch", + help="Customise the builder URL (for development work)", +) +@click.pass_context +def command_launch( + ctx, + pipeline, + id, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, +): + """ + Use `nf-core pipelines launch` instead. + """ + log.warning( + "The `[magenta]nf-core launch[/]` command is deprecated. Use `[magenta]nf-core pipelines launch[/]` instead." + ) + pipelines_launch(ctx, pipeline, id, revision, command_only, params_in, params_out, save_all, show_hidden, url) + + +# nf-core create-params-file (deprecated) +@nf_core_cli.command("create-params-file", deprecated=True, hidden=True) +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--revision", help="Release/branch/SHA of the pipeline (if remote)") +@click.option( + "-o", + "--output", + type=str, + default="nf-params.yml", + metavar="", + help="Output filename. Defaults to `nf-params.yml`.", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +@click.option( + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", +) +def command_create_params_file(pipeline, revision, output, force, show_hidden): + """ + Use `nf-core pipelines create-params-file` instead. + """ + log.warning( + "The `[magenta]nf-core create-params-file[/]` command is deprecated. Use `[magenta]nf-core pipelines create-params-file[/]` instead." + ) + pipelines_create_params_file(pipeline, revision, output, force, show_hidden) - To keep nf-core pipelines up to date with improvements in the main - template, we use a method of synchronisation that uses a special - git branch called [cyan i]TEMPLATE[/]. - This command updates the [cyan i]TEMPLATE[/] branch with the latest version of - the nf-core template, so that these updates can be synchronised with - the pipeline. It is run automatically for all pipelines when ever a - new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. +# nf-core download (deprecated) +@nf_core_cli.command("download", deprecated=True, hidden=True) +@click.argument("pipeline", required=False, metavar="") +@click.option( + "-r", + "--revision", + multiple=True, + help="Pipeline release to download. Multiple invocations are possible, e.g. `-r 1.1 -r 1.2`", +) +@click.option("-o", "--outdir", type=str, help="Output directory") +@click.option( + "-x", + "--compress", + type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), + help="Archive compression type", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +@click.option( + "-t", + "--tower", + is_flag=True, + default=False, + hidden=True, + help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", +) +@click.option( + "--platform", + is_flag=True, + default=False, + help="Download for Seqera Platform (formerly Nextflow Tower)", +) +@click.option( + "-d", + "--download-configuration", + is_flag=True, + default=False, + help="Include configuration profiles in download. Not available with `--platform`", +) +@click.option( + "--tag", + multiple=True, + help="Add custom alias tags to `--platform` downloads. For example, `--tag \"3.10=validated\"` adds the custom 'validated' tag to the 3.10 release.", +) +@click.option( + "-s", + "--container-system", + type=click.Choice(["none", "singularity"]), + help="Download container images of required software.", +) +@click.option( + "-l", + "--container-library", + multiple=True, + help="Container registry/library or mirror to pull images from.", +) +@click.option( + "-u", + "--container-cache-utilisation", + type=click.Choice(["amend", "copy", "remote"]), + help="Utilise a `singularity.cacheDir` in the download process, if applicable.", +) +@click.option( + "-i", + "--container-cache-index", + type=str, + help="List of images already available in a remote `singularity.cacheDir`.", +) +@click.option( + "-p", + "--parallel-downloads", + type=int, + default=4, + help="Number of parallel image downloads", +) +@click.pass_context +def command_download( + ctx, + pipeline, + revision, + outdir, + compress, + force, + tower, + platform, + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, +): """ - from nf_core.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError - from nf_core.utils import is_pipeline_directory + Use `nf-core pipelines download` instead. + """ + log.warning( + "The `[magenta]nf-core download[/]` command is deprecated. Use `[magenta]nf-core pipelines download[/]` instead." + ) + pipelines_download( + ctx, + pipeline, + revision, + outdir, + compress, + force, + platform or tower, + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, + ) - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - # Sync the given pipeline dir - sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) - try: - sync_obj.sync() - except (SyncExceptionError, PullRequestExceptionError) as e: - log.error(e) - sys.exit(1) +# nf-core lint (deprecated) +@nf_core_cli.command("lint", hidden=True, deprecated=True) +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory [dim]\[default: current working directory][/]", +) +@click.option( + "--release", + is_flag=True, + default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") + and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", + help="Execute additional checks for release-ready workflows.", +) +@click.option( + "-f", + "--fix", + type=str, + metavar="", + multiple=True, + help="Attempt to automatically fix specified lint test", +) +@click.option( + "-k", + "--key", + type=str, + metavar="", + multiple=True, + help="Run only these lint tests", +) +@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") +@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") +@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") +@click.option( + "--markdown", + type=str, + metavar="", + help="File to write linting results to (Markdown)", +) +@click.option( + "--json", + type=str, + metavar="", + help="File to write linting results to (JSON)", +) +@click.option( + "--sort-by", + type=click.Choice(["module", "test"]), + default="test", + help="Sort lint output by module or test name.", + show_default=True, +) +@click.pass_context +def command_lint( + ctx, + directory, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): + """ + Use `nf-core pipelines lint` instead. + """ + log.warning( + "The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead." + ) + pipelines_lint(ctx, directory, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) + + +# nf-core create (deprecated) +@nf_core_cli.command("create", hidden=True, deprecated=True) +@click.option( + "-n", + "--name", + type=str, + help="The name of your new pipeline", +) +@click.option("-d", "--description", type=str, help="A short description of your pipeline") +@click.option("-a", "--author", type=str, help="Name of the main author(s)") +@click.option("--version", type=str, default="1.0.0dev", help="The initial version number to use") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") +@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +@click.option("--plain", is_flag=True, help="Use the standard nf-core template") +@click.option( + "--organisation", + type=str, + default="nf-core", + help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", +) +@click.pass_context +def command_create(ctx, name, description, author, version, force, outdir, template_yaml, plain, organisation): + """ + Use `nf-core pipelines create` instead. + """ + log.warning( + "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." + ) + pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation) # Main script is being run - launch the CLI diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py new file mode 100644 index 000000000..33b1f7516 --- /dev/null +++ b/nf_core/commands_modules.py @@ -0,0 +1,358 @@ +import logging +import sys + +import rich + +from nf_core.utils import rich_force_colors + +log = logging.getLogger(__name__) +stdout = rich.console.Console(force_terminal=rich_force_colors()) + + +def modules_list_remote(ctx, keywords, json): + """ + List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + """ + from nf_core.modules.list import ModuleList + + try: + module_list = ModuleList( + ".", + True, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(module_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def modules_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin + """ + List modules installed locally in a pipeline + """ + from nf_core.modules.list import ModuleList + + try: + module_list = ModuleList( + directory, + False, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(module_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_install(ctx, tool, directory, prompt, force, sha): + """ + Install DSL2 modules within a pipeline. + + Fetches and installs module files from a remote repo e.g. nf-core/modules. + """ + from nf_core.modules.install import ModuleInstall + + try: + module_install = ModuleInstall( + directory, + force, + prompt, + sha, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + exit_status = module_install.install(tool) + if not exit_status: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_update( + ctx, + tool, + directory, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, + limit_output, +): + """ + Update DSL2 modules within a pipeline. + + Fetches and updates module files from a remote repo e.g. nf-core/modules. + """ + from nf_core.modules.update import ModuleUpdate + + try: + module_install = ModuleUpdate( + directory, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + limit_output, + ) + exit_status = module_install.update(tool) + if not exit_status and install_all: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_patch(ctx, tool, directory, remove): + """ + Create a patch file for minor changes in a module + + Checks if a module has been modified locally and creates a patch file + describing how the module has changed from the remote version + """ + from nf_core.modules.patch import ModulePatch + + try: + module_patch = ModulePatch( + directory, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + if remove: + module_patch.remove(tool) + else: + module_patch.patch(tool) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_remove(ctx, directory, tool): + """ + Remove a module from a pipeline. + """ + from nf_core.modules.remove import ModuleRemove + + try: + module_remove = ModuleRemove( + directory, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + module_remove.remove(tool) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def modules_create( + ctx, + tool, + directory, + author, + label, + meta, + no_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, +): + """ + Create a new DSL2 module from the nf-core template. + + If the specified directory is a pipeline, this function creates a file called + 'modules/local/tool_subtool.nf' + + If the specified directory is a clone of nf-core/modules, it creates or modifies files + in 'modules/', 'tests/modules' and 'tests/config/pytest_modules.yml' + """ + # Combine two bool flags into one variable + has_meta = None + if meta and no_meta: + log.critical("Both arguments '--meta' and '--no-meta' given. Please pick one.") + elif meta: + has_meta = True + elif no_meta: + has_meta = False + + from nf_core.modules.create import ModuleCreate + + # Run function + try: + module_create = ModuleCreate( + directory, + tool, + author, + label, + has_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, + ) + module_create.create() + except UserWarning as e: + log.critical(e) + sys.exit(1) + except LookupError as e: + log.error(e) + sys.exit(1) + + +def modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest): + """ + Run nf-test for a module. + + Given the name of a module, runs the nf-test command to test the module and generate snapshots. + """ + from nf_core.components.components_test import ComponentsTest + + if migrate_pytest: + modules_create( + ctx, + tool, + directory, + author="", + label="", + meta=True, + no_meta=False, + force=False, + conda_name=None, + conda_package_version=None, + empty_template=False, + migrate_pytest=migrate_pytest, + ) + try: + module_tester = ComponentsTest( + component_type="modules", + component_name=tool, + directory=directory, + no_prompts=no_prompts, + update=update, + once=once, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], + profile=profile, + ) + module_tester.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix): + """ + Lint one or more modules in a directory. + + Checks DSL2 module code against nf-core guidelines to ensure + that all modules follow the same standards. + + Test modules within a pipeline or a clone of the + nf-core/modules repository. + """ + from nf_core.components.lint import LintExceptionError + from nf_core.modules.lint import ModuleLint + + try: + module_lint = ModuleLint( + directory, + fail_warned=fail_warned, + fix=fix, + registry=ctx.params["registry"], + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + no_pull=ctx.obj["modules_repo_no_pull"], + hide_progress=ctx.obj["hide_progress"], + ) + module_lint.lint( + module=tool, + registry=registry, + key=key, + all_modules=all, + print_results=True, + local=local, + show_passed=passed, + sort_by=sort_by, + fix_version=fix_version, + ) + if len(module_lint.failed) > 0: + sys.exit(1) + except LintExceptionError as e: + log.error(e) + sys.exit(1) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def modules_info(ctx, tool, directory): + """ + Show developer usage information about a given module. + + Parses information from a module's [i]meta.yml[/] and renders help + on the command line. A handy equivalent to searching the + [link=https://nf-co.re/modules]nf-core website[/]. + + If run from a pipeline and a local copy of the module is found, the command + will print this usage info. + If not, usage from the remote modules repo will be shown. + """ + from nf_core.modules.info import ModuleInfo + + try: + module_info = ModuleInfo( + directory, + tool, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(module_info.get_component_info()) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_bump_versions(ctx, tool, directory, all, show_all): + """ + Bump versions for one or more modules in a clone of + the nf-core/modules repo. + """ + from nf_core.modules.bump_versions import ModuleVersionBumper + from nf_core.modules.modules_utils import ModuleExceptionError + + try: + version_bumper = ModuleVersionBumper( + directory, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all) + except ModuleExceptionError as e: + log.error(e) + sys.exit(1) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py new file mode 100644 index 000000000..1186935e5 --- /dev/null +++ b/nf_core/commands_pipelines.py @@ -0,0 +1,429 @@ +import logging +import os +import sys +from pathlib import Path + +import rich + +from nf_core.pipelines.params_file import ParamsFileBuilder +from nf_core.utils import rich_force_colors + +log = logging.getLogger(__name__) + +stdout = rich.console.Console(force_terminal=rich_force_colors()) + +## nf-core pipelines command functions ## + + +# nf-core pipelines create +def pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation): + """ + Create a new pipeline using the nf-core template. + + Uses the nf-core template to make a skeleton Nextflow pipeline with all required + files, boilerplate code and best-practices. + \n\n + Run without any command line arguments to use an interactive interface. + """ + from nf_core.pipelines.create import PipelineCreateApp + from nf_core.pipelines.create.create import PipelineCreate + + if (name and description and author) or (template_yaml): + # If all command arguments are used, run without the interactive interface + try: + create_obj = PipelineCreate( + name, + description, + author, + version=version, + force=force, + outdir=outdir, + template_config=template_yaml, + organisation=organisation, + ) + create_obj.init_pipeline() + except UserWarning as e: + log.error(e) + sys.exit(1) + elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": + log.error( + "[red]Partial arguments supplied.[/] " + "Run without [i]any[/] arguments for an interactive interface, " + "or with at least name + description + author to use non-interactively." + ) + sys.exit(1) + else: + log.info("Launching interactive nf-core pipeline creation tool.") + app = PipelineCreateApp() + app.run() + sys.exit(app.return_code or 0) + + +# nf-core pipelines bump-version +def pipelines_bump_version(ctx, new_version, directory, nextflow): + """ + Update nf-core pipeline version number. + + The pipeline version number is mentioned in a lot of different places + in nf-core pipelines. This tool updates the version for you automatically, + so that you don't accidentally miss any. + + Should be used for each pipeline release, and again for the next + development version after release. + + As well as the pipeline version, you can also change the required version of Nextflow. + """ + from nf_core.pipelines.bump_version import bump_nextflow_version, bump_pipeline_version + from nf_core.utils import Pipeline, is_pipeline_directory + + try: + # Check if pipeline directory contains necessary files + is_pipeline_directory(directory) + + # Make a pipeline object and load config etc + pipeline_obj = Pipeline(directory) + pipeline_obj._load() + + # Bump the pipeline version number + if not nextflow: + bump_pipeline_version(pipeline_obj, new_version) + else: + bump_nextflow_version(pipeline_obj, new_version) + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines lint +def pipelines_lint( + ctx, + directory, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): + """ + Check pipeline code against nf-core guidelines. + + Runs a large number of automated tests to ensure that the supplied pipeline + meets the nf-core guidelines. Documentation of all lint tests can be found + on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] + + You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. + See the documentation for details. + """ + from nf_core.pipelines.lint import run_linting + from nf_core.utils import is_pipeline_directory + + # Check if pipeline directory is a pipeline + try: + is_pipeline_directory(directory) + except UserWarning as e: + log.error(e) + sys.exit(1) + + # Run the lint tests! + try: + lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( + directory, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + sort_by, + markdown, + json, + ctx.obj["hide_progress"], + ) + swf_failed = 0 + module_failed = 0 + if subworkflow_lint_obj is not None: + swf_failed = len(subworkflow_lint_obj.failed) + if module_lint_obj is not None: + module_failed = len(module_lint_obj.failed) + if len(lint_obj.failed) + module_failed + swf_failed > 0: + sys.exit(1) + except AssertionError as e: + log.critical(e) + sys.exit(1) + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines download +def pipelines_download( + ctx, + pipeline, + revision, + outdir, + compress, + force, + platform, + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, +): + """ + Download a pipeline, nf-core/configs and pipeline singularity images. + + Collects all files in a single archive and configures the downloaded + workflow to use relative paths to the configs and singularity images. + """ + from nf_core.pipelines.download import DownloadWorkflow + + dl = DownloadWorkflow( + pipeline, + revision, + outdir, + compress, + force, + platform, + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, + ) + dl.download_workflow() + + +# nf-core pipelines create-params-file +def pipelines_create_params_file(ctx, pipeline, revision, output, force, show_hidden): + """ + Build a parameter file for a pipeline. + + Uses the pipeline schema file to generate a YAML parameters file. + Parameters are set to the pipeline defaults and descriptions are shown in comments. + After the output file is generated, it can then be edited as needed before + passing to nextflow using the `-params-file` option. + + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory. + """ + builder = ParamsFileBuilder(pipeline, revision) + + if not builder.write_params_file(output, show_hidden=show_hidden, force=force): + sys.exit(1) + + +# nf-core pipelines launch +def pipelines_launch( + ctx, + pipeline, + id, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, +): + """ + Launch a pipeline using a web GUI or command line prompts. + + Uses the pipeline schema file to collect inputs for all available pipeline + parameters. Parameter names, descriptions and help text are shown. + The pipeline schema is used to validate all inputs as they are entered. + + When finished, saves a file with the selected parameters which can be + passed to Nextflow using the -params-file option. + + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory or an ID from the nf-core web launch tool. + """ + from nf_core.pipelines.launch import Launch + + launcher = Launch( + pipeline, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, + id, + ) + if not launcher.launch_pipeline(): + sys.exit(1) + + +# nf-core pipelines list +def pipelines_list(ctx, keywords, sort, json, show_archived): + """ + List available nf-core pipelines with local info. + + Checks the web for a list of nf-core pipelines with their latest releases. + Shows which nf-core pipelines you have pulled locally and whether they are up to date. + """ + from nf_core.pipelines.list import list_workflows + + stdout.print(list_workflows(keywords, sort, json, show_archived)) + + +# nf-core pipelines sync +def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): + """ + Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. + + To keep nf-core pipelines up to date with improvements in the main + template, we use a method of synchronisation that uses a special + git branch called [cyan i]TEMPLATE[/]. + + This command updates the [cyan i]TEMPLATE[/] branch with the latest version of + the nf-core template, so that these updates can be synchronised with + the pipeline. It is run automatically for all pipelines when ever a + new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. + """ + from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError + from nf_core.utils import is_pipeline_directory + + try: + # Check if pipeline directory contains necessary files + is_pipeline_directory(directory) + # Sync the given pipeline dir + sync_obj = PipelineSync( + directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr + ) + sync_obj.sync() + except (SyncExceptionError, PullRequestExceptionError) as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines create-logo +def pipelines_create_logo(logo_text, directory, name, theme, width, format, force): + """ + Generate a logo with the nf-core logo template. + + This command generates an nf-core pipeline logo, using the supplied + """ + from nf_core.pipelines.create_logo import create_logo + + try: + if directory == ".": + directory = Path.cwd() + logo_path = create_logo(logo_text, directory, name, theme, width, format, force) + # Print path to logo relative to current working directory + try: + logo_path = Path(logo_path).relative_to(Path.cwd()) + except ValueError: + logo_path = Path(logo_path) + log.info(f"Created logo: [magenta]{logo_path}[/]") + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines schema validate +def pipelines_schema_validate(pipeline, params): + """ + Validate a set of parameters against a pipeline schema. + + Nextflow can be run using the -params-file flag, which loads + script parameters from a JSON file. + + This command takes such a file and validates it against the pipeline + schema, checking whether all schema rules are satisfied. + """ + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(pipeline) + # Load and check schema + schema_obj.load_lint_schema() + except AssertionError as e: + log.error(e) + sys.exit(1) + schema_obj.load_input_params(params) + try: + schema_obj.validate_params() + except AssertionError: + sys.exit(1) + + +# nf-core pipelines schema build +def pipelines_schema_build(directory, no_prompts, web_only, url): + """ + Interactively build a pipeline schema from Nextflow params. + + Automatically detects parameters from the pipeline config and main.nf and + compares these to the pipeline schema. Prompts to add or remove parameters + if the two do not match one another. + + Once all parameters are accounted for, can launch a web GUI tool on the + https://nf-co.re website where you can annotate and organise parameters. + Listens for this to be completed and saves the updated schema. + """ + from nf_core.pipelines.schema import PipelineSchema + + try: + schema_obj = PipelineSchema() + if schema_obj.build_schema(directory, no_prompts, web_only, url) is False: + sys.exit(1) + except (UserWarning, AssertionError) as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines schema lint +def pipelines_schema_lint(schema_path): + """ + Check that a given pipeline schema is valid. + + Checks whether the pipeline schema validates as JSON Schema Draft 7 + and adheres to the additional nf-core pipelines schema requirements. + + This function runs as part of the nf-core pipelines lint command, this is a convenience + command that does just the schema linting nice and quickly. + + If no schema path is provided, "nextflow_schema.json" will be used (if it exists). + """ + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(schema_path) + schema_obj.load_lint_schema() + # Validate title and description - just warnings as schema should still work fine + try: + schema_obj.validate_schema_title_description() + except AssertionError as e: + log.warning(e) + except AssertionError: + sys.exit(1) + + +# nf-core pipelines schema docs +def pipelines_schema_docs(schema_path, output, format, force, columns): + """ + Outputs parameter documentation for a pipeline schema. + """ + if not os.path.exists(schema_path): + log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") + sys.exit(1) + + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + # Assume we're in a pipeline dir root if schema path not set + schema_obj.get_schema_path(schema_path) + schema_obj.load_schema() + schema_obj.print_documentation(output, format, force, columns.split(",")) diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py new file mode 100644 index 000000000..8e90a8116 --- /dev/null +++ b/nf_core/commands_subworkflows.py @@ -0,0 +1,264 @@ +import logging +import sys + +import rich + +from nf_core.utils import rich_force_colors + +log = logging.getLogger(__name__) + +stdout = rich.console.Console(force_terminal=rich_force_colors()) + + +def subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest): + """ + Create a new subworkflow from the nf-core template. + + If the specified directory is a pipeline, this function creates a file called + 'subworkflows/local/.nf' + + If the specified directory is a clone of nf-core/modules, it creates or modifies files + in 'subworkflows/', 'tests/subworkflows' and 'tests/config/pytest_modules.yml' + """ + from nf_core.subworkflows import SubworkflowCreate + + # Run function + try: + subworkflow_create = SubworkflowCreate(directory, subworkflow, author, force, migrate_pytest) + subworkflow_create.create() + except UserWarning as e: + log.critical(e) + sys.exit(1) + except LookupError as e: + log.error(e) + sys.exit(1) + + +def subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile, migrate_pytest): + """ + Run nf-test for a subworkflow. + + Given the name of a subworkflow, runs the nf-test command to test the subworkflow and generate snapshots. + """ + from nf_core.components.components_test import ComponentsTest + + if migrate_pytest: + subworkflows_create(ctx, subworkflow, directory, None, False, True) + try: + sw_tester = ComponentsTest( + component_type="subworkflows", + component_name=subworkflow, + directory=directory, + no_prompts=no_prompts, + update=update, + once=once, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], + profile=profile, + ) + sw_tester.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def subworkflows_list_remote(ctx, keywords, json): + """ + List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + """ + from nf_core.subworkflows import SubworkflowList + + try: + subworkflow_list = SubworkflowList( + ".", + True, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + + stdout.print(subworkflow_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin + """ + List subworkflows installed locally in a pipeline + """ + from nf_core.subworkflows import SubworkflowList + + try: + subworkflow_list = SubworkflowList( + directory, + False, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflow_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix): + """ + Lint one or more subworkflows in a directory. + + Checks DSL2 subworkflow code against nf-core guidelines to ensure + that all subworkflows follow the same standards. + + Test subworkflows within a pipeline or a clone of the + nf-core/modules repository. + """ + from nf_core.components.lint import LintExceptionError + from nf_core.subworkflows import SubworkflowLint + + try: + subworkflow_lint = SubworkflowLint( + directory, + fail_warned=fail_warned, + fix=fix, + registry=ctx.params["registry"], + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + no_pull=ctx.obj["modules_repo_no_pull"], + hide_progress=ctx.obj["hide_progress"], + ) + subworkflow_lint.lint( + subworkflow=subworkflow, + registry=registry, + key=key, + all_subworkflows=all, + print_results=True, + local=local, + show_passed=passed, + sort_by=sort_by, + ) + if len(subworkflow_lint.failed) > 0: + sys.exit(1) + except LintExceptionError as e: + log.error(e) + sys.exit(1) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def subworkflows_info(ctx, subworkflow, directory): + """ + Show developer usage information about a given subworkflow. + + Parses information from a subworkflow's [i]meta.yml[/] and renders help + on the command line. A handy equivalent to searching the + [link=https://nf-co.re/modules]nf-core website[/]. + + If run from a pipeline and a local copy of the subworkflow is found, the command + will print this usage info. + If not, usage from the remote subworkflows repo will be shown. + """ + from nf_core.subworkflows import SubworkflowInfo + + try: + subworkflow_info = SubworkflowInfo( + directory, + subworkflow, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflow_info.get_component_info()) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def subworkflows_install(ctx, subworkflow, directory, prompt, force, sha): + """ + Install DSL2 subworkflow within a pipeline. + + Fetches and installs subworkflow files from a remote repo e.g. nf-core/modules. + """ + from nf_core.subworkflows import SubworkflowInstall + + try: + subworkflow_install = SubworkflowInstall( + directory, + force, + prompt, + sha, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + exit_status = subworkflow_install.install(subworkflow) + if not exit_status: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def subworkflows_remove(ctx, directory, subworkflow): + """ + Remove a subworkflow from a pipeline. + """ + from nf_core.subworkflows import SubworkflowRemove + + try: + module_remove = SubworkflowRemove( + directory, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + module_remove.remove(subworkflow) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def subworkflows_update( + ctx, + subworkflow, + directory, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, + limit_output, +): + """ + Update DSL2 subworkflow within a pipeline. + + Fetches and updates subworkflow files from a remote repo e.g. nf-core/modules. + """ + from nf_core.subworkflows import SubworkflowUpdate + + try: + subworkflow_install = SubworkflowUpdate( + directory, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + limit_output, + ) + exit_status = subworkflow_install.update(subworkflow) + if not exit_status and install_all: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 4df67639e..f25fb33a6 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -22,7 +22,7 @@ class ComponentCommand: def __init__( self, component_type: str, - dir: str, + directory: Union[str, Path] = ".", remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -32,11 +32,13 @@ def __init__( """ Initialise the ComponentClass object """ - self.component_type = component_type - self.dir = dir + self.component_type: str = component_type + self.directory: Path = Path(directory) self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) - self.hide_progress = hide_progress - self.no_prompts = no_prompts + self.hide_progress: bool = hide_progress + self.no_prompts: bool = no_prompts + self.repo_type: Optional[str] = None + self.org: str = "" self._configure_repo_and_paths() def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: @@ -47,18 +49,17 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: Args: nf_dir_req (bool, optional): Whether this command requires being run in the nf-core modules repo or a nf-core pipeline repository. Defaults to True. """ - try: - if self.dir: - self.dir, self.repo_type, self.org = get_repo_info(self.dir, use_prompt=not self.no_prompts) - else: - self.repo_type = None - self.org = "" + if self.directory: + if self.directory == Path(".") and not nf_dir_req: + self.no_prompts = True + self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) except UserWarning: if nf_dir_req: raise - self.repo_type = None - self.org = "" + except FileNotFoundError: + raise + self.default_modules_path = Path("modules", self.org) self.default_tests_path = Path("tests", "modules", self.org) self.default_subworkflows_path = Path("subworkflows", self.org) @@ -68,7 +69,7 @@ def get_local_components(self) -> List[str]: """ Get the local modules/subworkflows in a pipeline """ - local_component_dir = Path(self.dir, self.component_type, "local") + local_component_dir = Path(self.directory, self.component_type, "local") return [ str(path.relative_to(local_component_dir)) for path in local_component_dir.iterdir() if path.suffix == ".nf" ] @@ -78,12 +79,12 @@ def get_components_clone_modules(self) -> List[str]: Get the modules/subworkflows repository available in a clone of nf-core/modules """ if self.component_type == "modules": - component_base_path = Path(self.dir, self.default_modules_path) + component_base_path = Path(self.directory, self.default_modules_path) elif self.component_type == "subworkflows": - component_base_path = Path(self.dir, self.default_subworkflows_path) + component_base_path = Path(self.directory, self.default_subworkflows_path) return [ - str(Path(dir).relative_to(component_base_path)) - for dir, _, files in os.walk(component_base_path) + str(Path(directory).relative_to(component_base_path)) + for directory, _, files in os.walk(component_base_path) if "main.nf" in files ] @@ -91,41 +92,41 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if self.dir is None or not os.path.exists(self.dir): - log.error(f"Could not find directory: {self.dir}") + if not self.directory.exists(): + log.error(f"Could not find directory: {self.directory}") return False - main_nf = os.path.join(self.dir, "main.nf") - nf_config = os.path.join(self.dir, "nextflow.config") - if not os.path.exists(main_nf) and not os.path.exists(nf_config): - if Path(self.dir).resolve().parts[-1].startswith("nf-core"): - raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") - log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") + main_nf = Path(self.directory, "main.nf") + nf_config = Path(self.directory, "nextflow.config") + if not main_nf.exists() and not nf_config.exists(): + if self.directory.resolve().parts[-1].startswith("nf-core"): + raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") + log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") return True def has_modules_file(self) -> None: """Checks whether a module.json file has been created and creates one if it is missing""" - modules_json_path = os.path.join(self.dir, "modules.json") - if not os.path.exists(modules_json_path): + modules_json_path = Path(self.directory, "modules.json") + if not modules_json_path.exists(): log.info("Creating missing 'module.json' file.") - ModulesJson(self.dir).create() + ModulesJson(self.directory).create() - def clear_component_dir(self, component_name: str, component_dir: str) -> bool: + def clear_component_dir(self, component_name: str, component_dir: Union[str, Path]) -> bool: """ Removes all files in the module/subworkflow directory Args: component_name (str): The name of the module/subworkflow - component_dir (str): The path to the module/subworkflow in the module repository + component_dir (str, Path): The path to the module/subworkflow """ try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in os.walk(self.dir, topdown=False): + for dir_path, dir_names, filenames in os.walk(self.directory, topdown=False): if not dir_names and not filenames: try: - os.rmdir(dir_path) + Path(dir_path).rmdir() except OSError: pass else: @@ -147,7 +148,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: Returns: [str]: The names of the modules/subworkflows """ - repo_dir = Path(self.dir, self.component_type, install_dir) + repo_dir = Path(self.directory, self.component_type, install_dir) if not repo_dir.exists(): raise LookupError(f"Nothing installed from {install_dir} in pipeline") @@ -156,7 +157,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: ] def install_component_files( - self, component_name: str, component_version: str, modules_repo: ModulesRepo, install_dir: str + self, component_name: str, component_version: str, modules_repo: ModulesRepo, install_dir: Union[str, Path] ) -> bool: """ Installs a module/subworkflow into the given directory @@ -165,7 +166,7 @@ def install_component_files( component_name (str): The name of the module/subworkflow component_version (str): Git SHA for the version of the module/subworkflow to be installed modules_repo (ModulesRepo): A correctly configured ModulesRepo object - install_dir (str): The path to where the module/subworkflow should be installed (should be the 'modules/' or 'subworkflows/' dir of the pipeline) + install_dir (str, Path): The path to where the module/subworkflow should be installed (should be the 'modules/' or 'subworkflows/' dir of the pipeline) Returns: (bool): Whether the operation was successful of not @@ -180,8 +181,11 @@ def load_lint_config(self) -> None: Add parsed config to the `self.lint_config` class attribute. """ - _, tools_config = nf_core.utils.load_tools_config(self.dir) - self.lint_config = tools_config.get("lint", {}) + _, tools_config = nf_core.utils.load_tools_config(self.directory) + if tools_config is None: + raise UserWarning("Could not load `.nf-core.yml` file.") + else: + self.lint_config = tools_config.get("lint", {}) def check_modules_structure(self) -> None: """ @@ -193,9 +197,9 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in os.walk(Path(self.dir, "modules")): + for directory, _, files in os.walk(Path(self.directory, "modules")): if "main.nf" in files: - module_path = Path(directory).relative_to(Path(self.dir, "modules")) + module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts # Check that there are modules installed directly under the 'modules' directory if parts[1] == "modules": @@ -215,9 +219,9 @@ def check_modules_structure(self) -> None: wrong_dir = Path(modules_dir, module) shutil.move(str(wrong_dir), str(correct_dir)) log.info(f"Moved {wrong_dir} to {correct_dir}.") - shutil.rmtree(Path(self.dir, "modules", self.modules_repo.repo_path, "modules")) + shutil.rmtree(Path(self.directory, "modules", self.modules_repo.repo_path, "modules")) # Regenerate modules.json file - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.check_up_to_date() def check_patch_paths(self, patch_path: Path, module_name: str) -> None: @@ -243,12 +247,16 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: for line in lines: fh.write(line) # Update path in modules.json if the file is in the correct format - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.load() - if modules_json.has_git_url_and_modules() and modules_json.modules_json is not None: + if ( + modules_json.has_git_url_and_modules() + and self.modules_repo.repo_path is not None + and modules_json.modules_json is not None + ): modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ self.modules_repo.repo_path - ][module_name]["patch"] = str(patch_path.relative_to(Path(self.dir).resolve())) + ][module_name]["patch"] = str(patch_path.relative_to(self.directory.resolve())) modules_json.dump() def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[str, Union[int, str]]]]: @@ -262,7 +270,7 @@ def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[ """ include_stmts: Dict[str, List[Dict[str, Union[int, str]]]] = {} if self.repo_type == "pipeline": - workflow_files = Path(self.dir, "workflows").glob("*.nf") + workflow_files = Path(self.directory, "workflows").glob("*.nf") for workflow_file in workflow_files: with open(workflow_file) as fh: # Check if component path is in the file using mmap diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index 9b81f54f0..57c0034ba 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -93,7 +93,7 @@ def run(self) -> None: os.environ["NFT_DIFF_ARGS"] = ( "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences ) - with nf_core.utils.set_wd(Path(self.dir)): + with nf_core.utils.set_wd(self.directory): self.check_snapshot_stability() if len(self.errors) > 0: errors = "\n - ".join(self.errors) @@ -126,7 +126,7 @@ def check_inputs(self) -> None: self.component_dir = Path(self.component_type, self.modules_repo.repo_path, *self.component_name.split("/")) # First, sanity check that the module directory exists - if not Path(self.dir, self.component_dir).is_dir(): + if not Path(self.directory, self.component_dir).is_dir(): raise UserWarning( f"Cannot find directory '{self.component_dir}'.{' Should be TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else ''}" ) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 01650a643..67e05e0ce 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,33 +1,43 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple +from typing import TYPE_CHECKING, List, Optional, Tuple, Union import questionary +import requests import rich.prompt +if TYPE_CHECKING: + from nf_core.modules.modules_repo import ModulesRepo + import nf_core.utils -from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) +# Constants for the nf-core/modules repo used throughout the module files +NF_CORE_MODULES_NAME = "nf-core" +NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" +NF_CORE_MODULES_DEFAULT_BRANCH = "master" + -def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[str, Optional[str], str]: +def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[Path, Optional[str], str]: """ Determine whether this is a pipeline repository or a clone of nf-core/modules """ # Verify that the pipeline dir exists - if directory is None or not Path(directory).is_dir(): + if not Path(directory).is_dir(): raise UserWarning(f"Could not find directory: {directory}") # Try to find the root directory - base_dir: str = nf_core.utils.determine_base_dir(directory) + base_dir: Path = nf_core.utils.determine_base_dir(directory) # Figure out the repository type from the .nf-core.yml config file if we can config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) - repo_type: Optional[str] = tools_config.get("repository_type", None) + if config_fn is None: + raise UserWarning(f"Could not find a config file in directory: {base_dir}") + repo_type = getattr(tools_config, "repository_type", None) or None # If not set, prompt the user if not repo_type and use_prompt: @@ -54,14 +64,12 @@ def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[st # Check if it's a valid answer if repo_type not in ["pipeline", "modules"]: - raise UserWarning(f"Invalid repository type: '{repo_type}'") - + raise UserWarning(f"Invalid repository type: '{repo_type}', must be 'pipeline' or 'modules'") + org: str = "" # Check for org if modules repo - if repo_type == "pipeline": - org = "" - elif repo_type == "modules": - org = tools_config.get("org_path", None) - if org is None: + if repo_type == "modules": + org = getattr(tools_config, "org_path", "") or "" + if org == "": log.warning("Organisation path not defined in %s [key: org_path]", config_fn.name) org = questionary.text( "What is the organisation path under which modules and subworkflows are stored?", @@ -82,7 +90,10 @@ def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[st def prompt_component_version_sha( - component_name: str, component_type: str, modules_repo: ModulesRepo, installed_sha: Optional[str] = None + component_name: str, + component_type: str, + modules_repo: "ModulesRepo", + installed_sha: Optional[str] = None, ) -> str: """ Creates an interactive questionary prompt for selecting the module/subworkflow version @@ -101,7 +112,7 @@ def prompt_component_version_sha( git_sha = "" page_nbr = 1 - all_commits = modules_repo.get_component_git_log(component_name, component_type) + all_commits = iter(modules_repo.get_component_git_log(component_name, component_type)) next_page_commits = [next(all_commits, None) for _ in range(10)] next_page_commits = [commit for commit in next_page_commits if commit is not None] @@ -132,7 +143,7 @@ def prompt_component_version_sha( return git_sha -def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str]]: +def get_components_to_install(subworkflow_dir: Union[str, Path]) -> Tuple[List[str], List[str]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ @@ -143,7 +154,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str regex = re.compile( r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")" ) - match = regex.match(line) + match = regex.search(line) if match and len(match.groups()) == 2: name, link = match.groups() if link.startswith("../../../"): @@ -152,3 +163,29 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str elif link.startswith("../"): subworkflows.append(name.lower()) return modules, subworkflows + + +def get_biotools_id(tool_name) -> str: + """ + Try to find a bio.tools ID for 'tool' + """ + url = f"https://bio.tools/api/t/?q={tool_name}&format=json" + try: + # Send a GET request to the API + response = requests.get(url) + response.raise_for_status() # Raise an error for bad status codes + # Parse the JSON response + data = response.json() + + # Iterate through the tools in the response to find the tool name + for tool in data["list"]: + if tool["name"].lower() == tool_name: + return tool["biotoolsCURIE"] + + # If the tool name was not found in the response + log.warning(f"Could not find a bio.tools ID for '{tool_name}'") + return "" + + except requests.exceptions.RequestException as e: + log.warning(f"Could not find a bio.tools ID for '{tool_name}': {e}") + return "" diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 6c9c01b49..c0095da23 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -14,13 +14,15 @@ import jinja2 import questionary import rich +import rich.prompt import yaml from packaging.version import parse as parse_version import nf_core import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.lint_utils import run_prettier_on_file +from nf_core.components.components_utils import get_biotools_id +from nf_core.pipelines.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -29,7 +31,7 @@ class ComponentCreate(ComponentCommand): def __init__( self, component_type: str, - directory: str = ".", + directory: Path = Path("."), component: str = "", author: Optional[str] = None, process_label: Optional[str] = None, @@ -60,8 +62,9 @@ def __init__( self.file_paths: Dict[str, Path] = {} self.not_empty_template = not empty_template self.migrate_pytest = migrate_pytest + self.tool_identifier = "" - def create(self): + def create(self) -> bool: """ Create a new DSL2 module or subworkflow from the nf-core template. @@ -88,8 +91,7 @@ def create(self): ├── meta.yml ├── environment.yml └── tests - ├── main.nf.test - └── tags.yml + └── main.nf.test ``` The function will attempt to automatically find a Bioconda package called @@ -102,17 +104,16 @@ def create(self): ├── main.nf ├── meta.yml └── tests - ├── main.nf.test - └── tags.yml + └── main.nf.test ``` """ - if self.component_type == "modules": # Check modules directory structure self.check_modules_structure() # Check whether the given directory is a nf-core pipeline or a clone of nf-core/modules + log.info(f"Repository type: [blue]{self.repo_type}") if self.directory != ".": log.info(f"Base directory: '{self.directory}'") @@ -150,6 +151,8 @@ def create(self): if self.component_type == "modules": # Try to find a bioconda package for 'component' self._get_bioconda_tool() + # Try to find a biotools entry for 'component' + self.tool_identifier = get_biotools_id(self.component) # Prompt for GitHub username self._get_username() @@ -157,8 +160,12 @@ def create(self): if self.component_type == "modules": self._get_module_structure_components() + # Add a valid organization name for nf-test tags + not_alphabet = re.compile(r"[^a-zA-Z]") + self.org_alphabet = not_alphabet.sub("", self.org) + # Create component template with jinja2 - self._render_template() + assert self._render_template() log.info(f"Created component template: '{self.component_name}'") if self.migrate_pytest: @@ -168,7 +175,9 @@ def create(self): self._print_and_delete_pytest_files() new_files = [str(path) for path in self.file_paths.values()] + log.info("Created following files:\n " + "\n ".join(new_files)) + return True def _get_bioconda_tool(self): """ @@ -228,7 +237,14 @@ def _get_bioconda_tool(self): log.info(f"Could not find a Docker/Singularity container ({e})") def _get_module_structure_components(self): - process_label_defaults = ["process_single", "process_low", "process_medium", "process_high", "process_long"] + process_label_defaults = [ + "process_single", + "process_low", + "process_medium", + "process_high", + "process_long", + "process_high_memory", + ] if self.process_label is None: log.info( "Provide an appropriate resource label for the process, taken from the " @@ -252,17 +268,19 @@ def _get_module_structure_components(self): ) while self.has_meta is None: self.has_meta = rich.prompt.Confirm.ask( - "[violet]Will the module require a meta map of sample information?", default=True + "[violet]Will the module require a meta map of sample information?", + default=True, ) - def _render_template(self): + def _render_template(self) -> Optional[bool]: """ Create new module/subworkflow files with Jinja2. """ object_attrs = vars(self) # Run jinja2 for each file in the template folder env = jinja2.Environment( - loader=jinja2.PackageLoader("nf_core", f"{self.component_type[:-1]}-template"), keep_trailing_newline=True + loader=jinja2.PackageLoader("nf_core", f"{self.component_type[:-1]}-template"), + keep_trailing_newline=True, ) for template_fn, dest_fn in self.file_paths.items(): log.debug(f"Rendering template file: '{template_fn}'") @@ -286,6 +304,7 @@ def _render_template(self): Path(nf_core.__file__).parent / f"{self.component_type[:-1]}-template" / template_fn ).stat() dest_fn.chmod(template_stat.st_mode) + return True def _collect_name_prompt(self): """ @@ -329,7 +348,7 @@ def _collect_name_prompt(self): elif self.component_type == "subworkflows": self.component = rich.prompt.Prompt.ask("[violet]Name of subworkflow").strip() - def _get_component_dirs(self): + def _get_component_dirs(self) -> Dict[str, Path]: """Given a directory and a tool/subtool or subworkflow, set the file paths and check if they already exist Returns dict: keys are relative paths to template files, vals are target paths. @@ -361,9 +380,8 @@ def _get_component_dirs(self): # Set file paths file_paths["main.nf"] = component_file - if self.repo_type == "modules": + elif self.repo_type == "modules": component_dir = Path(self.directory, self.component_type, self.org, self.component_dir) - # Check if module/subworkflow directories exist already if component_dir.exists() and not self.force_overwrite and not self.migrate_pytest: raise UserWarning( @@ -372,7 +390,13 @@ def _get_component_dirs(self): if self.component_type == "modules": # If a subtool, check if there is a module called the base tool name already - parent_tool_main_nf = Path(self.directory, self.component_type, self.org, self.component, "main.nf") + parent_tool_main_nf = Path( + self.directory, + self.component_type, + self.org, + self.component, + "main.nf", + ) if self.subtool and parent_tool_main_nf.exists() and not self.migrate_pytest: raise UserWarning( f"Module '{parent_tool_main_nf}' exists already, cannot make subtool '{self.component_name}'" @@ -386,15 +410,15 @@ def _get_component_dirs(self): raise UserWarning( f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" ) - # Set file paths # For modules - can be tool/ or tool/subtool/ so can't do in template directory structure file_paths["main.nf"] = component_dir / "main.nf" file_paths["meta.yml"] = component_dir / "meta.yml" if self.component_type == "modules": file_paths["environment.yml"] = component_dir / "environment.yml" - file_paths["tests/tags.yml"] = component_dir / "tests" / "tags.yml" file_paths["tests/main.nf.test.j2"] = component_dir / "tests" / "main.nf.test" + else: + raise ValueError("`repo_type` not set correctly") return file_paths @@ -428,11 +452,15 @@ def _copy_old_files(self, component_old_path): shutil.copyfile(component_old_path / "meta.yml", self.file_paths["meta.yml"]) if self.component_type == "modules": log.debug("Copying original environment.yml file") - shutil.copyfile(component_old_path / "environment.yml", self.file_paths["environment.yml"]) + shutil.copyfile( + component_old_path / "environment.yml", + self.file_paths["environment.yml"], + ) if (component_old_path / "templates").is_dir(): log.debug("Copying original templates directory") shutil.copytree( - component_old_path / "templates", self.file_paths["environment.yml"].parent / "templates" + component_old_path / "templates", + self.file_paths["environment.yml"].parent / "templates", ) # Create a nextflow.config file if it contains information other than publishDir pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) @@ -447,7 +475,14 @@ def _copy_old_files(self, component_old_path): if len(config_lines) > 11: log.debug("Copying nextflow.config file from pytest tests") with open( - Path(self.directory, self.component_type, self.org, self.component_dir, "tests", "nextflow.config"), + Path( + self.directory, + self.component_type, + self.org, + self.component_dir, + "tests", + "nextflow.config", + ), "w+", ) as ofh: ofh.write(config_lines) diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 54fc0004d..f3e5bf617 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -1,6 +1,7 @@ import logging import os from pathlib import Path +from typing import Dict, List, Optional, Tuple, Union import questionary import yaml @@ -14,8 +15,8 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import NF_CORE_MODULES_REMOTE from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE log = logging.getLogger(__name__) @@ -57,38 +58,39 @@ class ComponentInfo(ComponentCommand): def __init__( self, - component_type, - pipeline_dir, - component_name, - remote_url=None, - branch=None, - no_pull=False, + component_type: str, + pipeline_dir: Union[str, Path], + component_name: str, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - self.meta = None - self.local_path = None - self.remote_location = None - self.local = None + self.meta: Optional[Dict] = None + self.local_path: Optional[Path] = None + self.remote_location: Optional[str] = None + self.local: bool = False + self.modules_json: Optional[ModulesJson] = None if self.repo_type == "pipeline": # Check modules directory structure if self.component_type == "modules": self.check_modules_structure() # Check modules.json up to date - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.modules_json.check_up_to_date() else: self.modules_json = None self.component = self.init_mod_name(component_name) - def _configure_repo_and_paths(self, nf_dir_req=False): + def _configure_repo_and_paths(self, nf_dir_req=False) -> None: """ Override the default with nf_dir_req set to False to allow info to be run from anywhere and still return remote info """ return super()._configure_repo_and_paths(nf_dir_req) - def init_mod_name(self, component): + def init_mod_name(self, component: Optional[str]) -> str: """ Makes sure that we have a module/subworkflow name before proceeding. @@ -102,18 +104,22 @@ def init_mod_name(self, component): if self.local: if self.repo_type == "modules": components = self.get_components_clone_modules() - else: - components = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, {} - ) + elif self.repo_type == "pipeline": + assert self.modules_json is not None # mypy + all_components: List[Tuple[str, str]] = self.modules_json.get_all_components( + self.component_type + ).get(self.modules_repo.remote_url, []) + components = [ component if directory == self.modules_repo.repo_path else f"{directory}/{component}" - for directory, component in components + for directory, component in all_components ] if not components: raise UserWarning( f"No {self.component_type[:-1]} installed from '{self.modules_repo.remote_url}'" ) + else: + raise UserWarning("Unknown repository type") else: components = self.modules_repo.get_avail_components(self.component_type) components.sort() @@ -131,15 +137,17 @@ def init_mod_name(self, component): ).unsafe_ask() else: if self.repo_type == "pipeline": + assert self.modules_json is not None # mypy # check if the module is locally installed local_paths = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, {} - ) - for directory, comp in local_paths: - if comp == component: - component_base_path = Path(self.dir, self.component_type) - self.local_path = Path(component_base_path, directory, component) - break + self.modules_repo.remote_url + ) # type: ignore + if local_paths is not None: + for directory, comp in local_paths: + if comp == component: + component_base_path = Path(self.directory, self.component_type) + self.local_path = Path(component_base_path, directory, component) + break if self.local_path: self.local = True @@ -162,24 +170,26 @@ def get_component_info(self): return self.generate_component_info_help() - def get_local_yaml(self): + def get_local_yaml(self) -> Optional[Dict]: """Attempt to get the meta.yml file from a locally installed module/subworkflow. Returns: - dict or bool: Parsed meta.yml found, False otherwise + Optional[dict]: Parsed meta.yml if found, None otherwise """ if self.repo_type == "pipeline": + assert self.modules_json is not None # mypy # Try to find and load the meta.yml file - component_base_path = Path(self.dir, self.component_type) + component_base_path = Path(self.directory, self.component_type) # Check that we have any modules/subworkflows installed from this repo components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) - component_names = [component for _, component in components] if components is None: raise LookupError(f"No {self.component_type[:-1]} installed from {self.modules_repo.remote_url}") + component_names = [component for _, component in components] + if self.component in component_names: - install_dir = [dir for dir, module in components if module == self.component][0] + install_dir = [directory for directory, module in components if module == self.component][0] comp_dir = Path(component_base_path, install_dir, self.component) meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): @@ -190,7 +200,7 @@ def get_local_yaml(self): log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") else: - component_base_path = Path(self.dir, self.component_type, self.org) + component_base_path = Path(self.directory, self.component_type, self.org) if self.component in os.listdir(component_base_path): comp_dir = Path(component_base_path, self.component) meta_fn = Path(comp_dir, "meta.yml") @@ -203,7 +213,7 @@ def get_local_yaml(self): return None - def get_remote_yaml(self): + def get_remote_yaml(self) -> Optional[dict]: """Attempt to get the meta.yml file from a remote repo. Returns: @@ -211,14 +221,33 @@ def get_remote_yaml(self): """ # Check if our requested module/subworkflow is there if self.component not in self.modules_repo.get_avail_components(self.component_type): - return False + return None file_contents = self.modules_repo.get_meta_yml(self.component_type, self.component) if file_contents is None: - return False + return None self.remote_location = self.modules_repo.remote_url return yaml.safe_load(file_contents) + def generate_params_table(self, type) -> Table: + "Generate a rich table for inputs and outputs" + table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) + table.add_column(f":inbox_tray: {type}") + table.add_column("Description") + if self.component_type == "modules": + table.add_column("Pattern", justify="right", style="green") + elif self.component_type == "subworkflows": + table.add_column("Structure", justify="right", style="green") + return table + + def get_channel_structure(self, structure: dict) -> str: + "Get the structure of a channel" + structure_str = "" + for key, info in structure.items(): + pattern = f" - {info['pattern']}" if info.get("pattern") else "" + structure_str += f"{key} ({info['type']}{pattern})" + return structure_str + def generate_component_info_help(self): """Take the parsed meta.yml and generate rich help. @@ -242,7 +271,8 @@ def generate_component_info_help(self): "\n" ) ) - + if self.meta is None: + raise UserWarning("No meta.yml file found") if self.meta.get("tools"): tools_strings = [] for tool in self.meta["tools"]: @@ -266,33 +296,48 @@ def generate_component_info_help(self): # Inputs if self.meta.get("input"): - inputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - inputs_table.add_column(":inbox_tray: Inputs") - inputs_table.add_column("Description") - inputs_table.add_column("Pattern", justify="right", style="green") - for input in self.meta["input"]: - for key, info in input.items(): - inputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) + inputs_table = self.generate_params_table("Inputs") + for i, input in enumerate(self.meta["input"]): + inputs_table.add_row(f"[italic]input[{i}][/]", "", "") + if self.component_type == "modules": + for element in input: + for key, info in element.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + elif self.component_type == "subworkflows": + for key, info in input.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i]", + Markdown(info["description"] if info["description"] else ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", + ) renderables.append(inputs_table) # Outputs if self.meta.get("output"): - outputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - outputs_table.add_column(":outbox_tray: Outputs") - outputs_table.add_column("Description") - outputs_table.add_column("Pattern", justify="right", style="green") + outputs_table = self.generate_params_table("Outputs") for output in self.meta["output"]: - for key, info in output.items(): - outputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) + if self.component_type == "modules": + for ch_name, elements in output.items(): + outputs_table.add_row(f"{ch_name}", "", "") + for element in elements: + for key, info in element.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + elif self.component_type == "subworkflows": + for key, info in output.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i]", + Markdown(info["description"] if info["description"] else ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", + ) renderables.append(outputs_table) @@ -306,22 +351,22 @@ def generate_component_info_help(self): ) # Print include statement - if self.local_path: - install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + if self.local_path and self.modules_repo.repo_path is not None: + install_folder = Path(self.directory, self.component_type, self.modules_repo.repo_path) component_name = "_".join(self.component.upper().split("/")) renderables.append( Text.from_markup(f"\n [blue]Use the following statement to include this {self.component_type[:-1]}:") ) renderables.append( Syntax( - f"include {{ {component_name} }} from '../{Path(install_folder, self.component).relative_to(self.dir)}/main'", + f"include {{ {component_name} }} from '../{Path(install_folder, self.component).relative_to(self.directory)}/main'", "groovy", theme="ansi_dark", padding=1, ) ) if self.component_type == "subworkflows": - subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.dir) + subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.directory) if os.path.isfile(subworkflow_config): renderables.append( Text.from_markup("\n [blue]Add the following config statement to use this subworkflow:") diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 6385ee409..5bdcd1ebd 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,20 +1,25 @@ import logging import os from pathlib import Path +from typing import List, Optional, Union import questionary +from rich import print from rich.console import Console +from rich.markdown import Markdown +from rich.panel import Panel from rich.syntax import Syntax +import nf_core.components import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.components_utils import ( + NF_CORE_MODULES_NAME, get_components_to_install, prompt_component_version_sha, ) from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME log = logging.getLogger(__name__) @@ -22,26 +27,26 @@ class ComponentInstall(ComponentCommand): def __init__( self, - pipeline_dir, - component_type, - force=False, - prompt=False, - sha=None, - remote_url=None, - branch=None, - no_pull=False, - installed_by=False, + pipeline_dir: Union[str, Path], + component_type: str, + force: bool = False, + prompt: bool = False, + sha: Optional[str] = None, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + installed_by: Optional[List[str]] = None, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.force = force self.prompt = prompt self.sha = sha - if installed_by: + if installed_by is not None: self.installed_by = installed_by else: - self.installed_by = self.component_type + self.installed_by = [self.component_type] - def install(self, component, silent=False): + def install(self, component: str, silent: bool = False) -> bool: if self.repo_type == "modules": log.error(f"You cannot install a {component} in a clone of nf-core/modules") return False @@ -54,7 +59,7 @@ def install(self, component, silent=False): self.check_modules_structure() # Verify that 'modules.json' is consistent with the installed modules and subworkflows - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) if not silent: modules_json.check_up_to_date() @@ -66,9 +71,22 @@ def install(self, component, silent=False): # Verify SHA if not self.modules_repo.verify_sha(self.prompt, self.sha): + err_msg = f"SHA '{self.sha}' is not a valid commit SHA for the repository '{self.modules_repo.remote_url}'" + log.error(err_msg) + return False + + # verify self.modules_repo entries: + if self.modules_repo is None: + err_msg = "Could not find a valid modules repository." + log.error(err_msg) + return False + if self.modules_repo.repo_path is None: + err_msg = "Could not find a valid modules repository path." + log.error(err_msg) return False # Check and verify component name + component = self.collect_and_verify_name(component, self.modules_repo) if not component: return False @@ -79,7 +97,7 @@ def install(self, component, silent=False): ) # Set the install folder based on the repository name - install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + install_folder = Path(self.directory, self.component_type, self.modules_repo.repo_path) # Compute the component directory component_dir = Path(install_folder, component) @@ -95,8 +113,11 @@ def install(self, component, silent=False): modules_json.load() modules_json.update(self.component_type, self.modules_repo, component, current_version, self.installed_by) return False - - version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + try: + version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + except UserWarning as e: + log.error(e) + return False if not version: return False @@ -134,15 +155,15 @@ def install(self, component, silent=False): log.info(f"Use the following statement to include this {self.component_type[:-1]}:") Console().print( Syntax( - f"include {{ {component_name} }} from '../{Path(install_folder, component).relative_to(self.dir)}/main'", + f"include {{ {component_name} }} from '../{Path(install_folder, component).relative_to(self.directory)}/main'", "groovy", theme="ansi_dark", padding=1, ) ) if self.component_type == "subworkflows": - subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.dir) - if os.path.isfile(subworkflow_config): + subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.directory) + if subworkflow_config.is_file(): log.info("Add the following config statement to use this subworkflow:") Console().print( Syntax(f"includeConfig '{subworkflow_config}'", "groovy", theme="ansi_dark", padding=1) @@ -156,19 +177,21 @@ def install_included_components(self, subworkflow_dir): modules_to_install, subworkflows_to_install = get_components_to_install(subworkflow_dir) for s_install in subworkflows_to_install: original_installed = self.installed_by - self.installed_by = Path(subworkflow_dir).parts[-1] + self.installed_by = [Path(subworkflow_dir).parts[-1]] self.install(s_install, silent=True) self.installed_by = original_installed for m_install in modules_to_install: original_component_type = self.component_type self.component_type = "modules" original_installed = self.installed_by - self.installed_by = Path(subworkflow_dir).parts[-1] + self.installed_by = [Path(subworkflow_dir).parts[-1]] self.install(m_install, silent=True) self.component_type = original_component_type self.installed_by = original_installed - def collect_and_verify_name(self, component, modules_repo): + def collect_and_verify_name( + self, component: Optional[str], modules_repo: "nf_core.modules.modules_repo.ModulesRepo" + ) -> str: """ Collect component name. Check that the supplied name is an available module/subworkflow. @@ -180,18 +203,29 @@ def collect_and_verify_name(self, component, modules_repo): style=nf_core.utils.nfcore_question_style, ).unsafe_ask() + if component is None: + return "" + # Check that the supplied name is an available module/subworkflow if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): - log.error( - f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." + log.error(f"{self.component_type[:-1].title()} '{component}' not found in available {self.component_type}") + print( + Panel( + Markdown( + f"Use the command `nf-core {self.component_type} list` to view available {self.component_type}." + ), + title="info", + title_align="left", + style="blue", + padding=1, + ) ) - log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") - return False + + raise ValueError if not modules_repo.component_exists(component, self.component_type, commit=self.sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" log.warning(warn_msg) - return False return component @@ -261,9 +295,9 @@ def clean_modules_json(self, component, modules_repo, modules_json): Remove installed version of module/subworkflow from modules.json """ for repo_url, repo_content in modules_json.modules_json["repos"].items(): - for dir, dir_components in repo_content[self.component_type].items(): + for directory, dir_components in repo_content[self.component_type].items(): for name, component_values in dir_components.items(): - if name == component and dir == modules_repo.repo_path: + if name == component and directory == modules_repo.repo_path: repo_to_remove = repo_url log.debug( f"Removing {self.component_type[:-1]} '{modules_repo.repo_path}/{component}' from repo '{repo_to_remove}' from modules.json." @@ -285,7 +319,7 @@ def check_alternate_remotes(self, modules_json): modules_json.load() for repo_url, repo_content in modules_json.modules_json.get("repos", dict()).items(): for component_type in repo_content: - for dir in repo_content.get(component_type, dict()).keys(): - if dir == self.modules_repo.repo_path and repo_url != self.modules_repo.remote_url: + for directory in repo_content.get(component_type, dict()).keys(): + if directory == self.modules_repo.repo_path and repo_url != self.modules_repo.remote_url: return True return False diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 564dcfaf6..fcc3b414d 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -7,6 +7,7 @@ import operator import os from pathlib import Path +from typing import List, Optional, Tuple, Union import rich.box import rich.console @@ -19,8 +20,9 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.lint_utils import console from nf_core.modules.modules_json import ModulesJson +from nf_core.pipelines.lint_utils import console +from nf_core.utils import LintConfigType from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -36,15 +38,14 @@ class LintExceptionError(Exception): class LintResult: """An object to hold the results of a lint test""" - def __init__(self, component, lint_test, message, file_path): + def __init__(self, component: NFCoreComponent, lint_test: str, message: str, file_path: Path): self.component = component self.lint_test = lint_test self.message = message self.file_path = file_path - self.component_name = component.component_name + self.component_name: str = component.component_name -@rich.repr.auto class ComponentLint(ComponentCommand): """ An object for linting modules and subworkflows either in a clone of the 'nf-core/modules' @@ -53,18 +54,19 @@ class ComponentLint(ComponentCommand): def __init__( self, - component_type, - dir, - fail_warned=False, - remote_url=None, - branch=None, - no_pull=False, - registry=None, - hide_progress=False, + component_type: str, + directory: Union[str, Path], + fail_warned: bool = False, + fix: bool = False, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + registry: Optional[str] = None, + hide_progress: bool = False, ): super().__init__( component_type, - dir=dir, + directory=directory, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -72,38 +74,51 @@ def __init__( ) self.fail_warned = fail_warned - self.passed = [] - self.warned = [] - self.failed = [] + self.fix = fix + self.passed: List[LintResult] = [] + self.warned: List[LintResult] = [] + self.failed: List[LintResult] = [] + self.all_local_components: List[NFCoreComponent] = [] + + self.lint_config: Optional[LintConfigType] = None + self.modules_json: Optional[ModulesJson] = None + if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") else: self.lint_tests = self.get_all_subworkflow_lint_tests(self.repo_type == "pipeline") + if self.repo_type is None: + raise LookupError( + "Could not determine repository type. Please check the repository type in the nf-core.yml" + ) + if self.repo_type == "pipeline": - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.check_up_to_date() - self.all_remote_components = [] + self.all_remote_components: List[NFCoreComponent] = [] for repo_url, components in modules_json.get_all_components(self.component_type).items(): if remote_url is not None and remote_url != repo_url: continue + if isinstance(components, str): + raise LookupError( + f"Error parsing modules.json: {components}. " f"Please check the file for errors or try again." + ) for org, comp in components: self.all_remote_components.append( NFCoreComponent( comp, repo_url, - Path(self.dir, self.component_type, org, comp), + Path(self.directory, self.component_type, org, comp), self.repo_type, - Path(self.dir), + self.directory, self.component_type, ) ) if not self.all_remote_components: - raise LookupError( - f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." - ) - local_component_dir = Path(self.dir, self.component_type, "local") - self.all_local_components = [] + log.warning(f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline.") + local_component_dir = Path(self.directory, self.component_type, "local") + if local_component_dir.exists(): self.all_local_components = [ NFCoreComponent( @@ -111,38 +126,42 @@ def __init__( None, Path(local_component_dir, comp), self.repo_type, - Path(self.dir), + self.directory, self.component_type, remote_component=False, ) for comp in self.get_local_components() ] - self.config = nf_core.utils.fetch_wf_config(self.dir, cache_config=True) - else: + self.config = nf_core.utils.fetch_wf_config(self.directory, cache_config=True) + self._set_registry(registry) + + elif self.repo_type == "modules": component_dir = Path( - self.dir, + self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, ) self.all_remote_components = [ - NFCoreComponent(m, None, component_dir / m, self.repo_type, Path(self.dir), self.component_type) + NFCoreComponent(m, None, component_dir / m, self.repo_type, self.directory, self.component_type) for m in self.get_components_clone_modules() ] self.all_local_components = [] if not self.all_remote_components: - raise LookupError(f"No {self.component_type} in '{self.component_type}' directory") + log.warning(f"No {self.component_type} in '{self.component_type}' directory") # This could be better, perhaps glob for all nextflow.config files in? - self.config = nf_core.utils.fetch_wf_config(Path(self.dir).joinpath("tests", "config"), cache_config=True) + self.config = nf_core.utils.fetch_wf_config(self.directory / "tests" / "config", cache_config=True) + self._set_registry(registry) + + def __repr__(self) -> str: + return f"ComponentLint({self.component_type}, {self.directory})" + def _set_registry(self, registry) -> None: if registry is None: self.registry = self.config.get("docker.registry", "quay.io") else: self.registry = registry log.debug(f"Registry set to {self.registry}") - self.lint_config = None - self.modules_json = None - @staticmethod def get_all_module_lint_tests(is_pipeline): if is_pipeline: @@ -168,7 +187,7 @@ def get_all_subworkflow_lint_tests(is_pipeline): def set_up_pipeline_files(self): self.load_lint_config() - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.modules_json.load() # Only continue if a lint config has been loaded @@ -243,7 +262,7 @@ def format_result(test_results, table): module_name = lint_result.component_name # Make the filename clickable to open in VSCode - file_path = os.path.relpath(lint_result.file_path, self.dir) + file_path = os.path.relpath(lint_result.file_path, self.directory) file_path_link = f"[link=vscode://file/{os.path.abspath(file_path)}]{file_path}[/link]" table.add_row( diff --git a/nf_core/components/list.py b/nf_core/components/list.py index f5f2744e1..4c20e6086 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -1,11 +1,12 @@ import json import logging -from typing import Dict, List, Optional, Tuple, Union, cast +from pathlib import Path +from typing import Dict, List, Optional, Union, cast import rich.table from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_json import ModulesJson, ModulesJsonModuleEntry from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) @@ -15,16 +16,27 @@ class ComponentList(ComponentCommand): def __init__( self, component_type: str, - pipeline_dir: str, + pipeline_dir: Union[str, Path] = ".", remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, ) -> None: - super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.remote = remote + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + + def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: + """ + Override the default with nf_dir_req set to False to allow + info to be run from anywhere and still return remote info + """ + if self.remote: + nf_dir_req = False + return super()._configure_repo_and_paths(nf_dir_req) - def list_components(self, keywords: Optional[List[str]] = None, print_json=False) -> Union[rich.table.Table, str]: + def list_components( + self, keywords: Optional[List[str]] = None, print_json: bool = False + ) -> Union[rich.table.Table, str]: keywords = keywords or [] """ Get available modules/subworkflows names from GitHub tree for repo @@ -87,18 +99,18 @@ def pattern_msg(keywords: List[str]) -> str: return "" # Verify that 'modules.json' is consistent with the installed modules - modules_json: ModulesJson = ModulesJson(self.dir) + modules_json: ModulesJson = ModulesJson(self.directory) modules_json.check_up_to_date() # Filter by keywords - repos_with_comps: Dict[str, List[Tuple[str, str]]] = { + repos_with_comps = { repo_url: [comp for comp in components if all(k in comp[1] for k in keywords)] for repo_url, components in modules_json.get_all_components(self.component_type).items() } # Nothing found if sum(map(len, repos_with_comps)) == 0: - log.info(f"No nf-core {self.component_type} found in '{self.dir}'{pattern_msg(keywords)}") + log.info(f"No nf-core {self.component_type} found in '{self.directory}'{pattern_msg(keywords)}") return "" table.add_column("Repository") @@ -110,7 +122,7 @@ def pattern_msg(keywords: List[str]) -> str: modules_json_file = modules_json.modules_json for repo_url, component_with_dir in sorted(repos_with_comps.items()): - repo_entry: Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]] + repo_entry: Dict[str, Dict[str, Dict[str, ModulesJsonModuleEntry]]] if modules_json_file is None: log.warning(f"Modules JSON file '{modules_json.modules_json_path}' is missing. ") continue @@ -160,5 +172,5 @@ def pattern_msg(keywords: List[str]) -> str: f"{pattern_msg(keywords)}:\n" ) else: - log.info(f"{self.component_type.capitalize()} installed in '{self.dir}'{pattern_msg(keywords)}:\n") + log.info(f"{self.component_type.capitalize()} installed in '{self.directory}'{pattern_msg(keywords)}:\n") return table diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index bcba068af..37e43a536 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -5,7 +5,7 @@ import logging import re from pathlib import Path -from typing import Union +from typing import Any, Dict, List, Optional, Tuple, Union log = logging.getLogger(__name__) @@ -17,7 +17,14 @@ class NFCoreComponent: """ def __init__( - self, component_name, repo_url, component_dir, repo_type, base_dir, component_type, remote_component=True + self, + component_name: str, + repo_url: Optional[str], + component_dir: Path, + repo_type: Optional[str], + base_dir: Path, + component_type: str, + remote_component: bool = True, ): """ Initialize the object @@ -34,32 +41,42 @@ def __init__( remote_component (bool): Whether the module is to be treated as a nf-core or local component """ + self.component_type = component_type self.component_name = component_name self.repo_url = repo_url self.component_dir = component_dir self.repo_type = repo_type self.base_dir = base_dir - self.passed = [] - self.warned = [] - self.failed = [] - self.inputs = [] - self.outputs = [] - self.has_meta = False - self.git_sha = None - self.is_patched = False + self.passed: List[Tuple[str, str, Path]] = [] + self.warned: List[Tuple[str, str, Path]] = [] + self.failed: List[Tuple[str, str, Path]] = [] + self.inputs: List[List[Dict[str, Dict[str, str]]]] = [] + self.outputs: List[str] = [] + self.has_meta: bool = False + self.git_sha: Optional[str] = None + self.is_patched: bool = False + self.branch: Optional[str] = None + self.workflow_name: Optional[str] = None if remote_component: # Initialize the important files - self.main_nf = Path(self.component_dir, "main.nf") - self.meta_yml = Path(self.component_dir, "meta.yml") + self.main_nf: Path = Path(self.component_dir, "main.nf") + self.meta_yml: Optional[Path] = Path(self.component_dir, "meta.yml") self.process_name = "" - self.environment_yml = Path(self.component_dir, "environment.yml") + self.environment_yml: Optional[Path] = Path(self.component_dir, "environment.yml") + + component_list = self.component_name.split("/") + + name_index = len(self.component_dir.parts) - 1 - self.component_dir.parts[::-1].index(component_list[0]) + if len(component_list) != 1 and component_list[0] == component_list[1]: + # Handle cases where the subtool has the same name as the tool + name_index -= 1 + + repo_dir = self.component_dir.parts[:name_index][-1] - repo_dir = self.component_dir.parts[: self.component_dir.parts.index(self.component_name.split("/")[0])][-1] self.org = repo_dir self.nftest_testdir = Path(self.component_dir, "tests") self.nftest_main_nf = Path(self.nftest_testdir, "main.nf.test") - self.tags_yml = Path(self.nftest_testdir, "tags.yml") if self.repo_type == "pipeline": patch_fn = f"{self.component_name.replace('/', '-')}.diff" @@ -73,12 +90,15 @@ def __init__( self.component_name = self.component_dir.stem # These attributes are only used by nf-core modules # so just initialize them to None - self.meta_yml = "" - self.environment_yml = "" + self.meta_yml = None + self.environment_yml = None self.test_dir = None self.test_yml = None self.test_main_nf = None + def __repr__(self) -> str: + return f"" + def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): """Collect all tags from the main.nf.test file.""" tags = [] @@ -149,48 +169,97 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st included_components.append(component) return included_components - def get_inputs_from_main_nf(self): + def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" - inputs = [] + inputs: Any = [] # Can be 'list[list[dict[str, dict[str, str]]]]' or 'list[str]' with open(self.main_nf) as f: data = f.read() - # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo - # regex matches: - # val(foo) - # path(bar) - # val foo - # val bar - # path bar - # path foo - # don't match anything inside comments or after "output:" - if "input:" not in data: - log.debug(f"Could not find any inputs in {self.main_nf}") - return inputs - input_data = data.split("input:")[1].split("output:")[0] - regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" - matches = re.finditer(regex, input_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - if match.group(3): - input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - elif match.group(4): - input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") - self.inputs = inputs + if self.component_type == "modules": + # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo + # regex matches: + # val(foo) + # path(bar) + # val foo + # val bar + # path bar + # path foo + # don't match anything inside comments or after "output:" + if "input:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return + input_data = data.split("input:")[1].split("output:")[0] + for line in input_data.split("\n"): + channel_elements: Any = [] + regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + matches = re.finditer(regex, line) + for _, match in enumerate(matches, start=1): + input_val = None + if match.group(3): + input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + elif match.group(4): + input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + if input_val: + channel_elements.append({input_val: {}}) + if len(channel_elements) > 0: + inputs.append(channel_elements) + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs + elif self.component_type == "subworkflows": + # get input values from main.nf after "take:" + if "take:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return + # get all lines between "take" and "main" or "emit" + input_data = data.split("take:")[1].split("main:")[0].split("emit:")[0] + for line in input_data.split("\n"): + try: + inputs.append(line.split()[0]) + except IndexError: + pass # Empty lines + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs def get_outputs_from_main_nf(self): outputs = [] with open(self.main_nf) as f: data = f.read() - # get output values from main.nf after "output:". the names are always after "emit:" - if "output:" not in data: - log.debug(f"Could not find any outputs in {self.main_nf}") - return outputs - output_data = data.split("output:")[1].split("when:")[0] - regex = r"emit:\s*([^)\s,]+)" - matches = re.finditer(regex, output_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - outputs.append(match.group(1)) - log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") - self.outputs = outputs + if self.component_type == "modules": + # get output values from main.nf after "output:". the names are always after "emit:" + if "output:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("output:")[1].split("when:")[0] + regex_emit = r"emit:\s*([^)\s,]+)" + regex_elements = r"(val|path|env|stdout)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + for line in output_data.split("\n"): + match_emit = re.search(regex_emit, line) + matches_elements = re.finditer(regex_elements, line) + if not match_emit: + continue + output_channel = {match_emit.group(1): []} + for _, match_element in enumerate(matches_elements, start=1): + output_val = None + if match_element.group(3): + output_val = match_element.group(3) + elif match_element.group(4): + output_val = match_element.group(4) + if output_val: + output_val = output_val.strip("'").strip('"') # remove quotes + output_channel[match_emit.group(1)].append({output_val: {}}) + outputs.append(output_channel) + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs + elif self.component_type == "subworkflows": + # get output values from main.nf after "emit:". Can be named outputs or not. + if "emit:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("emit:")[1].split("}")[0] + for line in output_data.split("\n"): + try: + outputs.append(line.split("=")[0].split()[0]) + except IndexError: + # Empty lines + pass + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 55d574745..41fccd8be 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -15,7 +15,7 @@ class ComponentPatch(ComponentCommand): - def __init__(self, pipeline_dir, component_type, remote_url=None, branch=None, no_pull=False, installed_by=False): + def __init__(self, pipeline_dir, component_type, remote_url=None, branch=None, no_pull=False, installed_by=None): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.modules_json = ModulesJson(pipeline_dir) @@ -30,10 +30,14 @@ def _parameter_checks(self, component): raise UserWarning("The command was not run in a valid pipeline directory.") components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) + if components is None: + raise UserWarning( + f"No {self.component_type[:-1]}s found in the 'modules.json' file for the remote '{self.modules_repo.remote_url}'" + ) component_names = [component for _, component in components] if component is not None and component not in component_names: - component_dir = [dir for dir, m in components if m == component][0] + component_dir = [d for d, m in components if m == component][0] raise UserWarning( f"{self.component_type[:-1].title()} '{Path(self.component_type, component_dir, component)}' does not exist in the pipeline" ) @@ -84,8 +88,8 @@ def patch(self, component=None): patch_filename = f"{component.replace('/', '-')}.diff" component_relpath = Path(self.component_type, component_dir, component) patch_relpath = Path(component_relpath, patch_filename) - component_current_dir = Path(self.dir, component_relpath) - patch_path = Path(self.dir, patch_relpath) + component_current_dir = Path(self.directory, component_relpath) + patch_path = Path(self.directory, patch_relpath) if patch_path.exists(): remove = questionary.confirm( @@ -185,8 +189,8 @@ def remove(self, component): patch_filename = f"{component.replace('/', '-')}.diff" component_relpath = Path(self.component_type, component_dir, component) patch_relpath = Path(component_relpath, patch_filename) - patch_path = Path(self.dir, patch_relpath) - component_path = Path(self.dir, component_relpath) + patch_path = Path(self.directory, patch_relpath) + component_path = Path(self.directory, component_relpath) if patch_path.exists(): remove = questionary.confirm( diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index 8d884db6c..c2c584391 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -58,10 +58,10 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals removed_components = [] # Get the module/subworkflow directory - component_dir = Path(self.dir, self.component_type, repo_path, component) + component_dir = Path(self.directory, self.component_type, repo_path, component) # Load the modules.json file - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.load() # Verify that the module/subworkflow is actually installed @@ -98,9 +98,16 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals for file, stmts in include_stmts.items(): renderables = [] for stmt in stmts: + # check that the line number is integer + if not isinstance(stmt["line_number"], int): + log.error( + f"Could not parse line number '{stmt['line_number']}' in '{file}'. Please report this issue." + ) + continue + renderables.append( Syntax( - stmt["line"], + str(stmt["line"]), "groovy", theme="ansi_dark", line_numbers=True, @@ -123,7 +130,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals style=nf_core.utils.nfcore_question_style, ).unsafe_ask(): # add the component back to modules.json - if not ComponentInstall(self.dir, self.component_type, force=True).install( + if not ComponentInstall(self.directory, self.component_type, force=True).install( component, silent=True ): log.warning( @@ -133,7 +140,9 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals return removed # Remove the component files of all entries removed from modules.json removed = ( - True if self.clear_component_dir(component, Path(self.dir, removed_component_dir)) or removed else False + True + if self.clear_component_dir(component, Path(self.directory, removed_component_dir)) or removed + else False ) removed_components.append(component) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index a54c47232..3e4694adc 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -38,6 +38,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, + limit_output=False, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.force = force @@ -46,10 +47,11 @@ def __init__( self.update_all = update_all self.show_diff = show_diff self.save_diff_fn = save_diff_fn + self.limit_output = limit_output self.update_deps = update_deps self.component = None self.update_config = None - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.branch = branch def _parameter_checks(self): @@ -75,6 +77,8 @@ def _parameter_checks(self): if not self.has_valid_directory(): raise UserWarning("The command was not run in a valid pipeline directory.") + if self.limit_output and not (self.save_diff_fn or self.show_diff): + raise UserWarning("The '--limit-output' flag can only be used with '--preview' or '--save-diff'.") def update(self, component=None, silent=False, updated=None, check_diff_exist=True) -> bool: """Updates a specified module/subworkflow or all modules/subworkflows in a pipeline. @@ -92,9 +96,8 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if updated is None: updated = [] - _, tool_config = nf_core.utils.load_tools_config(self.dir) - self.update_config = tool_config.get("update", {}) - + _, tool_config = nf_core.utils.load_tools_config(self.directory) + self.update_config = getattr(tool_config, "update", {}) or {} self._parameter_checks() # Check modules directory structure @@ -124,7 +127,6 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr components_info = ( self.get_all_components_info() if self.update_all else [self.get_single_component_info(component)] ) - # Save the current state of the modules.json old_modules_json = self.modules_json.get_modules_json() @@ -168,7 +170,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr component_install_dir = install_tmp_dir / component # Compute the component directory - component_dir = os.path.join(self.dir, self.component_type, modules_repo.repo_path, component) + component_dir = Path(self.directory, self.component_type, modules_repo.repo_path, component) if sha is not None: version = sha @@ -231,6 +233,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr version, dsp_from_dir=component_dir, dsp_to_dir=component_dir, + limit_output=self.limit_output, ) updated.append(component) except UserWarning as e: @@ -271,8 +274,8 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr version, dsp_from_dir=component_dir, dsp_to_dir=component_dir, + limit_output=self.limit_output, ) - # Ask the user if they want to install the component dry_run = not questionary.confirm( f"Update {self.component_type[:-1]} '{component}'?", @@ -314,7 +317,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.save_diff_fn, old_modules_json, self.modules_json.get_modules_json(), - Path(self.dir, "modules.json"), + Path(self.directory, "modules.json"), ) if exit_value and not silent: log.info( @@ -389,25 +392,26 @@ def get_single_component_info(self, component): sha = self.sha config_entry = None - if any( - [ - entry.count("/") == 1 - and (entry.endswith("modules") or entry.endswith("subworkflows")) - and not (entry.endswith(".git") or entry.endswith(".git/")) - for entry in self.update_config.keys() - ] - ): - raise UserWarning( - "Your '.nf-core.yml' file format is outdated. " - "The format should be of the form:\n" - "update:\n :\n :\n :" - ) - if isinstance(self.update_config.get(self.modules_repo.remote_url, {}), str): - # If the repo entry is a string, it's the sha to update to - config_entry = self.update_config.get(self.modules_repo.remote_url, {}) - elif component in self.update_config.get(self.modules_repo.remote_url, {}).get(install_dir, {}): - # If the component to update is in .nf-core.yml config file - config_entry = self.update_config[self.modules_repo.remote_url][install_dir].get(component) + if self.update_config is not None: + if any( + [ + entry.count("/") == 1 + and (entry.endswith("modules") or entry.endswith("subworkflows")) + and not (entry.endswith(".git") or entry.endswith(".git/")) + for entry in self.update_config.keys() + ] + ): + raise UserWarning( + "Your '.nf-core.yml' file format is outdated. " + "The format should be of the form:\n" + "update:\n :\n :\n :" + ) + if isinstance(self.update_config.get(self.modules_repo.remote_url, {}), str): + # If the repo entry is a string, it's the sha to update to + config_entry = self.update_config.get(self.modules_repo.remote_url, {}) + elif component in self.update_config.get(self.modules_repo.remote_url, {}).get(install_dir, {}): + # If the component to update is in .nf-core.yml config file + config_entry = self.update_config[self.modules_repo.remote_url][install_dir].get(component) if config_entry is not None and config_entry is not True: if config_entry is False: log.warn( @@ -472,8 +476,11 @@ def get_all_components_info(self, branch=None): components_info = {} # Loop through all the modules/subworkflows in the pipeline # and check if they have an entry in the '.nf-core.yml' file + for repo_name, components in self.modules_json.get_all_components(self.component_type).items(): - if repo_name not in self.update_config or self.update_config[repo_name] is True: + if isinstance(self.update_config, dict) and ( + repo_name not in self.update_config or self.update_config[repo_name] is True + ): # There aren't restrictions for the repository in .nf-core.yml file components_info[repo_name] = {} for component_dir, component in components: @@ -497,7 +504,7 @@ def get_all_components_info(self, branch=None): ), ) ] - elif isinstance(self.update_config[repo_name], dict): + elif isinstance(self.update_config, dict) and isinstance(self.update_config[repo_name], dict): # If it is a dict, then there are entries for individual components or component directories for component_dir in set([dir for dir, _ in components]): if isinstance(self.update_config[repo_name][component_dir], str): @@ -529,8 +536,8 @@ def get_all_components_info(self, branch=None): if self.sha is not None: overridden_repos.append(repo_name) elif self.update_config[repo_name][component_dir] is False: - for dir, component in components: - if dir == component_dir: + for directory, component in components: + if directory == component_dir: skipped_components.append(f"{component_dir}/{components}") elif isinstance(self.update_config[repo_name][component_dir], dict): # If it's a dict, there are entries for individual components @@ -590,7 +597,7 @@ def get_all_components_info(self, branch=None): raise UserWarning( f"{self.component_type[:-1].title()} '{component}' in '{component_dir}' has an invalid entry in '.nf-core.yml'" ) - elif isinstance(self.update_config[repo_name], str): + elif isinstance(self.update_config, dict) and isinstance(self.update_config[repo_name], str): # If a string is given it is the commit SHA to which we should update to custom_sha = self.update_config[repo_name] components_info[repo_name] = {} @@ -617,10 +624,10 @@ def get_all_components_info(self, branch=None): ] if self.sha is not None: overridden_repos.append(repo_name) - elif self.update_config[repo_name] is False: + elif isinstance(self.update_config, dict) and self.update_config[repo_name] is False: skipped_repos.append(repo_name) else: - raise UserWarning(f"Repo '{repo_name}' has an invalid entry in '.nf-core.yml'") + log.debug(f"no update config for {repo_name} in `.nf-core.yml`") if skipped_repos: skipped_str = "', '".join(skipped_repos) @@ -706,8 +713,10 @@ def setup_diff_file(self, check_diff_exist=True): self.save_diff_fn = questionary.path( "Enter the filename: ", style=nf_core.utils.nfcore_question_style ).unsafe_ask() - - self.save_diff_fn = Path(self.save_diff_fn) + if self.save_diff_fn is not None: + self.save_diff_fn = Path(self.save_diff_fn) + else: + raise UserWarning("No filename provided for saving the diff file") if not check_diff_exist: # This guarantees that the file exists after calling the function @@ -738,7 +747,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path """ temp_component_dir = Path(install_folder, component) files = [file_path for file_path in temp_component_dir.rglob("*") if file_path.is_file()] - pipeline_path = Path(self.dir, self.component_type, repo_path, component) + pipeline_path = Path(self.directory, self.component_type, repo_path, component) if pipeline_path.exists(): pipeline_files = [f.name for f in pipeline_path.iterdir() if f.is_file()] @@ -746,7 +755,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path config_files = [f for f in pipeline_files if str(f).endswith(".config")] for config_file in config_files: log.debug(f"Moving '{component}/{config_file}' to updated component") - shutil.move(pipeline_path / config_file, temp_component_dir / config_file) + shutil.move(str(pipeline_path / config_file), temp_component_dir / config_file) files.append(temp_component_dir / config_file) else: @@ -763,7 +772,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path log.debug(f"Moving '{file}' to updated component") dest = Path(pipeline_path, file) dest.parent.mkdir(parents=True, exist_ok=True) - shutil.move(path, dest) + shutil.move(str(path), dest) log.info(f"Updating '{repo_path}/{component}'") log.debug(f"Updating {self.component_type[:-1]} '{component}' to {new_version} from {repo_path}") @@ -789,7 +798,7 @@ def try_apply_patch( component_fullname = str(Path(repo_path, component)) log.info(f"Found patch for {self.component_type[:-1]} '{component_fullname}'. Trying to apply it to new files") - patch_path = Path(self.dir / patch_relpath) + patch_path = Path(self.directory / patch_relpath) component_relpath = Path(self.component_type, repo_path, component) # Check that paths in patch file are updated @@ -829,6 +838,7 @@ def try_apply_patch( for_git=False, dsp_from_dir=component_relpath, dsp_to_dir=component_relpath, + limit_output=self.limit_output, ) # Move the patched files to the install dir @@ -875,7 +885,13 @@ def get_components_to_update(self, component): return modules_to_update, subworkflows_to_update - def update_linked_components(self, modules_to_update, subworkflows_to_update, updated=None, check_diff_exist=True): + def update_linked_components( + self, + modules_to_update, + subworkflows_to_update, + updated=None, + check_diff_exist=True, + ): """ Update modules and subworkflows linked to the component being updated. """ @@ -883,7 +899,12 @@ def update_linked_components(self, modules_to_update, subworkflows_to_update, up if s_update in updated: continue original_component_type, original_update_all = self._change_component_type("subworkflows") - self.update(s_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) + self.update( + s_update, + silent=True, + updated=updated, + check_diff_exist=check_diff_exist, + ) self._reset_component_type(original_component_type, original_update_all) for m_update in modules_to_update: @@ -891,7 +912,12 @@ def update_linked_components(self, modules_to_update, subworkflows_to_update, up continue original_component_type, original_update_all = self._change_component_type("modules") try: - self.update(m_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) + self.update( + m_update, + silent=True, + updated=updated, + check_diff_exist=check_diff_exist, + ) except LookupError as e: # If the module to be updated is not available, check if there has been a name change if "not found in list of available" in str(e): @@ -905,29 +931,31 @@ def update_linked_components(self, modules_to_update, subworkflows_to_update, up def manage_changes_in_linked_components(self, component, modules_to_update, subworkflows_to_update): """Check for linked components added or removed in the new subworkflow version""" if self.component_type == "subworkflows": - subworkflow_directory = Path(self.dir, self.component_type, self.modules_repo.repo_path, component) + subworkflow_directory = Path(self.directory, self.component_type, self.modules_repo.repo_path, component) included_modules, included_subworkflows = get_components_to_install(subworkflow_directory) # If a module/subworkflow has been removed from the subworkflow for module in modules_to_update: if module not in included_modules: log.info(f"Removing module '{module}' which is not included in '{component}' anymore.") - remove_module_object = ComponentRemove("modules", self.dir) + remove_module_object = ComponentRemove("modules", self.directory) remove_module_object.remove(module, removed_by=component) for subworkflow in subworkflows_to_update: if subworkflow not in included_subworkflows: log.info(f"Removing subworkflow '{subworkflow}' which is not included in '{component}' anymore.") - remove_subworkflow_object = ComponentRemove("subworkflows", self.dir) + remove_subworkflow_object = ComponentRemove("subworkflows", self.directory) remove_subworkflow_object.remove(subworkflow, removed_by=component) # If a new module/subworkflow is included in the subworklfow and wasn't included before for module in included_modules: if module not in modules_to_update: log.info(f"Installing newly included module '{module}' for '{component}'") - install_module_object = ComponentInstall(self.dir, "modules", installed_by=component) + install_module_object = ComponentInstall(self.directory, "modules", installed_by=component) install_module_object.install(module, silent=True) for subworkflow in included_subworkflows: if subworkflow not in subworkflows_to_update: log.info(f"Installing newly included subworkflow '{subworkflow}' for '{component}'") - install_subworkflow_object = ComponentInstall(self.dir, "subworkflows", installed_by=component) + install_subworkflow_object = ComponentInstall( + self.directory, "subworkflows", installed_by=component + ) install_subworkflow_object.install(subworkflow, silent=True) def _change_component_type(self, new_component_type): diff --git a/nf_core/create.py b/nf_core/create.py deleted file mode 100644 index b420b1c86..000000000 --- a/nf_core/create.py +++ /dev/null @@ -1,567 +0,0 @@ -"""Creates a nf-core pipeline matching the current -organization's specification based on a template. -""" - -import configparser -import logging -import os -import re -import shutil -import sys -from pathlib import Path - -import git -import jinja2 -import questionary -import yaml - -import nf_core -import nf_core.schema -import nf_core.utils -from nf_core.create_logo import create_logo -from nf_core.lint_utils import run_prettier_on_file - -log = logging.getLogger(__name__) - - -class PipelineCreate: - """Creates a nf-core pipeline a la carte from the nf-core best-practice template. - - Args: - name (str): Name for the pipeline. - description (str): Description for the pipeline. - author (str): Authors name of the pipeline. - version (str): Version flag. Semantic versioning only. Defaults to `1.0dev`. - no_git (bool): Prevents the creation of a local Git repository for the pipeline. Defaults to False. - force (bool): Overwrites a given workflow directory with the same name. Defaults to False. - May the force be with you. - outdir (str): Path to the local output directory. - template_yaml_path (str): Path to template.yml file for pipeline creation settings. - plain (bool): If true the Git repository will be initialized plain. - default_branch (str): Specifies the --initial-branch name. - """ - - def __init__( - self, - name, - description, - author, - version="1.0dev", - no_git=False, - force=False, - outdir=None, - template_yaml_path=None, - plain=False, - default_branch=None, - ): - self.template_params, skip_paths_keys, self.template_yaml = self.create_param_dict( - name, description, author, version, template_yaml_path, plain, outdir if outdir else "." - ) - - skippable_paths = { - "github": [ - ".github/", - ".gitignore", - ], - "ci": [".github/workflows/"], - "igenomes": ["conf/igenomes.config"], - "branded": [ - ".github/ISSUE_TEMPLATE/config", - "CODE_OF_CONDUCT.md", - ".github/workflows/awsfulltest.yml", - ".github/workflows/awstest.yml", - ], - } - # Get list of files we're skipping with the supplied skip keys - self.skip_paths = set(sp for k in skip_paths_keys for sp in skippable_paths[k]) - - # Set convenience variables - self.name = self.template_params["name"] - - # Set fields used by the class methods - self.no_git = ( - no_git if self.template_params["github"] else True - ) # Set to True if template was configured without github hosting - self.default_branch = default_branch - self.force = force - if outdir is None: - outdir = os.path.join(os.getcwd(), self.template_params["name_noslash"]) - self.outdir = Path(outdir) - - def create_param_dict(self, name, description, author, version, template_yaml_path, plain, pipeline_dir): - """Creates a dictionary of parameters for the new pipeline. - - Args: - name (str): Name for the pipeline. - description (str): Description for the pipeline. - author (str): Authors name of the pipeline. - version (str): Version flag. - template_yaml_path (str): Path to YAML file containing template parameters. - plain (bool): If true the pipeline template will be initialized plain, without customisation. - pipeline_dir (str): Path to the pipeline directory. - """ - # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) - - # Obtain template customization info from template yaml file or `.nf-core.yml` config file - try: - if template_yaml_path is not None: - with open(template_yaml_path) as f: - template_yaml = yaml.safe_load(f) - elif "template" in config_yml: - template_yaml = config_yml["template"] - else: - template_yaml = {} - except FileNotFoundError: - raise UserWarning(f"Template YAML file '{template_yaml_path}' not found.") - - param_dict = {} - # Get the necessary parameters either from the template or command line arguments - param_dict["name"] = self.get_param("name", name, template_yaml, template_yaml_path) - param_dict["description"] = self.get_param("description", description, template_yaml, template_yaml_path) - param_dict["author"] = self.get_param("author", author, template_yaml, template_yaml_path) - - if "version" in template_yaml: - if version is not None: - log.info(f"Overriding --version with version found in {template_yaml_path}") - version = template_yaml["version"] - param_dict["version"] = version - - # Define the different template areas, and what actions to take for each - # if they are skipped - template_areas = { - "github": {"name": "GitHub hosting", "file": True, "content": False}, - "ci": {"name": "GitHub CI", "file": True, "content": False}, - "github_badges": {"name": "GitHub badges", "file": False, "content": True}, - "igenomes": {"name": "iGenomes config", "file": True, "content": True}, - "nf_core_configs": {"name": "nf-core/configs", "file": False, "content": True}, - } - - # Once all necessary parameters are set, check if the user wants to customize the template more - if template_yaml_path is None and not plain: - customize_template = questionary.confirm( - "Do you want to customize which parts of the template are used?", - style=nf_core.utils.nfcore_question_style, - default=False, - ).unsafe_ask() - if customize_template: - template_yaml.update(self.customize_template(template_areas)) - - # Now look in the template for more options, otherwise default to nf-core defaults - param_dict["prefix"] = template_yaml.get("prefix", "nf-core") - param_dict["branded"] = param_dict["prefix"] == "nf-core" - - skip_paths = [] if param_dict["branded"] else ["branded"] - - for t_area in template_areas: - areas_to_skip = template_yaml.get("skip", []) - if isinstance(areas_to_skip, str): - areas_to_skip = [areas_to_skip] - if t_area in areas_to_skip: - if template_areas[t_area]["file"]: - skip_paths.append(t_area) - param_dict[t_area] = False - else: - param_dict[t_area] = True - # If github is selected, exclude also github_badges - if not param_dict["github"]: - param_dict["github_badges"] = False - - # Set the last parameters based on the ones provided - param_dict["short_name"] = ( - param_dict["name"].lower().replace(r"/\s+/", "-").replace(f"{param_dict['prefix']}/", "").replace("/", "-") - ) - param_dict["name"] = f"{param_dict['prefix']}/{param_dict['short_name']}" - param_dict["name_noslash"] = param_dict["name"].replace("/", "-") - param_dict["prefix_nodash"] = param_dict["prefix"].replace("-", "") - param_dict["name_docker"] = param_dict["name"].replace(param_dict["prefix"], param_dict["prefix_nodash"]) - param_dict["logo_light"] = f"nf-core-{param_dict['short_name']}_logo_light.png" - param_dict["logo_dark"] = f"nf-core-{param_dict['short_name']}_logo_dark.png" - param_dict["version"] = version - - if ( - "lint" in config_yml - and "nextflow_config" in config_yml["lint"] - and "manifest.name" in config_yml["lint"]["nextflow_config"] - ): - return param_dict, skip_paths, template_yaml - - # Check that the pipeline name matches the requirements - if not re.match(r"^[a-z]+$", param_dict["short_name"]): - if param_dict["prefix"] == "nf-core": - raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") - else: - log.warning( - "Your workflow name is not lowercase without punctuation. This may cause Nextflow errors.\nConsider changing the name to avoid special characters." - ) - - return param_dict, skip_paths, template_yaml - - def customize_template(self, template_areas): - """Customizes the template parameters. - - Args: - template_areas (list): List of available template areas to skip. - """ - template_yaml = {} - prefix = questionary.text("Pipeline prefix", style=nf_core.utils.nfcore_question_style).unsafe_ask() - while not re.match(r"^[a-zA-Z_][a-zA-Z0-9-_]*$", prefix): - log.error("[red]Pipeline prefix cannot start with digit or hyphen and cannot contain punctuation.[/red]") - prefix = questionary.text( - "Please provide a new pipeline prefix", style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - template_yaml["prefix"] = prefix - - choices = [{"name": template_areas[area]["name"], "value": area} for area in template_areas] - template_yaml["skip"] = questionary.checkbox( - "Skip template areas?", choices=choices, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - return template_yaml - - def get_param(self, param_name, passed_value, template_yaml, template_yaml_path): - if param_name in template_yaml: - if passed_value is not None: - log.info(f"overriding --{param_name} with name found in {template_yaml_path}") - passed_value = template_yaml[param_name] - if passed_value is None: - passed_value = getattr(self, f"prompt_wf_{param_name}")() - return passed_value - - def prompt_wf_name(self): - wf_name = questionary.text("Workflow name", style=nf_core.utils.nfcore_question_style).unsafe_ask() - while not re.match(r"^[a-z]+$", wf_name): - log.error("[red]Invalid workflow name: must be lowercase without punctuation.") - wf_name = questionary.text( - "Please provide a new workflow name", style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - return wf_name - - def prompt_wf_description(self): - wf_description = questionary.text("Description", style=nf_core.utils.nfcore_question_style).unsafe_ask() - return wf_description - - def prompt_wf_author(self): - wf_author = questionary.text("Author", style=nf_core.utils.nfcore_question_style).unsafe_ask() - return wf_author - - def init_pipeline(self): - """Creates the nf-core pipeline.""" - - # Make the new pipeline - self.render_template() - - # Init the git repository and make the first commit - if not self.no_git: - self.git_init_pipeline() - - if self.template_params["branded"]: - log.info( - "[green bold]!!!!!! IMPORTANT !!!!!!\n\n" - "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" - "PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE!\n\n" - "[default]Please read: [link=https://nf-co.re/developers/adding_pipelines#join-the-community]" - "https://nf-co.re/developers/adding_pipelines#join-the-community[/link]" - ) - - def render_template(self): - """Runs Jinja to create a new nf-core pipeline.""" - log.info(f"Creating new nf-core pipeline: '{self.name}'") - - # Check if the output directory exists - if self.outdir.exists(): - if self.force: - log.warning(f"Output directory '{self.outdir}' exists - continuing as --force specified") - else: - log.error(f"Output directory '{self.outdir}' exists!") - log.info("Use -f / --force to overwrite existing files") - sys.exit(1) - else: - os.makedirs(self.outdir) - - # Run jinja2 for each file in the template folder - env = jinja2.Environment( - loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True - ) - template_dir = os.path.join(os.path.dirname(__file__), "pipeline-template") - object_attrs = self.template_params - object_attrs["nf_core_version"] = nf_core.__version__ - - # Can't use glob.glob() as need recursive hidden dotfiles - https://stackoverflow.com/a/58126417/713980 - template_files = list(Path(template_dir).glob("**/*")) - template_files += list(Path(template_dir).glob("*")) - ignore_strs = [".pyc", "__pycache__", ".pyo", ".pyd", ".DS_Store", ".egg"] - short_name = self.template_params["short_name"] - rename_files = { - "workflows/pipeline.nf": f"workflows/{short_name}.nf", - "subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf": f"subworkflows/local/utils_nfcore_{short_name}_pipeline/main.nf", - } - - # Set the paths to skip according to customization - for template_fn_path_obj in template_files: - template_fn_path = str(template_fn_path_obj) - - # Skip files that are in the self.skip_paths list - for skip_path in self.skip_paths: - if os.path.relpath(template_fn_path, template_dir).startswith(skip_path): - break - else: - if os.path.isdir(template_fn_path): - continue - if any([s in template_fn_path for s in ignore_strs]): - log.debug(f"Ignoring '{template_fn_path}' in jinja2 template creation") - continue - - # Set up vars and directories - template_fn = os.path.relpath(template_fn_path, template_dir) - output_path = self.outdir / template_fn - if template_fn in rename_files: - output_path = self.outdir / rename_files[template_fn] - os.makedirs(os.path.dirname(output_path), exist_ok=True) - - try: - # Just copy binary files - if nf_core.utils.is_file_binary(template_fn_path): - raise AttributeError(f"Binary file: {template_fn_path}") - - # Got this far - render the template - log.debug(f"Rendering template file: '{template_fn}'") - j_template = env.get_template(template_fn) - rendered_output = j_template.render(object_attrs) - - # Write to the pipeline output file - with open(output_path, "w") as fh: - log.debug(f"Writing to output file: '{output_path}'") - fh.write(rendered_output) - - # Copy the file directly instead of using Jinja - except (AttributeError, UnicodeDecodeError) as e: - log.debug(f"Copying file without Jinja: '{output_path}' - {e}") - shutil.copy(template_fn_path, output_path) - - # Something else went wrong - except Exception as e: - log.error(f"Copying raw file as error rendering with Jinja: '{output_path}' - {e}") - shutil.copy(template_fn_path, output_path) - - # Mirror file permissions - template_stat = os.stat(template_fn_path) - os.chmod(output_path, template_stat.st_mode) - - # Remove all unused parameters in the nextflow schema - if not self.template_params["igenomes"] or not self.template_params["nf_core_configs"]: - self.update_nextflow_schema() - - if self.template_params["branded"]: - # Make a logo and save it, if it is a nf-core pipeline - self.make_pipeline_logo() - else: - if self.template_params["github"]: - # Remove field mentioning nf-core docs - # in the github bug report template - self.remove_nf_core_in_bug_report_template() - - # Update the .nf-core.yml with linting configurations - self.fix_linting() - - if self.template_yaml: - config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) - with open(self.outdir / config_fn, "w") as fh: - config_yml.update(template=self.template_yaml) - yaml.safe_dump(config_yml, fh) - log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") - run_prettier_on_file(self.outdir / config_fn) - - def update_nextflow_schema(self): - """ - Removes unused parameters from the nextflow schema. - """ - schema_path = self.outdir / "nextflow_schema.json" - - schema = nf_core.schema.PipelineSchema() - schema.schema_filename = schema_path - schema.no_prompts = True - schema.load_schema() - schema.get_wf_params() - schema.remove_schema_notfound_configs() - schema.save_schema(suppress_logging=True) - run_prettier_on_file(schema_path) - - def remove_nf_core_in_bug_report_template(self): - """ - Remove the field mentioning nf-core documentation - in the github bug report template - """ - bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml" - - with open(bug_report_path) as fh: - contents = yaml.load(fh, Loader=yaml.FullLoader) - - # Remove the first item in the body, which is the information about the docs - contents["body"].pop(0) - - with open(bug_report_path, "w") as fh: - yaml.dump(contents, fh, default_flow_style=False, sort_keys=False) - - run_prettier_on_file(bug_report_path) - - def fix_linting(self): - """ - Updates the .nf-core.yml with linting configurations - for a customized pipeline. - """ - # Create a lint config - short_name = self.template_params["short_name"] - lint_config = { - "files_exist": [ - "CODE_OF_CONDUCT.md", - f"assets/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_dark.png", - ".github/ISSUE_TEMPLATE/config.yml", - ".github/workflows/awstest.yml", - ".github/workflows/awsfulltest.yml", - ], - "files_unchanged": [ - "CODE_OF_CONDUCT.md", - f"assets/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_dark.png", - ], - "nextflow_config": [ - "manifest.name", - "manifest.homePage", - ], - "multiqc_config": ["report_comment"], - } - - # Add GitHub hosting specific configurations - if not self.template_params["github"]: - lint_config["files_exist"].extend( - [ - ".github/ISSUE_TEMPLATE/bug_report.yml", - ".github/ISSUE_TEMPLATE/feature_request.yml", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/CONTRIBUTING.md", - ".github/.dockstore.yml", - ".gitignore", - ] - ) - lint_config["files_unchanged"].extend( - [ - ".github/ISSUE_TEMPLATE/bug_report.yml", - ".github/ISSUE_TEMPLATE/config.yml", - ".github/ISSUE_TEMPLATE/feature_request.yml", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/workflows/branch.yml", - ".github/workflows/linting_comment.yml", - ".github/workflows/linting.yml", - ".github/CONTRIBUTING.md", - ".github/.dockstore.yml", - ] - ) - - # Add CI specific configurations - if not self.template_params["ci"]: - lint_config["files_exist"].extend( - [ - ".github/workflows/branch.yml", - ".github/workflows/ci.yml", - ".github/workflows/linting_comment.yml", - ".github/workflows/linting.yml", - ] - ) - - # Add custom config specific configurations - if not self.template_params["nf_core_configs"]: - lint_config["files_exist"].extend(["conf/igenomes.config"]) - lint_config["nextflow_config"].extend( - [ - "process.cpus", - "process.memory", - "process.time", - "custom_config", - ] - ) - - # Add igenomes specific configurations - if not self.template_params["igenomes"]: - lint_config["files_exist"].extend(["conf/igenomes.config"]) - - # Add github badges specific configurations - if not self.template_params["github_badges"] or not self.template_params["github"]: - lint_config["readme"] = ["nextflow_badge"] - - # If the pipeline is unbranded - if not self.template_params["branded"]: - lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) - - # Add the lint content to the preexisting nf-core config - config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) - nf_core_yml["lint"] = lint_config - with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml, fh, default_flow_style=False, sort_keys=False) - - run_prettier_on_file(os.path.join(self.outdir, config_fn)) - - def make_pipeline_logo(self): - """Fetch a logo for the new pipeline from the nf-core website""" - email_logo_path = Path(self.outdir) / "assets" - create_logo(text=self.template_params["short_name"], dir=email_logo_path, theme="light", force=self.force) - for theme in ["dark", "light"]: - readme_logo_path = Path(self.outdir) / "docs" / "images" - create_logo( - text=self.template_params["short_name"], dir=readme_logo_path, width=600, theme=theme, force=self.force - ) - - def git_init_pipeline(self): - """Initialises the new pipeline as a Git repository and submits first commit. - - Raises: - UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. - """ - default_branch = self.default_branch - try: - default_branch = default_branch or git.config.GitConfigParser().get_value("init", "defaultBranch") - except configparser.Error: - log.debug("Could not read init.defaultBranch") - if default_branch in ["dev", "TEMPLATE"]: - raise UserWarning( - f"Your Git defaultBranch '{default_branch}' is incompatible with nf-core.\n" - "'dev' and 'TEMPLATE' can not be used as default branch name.\n" - "Set the default branch name with " - "[white on grey23] git config --global init.defaultBranch [/]\n" - "Or set the default_branch parameter in this class.\n" - "Pipeline git repository will not be initialised." - ) - - log.info("Initialising pipeline git repository") - repo = git.Repo.init(self.outdir) - repo.git.add(A=True) - repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") - if default_branch: - repo.active_branch.rename(default_branch) - try: - repo.git.branch("TEMPLATE") - repo.git.branch("dev") - - except git.GitCommandError as e: - if "already exists" in e.stderr: - log.debug("Branches 'TEMPLATE' and 'dev' already exist") - if self.force: - log.debug("Force option set - deleting branches") - repo.git.branch("-D", "TEMPLATE") - repo.git.branch("-D", "dev") - repo.git.branch("TEMPLATE") - repo.git.branch("dev") - else: - log.error( - "Branches 'TEMPLATE' and 'dev' already exist. Use --force to overwrite existing branches." - ) - sys.exit(1) - log.info( - "Done. Remember to add a remote and push to GitHub:\n" - f"[white on grey23] cd {self.outdir} \n" - " git remote add origin git@github.com:USERNAME/REPO_NAME.git \n" - " git push --all origin " - ) - log.info("This will also push your newly created dev branch and the TEMPLATE branch for syncing.") diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 6afca0e47..2a9fbb0ed 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -1,7 +1,8 @@ # Test build locally before making a PR # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . -FROM gitpod/workspace-base@sha256:124f2b8cbefe9b4abbb6a14538da8846770dde20b93f038d9551b6230aec1d1c +# See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. +FROM gitpod/workspace-base@sha256:2cc134fe5bd7d8fdbe44cab294925d4bc6d2d178d94624f4c376584a22d1f7b6 USER root @@ -40,12 +41,11 @@ RUN chown -R gitpod:gitpod /opt/conda /usr/src/nf_core # Change user to gitpod USER gitpod -# Install nextflow, nf-core, Mamba, and pytest-workflow -RUN conda config --add channels defaults && \ - conda config --add channels bioconda && \ +# Install nextflow, nf-core, nf-test, and other useful tools +RUN conda config --add channels bioconda && \ conda config --add channels conda-forge && \ conda config --set channel_priority strict && \ - conda install --quiet --yes --name base \ + conda install --quiet --yes --update-all --name base \ nextflow \ nf-test \ prettier \ @@ -63,3 +63,4 @@ RUN nextflow self-update && \ # Setup pdiff for nf-test diffs ENV NFT_DIFF="pdiff" ENV NFT_DIFF_ARGS="--line-numbers --expand-tabs=2" +ENV JAVA_TOOL_OPTIONS= diff --git a/nf_core/licences.py b/nf_core/licences.py deleted file mode 100644 index be737280f..000000000 --- a/nf_core/licences.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Lists software licences for a given workflow.""" - -import json -import logging -import os - -import requests -import rich.console -import rich.table -import yaml - -import nf_core.utils - -log = logging.getLogger(__name__) - - -class WorkflowLicences: - """A nf-core workflow licenses collection. - - Tries to retrieve the license information from all dependencies - of a given nf-core pipeline. - - A condensed overview with license per dependency can be printed out. - - Args: - pipeline (str): An existing nf-core pipeline name, like `nf-core/hlatyping` - or short `hlatyping`. - """ - - def __init__(self, pipeline): - self.pipeline = pipeline - self.conda_config = None - if self.pipeline.startswith("nf-core/"): - self.pipeline = self.pipeline[8:] - self.conda_packages = {} - self.conda_package_licences = {} - self.as_json = False - - def run_licences(self): - """ - Run the nf-core licences action - """ - self.get_environment_file() - self.fetch_conda_licences() - return self.print_licences() - - def get_environment_file(self): - """Get the conda environment file for the pipeline""" - if os.path.exists(self.pipeline): - pipeline_obj = nf_core.utils.Pipeline(self.pipeline) - pipeline_obj._load() - if pipeline_obj._fp("environment.yml") not in pipeline_obj.files: - raise LookupError( - "No `environment.yml` file found. (Note: DSL2 pipelines are currently not supported by this command.)" - ) - self.conda_config = pipeline_obj.conda_config - else: - env_url = f"https://raw.githubusercontent.com/nf-core/{self.pipeline}/master/environment.yml" - log.debug(f"Fetching environment.yml file: {env_url}") - response = requests.get(env_url) - # Check that the pipeline exists - if response.status_code == 404: - raise LookupError( - f"Couldn't find pipeline conda file: {env_url}. (Note: DSL2 pipelines are currently not supported by this command.)" - ) - self.conda_config = yaml.safe_load(response.text) - - def fetch_conda_licences(self): - """Fetch package licences from Anaconda and PyPi.""" - - # Check conda dependency list - deps = self.conda_config.get("dependencies", []) - deps_data = {} - log.info(f"Fetching licence information for {len(deps)} tools") - for dep in deps: - try: - if isinstance(dep, str): - dep_channels = self.conda_config.get("channels", []) - deps_data[dep] = nf_core.utils.anaconda_package(dep, dep_channels) - elif isinstance(dep, dict): - deps_data[dep] = nf_core.utils.pip_package(dep) - except ValueError: - log.error(f"Couldn't get licence information for {dep}") - - for dep, data in deps_data.items(): - _, depver = dep.split("=", 1) - self.conda_package_licences[dep] = nf_core.utils.parse_anaconda_licence(data, depver) - - def print_licences(self): - """Prints the fetched license information. - - Args: - as_json (boolean): Prints the information in JSON. Defaults to False. - """ - log.info("Warning: This tool only prints licence information for the software tools packaged using conda.") - log.info("The pipeline may use other software and dependencies not described here. ") - - if self.as_json: - return json.dumps(self.conda_package_licences, indent=4) - else: - table = rich.table.Table("Package Name", "Version", "Licence") - licence_list = [] - for dep, licences in self.conda_package_licences.items(): - depname, depver = dep.split("=", 1) - try: - depname = depname.split("::")[1] - except IndexError: - pass - licence_list.append([depname, depver, ", ".join(licences)]) - # Sort by licence, then package name - licence_list = sorted(sorted(licence_list), key=lambda x: x[2]) - # Add table rows - for lic in licence_list: - table.add_row(*lic) - return table diff --git a/nf_core/module-template/environment.yml b/nf_core/module-template/environment.yml index dcf510aff..a8a40a8e0 100644 --- a/nf_core/module-template/environment.yml +++ b/nf_core/module-template/environment.yml @@ -1,9 +1,7 @@ --- # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json -name: "{{ component_name_underscore }}" channels: - conda-forge - bioconda - - defaults dependencies: - "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" diff --git a/nf_core/module-template/meta.yml b/nf_core/module-template/meta.yml index 9d3f3c1c1..d9d1cc8ae 100644 --- a/nf_core/module-template/meta.yml +++ b/nf_core/module-template/meta.yml @@ -20,48 +20,67 @@ tools: tool_dev_url: "{{ tool_dev_url }}" doi: "" licence: {{ tool_licence }} + identifier: {{ tool_identifier }} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as input {% endif -%} input: #{% if has_meta %} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` {% endif %} {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input {%- endif %} - - {{ 'bam:' if not_empty_template else "input:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ 'bam:' if not_empty_template else "input:" }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else %} + - edam: "" + {%- endif %} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as output {% endif -%} output: + - {{ 'bam:' if not_empty_template else "output:" }} #{% if has_meta -%} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` - {% endif %} - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` + {%- endif %} {% if not_empty_template -%} - ## TODO nf-core: Delete / customise this example output + ## TODO nf-core: Delete / customise this example output {%- endif %} - - {{ 'bam:' if not_empty_template else "output:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ '"*.bam":' if not_empty_template else '"*":' }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else -%} + - edam: "" + {%- endif %} + - versions: + - "versions.yml": + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "{{ author }}" diff --git a/nf_core/module-template/tests/main.nf.test.j2 b/nf_core/module-template/tests/main.nf.test.j2 index f31e92d65..a50ecc6a0 100644 --- a/nf_core/module-template/tests/main.nf.test.j2 +++ b/nf_core/module-template/tests/main.nf.test.j2 @@ -7,7 +7,7 @@ nextflow_process { process "{{ component_name_underscore|upper }}" tag "modules" - tag "modules_nfcore" + tag "modules_{{ org_alphabet }}" {%- if subtool %} tag "{{ component }}" {%- endif %} @@ -28,10 +28,10 @@ nextflow_process { {% if has_meta %} input[0] = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), ] {%- else %} - input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), {%- endif %} """ } @@ -60,10 +60,10 @@ nextflow_process { {% if has_meta %} input[0] = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) - ] + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), + ] {%- else %} - input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), {%- endif %} """ } diff --git a/nf_core/module-template/tests/tags.yml b/nf_core/module-template/tests/tags.yml deleted file mode 100644 index e7fac9f5b..000000000 --- a/nf_core/module-template/tests/tags.yml +++ /dev/null @@ -1,2 +0,0 @@ -{{ component_dir }}: - - "modules/{{ org }}/{{ component_dir }}/**" diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index 6be871ece..e69de29bb 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -1,12 +0,0 @@ -from .bump_versions import ModuleVersionBumper -from .create import ModuleCreate -from .info import ModuleInfo -from .install import ModuleInstall -from .lint import ModuleLint -from .list import ModuleList -from .modules_json import ModulesJson -from .modules_repo import ModulesRepo -from .modules_utils import ModuleExceptionError -from .patch import ModulePatch -from .remove import ModuleRemove -from .update import ModuleUpdate diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 9b54174d5..d98eac7cd 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -6,7 +6,8 @@ import logging import os import re -from typing import Any, Dict, List, Optional, Tuple, Union +from pathlib import Path +from typing import List, Optional, Tuple, Union import questionary import yaml @@ -21,16 +22,16 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.utils import custom_yaml_dumper, rich_force_colors +from nf_core.utils import NFCoreYamlConfig, custom_yaml_dumper, rich_force_colors from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) -class ModuleVersionBumper(ComponentCommand): # type: ignore[misc] +class ModuleVersionBumper(ComponentCommand): def __init__( self, - pipeline_dir: str, + pipeline_dir: Union[str, Path], remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -42,7 +43,7 @@ def __init__( self.failed: List[Tuple[str, str]] = [] self.ignored: List[Tuple[str, str]] = [] self.show_up_to_date: Optional[bool] = None - self.tools_config: Dict[str, Any] = {} + self.tools_config: Optional[NFCoreYamlConfig] def bump_versions( self, module: Union[str, None] = None, all_modules: bool = False, show_uptodate: bool = False @@ -75,10 +76,10 @@ def bump_versions( ) # Get list of all modules - _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.dir) + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.directory) # Load the .nf-core.yml config - _, self.tools_config = nf_core.utils.load_tools_config(self.dir) + _, self.tools_config = nf_core.utils.load_tools_config(self.directory) # Prompt for module or all if module is None and not all_modules: @@ -159,7 +160,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: return False # Don't update if blocked in blacklist - self.bump_versions_config = self.tools_config.get("bump-versions", {}) + self.bump_versions_config = getattr(self.tools_config, "bump-versions", {}) or {} if module.component_name in self.bump_versions_config: config_version = self.bump_versions_config[module.component_name] if not config_version: @@ -176,7 +177,12 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: try: response = nf_core.utils.anaconda_package(bp) except (LookupError, ValueError): - self.failed.append((f"Conda version not specified correctly: {module.main_nf}", module.component_name)) + self.failed.append( + ( + f"Conda version not specified correctly: {Path(module.main_nf).relative_to(self.directory)}", + module.component_name, + ) + ) return False # Check that required version is available at all @@ -239,9 +245,14 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: fh.write(content) # change version in environment.yml + if not module.environment_yml: + log.error(f"Could not read `environment.yml` of {module.component_name} module.") + return False with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) - re.sub(bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'", env_yml["dependencies"]) + env_yml["dependencies"][0] = re.sub( + bioconda_packages[0], f"bioconda::{bioconda_tool_name}={last_ver}", env_yml["dependencies"][0] + ) with open(module.environment_yml, "w") as fh: yaml.dump(env_yml, fh, default_flow_style=False, Dumper=custom_yaml_dumper()) @@ -263,11 +274,11 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: """ # Check whether file exists and load it bioconda_packages = [] - try: + if module.environment_yml is not None and module.environment_yml.exists(): with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) bioconda_packages = env_yml.get("dependencies", []) - except FileNotFoundError: + else: log.error(f"Could not read `environment.yml` of {module.component_name} module.") return bioconda_packages diff --git a/nf_core/modules/install.py b/nf_core/modules/install.py index e1755cee9..7055abe49 100644 --- a/nf_core/modules/install.py +++ b/nf_core/modules/install.py @@ -11,7 +11,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, - installed_by=False, + installed_by=None, ): super().__init__( pipeline_dir, diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index f96683089..49012cff4 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -8,17 +8,35 @@ import logging import os +from pathlib import Path +from typing import List, Optional, Union import questionary import rich +import rich.progress +import ruamel.yaml +import nf_core.components +import nf_core.components.nfcore_component import nf_core.modules.modules_utils import nf_core.utils +from nf_core.components.components_utils import get_biotools_id from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.lint_utils import console +from nf_core.components.nfcore_component import NFCoreComponent +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) +from .environment_yml import environment_yml +from .main_nf import main_nf +from .meta_yml import meta_yml, obtain_correct_and_specified_inputs, obtain_correct_and_specified_outputs, read_meta_yml +from .module_changes import module_changes +from .module_deprecations import module_deprecations +from .module_patch import module_patch +from .module_tests import module_tests +from .module_todos import module_todos +from .module_version import module_version + class ModuleLint(ComponentLint): """ @@ -27,30 +45,35 @@ class ModuleLint(ComponentLint): """ # Import lint functions - from .environment_yml import environment_yml # type: ignore[misc] - from .main_nf import main_nf # type: ignore[misc] - from .meta_yml import meta_yml # type: ignore[misc] - from .module_changes import module_changes # type: ignore[misc] - from .module_deprecations import module_deprecations # type: ignore[misc] - from .module_patch import module_patch # type: ignore[misc] - from .module_tests import module_tests # type: ignore[misc] - from .module_todos import module_todos # type: ignore[misc] - from .module_version import module_version # type: ignore[misc] + environment_yml = environment_yml + main_nf = main_nf + meta_yml = meta_yml + obtain_correct_and_specified_inputs = obtain_correct_and_specified_inputs + obtain_correct_and_specified_outputs = obtain_correct_and_specified_outputs + read_meta_yml = read_meta_yml + module_changes = module_changes + module_deprecations = module_deprecations + module_patch = module_patch + module_tests = module_tests + module_todos = module_todos + module_version = module_version def __init__( self, - dir, - fail_warned=False, - remote_url=None, - branch=None, - no_pull=False, - registry=None, - hide_progress=False, + directory: Union[str, Path], + fail_warned: bool = False, + fix: bool = False, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + registry: Optional[str] = None, + hide_progress: bool = False, ): super().__init__( component_type="modules", - dir=dir, + directory=directory, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -94,7 +117,7 @@ def lint( """ # TODO: consider unifying modules and subworkflows lint() function and add it to the ComponentLint class # Prompt for module or all - if module is None and not all_modules: + if module is None and not all_modules and len(self.all_remote_components) > 0: questions = [ { "type": "list", @@ -127,9 +150,9 @@ def lint( remote_modules = self.all_remote_components if self.repo_type == "modules": - log.info(f"Linting modules repo: [magenta]'{self.dir}'") + log.info(f"Linting modules repo: [magenta]'{self.directory}'") else: - log.info(f"Linting pipeline: [magenta]'{self.dir}'") + log.info(f"Linting pipeline: [magenta]'{self.directory}'") if module: log.info(f"Linting module: [magenta]'{module}'") @@ -154,7 +177,9 @@ def lint( self._print_results(show_passed=show_passed, sort_by=sort_by) self.print_summary() - def lint_modules(self, modules, registry="quay.io", local=False, fix_version=False): + def lint_modules( + self, modules: List[NFCoreComponent], registry: str = "quay.io", local: bool = False, fix_version: bool = False + ) -> None: """ Lint a list of modules @@ -182,9 +207,15 @@ def lint_modules(self, modules, registry="quay.io", local=False, fix_version=Fal for mod in modules: progress_bar.update(lint_progress, advance=1, test_name=mod.component_name) - self.lint_module(mod, progress_bar, registry=registry, local=local, fix_version=fix_version) + self.lint_module(mod, progress_bar, local=local, fix_version=fix_version) - def lint_module(self, mod, progress_bar, registry, local=False, fix_version=False): + def lint_module( + self, + mod: NFCoreComponent, + progress_bar: rich.progress.Progress, + local: bool = False, + fix_version: bool = False, + ): """ Perform linting on one module @@ -213,7 +244,13 @@ def lint_module(self, mod, progress_bar, registry, local=False, fix_version=Fals # Otherwise run all the lint tests else: - if self.repo_type == "pipeline" and self.modules_json: + mod.get_inputs_from_main_nf() + mod.get_outputs_from_main_nf() + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(mod) + + if self.repo_type == "pipeline" and self.modules_json and mod.repo_url: # Set correct sha version = self.modules_json.get_module_version(mod.component_name, mod.repo_url, mod.org) mod.git_sha = version @@ -232,3 +269,104 @@ def lint_module(self, mod, progress_bar, registry, local=False, fix_version=Fals self.failed += warned self.failed += [LintResult(mod, *m) for m in mod.failed] + + def update_meta_yml_file(self, mod): + """ + Update the meta.yml file with the correct inputs and outputs + """ + meta_yml = self.read_meta_yml(mod) + corrected_meta_yml = meta_yml.copy() + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Obtain inputs and outputs from main.nf and meta.yml + # Used to compare only the structure of channels and elements + # Do not compare features to allow for custom features in meta.yml (i.e. pattern) + if "input" in meta_yml: + correct_inputs, meta_inputs = self.obtain_correct_and_specified_inputs(mod, meta_yml) + if "output" in meta_yml: + correct_outputs, meta_outputs = self.obtain_correct_and_specified_outputs(mod, meta_yml) + + if "input" in meta_yml and correct_inputs != meta_inputs: + log.debug( + f"Correct inputs: '{correct_inputs}' differ from current inputs: '{meta_inputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["input"] = mod.inputs.copy() # list of lists (channels) of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["input"]): + for j, element in enumerate(channel): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["input"]): + try: + # Handle old format of meta.yml: list of dicts (channels) + if element_name in meta_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + except AttributeError: + # Handle new format of meta.yml: list of lists (channels) of elements (dicts) + for x, meta_ch_element in enumerate(meta_element): + if element_name in meta_ch_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[x][element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[x][ + element_name + ][feature] + break + + if "output" in meta_yml and correct_outputs != meta_outputs: + log.debug( + f"Correct outputs: '{correct_outputs}' differ from current outputs: '{meta_outputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["output"] = mod.outputs.copy() # list of dicts (channels) with list of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["output"]): + ch_name = list(channel.keys())[0] + for j, element in enumerate(channel[ch_name]): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["output"]): + if element_name in meta_element.keys(): + # Copy current features of that output element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + elif ch_name in meta_element.keys(): + # When the previous output element was using the name of the channel + # Copy current features of that output element form meta.yml + try: + # Handle old format of meta.yml + for feature in meta_element[ch_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_element[ch_name][feature] + ) + except AttributeError: + # Handle new format of meta.yml + for x, meta_ch_element in enumerate(meta_element[ch_name]): + for meta_ch_element_name in meta_ch_element.keys(): + for feature in meta_ch_element[meta_ch_element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_ch_element[meta_ch_element_name][feature] + ) + break + + # Add bio.tools identifier + for i, tool in enumerate(corrected_meta_yml["tools"]): + tool_name = list(tool.keys())[0] + if "identifier" not in tool[tool_name]: + corrected_meta_yml["tools"][i][tool_name]["identifier"] = get_biotools_id( + mod.component_name if "/" not in mod.component_name else mod.component_name.split("/")[0] + ) + + with open(mod.meta_yml, "w") as fh: + log.info(f"Updating {mod.meta_yml}") + yaml.dump(corrected_meta_yml, fh) + run_prettier_on_file(fh.name) diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index 92281d99c..4488b0bef 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -5,7 +5,7 @@ import yaml from jsonschema import exceptions, validators -from nf_core.components.lint import ComponentLint +from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent from nf_core.utils import custom_yaml_dumper @@ -22,8 +22,10 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) """ env_yml = None # load the environment.yml file + if module.environment_yml is None: + raise LintExceptionError("Module does not have an `environment.yml` file") try: - with open(Path(module.component_dir, "environment.yml")) as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) @@ -60,7 +62,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) hint = "" if len(e.path) > 0: hint = f"\nCheck the entry for `{e.path[0]}`." - if e.schema.get("message"): + if e.schema and isinstance(e.schema, dict) and "message" in e.schema: e.message = e.schema["message"] module.failed.append( ( @@ -88,42 +90,3 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) env_yml["dependencies"].sort() with open(Path(module.component_dir, "environment.yml"), "w") as fh: yaml.dump(env_yml, fh, Dumper=custom_yaml_dumper()) - - # Check that the name in the environment.yml file matches the name in the meta.yml file - with open(Path(module.component_dir, "meta.yml")) as fh: - meta_yml = yaml.safe_load(fh) - - if env_yml["name"] == meta_yml["name"]: - module.passed.append( - ( - "environment_yml_name", - "The module's `environment.yml` name matches module name", - module.environment_yml, - ) - ) - else: - module.failed.append( - ( - "environment_yml_name", - f"Conflicting process name between environment.yml (`{env_yml['name']}`) and meta.yml (`{module.component_name}`)", - module.environment_yml, - ) - ) - - # Check that the name is lowercase - if env_yml["name"] == env_yml["name"].lower(): - module.passed.append( - ( - "environment_yml_name_lowercase", - "The module's `environment.yml` name is lowercase", - module.environment_yml, - ) - ) - else: - module.failed.append( - ( - "environment_yml_name_lowercase", - "The module's `environment.yml` name is not lowercase", - module.environment_yml, - ) - ) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index fd4d81f7f..dbc1bed73 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -6,19 +6,24 @@ import re import sqlite3 from pathlib import Path +from typing import List, Tuple from urllib.parse import urlparse, urlunparse import requests import yaml +from rich.progress import Progress import nf_core import nf_core.modules.modules_utils +from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) -def main_nf(module_lint_object, module, fix_version, registry, progress_bar): +def main_nf( + module_lint_object, module: NFCoreComponent, fix_version: bool, registry: str, progress_bar: Progress +) -> Tuple[List[str], List[str]]: """ Lint a ``main.nf`` module file @@ -38,12 +43,12 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): of ``software`` and ``prefix`` """ - inputs = [] - outputs = [] + inputs: List[str] = [] + outputs: List[str] = [] # Check if we have a patch file affecting the 'main.nf' file # otherwise read the lines directly from the module - lines = None + lines: List[str] = [] if module.is_patched: lines = ModulesDiffer.try_apply_patch( module.component_name, @@ -51,8 +56,9 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): module.patch_path, Path(module.component_dir).relative_to(module.base_dir), reverse=True, - ).get("main.nf") - if lines is None: + ).get("main.nf", [""]) + + if len(lines) == 0: try: # Check whether file exists and load it with open(module.main_nf) as fh: @@ -60,10 +66,14 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): module.passed.append(("main_nf_exists", "Module file exists", module.main_nf)) except FileNotFoundError: module.failed.append(("main_nf_exists", "Module file does not exist", module.main_nf)) - return + raise FileNotFoundError(f"Module file does not exist: {module.main_nf}") deprecated_i = ["initOptions", "saveFiles", "getSoftwareName", "getProcessName", "publishDir"] - lines_j = "\n".join(lines) + if len(lines) > 0: + lines_j = "\n".join(lines) + else: + lines_j = "" + for i in deprecated_i: if i in lines_j: module.failed.append( @@ -81,7 +91,8 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): script_lines = [] shell_lines = [] when_lines = [] - for line in lines: + iter_lines = iter(lines) + for line in iter_lines: if re.search(r"^\s*process\s*\w*\s*{", line) and state == "module": state = "process" if re.search(r"input\s*:", line) and state in ["process"]: @@ -104,6 +115,13 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): if state == "process" and not _is_empty(line): process_lines.append(line) if state == "input" and not _is_empty(line): + # allow multiline tuples + if "tuple" in line and line.count("(") <= 1: + joint_tuple = line + while re.sub(r"\s", "", line) != ")": + joint_tuple = joint_tuple + line + line = next(iter_lines) + line = joint_tuple inputs.extend(_parse_input(module, line)) if state == "output" and not _is_empty(line): outputs += _parse_output(module, line) @@ -367,14 +385,14 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): # response = _bioconda_package(bp) response = nf_core.utils.anaconda_package(bp) except LookupError: - self.warned.append(("bioconda_version", "Conda version not specified correctly", self.main_nf)) + self.warned.append(("bioconda_version", f"Conda version not specified correctly: {bp}", self.main_nf)) except ValueError: - self.failed.append(("bioconda_version", "Conda version not specified correctly", self.main_nf)) + self.failed.append(("bioconda_version", f"Conda version not specified correctly: {bp}", self.main_nf)) else: # Check that required version is available at all if bioconda_version not in response.get("versions"): self.failed.append( - ("bioconda_version", f"Conda package had unknown version: `{bioconda_version}`", self.main_nf) + ("bioconda_version", f"Conda package {bp} had unknown version: `{bioconda_version}`", self.main_nf) ) continue # No need to test for latest version, continue linting # Check version is latest available @@ -423,14 +441,21 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): def check_process_labels(self, lines): - correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long"] + correct_process_labels = [ + "process_single", + "process_low", + "process_medium", + "process_high", + "process_long", + "process_high_memory", + ] all_labels = [line.strip() for line in lines if line.lstrip().startswith("label ")] bad_labels = [] good_labels = [] if len(all_labels) > 0: for label in all_labels: try: - label = re.match(r"^label\s+'?([a-zA-Z0-9_-]+)'?$", label).group(1) + label = re.match(r"^label\s+'?\"?([a-zA-Z0-9_-]+)'?\"?$", label).group(1) except AttributeError: self.warned.append( ( diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 481d50b3e..4ad728d10 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -1,13 +1,17 @@ import json +import logging from pathlib import Path +from typing import Union -import yaml +import ruamel.yaml from jsonschema import exceptions, validators -from nf_core.components.lint import ComponentLint +from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer +log = logging.getLogger(__name__) + def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None: """ @@ -39,11 +43,9 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None """ - module.get_inputs_from_main_nf() - module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case - meta_yaml = None - if module.is_patched: + meta_yaml = read_meta_yml(module_lint_object, module) + if module.is_patched and module_lint_object.modules_repo.repo_path is not None: lines = ModulesDiffer.try_apply_patch( module.component_name, module_lint_object.modules_repo.repo_path, @@ -52,15 +54,15 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None reverse=True, ).get("meta.yml") if lines is not None: + yaml = ruamel.yaml.YAML() meta_yaml = yaml.safe_load("".join(lines)) + if module.meta_yml is None: + raise LintExceptionError("Module does not have a `meta.yml` file") if meta_yaml is None: - try: - with open(module.meta_yml) as fh: - meta_yaml = yaml.safe_load(fh) - module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) - except FileNotFoundError: - module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) - return + module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) + return + else: + module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = False @@ -76,7 +78,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None hint = f"\nCheck the entry for `{e.path[0]}`." if e.message.startswith("None is not of type 'object'") and len(e.path) > 2: hint = f"\nCheck that the child entries of {str(e.path[0])+'.'+str(e.path[2])} are indented correctly." - if e.schema.get("message"): + if e.schema and isinstance(e.schema, dict) and "message" in e.schema: e.message = e.schema["message"] incorrect_value = meta_yaml for key in e.path: @@ -91,79 +93,8 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None ) ) - # Confirm that all input and output channels are specified + # Confirm that all input and output channels are correctly specified if valid_meta_yml: - if "input" in meta_yaml: - meta_input = [list(x.keys())[0] for x in meta_yaml["input"]] - for input in module.inputs: - if input in meta_input: - module.passed.append(("meta_input_main_only", f"`{input}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_input_main_only", - f"`{input}` is present as an input in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any inputs in meta.yml that are not in main.nf - for input in meta_input: - if input in module.inputs: - module.passed.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) - - if "output" in meta_yaml and meta_yaml["output"] is not None: - meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] - for output in module.outputs: - if output in meta_output: - module.passed.append(("meta_output_main_only", f"`{output}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_output_main_only", - f"`{output}` is present as an output in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any outputs in meta.yml that are not in main.nf - for output in meta_output: - if output in module.outputs: - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - elif output == "meta": - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is skipped for `meta.yml` outputs", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == module.process_name: module.passed.append( @@ -181,3 +112,179 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.meta_yml, ) ) + # Check that inputs are specified in meta.yml + if len(module.inputs) > 0 and "input" not in meta_yaml: + module.failed.append( + ( + "meta_input", + "Inputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.inputs) > 0: + module.passed.append( + ( + "meta_input", + "Inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + log.debug(f"No inputs specified in module `main.nf`: {module.component_name}") + # Check that all inputs are correctly specified + if "input" in meta_yaml: + correct_inputs, meta_inputs = obtain_correct_and_specified_inputs(module_lint_object, module, meta_yaml) + + if correct_inputs == meta_inputs: + module.passed.append( + ( + "correct_meta_inputs", + "Correct inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_inputs", + f"Module `meta.yml` does not match `main.nf`. Inputs should contain: {correct_inputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", + module.meta_yml, + ) + ) + + # Check that outputs are specified in meta.yml + if len(module.outputs) > 0 and "output" not in meta_yaml: + module.failed.append( + ( + "meta_output", + "Outputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.outputs) > 0: + module.passed.append( + ( + "meta_output", + "Outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + # Check that all outputs are correctly specified + if "output" in meta_yaml: + correct_outputs, meta_outputs = obtain_correct_and_specified_outputs(module_lint_object, module, meta_yaml) + + if correct_outputs == meta_outputs: + module.passed.append( + ( + "correct_meta_outputs", + "Correct outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_outputs", + f"Module `meta.yml` does not match `main.nf`. Outputs should contain: {correct_outputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", + module.meta_yml, + ) + ) + + +def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> Union[dict, None]: + """ + Read a `meta.yml` file and return it as a dictionary + + Args: + module_lint_object (ComponentLint): The lint object for the module + module (NFCoreComponent): The module to read + + Returns: + dict: The `meta.yml` file as a dictionary + """ + meta_yaml = None + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + # Check if we have a patch file, get original file in that case + if module.is_patched: + lines = ModulesDiffer.try_apply_patch( + module.component_name, + module_lint_object.modules_repo.repo_path, + module.patch_path, + Path(module.component_dir).relative_to(module.base_dir), + reverse=True, + ).get("meta.yml") + if lines is not None: + meta_yaml = yaml.load("".join(lines)) + if meta_yaml is None: + if module.meta_yml is None: + return None + with open(module.meta_yml) as fh: + meta_yaml = yaml.load(fh) + return meta_yaml + + +def obtain_correct_and_specified_inputs(_, module, meta_yaml): + """ + Obtain the list of correct inputs and the elements of each input channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + tuple: A tuple containing two lists. The first list contains the correct inputs, + and the second list contains the inputs specified in meta.yml. + """ + correct_inputs = [] + for input_channel in module.inputs: + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + correct_inputs.append(channel_elements) + + meta_inputs = [] + for input_channel in meta_yaml["input"]: + if isinstance(input_channel, list): # Correct format + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + meta_inputs.append(channel_elements) + elif isinstance(input_channel, dict): # Old format + meta_inputs.append(list(input_channel.keys())[0]) + + return correct_inputs, meta_inputs + + +def obtain_correct_and_specified_outputs(_, module, meta_yaml): + """ + Obtain the dictionary of correct outputs and elements of each output channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + correct_outputs (dict): A dictionary containing the correct outputs and their elements. + meta_outputs (dict): A dictionary containing the outputs specified in meta.yml. + """ + correct_outputs = {} + for output_channel in module.outputs: + channel_name = list(output_channel.keys())[0] + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + correct_outputs[channel_name] = channel_elements + + meta_outputs = {} + for output_channel in meta_yaml["output"]: + channel_name = list(output_channel.keys())[0] + if isinstance(output_channel[channel_name], list): # Correct format + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + meta_outputs[channel_name] = channel_elements + elif isinstance(output_channel[channel_name], dict): # Old format + meta_outputs[channel_name] = [] + + return correct_outputs, meta_outputs diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index b2b6c2221..6722c1212 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -4,6 +4,7 @@ import json import logging +import re from pathlib import Path import yaml @@ -18,7 +19,7 @@ def module_tests(_, module: NFCoreComponent): Lint the tests of a module in ``nf-core/modules`` It verifies that the test directory exists - and contains a ``main.nf.test`` a ``main.nf.test.snap`` and ``tags.yml``. + and contains a ``main.nf.test`` and a ``main.nf.test.snap`` """ repo_dir = module.component_dir.parts[: module.component_dir.parts.index(module.component_name.split("/")[0])][-1] @@ -29,9 +30,21 @@ def module_tests(_, module: NFCoreComponent): module.passed.append(("test_dir_exists", "nf-test test directory exists", module.nftest_testdir)) else: if is_pytest: - module.warned.append(("test_dir_exists", "nf-test directory is missing", module.nftest_testdir)) + module.warned.append( + ( + "test_dir_exists", + "nf-test directory is missing", + module.nftest_testdir, + ) + ) else: - module.failed.append(("test_dir_exists", "nf-test directory is missing", module.nftest_testdir)) + module.failed.append( + ( + "test_dir_exists", + "nf-test directory is missing", + module.nftest_testdir, + ) + ) return # Lint the test main.nf file @@ -39,18 +52,35 @@ def module_tests(_, module: NFCoreComponent): module.passed.append(("test_main_nf_exists", "test `main.nf.test` exists", module.nftest_main_nf)) else: if is_pytest: - module.warned.append(("test_main_nf_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) + module.warned.append( + ( + "test_main_nf_exists", + "test `main.nf.test` does not exist", + module.nftest_main_nf, + ) + ) else: - module.failed.append(("test_main_nf_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) + module.failed.append( + ( + "test_main_nf_exists", + "test `main.nf.test` does not exist", + module.nftest_main_nf, + ) + ) if module.nftest_main_nf.is_file(): # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test with open(module.nftest_main_nf) as fh: if "snapshot(" in fh.read(): snap_file = module.nftest_testdir / "main.nf.test.snap" + if snap_file.is_file(): module.passed.append( - ("test_snapshot_exists", "snapshot file `main.nf.test.snap` exists", snap_file) + ( + "test_snapshot_exists", + "snapshot file `main.nf.test.snap` exists", + snap_file, + ) ) # Validate no empty files with open(snap_file) as snap_fh: @@ -133,11 +163,18 @@ def module_tests(_, module: NFCoreComponent): ) else: module.failed.append( - ("test_snapshot_exists", "snapshot file `main.nf.test.snap` does not exist", snap_file) + ( + "test_snapshot_exists", + "snapshot file `main.nf.test.snap` does not exist", + snap_file, + ) ) # Verify that tags are correct. main_nf_tags = module._get_main_nf_tags(module.nftest_main_nf) - required_tags = ["modules", "modules_nfcore", module.component_name] + not_alphabet = re.compile(r"[^a-zA-Z]") + org_alp = not_alphabet.sub("", module.org) + org_alphabet = org_alp if org_alp != "" else "nfcore" + required_tags = ["modules", f"modules_{org_alphabet}", module.component_name] if module.component_name.count("/") == 1: required_tags.append(module.component_name.split("/")[0]) chained_components_tags = module._get_included_components_in_chained_tests(module.nftest_main_nf) @@ -148,7 +185,13 @@ def module_tests(_, module: NFCoreComponent): if tag not in main_nf_tags: missing_tags.append(tag) if len(missing_tags) == 0: - module.passed.append(("test_main_tags", "Tags adhere to guidelines", module.nftest_main_nf)) + module.passed.append( + ( + "test_main_tags", + "Tags adhere to guidelines", + module.nftest_main_nf, + ) + ) else: module.failed.append( ( @@ -174,41 +217,20 @@ def module_tests(_, module: NFCoreComponent): ) else: module.passed.append( - ("test_pytest_yml", "module with nf-test not in pytest_modules.yml", pytest_yml_path) - ) - except FileNotFoundError: - module.warned.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) - - if module.tags_yml.is_file(): - # Check that tags.yml exists and it has the correct entry - module.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", module.tags_yml)) - with open(module.tags_yml) as fh: - tags_yml = yaml.safe_load(fh) - if module.component_name in tags_yml.keys(): - module.passed.append(("test_tags_yml", "correct entry in tags.yml", module.tags_yml)) - if f"modules/{module.org}/{module.component_name}/**" in tags_yml[module.component_name]: - module.passed.append(("test_tags_yml", "correct path in tags.yml", module.tags_yml)) - else: - module.failed.append( ( - "test_tags_yml", - f"incorrect path in tags.yml, expected `modules/{module.org}/{module.component_name}/**`, got `{tags_yml[module.component_name][0]}`", - module.tags_yml, + "test_pytest_yml", + "module with nf-test not in pytest_modules.yml", + pytest_yml_path, ) ) - else: - module.failed.append( - ( - "test_tags_yml", - f"incorrect key in tags.yml, should be `{module.component_name}`, got `{list(tags_yml.keys())[0]}`.", - module.tags_yml, - ) + except FileNotFoundError: + module.warned.append( + ( + "test_pytest_yml", + "Could not open pytest_modules.yml file", + pytest_yml_path, ) - else: - if is_pytest: - module.warned.append(("test_tags_yml_exists", "file `tags.yml` does not exist", module.tags_yml)) - else: - module.failed.append(("test_tags_yml_exists", "file `tags.yml` does not exist", module.tags_yml)) + ) # Check that the old test directory does not exist if not is_pytest: @@ -222,4 +244,10 @@ def module_tests(_, module: NFCoreComponent): ) ) else: - module.passed.append(("test_old_test_dir", "Old pytests don't exist for this module", old_test_dir)) + module.passed.append( + ( + "test_old_test_dir", + "Old pytests don't exist for this module", + old_test_dir, + ) + ) diff --git a/nf_core/modules/lint/module_todos.py b/nf_core/modules/lint/module_todos.py index c9c90ec3d..a07005df0 100644 --- a/nf_core/modules/lint/module_todos.py +++ b/nf_core/modules/lint/module_todos.py @@ -1,6 +1,6 @@ import logging -from nf_core.lint.pipeline_todos import pipeline_todos +from nf_core.pipelines.lint.pipeline_todos import pipeline_todos log = logging.getLogger(__name__) diff --git a/nf_core/modules/lint/module_version.py b/nf_core/modules/lint/module_version.py index d08658f5d..207d5e941 100644 --- a/nf_core/modules/lint/module_version.py +++ b/nf_core/modules/lint/module_version.py @@ -6,13 +6,15 @@ from pathlib import Path import nf_core +import nf_core.modules.lint import nf_core.modules.modules_repo import nf_core.modules.modules_utils +from nf_core.modules.modules_utils import NFCoreComponent log = logging.getLogger(__name__) -def module_version(module_lint_object, module): +def module_version(module_lint_object: "nf_core.modules.lint.ModuleLint", module: "NFCoreComponent"): """ Verifies that the module has a version specified in the ``modules.json`` file @@ -20,8 +22,9 @@ def module_version(module_lint_object, module): containing a commit SHA. If that is true, it verifies that there are no newer version of the module available. """ - - modules_json_path = Path(module_lint_object.dir, "modules.json") + assert module_lint_object.modules_json is not None # mypy + assert module.repo_url is not None # mypy + modules_json_path = Path(module_lint_object.directory, "modules.json") # Verify that a git_sha exists in the `modules.json` file for this module version = module_lint_object.modules_json.get_module_version(module.component_name, module.repo_url, module.org) if version is None: @@ -38,8 +41,8 @@ def module_version(module_lint_object, module): ) modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=module.repo_url, branch=module.branch) - module_git_log = modules_repo.get_component_git_log(module.component_name, "modules") - if version == next(module_git_log)["git_sha"]: + module_git_log = list(modules_repo.get_component_git_log(module.component_name, "modules")) + if version == module_git_log[0]["git_sha"]: module.passed.append(("module_version", "Module is the latest version", module.component_dir)) else: module.warned.append(("module_version", "New version available", module.component_dir)) diff --git a/nf_core/modules/list.py b/nf_core/modules/list.py index c7dc943f9..68da570f6 100644 --- a/nf_core/modules/list.py +++ b/nf_core/modules/list.py @@ -1,4 +1,6 @@ import logging +from pathlib import Path +from typing import Optional, Union from nf_core.components.list import ComponentList @@ -6,5 +8,12 @@ class ModuleList(ComponentList): - def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + def __init__( + self, + pipeline_dir: Union[str, Path] = ".", + remote: bool = True, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + ): super().__init__("modules", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index dc2b163dd..f9ba9d30c 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -4,6 +4,7 @@ import logging import os from pathlib import Path +from typing import Dict, List, Union from rich.console import Console from rich.syntax import Syntax @@ -133,6 +134,7 @@ def write_diff_file( for_git=True, dsp_from_dir=None, dsp_to_dir=None, + limit_output=False, ): """ Writes the diffs of a module to the diff file. @@ -154,6 +156,7 @@ def write_diff_file( adds a/ and b/ prefixes to the file paths dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff + limit_output (bool): If true, don't write the diff for files other than main.nf """ if dsp_from_dir is None: dsp_from_dir = from_dir @@ -174,9 +177,22 @@ def write_diff_file( else: fh.write(f"Changes in module '{Path(repo_path, module)}'\n") - for _, (diff_status, diff) in diffs.items(): - if diff_status != ModulesDiffer.DiffEnum.UNCHANGED: + for file, (diff_status, diff) in diffs.items(): + if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + # The files are identical + fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") + elif diff_status == ModulesDiffer.DiffEnum.CREATED: + # The file was created between the commits + fh.write(f"'{Path(dsp_from_dir, file)}' was created\n") + elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + # The file was removed between the commits + fh.write(f"'{Path(dsp_from_dir, file)}' was removed\n") + elif limit_output and not file.suffix == ".nf": + # Skip printing the diff for files other than main.nf + fh.write(f"Changes in '{Path(module, file)}' but not shown\n") + else: # The file has changed write the diff lines to the file + fh.write(f"Changes in '{Path(module, file)}':\n") for line in diff: fh.write(line) fh.write("\n") @@ -219,7 +235,15 @@ def append_modules_json_diff(diff_path, old_modules_json, new_modules_json, modu @staticmethod def print_diff( - module, repo_path, from_dir, to_dir, current_version=None, new_version=None, dsp_from_dir=None, dsp_to_dir=None + module, + repo_path, + from_dir, + to_dir, + current_version=None, + new_version=None, + dsp_from_dir=None, + dsp_to_dir=None, + limit_output=False, ): """ Prints the diffs between two module versions to the terminal @@ -234,6 +258,7 @@ def print_diff( new_version (str): The version of the module the diff is computed against dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff + limit_output (bool): If true, don't print the diff for files other than main.nf """ if dsp_from_dir is None: dsp_from_dir = from_dir @@ -261,6 +286,9 @@ def print_diff( elif diff_status == ModulesDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") + elif limit_output and not file.suffix == ".nf": + # Skip printing the diff for files other than main.nf + log.info(f"Changes in '{Path(module, file)}' but not shown") else: # The file has changed log.info(f"Changes in '{Path(module, file)}':") @@ -268,7 +296,7 @@ def print_diff( console.print(Syntax("".join(diff), "diff", theme="ansi_dark", padding=1)) @staticmethod - def per_file_patch(patch_fn): + def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: """ Splits a patch file for several files into one patch per file. @@ -284,7 +312,7 @@ def per_file_patch(patch_fn): patches = {} i = 0 - patch_lines = [] + patch_lines: List[str] = [] key = "preamble" while i < len(lines): line = lines[i] @@ -363,13 +391,13 @@ def get_new_and_old_lines(patch): def try_apply_single_patch(file_lines, patch, reverse=False): """ Tries to apply a patch to a modified file. Since the line numbers in - the patch does not agree if the file is modified, the old and new + the patch do not agree if the file is modified, the old and new lines in the patch are reconstructed and then we look for the old lines in the modified file. If all hunk in the patch are found in the new file it is updated with the new lines from the patch file. Args: - new_fn (str | Path): Path to the modified file + file_lines ([str]): The lines of the file to be patched patch (str | Path): (Outdated) patch for the file reverse (bool): Apply the patch in reverse @@ -423,7 +451,9 @@ def try_apply_single_patch(file_lines, patch, reverse=False): return patched_new_lines @staticmethod - def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): + def try_apply_patch( + module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False + ) -> Dict[str, List[str]]: """ Try applying a full patch file to a module @@ -432,6 +462,7 @@ def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): repo_path (str): Name of the repository where the module resides patch_path (str): The absolute path to the patch file to be applied module_dir (Path): The directory containing the module + reverse (bool): Apply the patch in reverse Returns: dict[str, str]: A dictionary with file paths (relative to the pipeline dir) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 7d78268e9..05c64b6de 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,46 +6,57 @@ import shutil import tempfile from pathlib import Path +from typing import Dict, List, Optional, Tuple, Union import git import questionary import rich.prompt from git.exc import GitCommandError +from typing_extensions import NotRequired, TypedDict # for py<3.11 import nf_core.utils -from nf_core.components.components_utils import get_components_to_install -from nf_core.lint_utils import dump_json_with_prettier -from nf_core.modules.modules_repo import ( - NF_CORE_MODULES_NAME, - NF_CORE_MODULES_REMOTE, - ModulesRepo, -) +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, get_components_to_install +from nf_core.modules.modules_repo import ModulesRepo +from nf_core.pipelines.lint_utils import dump_json_with_prettier from .modules_differ import ModulesDiffer log = logging.getLogger(__name__) +class ModulesJsonModuleEntry(TypedDict): + branch: str + git_sha: str + installed_by: List[str] + patch: NotRequired[str] + + +class ModulesJsonType(TypedDict): + name: str + homePage: str + repos: Dict[str, Dict[str, Dict[str, Dict[str, ModulesJsonModuleEntry]]]] + + class ModulesJson: """ An object for handling a 'modules.json' file in a pipeline """ - def __init__(self, pipeline_dir: str): + def __init__(self, pipeline_dir: Union[str, Path]) -> None: """ Initialise the object. Args: pipeline_dir (str): The pipeline directory """ - self.dir = pipeline_dir - self.modules_dir = Path(self.dir, "modules") - self.subworkflows_dir = Path(self.dir, "subworkflows") - self.modules_json_path = Path(self.dir, "modules.json") - self.modules_json = None + self.directory = Path(pipeline_dir) + self.modules_dir = self.directory / "modules" + self.subworkflows_dir = self.directory / "subworkflows" + self.modules_json_path = self.directory / "modules.json" + self.modules_json: Optional[ModulesJsonType] = None self.pipeline_modules = None self.pipeline_subworkflows = None - self.pipeline_components = None + self.pipeline_components: Optional[Dict[str, List[Tuple[str, str]]]] = None def __str__(self): if self.modules_json is None: @@ -55,30 +66,29 @@ def __str__(self): def __repr__(self): return self.__str__() - def create(self): + def create(self) -> None: """ Creates the modules.json file from the modules and subworkflows installed in the pipeline directory Raises: UserWarning: If the creation fails """ - pipeline_config = nf_core.utils.fetch_wf_config(self.dir) + pipeline_config = nf_core.utils.fetch_wf_config(self.directory) pipeline_name = pipeline_config.get("manifest.name", "") pipeline_url = pipeline_config.get("manifest.homePage", "") - new_modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} + new_modules_json = ModulesJsonType(name=pipeline_name, homePage=pipeline_url, repos={}) if not self.modules_dir.exists(): if rich.prompt.Confirm.ask( "[bold][blue]?[/] Can't find a ./modules directory. Would you like me to create one?", default=True ): - log.info(f"Creating ./modules directory in '{self.dir}'") + log.info(f"Creating ./modules directory in '{self.directory}'") self.modules_dir.mkdir() else: raise UserWarning("Cannot proceed without a ./modules directory.") # Get repositories repos, _ = self.get_pipeline_module_repositories("modules", self.modules_dir) - # Get all module/subworkflow names in the repos repo_module_names = self.get_component_names_from_repo(repos, self.modules_dir) repo_subworkflow_names = self.get_component_names_from_repo(repos, self.subworkflows_dir) @@ -104,7 +114,9 @@ def create(self): self.modules_json = new_modules_json self.dump() - def get_component_names_from_repo(self, repos, directory): + def get_component_names_from_repo( + self, repos: Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]]], directory: Path + ) -> List[Tuple[str, List[str], str]]: """ Get component names from repositories in a pipeline. @@ -118,6 +130,10 @@ def get_component_names_from_repo(self, repos, directory): names = [] for repo_url in repos: modules_repo = ModulesRepo(repo_url) + if modules_repo is None: + raise UserWarning(f"Could not find module repository for '{repo_url}' in '{directory}'") + if modules_repo.repo_path is None: + raise UserWarning(f"Could not find module repository path for '{repo_url}' in '{directory}'") components = ( repo_url, [ @@ -130,7 +146,9 @@ def get_component_names_from_repo(self, repos, directory): names.append(components) return names - def get_pipeline_module_repositories(self, component_type, directory, repos=None): + def get_pipeline_module_repositories( + self, component_type: str, directory: Path, repos: Optional[Dict] = None + ) -> Tuple[Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]]], Dict[Path, Path]]: """ Finds all module repositories in the modules and subworkflows directory. Ignores the local modules/subworkflows. @@ -152,6 +170,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None # The function might rename some directories, keep track of them renamed_dirs = {} # Check if there are any untracked repositories + dirs_not_covered = self.dir_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) if len(dirs_not_covered) > 0: log.info(f"Found custom {component_type[:-1]} repositories when creating 'modules.json'") @@ -180,6 +199,8 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None # Verify that there is a directory corresponding the remote nrepo_name = ModulesRepo(nrepo_remote).repo_path + if nrepo_name is None: + raise UserWarning(f"Could not find the repository name for '{nrepo_remote}'") if not (directory / nrepo_name).exists(): log.info( "The provided remote does not seem to correspond to a local directory. " @@ -243,7 +264,13 @@ def dir_tree_uncovered(self, components_directory, repos): depth += 1 return dirs_not_covered - def determine_branches_and_shas(self, component_type, install_dir, remote_url, components): + def determine_branches_and_shas( + self, + component_type: str, + install_dir: Union[str, Path], + remote_url: str, + components: List[str], + ) -> Dict[str, ModulesJsonModuleEntry]: """ Determines what branch and commit sha each module/subworkflow in the pipeline belongs to @@ -264,14 +291,16 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c repo_path = self.modules_dir / install_dir elif component_type == "subworkflows": repo_path = self.subworkflows_dir / install_dir + else: + raise ValueError(f"Unknown component type '{component_type}'") # Get the branches present in the repository, as well as the default branch available_branches = ModulesRepo.get_remote_branches(remote_url) sb_local = [] dead_components = [] - repo_entry = {} + repo_entry: Dict[str, ModulesJsonModuleEntry] = {} for component in sorted(components): modules_repo = default_modules_repo - component_path = repo_path / component + component_path = Path(repo_path, component) correct_commit_sha = None tried_branches = {default_modules_repo.branch} found_sha = False @@ -323,7 +352,7 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c else: found_sha = True break - if found_sha: + if found_sha and correct_commit_sha is not None: repo_entry[component] = { "branch": modules_repo.branch, "git_sha": correct_commit_sha, @@ -333,7 +362,7 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c # Clean up the modules/subworkflows we were unable to find the sha for for component in sb_local: log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, component)}' to 'local' directory") - self.move_component_to_local(component_type, component, install_dir) + self.move_component_to_local(component_type, component, str(install_dir)) for component in dead_components: log.debug(f"Removing {component_type[:-1]} {Path(install_dir, component)}'") @@ -341,7 +370,13 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c return repo_entry - def find_correct_commit_sha(self, component_type, component_name, component_path, modules_repo): + def find_correct_commit_sha( + self, + component_type: str, + component_name: Union[str, Path], + component_path: Union[str, Path], + modules_repo: ModulesRepo, + ) -> Optional[str]: """ Returns the SHA for the latest commit where the local files are identical to the remote files Args: @@ -369,11 +404,12 @@ def find_correct_commit_sha(self, component_type, component_name, component_path return commit_sha return None - def move_component_to_local(self, component_type, component, repo_name): + def move_component_to_local(self, component_type: str, component: str, repo_name: str): """ Move a module/subworkflow to the 'local' directory Args: + component_type (str): The type of component, either 'modules' or 'subworkflows' component (str): The name of the module/subworkflow repo_name (str): The name of the repository the module resides in """ @@ -381,10 +417,12 @@ def move_component_to_local(self, component_type, component, repo_name): directory = self.modules_dir elif component_type == "subworkflows": directory = self.subworkflows_dir + else: + raise ValueError(f"Unknown component type '{component_type}'") current_path = directory / repo_name / component local_dir = directory / "local" if not local_dir.exists(): - local_dir.mkdir() + local_dir.mkdir(parents=True) to_name = component # Check if there is already a subdirectory with the name @@ -392,19 +430,20 @@ def move_component_to_local(self, component_type, component, repo_name): # Add a time suffix to the path to make it unique # (do it again and again if it didn't work out...) to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" - shutil.move(current_path, local_dir / to_name) + shutil.move(str(current_path), local_dir / to_name) - def unsynced_components(self): + def unsynced_components(self) -> Tuple[List[str], List[str], Dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all directories containing a 'main.nf' file Returns: - (untrack_dirs ([ Path ]), missing_installation (dict)): Directories that are not tracked + (untrack_dirs ([ str ]), missing_installation (dict)): Directories that are not tracked by the modules.json file, and modules/subworkflows in the modules.json where the installation directory is missing """ + assert self.modules_json is not None # mypy # Add all modules from modules.json to missing_installation missing_installation = copy.deepcopy(self.modules_json["repos"]) # Obtain the path of all installed modules @@ -428,14 +467,28 @@ def unsynced_components(self): return untracked_dirs_modules, untracked_dirs_subworkflows, missing_installation - def parse_dirs(self, dirs, missing_installation, component_type): + def parse_dirs(self, dirs: List[Path], missing_installation: Dict, component_type: str) -> Tuple[List[str], Dict]: + """ + Parse directories and check if they are tracked in the modules.json file + + Args: + dirs ([ Path ]): List of directories to check + missing_installation (dict): Dictionary with the modules.json entries + component_type (str): The type of component, either 'modules' or 'subworkflows' + + Returns: + (untracked_dirs ([ Path ]), missing_installation (dict)): List of directories that are not tracked + by the modules.json file, and the updated missing_installation dictionary + """ + untracked_dirs = [] for dir_ in dirs: # Check if the module/subworkflows directory exists in modules.json install_dir = dir_.parts[0] - component = str(Path(*dir_.parts[1:])) + component = "/".join(dir_.parts[1:]) component_in_file = False - git_url = None + git_url = "" + for repo in missing_installation: if component_type in missing_installation[repo]: if install_dir in missing_installation[repo][component_type]: @@ -452,9 +505,7 @@ def parse_dirs(self, dirs, missing_installation, component_type): # Check if the entry has a git sha and branch before removing components_dict = module_repo[component_type][install_dir] if "git_sha" not in components_dict[component] or "branch" not in components_dict[component]: - self.determine_branches_and_shas( - component_type, component, git_url, module_repo["base_path"], [component] - ) + self.determine_branches_and_shas(component_type, component, git_url, [component]) # Remove the module/subworkflow from modules/subworkflows without installation module_repo[component_type][install_dir].pop(component) if len(module_repo[component_type][install_dir]) == 0: @@ -469,13 +520,14 @@ def parse_dirs(self, dirs, missing_installation, component_type): return untracked_dirs, missing_installation - def has_git_url_and_modules(self): + def has_git_url_and_modules(self) -> bool: """ Check that all repo entries in the modules.json has a git url and a modules dict entry Returns: (bool): True if they are found for all repos, False otherwise """ + assert self.modules_json is not None # mypy for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): if "modules" not in repo_entry: if "subworkflows" in repo_entry: @@ -557,6 +609,8 @@ def check_up_to_date(self): self.load() if not self.has_git_url_and_modules(): raise UserWarning + + assert self.modules_json is not None # mypy # check that all "installed_by" entries are lists and not strings # [these strings come from an older dev version, so this check can probably be removed in a future release] for _, repo_entry in self.modules_json.get("repos", {}).items(): @@ -600,7 +654,7 @@ def check_up_to_date(self): if len(subworkflows_missing_from_modules_json) > 0: dump_modules_json = True self.resolve_missing_from_modules_json(subworkflows_missing_from_modules_json, "subworkflows") - + assert self.modules_json is not None # mypy # If the "installed_by" value is not present for modules/subworkflows, add it. for repo, repo_content in self.modules_json["repos"].items(): for component_type, dir_content in repo_content.items(): @@ -625,8 +679,9 @@ def check_up_to_date(self): if dump_modules_json: self.dump(run_prettier=True) + return True - def load(self): + def load(self) -> None: """ Loads the modules.json file into the variable 'modules_json' @@ -647,14 +702,14 @@ def load(self): def update( self, - component_type, - modules_repo, - component_name, - component_version, - installed_by, - installed_by_log=None, - write_file=True, - ): + component_type: str, + modules_repo: ModulesRepo, + component_name: str, + component_version: str, + installed_by: Optional[List[str]], + installed_by_log: Optional[List[str]] = None, + write_file: bool = True, + ) -> bool: """ Updates the 'module.json' file with new module/subworkflow info @@ -674,23 +729,23 @@ def update( if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy repo_name = modules_repo.repo_path remote_url = modules_repo.remote_url branch = modules_repo.branch + if remote_url not in self.modules_json["repos"]: self.modules_json["repos"][remote_url] = {component_type: {repo_name: {}}} if component_type not in self.modules_json["repos"][remote_url]: self.modules_json["repos"][remote_url][component_type] = {repo_name: {}} repo_component_entry = self.modules_json["repos"][remote_url][component_type][repo_name] if component_name not in repo_component_entry: - repo_component_entry[component_name] = {} + repo_component_entry[component_name] = {"branch": "", "git_sha": "", "installed_by": []} repo_component_entry[component_name]["git_sha"] = component_version repo_component_entry[component_name]["branch"] = branch try: if installed_by not in repo_component_entry[component_name]["installed_by"] and installed_by is not None: - repo_component_entry[component_name]["installed_by"].append(installed_by) - except KeyError: - repo_component_entry[component_name]["installed_by"] = [installed_by] + repo_component_entry[component_name]["installed_by"] += installed_by finally: new_installed_by = repo_component_entry[component_name]["installed_by"] + list(installed_by_log) repo_component_entry[component_name]["installed_by"] = sorted([*set(new_installed_by)]) @@ -756,6 +811,8 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + if repo_url not in self.modules_json["repos"]: raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: @@ -767,6 +824,8 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True): if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + try: del self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] except KeyError: @@ -788,6 +847,7 @@ def get_patch_fn(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy path = ( self.modules_json["repos"] .get(repo_url, {}) @@ -815,7 +875,7 @@ def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): LookupError: If patch was not applied """ module_fullname = str(Path(repo_name, module)) - patch_path = Path(self.dir / patch_relpath) + patch_path = Path(self.directory / patch_relpath) try: new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, module_dir, reverse=True) @@ -844,6 +904,8 @@ def repo_present(self, repo_name): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + return repo_name in self.modules_json.get("repos", {}) def module_present(self, module_name, repo_url, install_dir): @@ -858,11 +920,12 @@ def module_present(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( install_dir, {} ) - def get_modules_json(self): + def get_modules_json(self) -> ModulesJsonType: """ Returns a copy of the loaded modules.json @@ -871,6 +934,7 @@ def get_modules_json(self): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return copy.deepcopy(self.modules_json) def get_component_version(self, component_type, component_name, repo_url, install_dir): @@ -887,6 +951,7 @@ def get_component_version(self, component_type, component_name, repo_url, instal """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return ( self.modules_json.get("repos", {}) .get(repo_url, {}) @@ -896,7 +961,7 @@ def get_component_version(self, component_type, component_name, repo_url, instal .get("git_sha", None) ) - def get_module_version(self, module_name, repo_url, install_dir): + def get_module_version(self, module_name: str, repo_url: str, install_dir: str) -> Optional[str]: """ Returns the version of a module @@ -910,14 +975,12 @@ def get_module_version(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() - return ( - self.modules_json.get("repos", {}) - .get(repo_url, {}) - .get("modules", {}) - .get(install_dir, {}) - .get(module_name, {}) - .get("git_sha", None) - ) + assert self.modules_json is not None # mypy + try: + sha = self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["git_sha"] + except KeyError: + sha = None + return sha def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): """ @@ -933,6 +996,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return ( self.modules_json.get("repos", {}) .get(repo_url, {}) @@ -942,7 +1006,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): .get("git_sha", None) ) - def get_all_components(self, component_type): + def get_all_components(self, component_type: str) -> Dict[str, List[Tuple[(str, str)]]]: """ Retrieves all pipeline modules/subworkflows that are reported in the modules.json @@ -952,12 +1016,14 @@ def get_all_components(self, component_type): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + if self.pipeline_components is None: self.pipeline_components = {} for repo, repo_entry in self.modules_json.get("repos", {}).items(): if component_type in repo_entry: - for dir, components in repo_entry[component_type].items(): - self.pipeline_components[repo] = [(dir, m) for m in components] + for directory, components in repo_entry[component_type].items(): + self.pipeline_components[repo] = [(directory, m) for m in components] return self.pipeline_components @@ -985,6 +1051,7 @@ def get_dependent_components( if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy component_types = ["modules"] if component_type == "modules" else ["modules", "subworkflows"] # Find all components that have an entry of install by of a given component, recursively call this function for subworkflows for type in component_types: @@ -1014,17 +1081,18 @@ def get_installed_by_entries(self, component_type, name): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy installed_by_entries = {} - for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): + for _, repo_entry in self.modules_json.get("repos", {}).items(): if component_type in repo_entry: - for install_dir, components in repo_entry[component_type].items(): + for _, components in repo_entry[component_type].items(): if name in components: installed_by_entries = components[name]["installed_by"] break return installed_by_entries - def get_component_branch(self, component_type, component, repo_url, install_dir): + def get_component_branch(self, component_type: str, component: str, repo_url: str, install_dir: str) -> str: """ Gets the branch from which the module/subworkflow was installed @@ -1035,14 +1103,11 @@ def get_component_branch(self, component_type, component, repo_url, install_dir) """ if self.modules_json is None: self.load() - branch = ( - self.modules_json["repos"] - .get(repo_url, {}) - .get(component_type, {}) - .get(install_dir, {}) - .get(component, {}) - .get("branch") - ) + assert self.modules_json is not None # mypy + try: + branch = self.modules_json["repos"][repo_url][component_type][install_dir][component]["branch"] + except (KeyError, TypeError): + branch = None if branch is None: raise LookupError( f"Could not find branch information for component '{Path(install_dir, component)}'." @@ -1063,7 +1128,7 @@ def dump(self, run_prettier: bool = False) -> None: with open(self.modules_json_path, "w") as fh: json.dump(self.modules_json, fh, indent=4) - def resolve_missing_installation(self, missing_installation, component_type): + def resolve_missing_installation(self, missing_installation: Dict, component_type: str) -> None: missing_but_in_mod_json = [ f"'{component_type}/{install_dir}/{component}'" for repo_url, contents in missing_installation.items() @@ -1094,7 +1159,8 @@ def resolve_missing_installation(self, missing_installation, component_type): log.info( f"Was unable to reinstall some {component_type}. Removing 'modules.json' entries: {', '.join(uninstallable_components)}" ) - + if self.modules_json is None: + raise UserWarning("No modules.json file found") for (repo_url, install_dir), component_entries in remove_from_mod_json.items(): for component in component_entries: self.modules_json["repos"][repo_url][component_type][install_dir].pop(component) @@ -1111,14 +1177,15 @@ def resolve_missing_from_modules_json(self, missing_from_modules_json, component log.info( f"Recomputing commit SHAs for {component_type} which were missing from 'modules.json': {', '.join(format_missing)}" ) - + assert self.modules_json is not None # mypy # Get the remotes we are missing tracked_repos = {repo_url: (repo_entry) for repo_url, repo_entry in self.modules_json["repos"].items()} repos, _ = self.get_pipeline_module_repositories(component_type, self.modules_dir, tracked_repos) # Get tuples of components that miss installation and their install directory + def components_with_repos(): - for dir in missing_from_modules_json: + for directory in missing_from_modules_json: for repo_url in repos: modules_repo = ModulesRepo(repo_url) paths_in_directory = [] @@ -1128,12 +1195,12 @@ def components_with_repos(): ) for dir_name, _, _ in os.walk(repo_url_path): if component_type == "modules": - if len(Path(dir).parts) > 1: # The module name is TOOL/SUBTOOL + if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) pass paths_in_directory.append(Path(dir_name).parts[-1]) - if dir in paths_in_directory: - yield (modules_repo.repo_path, dir) + if directory in paths_in_directory: + yield (modules_repo.repo_path, directory) # Add all components into a dictionary with install directories repos_with_components = {} @@ -1184,7 +1251,7 @@ def recreate_dependencies(self, repo, org, subworkflow): sw_path = Path(self.subworkflows_dir, org, subworkflow) dep_mods, dep_subwfs = get_components_to_install(sw_path) - + assert self.modules_json is not None # mypy for dep_mod in dep_mods: installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] if installed_by == ["modules"]: diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 969492027..357fc49cc 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -1,24 +1,23 @@ import logging import os import shutil +from pathlib import Path +from typing import Optional import git import rich import rich.progress +import rich.prompt from git.exc import GitCommandError, InvalidGitRepositoryError import nf_core.modules.modules_json import nf_core.modules.modules_utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.synced_repo import RemoteProgressbar, SyncedRepo from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, load_tools_config log = logging.getLogger(__name__) -# Constants for the nf-core/modules repo used throughout the module files -NF_CORE_MODULES_NAME = "nf-core" -NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" -NF_CORE_MODULES_DEFAULT_BRANCH = "master" - class ModulesRepo(SyncedRepo): """ @@ -35,7 +34,13 @@ class ModulesRepo(SyncedRepo): local_repo_statuses = {} no_pull_global = False - def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=False): + def __init__( + self, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + hide_progress: bool = False, + ) -> None: """ Initializes the object and clones the git repository if it is not already present """ @@ -54,18 +59,21 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.setup_local_repo(remote_url, branch, hide_progress) config_fn, repo_config = load_tools_config(self.local_repo_dir) + if config_fn is None or repo_config is None: + raise UserWarning(f"Could not find a configuration file in {self.local_repo_dir}") try: - self.repo_path = repo_config["org_path"] + self.repo_path = repo_config.org_path except KeyError: raise UserWarning(f"'org_path' key not present in {config_fn.name}") # Verify that the repo seems to be correctly configured if self.repo_path != NF_CORE_MODULES_NAME or self.branch: self.verify_branch() - + if self.repo_path is None: + raise UserWarning(f"Could not find the org_path in the configuration file: {config_fn.name}") # Convenience variable - self.modules_dir = os.path.join(self.local_repo_dir, "modules", self.repo_path) - self.subworkflows_dir = os.path.join(self.local_repo_dir, "subworkflows", self.repo_path) + self.modules_dir = Path(self.local_repo_dir, "modules", self.repo_path) + self.subworkflows_dir = Path(self.local_repo_dir, "subworkflows", self.repo_path) self.avail_module_names = None @@ -86,7 +94,7 @@ def setup_local_repo(self, remote, branch, hide_progress=True, in_cache=False): branch (str): name of branch to use Sets self.repo """ - self.local_repo_dir = os.path.join(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) + self.local_repo_dir = Path(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) try: if not os.path.exists(self.local_repo_dir): try: diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index 6796de41e..ecfe5f24e 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -36,7 +36,7 @@ def repo_full_name_from_remote(remote_url: str) -> str: return path -def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], List[NFCoreComponent]]: +def get_installed_modules(directory: Path, repo_type="modules") -> Tuple[List[str], List[NFCoreComponent]]: """ Make a list of all modules installed in this repository @@ -52,15 +52,15 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis # initialize lists local_modules: List[str] = [] nfcore_modules_names: List[str] = [] - local_modules_dir: Optional[str] = None - nfcore_modules_dir = os.path.join(dir, "modules", "nf-core") + local_modules_dir: Optional[Path] = None + nfcore_modules_dir = Path(directory, "modules", "nf-core") # Get local modules if repo_type == "pipeline": - local_modules_dir = os.path.join(dir, "modules", "local") + local_modules_dir = Path(directory, "modules", "local") # Filter local modules - if os.path.exists(local_modules_dir): + if local_modules_dir.exists(): local_modules = os.listdir(local_modules_dir) local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) @@ -89,7 +89,7 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis "nf-core/modules", Path(nfcore_modules_dir, m), repo_type=repo_type, - base_dir=Path(dir), + base_dir=directory, component_type="modules", ) for m in nfcore_modules_names diff --git a/nf_core/modules/patch.py b/nf_core/modules/patch.py index b4e86f2d1..bbad0d428 100644 --- a/nf_core/modules/patch.py +++ b/nf_core/modules/patch.py @@ -6,5 +6,5 @@ class ModulePatch(ComponentPatch): - def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=None): super().__init__(pipeline_dir, "modules", remote_url, branch, no_pull, installed_by) diff --git a/nf_core/modules/update.py b/nf_core/modules/update.py index 9d53bf201..f6cf5235a 100644 --- a/nf_core/modules/update.py +++ b/nf_core/modules/update.py @@ -15,6 +15,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, + limit_output=False, ): super().__init__( pipeline_dir, @@ -29,4 +30,5 @@ def __init__( remote_url, branch, no_pull, + limit_output, ) diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index 72dda289a..1db986f5b 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -11,6 +11,7 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js}] indent_size = 2 +{% if modules %} # These files are edited and tested upstream in nf-core/modules [/modules/nf-core/**] charset = unset @@ -24,9 +25,12 @@ end_of_line = unset insert_final_newline = unset trim_trailing_whitespace = unset indent_style = unset +{% endif %} +{% if email %} [/assets/email*] indent_size = unset +{% endif %} # ignore python and markdown [*.{py,md}] diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 6244a6544..63cddcb7f 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -9,7 +9,7 @@ Please use the pre-filled template to save time. However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) -{% if branded -%} +{% if is_nfcore -%} > [!NOTE] > If you need help using or modifying {{ name }} then the best place to ask is on the nf-core Slack [#{{ short_name }}](https://nfcore.slack.com/channels/{{ short_name }}) channel ([join our Slack here](https://nf-co.re/join/slack)). @@ -23,37 +23,42 @@ If you'd like to write some code for {{ name }}, the standard workflow is as fol 1. Check that there isn't already an issue about your idea in the [{{ name }} issues](https://github.com/{{ name }}/issues) to avoid duplicating work. If there isn't one already, please create one so that others know you're working on this 2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [{{ name }} repository](https://github.com/{{ name }}) to your GitHub account 3. Make the necessary changes / additions within your forked repository following [Pipeline conventions](#pipeline-contribution-conventions) -4. Use `nf-core schema build` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). +4. Use `nf-core pipelines schema build` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). 5. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged If you're not used to this workflow with git, you can start with some [docs from GitHub](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests) or even their [excellent `git` resources](https://try.github.io/). ## Tests +{%- if test_config %} You have the option to test your changes locally by running the pipeline. For receiving warnings about process selectors and other `debug` information, it is recommended to use the debug profile. Execute all the tests with the following command: ```bash nf-test test --profile debug,test,docker --verbose ``` +{% endif -%} When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. -There are typically two types of tests that run: +{% if test_config %}There are typically two types of tests that run:{% endif %} ### Lint tests `nf-core` has a [set of guidelines](https://nf-co.re/developers/guidelines) which all pipelines must adhere to. -To enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core lint ` command. +To enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core pipelines lint ` command. If any failures or warnings are encountered, please follow the listed URL for more documentation. +{%- if test_config %} + ### Pipeline tests Each `nf-core` pipeline should be set up with a minimal set of test-data. `GitHub Actions` then runs the pipeline on this data to ensure that it exits successfully. If there are any failures then the automated tests fail. These tests are run both with the latest available version of `Nextflow` and also the minimum required version that is stated in the pipeline code. +{%- endif %} ## Patch @@ -63,7 +68,7 @@ These tests are run both with the latest available version of `Nextflow` and als - Fix the bug, and bump version (X.Y.Z+1). - A PR should be made on `master` from patch to directly this particular bug. -{% if branded -%} +{% if is_nfcore -%} ## Getting help @@ -83,18 +88,20 @@ If you wish to contribute a new step, please use the following coding standards: 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new parameters to `nextflow.config` with a default (see below). -5. Add any new parameters to `nextflow_schema.json` with help text (via the `nf-core schema build` tool). +5. Add any new parameters to `nextflow_schema.json` with help text (via the `nf-core pipelines schema build` tool). 6. Add sanity checks and validation for all relevant parameters. 7. Perform local tests to validate that the new code works as expected. 8. If applicable, add a new test command in `.github/workflow/ci.yml`. + {%- if multiqc %} 9. Update MultiQC config `assets/multiqc_config.yml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module. 10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`. + {%- endif %} ### Default values Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. -Once there, use `nf-core schema build` to add to `nextflow_schema.json`. +Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json`. ### Default processes resource requirements @@ -111,11 +118,12 @@ Please use the following naming schemes, to make it easy to understand what is g ### Nextflow version bumping -If you are using a new feature from core Nextflow, you may bump the minimum required version of nextflow in the pipeline with: `nf-core bump-version --nextflow . [min-nf-version]` +If you are using a new feature from core Nextflow, you may bump the minimum required version of nextflow in the pipeline with: `nf-core pipelines bump-version --nextflow . [min-nf-version]` ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). +{%- if codespaces %} ## GitHub Codespaces @@ -131,3 +139,4 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) + {% endif %} diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml index 063690f29..412f5bd3b 100644 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml @@ -2,6 +2,7 @@ name: Bug report description: Report something that is broken or incorrect labels: bug body: +{%- if is_nfcore %} - type: markdown attributes: value: | @@ -9,6 +10,7 @@ body: - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) - [{{ name }} pipeline documentation](https://nf-co.re/{{ short_name }}/usage) +{%- endif %} - type: textarea id: description diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index 4f01a9799..c96f2dd4c 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -16,12 +16,14 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/t - [ ] This comment contains a description of changes (with reason). - [ ] If you've fixed a bug or added code that should be tested, add tests! - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) - {%- if branded %} + {%- if is_nfcore %} - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. {%- endif %} -- [ ] Make sure your code lints (`nf-core lint`). +- [ ] Make sure your code lints (`nf-core pipelines lint`). + {%- if test_config %} - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). + {%- endif %} - [ ] Usage Documentation in `docs/usage.md` is updated. - [ ] Output Documentation in `docs/output.md` is updated. - [ ] `CHANGELOG.md` is updated. diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 56ecb6030..dc0450be4 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -1,18 +1,33 @@ name: nf-core AWS full size tests -# This workflow is triggered on published releases. +# This workflow is triggered on PRs opened against the master branch. # It can be additionally triggered manually with GitHub actions workflow dispatch button. # It runs the -profile 'test_full' on AWS batch on: - release: - types: [published] + pull_request: + branches: + - master workflow_dispatch: + pull_request_review: + types: [submitted] + jobs: run-platform: name: Run AWS full tests - if: github.repository == '{{ name }}' + if: github.repository == '{{ name }}' && github.event.review.state == 'approved' runs-on: ubuntu-latest steps: + - uses: octokit/request-action@v2.x + id: check_approvals + with: + route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.review.number }}/reviews + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - id: test_variables + run: | + JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}'{% endraw %} + CURRENT_APPROVALS_COUNT=$(echo $JSON_RESPONSE | jq -c '[.[] | select(.state | contains("APPROVED")) ] | length') + test $CURRENT_APPROVALS_COUNT -ge 2 || exit 1 # At least 2 approvals are required - name: Launch workflow via Seqera Platform uses: seqeralabs/action-tower-launch@v2 # TODO nf-core: You can customise AWS full pipeline tests as required diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 6b2547765..eefb59496 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -7,6 +7,7 @@ on: pull_request: release: types: [published] + workflow_dispatch: env: NXF_ANSI_LOG: false @@ -24,7 +25,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.04.0" + - "24.04.2" - "latest-everything" steps: - name: Check out pipeline code @@ -38,9 +39,21 @@ jobs: - name: Disk space cleanup uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 - - name: Run pipeline with test data + - name: Run pipeline with test data (docker) # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix run: | nextflow run ${GITHUB_WORKSPACE} -profile test,docker --outdir ./results + + - name: Run pipeline with test data (singularity) + # TODO nf-core: You can customise CI pipeline run tests as required + run: | + nextflow run ${GITHUB_WORKSPACE} -profile test,singularity --outdir ./results + if: "{% raw %}${{ github.base_ref == 'master' }}{% endraw %}" + + - name: Run pipeline with test data (conda) + # TODO nf-core: You can customise CI pipeline run tests as required + run: | + nextflow run ${GITHUB_WORKSPACE} -profile test,conda --outdir ./results + if: "{% raw %}${{ github.base_ref == 'master' }}{% endraw %}" diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index ebea16c5c..29b994754 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -1,4 +1,4 @@ -name: Test successful pipeline download with 'nf-core download' +name: Test successful pipeline download with 'nf-core pipelines download' # Run the workflow when: # - dispatched manually @@ -8,7 +8,7 @@ on: workflow_dispatch: inputs: testbranch: - description: "The specific branch you wish to utilize for the test execution of nf-core download." + description: "The specific branch you wish to utilize for the test execution of nf-core pipelines download." required: true default: "dev" pull_request: @@ -39,9 +39,11 @@ jobs: with: python-version: "3.12" architecture: "x64" - - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 + + - name: Setup Apptainer + uses: eWaterCycle/setup-apptainer@4bb22c52d4f63406c49e94c804632975787312b3 # v2.0.0 with: - singularity-version: 3.8.3 + apptainer-version: 1.3.4 - name: Install dependencies run: | @@ -54,33 +56,65 @@ jobs: echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} + - name: Make a cache directory for the container images + run: | + mkdir -p ./singularity_container_images + - name: Download the pipeline env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images run: | - nf-core download ${{ env.REPO_LOWERCASE }} \ + nf-core pipelines download ${{ env.REPO_LOWERCASE }} \ --revision ${{ env.REPO_BRANCH }} \ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ --compress "none" \ --container-system 'singularity' \ - --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ + --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io" \ --container-cache-utilisation 'amend' \ - --download-configuration + --download-configuration 'yes' - name: Inspect download - run: tree ./${{ env.REPOTITLE_LOWERCASE }} + run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} + + - name: Count the downloaded number of container images + id: count_initial + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Initial container image count: $image_count" + echo "IMAGE_COUNT_INITIAL=$image_count" >> ${GITHUB_ENV} - name: Run the downloaded pipeline (stub) id: stub_run_pipeline continue-on-error: true env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results - name: Run the downloaded pipeline (stub run not supported) id: run_pipeline if: ${{ job.steps.stub_run_pipeline.status == failure() }} env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %} + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results + + - name: Count the downloaded number of container images + id: count_afterwards + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Post-pipeline run container image count: $image_count" + echo "IMAGE_COUNT_AFTER=$image_count" >> ${GITHUB_ENV} + + - name: Compare container image counts + run: | + if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then + initial_count=${{ env.IMAGE_COUNT_INITIAL }} + final_count=${{ env.IMAGE_COUNT_AFTER }} + difference=$((final_count - initial_count)) + echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" + tree ./singularity_container_images + exit 1 + else + echo "The pipeline can be downloaded successfully!" + fi + {% endraw %}{% endif %} diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index b2cde075f..dbba830ec 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -1,6 +1,6 @@ name: nf-core linting # This workflow is triggered on pushes and PRs to the repository. -# It runs the `nf-core lint` and markdown lint tests to ensure +# It runs the `nf-core pipelines lint` and markdown lint tests to ensure # that the code meets the nf-core guidelines. {%- raw %} on: push: @@ -41,17 +41,32 @@ jobs: python-version: "3.12" architecture: "x64" + - name: read .nf-core.yml + uses: pietrobolcato/action-read-yaml@1.0.0 + id: read_yml + with: + config: ${{ github.workspace }}/.nf-core.yaml + - name: Install dependencies run: | python -m pip install --upgrade pip - pip install nf-core + pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }} + + - name: Run nf-core pipelines lint + if: ${{ github.base_ref != 'master' }} + env: + GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }} + run: nf-core -l lint_log.txt pipelines lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md - - name: Run nf-core lint + - name: Run nf-core pipelines lint --release + if: ${{ github.base_ref == 'master' }} env: GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }} - run: nf-core -l lint_log.txt lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md + run: nf-core -l lint_log.txt pipelines lint --release --dir ${GITHUB_WORKSPACE} --markdown lint_results.md - name: Save PR number if: ${{ always() }} diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index ea408fd6f..908dcea15 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@09f2f74827fd3a8607589e5ad7f9398816f540fe # v3 + uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 with: workflow: linting.yml workflow_conclusion: completed diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml index 8fee061fd..035ed63bb 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml @@ -12,7 +12,7 @@ jobs: - name: get topics and convert to hashtags id: get_topics run: | - echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" >> $GITHUB_OUTPUT + echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" | sed 's/-//g' >> $GITHUB_OUTPUT - uses: rzr/fediverse-action@master with: diff --git a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml new file mode 100644 index 000000000..58db2eb63 --- /dev/null +++ b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml @@ -0,0 +1,43 @@ +name: nf-core template version comment +# This workflow is triggered on PRs to check if the pipeline template version matches the latest nf-core version. +# It posts a comment to the PR, even if it comes from a fork.{%- raw %} + +on: pull_request_target + +jobs: + template_version: + runs-on: ubuntu-latest + steps: + - name: Check out pipeline code + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + + - name: Read template version from .nf-core.yml + uses: pietrobolcato/action-read-yaml@1.0.0 + id: read_yml + with: + config: ${{ github.workspace }}/.nf-core.yml + + - name: Install nf-core + run: | + python -m pip install --upgrade pip + pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }} + + - name: Check nf-core outdated + id: nf_core_outdated + run: pip list --outdated | grep nf-core + + - name: Post nf-core template version comment + uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 + if: | + ${{ steps.nf_core_outdated.outputs.stdout }} =~ 'nf-core' + with: + repo-token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} + allow-repeats: false + message: | + ## :warning: Newer version of the nf-core template is available. + + Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. + Please update your pipeline to the latest version. + + For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). + #{%- endraw %} diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 105a1821a..5907fb59c 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -4,17 +4,15 @@ tasks: command: | pre-commit install --install-hooks nextflow self-update - - name: unset JAVA_TOOL_OPTIONS - command: | - unset JAVA_TOOL_OPTIONS vscode: extensions: # based on nf-core.nf-core-extensionpack + #{%- if code_linters -%} - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code - - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files + - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files{% endif %} - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - mechatroner.rainbow-csv # Highlight columns in csv files in different colors - # - nextflow.nextflow # Nextflow syntax highlighting + - nextflow.nextflow # Nextflow syntax highlighting - oderwat.indent-rainbow # Highlight indentation level - streetsidesoftware.code-spell-checker # Spelling checker for source code - charliermarsh.ruff # Code linter Ruff diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml index 4dc0f1dcd..9e9f0e1c4 100644 --- a/nf_core/pipeline-template/.pre-commit-config.yaml +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: - prettier@3.2.5 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: "2.7.3" + rev: "3.0.3" hooks: - id: editorconfig-checker alias: ec diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index 437d763d0..c8e8ad9e1 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -1,6 +1,12 @@ +{%- if email %} email_template.html +{%- endif %} +{%- if adaptivecard %} adaptivecard.json +{%- endif %} +{%- if slackreport %} slackreport.json +{%- endif %} .nextflow* work/ data/ diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 6edf8f620..01e97c141 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -8,15 +8,19 @@ > Di Tommaso P, Chatzou M, Floden EW, Barja PP, Palumbo E, Notredame C. Nextflow enables reproducible computational workflows. Nat Biotechnol. 2017 Apr 11;35(4):316-319. doi: 10.1038/nbt.3820. PubMed PMID: 28398311. +{%- if citations %} + ## Pipeline tools -- [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) +{% if fastqc %}- [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) - > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. +> Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. +> {% endif %} -- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) +{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) - > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. +> Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. +> {% endif %} ## Software packaging/containerisation tools @@ -39,3 +43,4 @@ - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. + > {%- endif %} diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 88e0f1719..bcf159f46 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -1,4 +1,4 @@ -{% if branded -%} +{% if is_nfcore -%}

@@ -7,26 +7,30 @@

+{% else %} + +# {{ name }} + {% endif -%} {% if github_badges -%} [![GitHub Actions CI Status](https://github.com/{{ name }}/actions/workflows/ci.yml/badge.svg)](https://github.com/{{ name }}/actions/workflows/ci.yml) [![GitHub Actions Linting Status](https://github.com/{{ name }}/actions/workflows/linting.yml/badge.svg)](https://github.com/{{ name }}/actions/workflows/linting.yml){% endif -%} -{% if branded -%}[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results){% endif -%} +{% if is_nfcore -%}[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results){% endif -%} {%- if github_badges -%} [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) [![nf-test](https://img.shields.io/badge/unit_tests-nf--test-337ab7.svg)](https://www.nf-test.com) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) [![Launch on Seqera Platform](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Seqera%20Platform-%234256e7)](https://cloud.seqera.io/launch?pipeline=https://github.com/{{ name }}) {% endif -%} -{%- if branded -%}[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23{{ short_name }}-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/{{ short_name }}){% endif -%} -{%- if branded -%}[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core){% endif -%} -{%- if branded -%}[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core){% endif -%} -{%- if branded -%}[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) +{%- if is_nfcore -%}[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23{{ short_name }}-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/{{ short_name }}){% endif -%} +{%- if is_nfcore -%}[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core){% endif -%} +{%- if is_nfcore -%}[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core){% endif -%} +{%- if is_nfcore -%}[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) {% endif -%} @@ -44,13 +48,13 @@ workflows use the "tube map" design for that. See https://nf-co.re/docs/contributing/design_guidelines#examples for examples. --> -1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)) -2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)) +{% if fastqc %}1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)){% endif %} +{% if multiqc %}2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)){% endif %} ## Usage > [!NOTE] -> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data. +> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. {% if test_config %}Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data.{% endif %} - +{% if citations %} An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. - -{% if branded -%} +{% endif %} +{% if is_nfcore -%} You can cite the `nf-core` publication as follows: {% else -%} diff --git a/nf_core/pipeline-template/assets/email_template.txt b/nf_core/pipeline-template/assets/email_template.txt index 25b12e8ce..7927d4503 100644 --- a/nf_core/pipeline-template/assets/email_template.txt +++ b/nf_core/pipeline-template/assets/email_template.txt @@ -1,4 +1,4 @@ -{% if branded -%} +{% if is_nfcore -%} ---------------------------------------------------- ,--./,-. ___ __ __ __ ___ /,-._.--~\\ diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index b13b7ae07..e6fd87898 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -1,13 +1,13 @@ report_comment: > {% if 'dev' in version -%} This report has been generated by the {{ name }} - analysis pipeline.{% if branded %} For information about how to interpret these results, please see the + analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {%- else %} + {%- else -%} This report has been generated by the {{ name }} - analysis pipeline.{% if branded %} For information about how to interpret these results, please see the + analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {% endif %} + {%- endif %} report_section_order: "{{ name_noslash }}-methods-description": order: -1000 diff --git a/nf_core/pipeline-template/assets/schema_input.json b/nf_core/pipeline-template/assets/schema_input.json index e76b95fa9..28a468ada 100644 --- a/nf_core/pipeline-template/assets/schema_input.json +++ b/nf_core/pipeline-template/assets/schema_input.json @@ -1,5 +1,5 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/{{ name }}/master/assets/schema_input.json", "title": "{{ name }} pipeline - params.input schema", "description": "Schema for the file provided with params.input", diff --git a/nf_core/pipeline-template/assets/sendmail_template.txt b/nf_core/pipeline-template/assets/sendmail_template.txt index 3e59cd2d6..5257815f7 100644 --- a/nf_core/pipeline-template/assets/sendmail_template.txt +++ b/nf_core/pipeline-template/assets/sendmail_template.txt @@ -26,6 +26,7 @@ Content-Disposition: inline; filename="{{ name_noslash }}_logo_light.png" join( '\n' ) %> <% +{%- if multiqc %} if (mqcFile){ def mqcFileObj = new File("$mqcFile") if (mqcFileObj.length() < mqcMaxSize){ @@ -48,6 +49,7 @@ ${mqcFileObj. join( '\n' )} """ }} +{%- endif %} %> --nfcoremimeboundary-- diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index 9c62bf063..fa292339e 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -11,9 +11,9 @@ process { // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 @@ -27,30 +27,30 @@ process { // TODO nf-core: Customise requirements for specific processes. // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors withLabel:process_single { - cpus = { check_max( 1 , 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_low { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 12.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 2 * task.attempt } + memory = { 12.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_medium { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 36.GB * task.attempt, 'memory' ) } - time = { check_max( 8.h * task.attempt, 'time' ) } + cpus = { 6 * task.attempt } + memory = { 36.GB * task.attempt } + time = { 8.h * task.attempt } } withLabel:process_high { - cpus = { check_max( 12 * task.attempt, 'cpus' ) } - memory = { check_max( 72.GB * task.attempt, 'memory' ) } - time = { check_max( 16.h * task.attempt, 'time' ) } + cpus = { 12 * task.attempt } + memory = { 72.GB * task.attempt } + time = { 16.h * task.attempt } } withLabel:process_long { - time = { check_max( 20.h * task.attempt, 'time' ) } + time = { 20.h * task.attempt } } withLabel:process_high_memory { - memory = { check_max( 200.GB * task.attempt, 'memory' ) } + memory = { 200.GB * task.attempt } } withLabel:error_ignore { errorStrategy = 'ignore' diff --git a/nf_core/pipeline-template/conf/igenomes_ignored.config b/nf_core/pipeline-template/conf/igenomes_ignored.config new file mode 100644 index 000000000..b4034d824 --- /dev/null +++ b/nf_core/pipeline-template/conf/igenomes_ignored.config @@ -0,0 +1,9 @@ +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Nextflow config file for iGenomes paths +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Empty genomes dictionary to use when igenomes is ignored. +---------------------------------------------------------------------------------------- +*/ + +params.genomes = [:] diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index d203d2b6e..35e861d9b 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -18,10 +18,13 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] + {% if fastqc -%} withName: FASTQC { ext.args = '--quiet' } + {%- endif %} + {%- if multiqc %} withName: 'MULTIQC' { ext.args = { params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' } publishDir = [ @@ -30,5 +33,6 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] } + {%- endif %} } diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index 827e21b7b..bea6f670d 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -10,15 +10,18 @@ ---------------------------------------------------------------------------------------- */ +process { + resourceLimits = [ + cpus: 4, + memory: '15.GB', + time: '1.h' + ] +} + params { config_profile_name = 'Test profile' config_profile_description = 'Minimal test dataset to check pipeline function' - // Limit resources so that this can run on GitHub Actions - max_cpus = 2 - max_memory = '6.GB' - max_time = '6.h' - // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets // TODO nf-core: Give any required params for the test so that command line flags are not needed diff --git a/nf_core/pipeline-template/docs/README.md b/nf_core/pipeline-template/docs/README.md index e94889c53..9a237c1ad 100644 --- a/nf_core/pipeline-template/docs/README.md +++ b/nf_core/pipeline-template/docs/README.md @@ -6,7 +6,7 @@ The {{ name }} documentation is split into the following pages: - An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. - [Output](output.md) - An overview of the different results produced by the pipeline and how to interpret them. - {%- if branded %} + {%- if is_nfcore %} You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) {% else %} diff --git a/nf_core/pipeline-template/docs/images/mqc_fastqc_adapter.png b/nf_core/pipeline-template/docs/images/mqc_fastqc_adapter.png deleted file mode 100755 index 361d0e47a..000000000 Binary files a/nf_core/pipeline-template/docs/images/mqc_fastqc_adapter.png and /dev/null differ diff --git a/nf_core/pipeline-template/docs/images/mqc_fastqc_counts.png b/nf_core/pipeline-template/docs/images/mqc_fastqc_counts.png deleted file mode 100755 index cb39ebb80..000000000 Binary files a/nf_core/pipeline-template/docs/images/mqc_fastqc_counts.png and /dev/null differ diff --git a/nf_core/pipeline-template/docs/images/mqc_fastqc_quality.png b/nf_core/pipeline-template/docs/images/mqc_fastqc_quality.png deleted file mode 100755 index a4b89bf56..000000000 Binary files a/nf_core/pipeline-template/docs/images/mqc_fastqc_quality.png and /dev/null differ diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 53b0e242e..083c46ecd 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -2,7 +2,7 @@ ## Introduction -This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline. +This document describes the output produced by the pipeline. {% if multiqc %}Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline.{% endif %} The directories listed below will be created in the results directory after the pipeline has finished. All paths are relative to the top-level results directory. @@ -12,10 +12,13 @@ The directories listed below will be created in the results directory after the The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: -- [FastQC](#fastqc) - Raw read QC -- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline +{% if fastqc %}- [FastQC](#fastqc) - Raw read QC{% endif %} +{% if multiqc %}- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline{% endif %} + - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution +{%- if fastqc %} + ### FastQC
@@ -28,16 +31,8 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d
[FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). - -![MultiQC - FastQC sequence counts plot](images/mqc_fastqc_counts.png) - -![MultiQC - FastQC mean quality scores plot](images/mqc_fastqc_quality.png) - -![MultiQC - FastQC adapter content plot](images/mqc_fastqc_adapter.png) - -:::note -The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. -::: +{%- endif %} +{%- if multiqc %} ### MultiQC @@ -54,6 +49,7 @@ The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They m [MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . +{% endif %} ### Pipeline information @@ -62,7 +58,8 @@ Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQ - `pipeline_info/` - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. - - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. + {%- if email %} + - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. {% endif %} - Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. - Parameters used by the pipeline run: `params.json`. diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index d46dfca04..ae2761797 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -1,6 +1,6 @@ # {{ name }}: Usage -{% if branded -%} +{% if is_nfcore -%} ## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/{{ short_name }}/usage](https://nf-co.re/{{ short_name }}/usage) @@ -89,9 +89,9 @@ The above pipeline run specified with a params file in yaml format: nextflow run {{ name }} -profile docker -params-file params.yaml ``` -with `params.yaml` containing: +with: -```yaml +```yaml title="params.yaml" input: './samplesheet.csv' outdir: './results/' genome: 'GRCh37' @@ -114,7 +114,7 @@ It is a good idea to specify a pipeline version when running the pipeline on you First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. -This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. For example, at the bottom of the MultiQC reports. +This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. {% if multiqc %}For example, at the bottom of the MultiQC reports.{% endif %} To further assist in reproducbility, you can use share and re-use [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. @@ -148,9 +148,12 @@ They are loaded in sequence, so later profiles can overwrite earlier profiles. If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer enviroment. +{%- if test_config %} + - `test` - A profile with a complete configuration for automated testing - Includes links to test data so needs no other parameters + {%- endif %} - `docker` - A generic configuration profile to be used with [Docker](https://docker.com/) - `singularity` @@ -207,14 +210,6 @@ See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs). -## Azure Resource Requests - -To be used with the `azurebatch` profile by specifying the `-profile azurebatch`. -We recommend providing a compute `params.vm_type` of `Standard_D16_v3` VMs by default but these options can be changed if required. - -Note that the choice of VM size depends on your quota and the overall workload during the analysis. -For a thorough list, please refer the [Azure Sizes for virtual machines in Azure](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes). - {% endif -%} ## Running in the background diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 2590f7467..6516ebf90 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -4,15 +4,13 @@ {{ name }} ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Github : https://github.com/{{ name }} -{%- if branded %} +{%- if is_nfcore %} Website: https://nf-co.re/{{ short_name }} Slack : https://nfcore.slack.com/channels/{{ short_name }} {%- endif %} ---------------------------------------------------------------------------------------- */ -nextflow.enable.dsl = 2 - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS / WORKFLOWS @@ -20,9 +18,10 @@ nextflow.enable.dsl = 2 */ include { {{ short_name|upper }} } from './workflows/{{ short_name }}' +{%- if modules %} include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' -{% if igenomes %} +{%- if igenomes %} include { getGenomeAttribute } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' /* @@ -35,7 +34,7 @@ include { getGenomeAttribute } from './subworkflows/local/utils_nfcore_{{ s // This is an example of how to use getGenomeAttribute() to fetch parameters // from igenomes.config using `--genome` params.fasta = getGenomeAttribute('fasta') -{% endif %} +{% endif %}{% endif %} /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ NAMED WORKFLOWS FOR PIPELINE @@ -58,10 +57,10 @@ workflow {{ prefix_nodash|upper }}_{{ short_name|upper }} { {{ short_name|upper }} ( samplesheet ) - +{%- if multiqc %}{%- if modules %} emit: multiqc_report = {{ short_name|upper }}.out.multiqc_report // channel: /path/to/multiqc_report.html - +{%- endif %}{%- endif %} } /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -73,38 +72,46 @@ workflow { main: + {%- if modules %} // // SUBWORKFLOW: Run initialisation tasks // PIPELINE_INITIALISATION ( params.version, - params.help, params.validate_params, params.monochrome_logs, args, params.outdir, params.input ) - + {% endif %} // // WORKFLOW: Run main workflow // {{ prefix_nodash|upper }}_{{ short_name|upper }} ( + {%- if modules %} PIPELINE_INITIALISATION.out.samplesheet + {%- else %} + params.input + {%- endif %} ) + {%- if modules %} // // SUBWORKFLOW: Run completion tasks // PIPELINE_COMPLETION ( + {%- if email %} params.email, params.email_on_fail, params.plaintext_email, + {%- endif %} params.outdir, params.monochrome_logs, - params.hook_url, - {{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report + {% if adaptivecard or slackreport %}params.hook_url,{% endif %} + {% if multiqc %}{{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} ) + {%- endif %} } /* diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 9137c5967..9bc344e7d 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -5,35 +5,37 @@ "https://github.com/nf-core/modules.git": { "modules": { "nf-core": { + {%- if fastqc %} "fastqc": { "branch": "master", - "git_sha": "285a50500f9e02578d90b3ce6382ea3c30216acd", + "git_sha": "666652151335353eef2fcd58880bcef5bc2928e1", "installed_by": ["modules"] - }, + }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { "branch": "master", - "git_sha": "b7ebe95761cd389603f9cc0e0dc384c0f663815a", + "git_sha": "666652151335353eef2fcd58880bcef5bc2928e1", "installed_by": ["modules"] } + {%- endif %} } }, "subworkflows": { "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", + "git_sha": "d20fb2a9cc3e2835e9d067d1046a63252eb17352", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "92de218a329bfc9a9033116eb5f65fd270e72ba3", + "git_sha": "2fdce49d30c0254f76bc0f13c55c17455c1251ab", "installed_by": ["subworkflows"] - }, - "utils_nfvalidation_plugin": { + }{% if nf_schema %}, + "utils_nfschema_plugin": { "branch": "master", - "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", + "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c", "installed_by": ["subworkflows"] - } + }{% endif %} } } } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml index 1787b38a9..691d4c763 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml @@ -1,7 +1,5 @@ -name: fastqc channels: - conda-forge - bioconda - - defaults dependencies: - bioconda::fastqc=0.12.1 diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index d79f1c862..d8989f481 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -26,7 +26,10 @@ process FASTQC { def rename_to = old_new_pairs*.join(' ').join(' ') def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') - def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') + // The total amount of allocated RAM by FastQC is equal to the number of threads defined (--threads) time the amount of RAM defined (--memory) + // https://github.com/s-andrews/FastQC/blob/1faeea0412093224d7f6a07f777fad60a5650795/fastqc#L211-L222 + // Dividing the task.memory by task.cpu allows to stick to requested amount of RAM in the label + def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') / task.cpus // FastQC memory value allowed range (100 - 10000) def fastqc_memory = memory_in_mb > 10000 ? 10000 : (memory_in_mb < 100 ? 100 : memory_in_mb) diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index ee5507e06..4827da7af 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -16,35 +16,44 @@ tools: homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ licence: ["GPL-2.0-only"] + identifier: biotools:fastqc input: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - reads: - type: file - description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, - respectively. + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - html: - type: file - description: FastQC report - pattern: "*_{fastqc.html}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - "*.html": + type: file + description: FastQC report + pattern: "*_{fastqc.html}" - zip: - type: file - description: FastQC report archive - pattern: "*_{fastqc.zip}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - "*.zip": + type: file + description: FastQC report archive + pattern: "*_{fastqc.zip}" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - versions.yml: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@drpatelh" - "@grst" diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test index 70edae4d9..e9d79a074 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test @@ -23,17 +23,14 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. - // looks like this:
Mon 2 Oct 2023
test.gz
- // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_single") } + { assert process.success }, + // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. + // looks like this:
Mon 2 Oct 2023
test.gz
+ // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -54,16 +51,14 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, - { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, - { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, - { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, - { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_paired") } + { assert process.success }, + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -83,13 +78,11 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_interleaved") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -109,13 +102,11 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_bam") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -138,22 +129,20 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, - { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, - { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, - { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, - { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, - { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, - { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, - { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, - { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_multiple") } + { assert process.success }, + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, + { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, + { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -173,21 +162,18 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_custom_prefix") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } test("sarscov2 single-end [fastq] - stub") { - options "-stub" - + options "-stub" when { process { """ @@ -201,12 +187,123 @@ nextflow_process { then { assertAll ( - { assert process.success }, - { assert snapshot(process.out.html.collect { file(it[1]).getName() } + - process.out.zip.collect { file(it[1]).getName() } + - process.out.versions ).match("fastqc_stub") } + { assert process.success }, + { assert snapshot(process.out).match() } ) } } + test("sarscov2 paired-end [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 interleaved [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 paired-end [bam] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 multiple [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 custom_prefix - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [ id:'mysample', single_end:true ], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap index 86f7c3115..d5db3092f 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap @@ -1,88 +1,392 @@ { - "fastqc_versions_interleaved": { + "sarscov2 custom_prefix": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:07.293713" + "timestamp": "2024-07-22T11:02:16.374038" }, - "fastqc_stub": { + "sarscov2 single-end [fastq] - stub": { "content": [ - [ - "test.html", - "test.zip", - "versions.yml:md5,e1cc25ca8af856014824abd842e93978" - ] + { + "0": [ + [ + { + "id": "test", + "single_end": true + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": true + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": true + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": true + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:24.993809" + }, + "sarscov2 custom_prefix - stub": { + "content": [ + { + "0": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:31:01.425198" + "timestamp": "2024-07-22T11:03:10.93942" }, - "fastqc_versions_multiple": { + "sarscov2 interleaved [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:55.797907" + "timestamp": "2024-07-22T11:01:42.355718" }, - "fastqc_versions_bam": { + "sarscov2 paired-end [bam]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:26.795862" + "timestamp": "2024-07-22T11:01:53.276274" }, - "fastqc_versions_single": { + "sarscov2 multiple [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:39:27.043675" + "timestamp": "2024-07-22T11:02:05.527626" }, - "fastqc_versions_paired": { + "sarscov2 paired-end [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:01:31.188871" + }, + "sarscov2 paired-end [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:34.273566" + }, + "sarscov2 multiple [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:39:47.584191" + "timestamp": "2024-07-22T11:03:02.304411" }, - "fastqc_versions_custom_prefix": { + "sarscov2 single-end [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:01:19.095607" + }, + "sarscov2 interleaved [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:44.640184" + }, + "sarscov2 paired-end [bam] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:41:14.576531" + "timestamp": "2024-07-22T11:02:53.550742" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml index ca39fb67e..f1cd99b07 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -1,7 +1,5 @@ -name: multiqc channels: - conda-forge - bioconda - - defaults dependencies: - - bioconda::multiqc=1.21 + - bioconda::multiqc=1.24.1 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 47ac352f9..b9ccebdbb 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -3,14 +3,16 @@ process MULTIQC { conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.21--pyhdfd78af_0' : - 'biocontainers/multiqc:1.21--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.25--pyhdfd78af_0' : + 'biocontainers/multiqc:1.25--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" path(multiqc_config) path(extra_multiqc_config) path(multiqc_logo) + path(replace_names) + path(sample_names) output: path "*multiqc_report.html", emit: report @@ -23,16 +25,22 @@ process MULTIQC { script: def args = task.ext.args ?: '' + def prefix = task.ext.prefix ? "--filename ${task.ext.prefix}.html" : '' def config = multiqc_config ? "--config $multiqc_config" : '' def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' - def logo = multiqc_logo ? /--cl-config 'custom_logo: "${multiqc_logo}"'/ : '' + def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : '' + def replace = replace_names ? "--replace-names ${replace_names}" : '' + def samples = sample_names ? "--sample-names ${sample_names}" : '' """ multiqc \\ --force \\ $args \\ $config \\ + $prefix \\ $extra_config \\ $logo \\ + $replace \\ + $samples \\ . cat <<-END_VERSIONS > versions.yml diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml index 45a9bc35e..b16c18792 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml @@ -1,5 +1,6 @@ name: multiqc -description: Aggregate results from bioinformatics analyses across many samples into a single report +description: Aggregate results from bioinformatics analyses across many samples into + a single report keywords: - QC - bioinformatics tools @@ -12,40 +13,59 @@ tools: homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ licence: ["GPL-3.0-or-later"] + identifier: biotools:multiqc input: - - multiqc_files: - type: file - description: | - List of reports / files recognised by MultiQC, for example the html and zip output of FastQC - - multiqc_config: - type: file - description: Optional config yml for MultiQC - pattern: "*.{yml,yaml}" - - extra_multiqc_config: - type: file - description: Second optional config yml for MultiQC. Will override common sections in multiqc_config. - pattern: "*.{yml,yaml}" - - multiqc_logo: - type: file - description: Optional logo file for MultiQC - pattern: "*.{png}" + - - multiqc_files: + type: file + description: | + List of reports / files recognised by MultiQC, for example the html and zip output of FastQC + - - multiqc_config: + type: file + description: Optional config yml for MultiQC + pattern: "*.{yml,yaml}" + - - extra_multiqc_config: + type: file + description: Second optional config yml for MultiQC. Will override common sections + in multiqc_config. + pattern: "*.{yml,yaml}" + - - multiqc_logo: + type: file + description: Optional logo file for MultiQC + pattern: "*.{png}" + - - replace_names: + type: file + description: | + Optional two-column sample renaming file. First column a set of + patterns, second column a set of corresponding replacements. Passed via + MultiQC's `--replace-names` option. + pattern: "*.{tsv}" + - - sample_names: + type: file + description: | + Optional TSV file with headers, passed to the MultiQC --sample_names + argument. + pattern: "*.{tsv}" output: - report: - type: file - description: MultiQC report file - pattern: "multiqc_report.html" + - "*multiqc_report.html": + type: file + description: MultiQC report file + pattern: "multiqc_report.html" - data: - type: directory - description: MultiQC data dir - pattern: "multiqc_data" + - "*_data": + type: directory + description: MultiQC data dir + pattern: "multiqc_data" - plots: - type: file - description: Plots created by MultiQC - pattern: "*_data" + - "*_plots": + type: file + description: Plots created by MultiQC + pattern: "*_data" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - versions.yml: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@abhi18av" - "@bunop" diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test index f1c4242ef..33316a7dd 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test @@ -8,6 +8,8 @@ nextflow_process { tag "modules_nfcore" tag "multiqc" + config "./nextflow.config" + test("sarscov2 single-end [fastqc]") { when { @@ -17,6 +19,8 @@ nextflow_process { input[1] = [] input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } @@ -41,6 +45,8 @@ nextflow_process { input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true)) input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } @@ -66,6 +72,8 @@ nextflow_process { input[1] = [] input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap index bfebd8029..b779e4692 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap @@ -2,14 +2,14 @@ "multiqc_versions_single": { "content": [ [ - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,8c8724363a5efe0c6f43ab34faa57efd" ] ], "meta": { "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nextflow": "24.04.2" }, - "timestamp": "2024-02-29T08:48:55.657331" + "timestamp": "2024-07-10T12:41:34.562023" }, "multiqc_stub": { "content": [ @@ -17,25 +17,25 @@ "multiqc_report.html", "multiqc_data", "multiqc_plots", - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,8c8724363a5efe0c6f43ab34faa57efd" ] ], "meta": { "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nextflow": "24.04.2" }, - "timestamp": "2024-02-29T08:49:49.071937" + "timestamp": "2024-07-10T11:27:11.933869532" }, "multiqc_versions_config": { "content": [ [ - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,8c8724363a5efe0c6f43ab34faa57efd" ] ], "meta": { "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nextflow": "24.04.2" }, - "timestamp": "2024-02-29T08:49:25.457567" + "timestamp": "2024-07-10T11:26:56.709849369" } -} \ No newline at end of file +} diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config new file mode 100644 index 000000000..c537a6a3e --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config @@ -0,0 +1,5 @@ +process { + withName: 'MULTIQC' { + ext.prefix = null + } +} diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 7648a5ebc..4c816a2a2 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -20,69 +20,62 @@ params { igenomes_ignore = false {%- endif %} + {%- if multiqc %} // MultiQC options multiqc_config = null multiqc_title = null multiqc_logo = null max_multiqc_email_size = '25.MB' - multiqc_methods_description = null + {% if citations %}multiqc_methods_description = null{% endif %} + {%- endif %} // Boilerplate options outdir = null - publish_dir_mode = 'copy' + {% if modules %}publish_dir_mode = 'copy'{% endif %} + {%- if email %} email = null email_on_fail = null plaintext_email = false - monochrome_logs = false - hook_url = null - help = false + {%- endif %} + {% if modules %}monochrome_logs = false{% endif %} + {% if slackreport or adaptivecard %}hook_url = null{% endif %} + {% if nf_schema %}help = false + help_full = false + show_hidden = false{% endif %} version = false - pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/' + {% if test_config %}pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} + {%- if nf_core_configs %} // Config options config_profile_name = null config_profile_description = null - {%- if nf_core_configs %} custom_config_version = 'master' custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" config_profile_contact = null config_profile_url = null {%- endif %} - // Max resource options - // Defaults only, expecting to be overwritten - max_memory = '128.GB' - max_cpus = 16 - max_time = '240.h' - + {%- if nf_schema %} // Schema validation default options - validationFailUnrecognisedParams = false - validationLenientMode = false - validationSchemaIgnoreParams = 'genomes,igenomes_base' - validationShowHiddenParams = false - validate_params = true - + validate_params = true + {% endif %} } - +{% if modules %} // Load base.config by default for all pipelines includeConfig 'conf/base.config' +{%- else %} +process { + // TODO nf-core: Check the defaults for all processes + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } -{% if nf_core_configs -%} -// Load nf-core custom profiles from different Institutions -try { - includeConfig "${params.custom_config_base}/nfcore_custom.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") + errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } + maxRetries = 1 + maxErrors = '-1' } - -// Load {{ name }} custom profiles from different institutions. -try { - includeConfig "${params.custom_config_base}/pipeline/{{ short_name }}.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") -} -{% endif -%} +{% endif %} profiles { debug { @@ -98,7 +91,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false - conda.channels = ['conda-forge', 'bioconda', 'defaults'] + conda.channels = ['conda-forge', 'bioconda'] apptainer.enabled = false } mamba { @@ -178,35 +171,40 @@ profiles { wave.freeze = true wave.strategy = 'conda,container' } + {%- if gitpod %} gitpod { executor.name = 'local' executor.cpus = 4 executor.memory = 8.GB } + {%- endif %} + {%- if test_config %} test { includeConfig 'conf/test.config' } test_full { includeConfig 'conf/test_full.config' } + {%- endif %} } -// Set default registry for Apptainer, Docker, Podman and Singularity independent of -profile -// Will not be used unless Apptainer / Docker / Podman / Singularity are enabled -// Set to your registry if you have a mirror of containers -apptainer.registry = 'quay.io' -docker.registry = 'quay.io' -podman.registry = 'quay.io' -singularity.registry = 'quay.io' +{% if nf_core_configs -%} +// Load nf-core custom profiles from different Institutions +includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null" -// Nextflow plugins -plugins { - id 'nf-validation@1.1.3' // Validation of pipeline parameters and creation of an input channel from a sample sheet -} +// Load {{ name }} custom profiles from different institutions. +// TODO nf-core: Optionally, you can add a pipeline-specific nf-core config at https://github.com/nf-core/configs +// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/pipeline/{{ short_name }}.config" : "/dev/null" +{% endif -%} + +// Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile +// Will not be used unless Apptainer / Docker / Podman / Charliecloud / Singularity are enabled +// Set to your registry if you have a mirror of containers +apptainer.registry = 'quay.io' +docker.registry = 'quay.io' +podman.registry = 'quay.io' +singularity.registry = 'quay.io' +charliecloud.registry = 'quay.io' {% if igenomes -%} // Load igenomes.config if required -if (!params.igenomes_ignore) { - includeConfig 'conf/igenomes.config' -} else { - params.genomes = [:] -} +includeConfig !params.igenomes_ignore ? 'conf/igenomes.config' : 'conf/igenomes_ignored.config' {% endif -%} // Export these variables to prevent local Python/R libraries from conflicting with those in the container @@ -220,8 +218,15 @@ env { JULIA_DEPOT_PATH = "/usr/local/share/julia" } -// Capture exit codes from upstream processes when piping -process.shell = ['/bin/bash', '-euo', 'pipefail'] +// Set bash options +process.shell = """\ +bash + +set -e # Exit if a tool returns a non-zero status/exit code +set -u # Treat unset variables and parameters as an error +set -o pipefail # Returns the status of the last command to exit with a non-zero status or zero if all successfully execute +set -C # No clobber - prevent output redirection from overwriting files. +""" // Disable process selector warnings by default. Use debug profile to enable warnings. nextflow.enable.configProcessNamesValidation = false @@ -250,43 +255,50 @@ manifest { homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=23.04.0' + nextflowVersion = '!>=24.04.2' version = '{{ version }}' doi = '' } -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' +{% if nf_schema -%} +// Nextflow plugins +plugins { + id 'nf-schema@2.1.1' // Validation of pipeline parameters and creation of an input channel from a sample sheet +} -// Function to ensure that resource requirements don't go beyond -// a maximum limit -def check_max(obj, type) { - if (type == 'memory') { - try { - if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) - return params.max_memory as nextflow.util.MemoryUnit - else - return obj - } catch (all) { - println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'time') { - try { - if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) - return params.max_time as nextflow.util.Duration - else - return obj - } catch (all) { - println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'cpus') { - try { - return Math.min( obj, params.max_cpus as int ) - } catch (all) { - println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" - return obj - } - } +validation { + defaultIgnoreParams = ["genomes"] + help { + enabled = true + command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir " + fullParameter = "help_full" + showHiddenParameter = "show_hidden" + {%- if is_nfcore %} + beforeText = """ +-\033[2m----------------------------------------------------\033[0m- + \033[0;32m,--.\033[0;30m/\033[0;32m,-.\033[0m +\033[0;34m ___ __ __ __ ___ \033[0;32m/,-._.--~\'\033[0m +\033[0;34m |\\ | |__ __ / ` / \\ |__) |__ \033[0;33m} {\033[0m +\033[0;34m | \\| | \\__, \\__/ | \\ |___ \033[0;32m\\`-._,-`-,\033[0m + \033[0;32m`._,._,\'\033[0m +\033[0;35m ${manifest.name} ${manifest.version}\033[0m +-\033[2m----------------------------------------------------\033[0m- +""" + afterText = """${manifest.doi ? "* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""} +* The nf-core framework + https://doi.org/10.1038/s41587-020-0439-x + +* Software dependencies + https://github.com/${manifest.name}/blob/master/CITATIONS.md +"""{% endif %} + }{% if is_nfcore %} + summary { + beforeText = validation.help.beforeText + afterText = validation.help.afterText + }{% endif %} } +{% endif -%} +{%- if modules %} +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' +{% endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index ae7c0b715..4136a0b49 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -1,10 +1,10 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/{{ name }}/master/nextflow_schema.json", "title": "{{ name }} pipeline parameters", "description": "{{ description }}", "type": "object", - "definitions": { + "$defs": { "input_output_options": { "title": "Input/output options", "type": "object", @@ -20,7 +20,7 @@ "mimetype": "text/csv", "pattern": "^\\S+\\.csv$", "description": "Path to comma-separated file containing information about the samples in the experiment.", - "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row.{% if branded %} See [usage docs](https://nf-co.re/{{ short_name }}/usage#samplesheet-input).{% endif %}", + "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row.{% if is_nfcore %} See [usage docs](https://nf-co.re/{{ short_name }}/usage#samplesheet-input).{% endif %}", "fa_icon": "fas fa-file-csv" }, "outdir": { @@ -28,19 +28,19 @@ "format": "directory-path", "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", "fa_icon": "fas fa-folder-open" - }, + }{% if email %}, "email": { "type": "string", "description": "Email address for completion summary.", "fa_icon": "fas fa-envelope", "help_text": "Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run.", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$" - }, + }{% endif %}{% if multiqc %}, "multiqc_title": { "type": "string", "description": "MultiQC report title. Printed as page header, used for filename if not otherwise specified.", "fa_icon": "fas fa-file-signature" - } + }{% endif %} } }, {%- if igenomes %} @@ -56,6 +56,7 @@ "fa_icon": "fas fa-book", "help_text": "If using a reference genome configured in the pipeline using iGenomes, use this parameter to give the ID for the reference. This is then used to build the full paths for all required reference genome files e.g. `--genome GRCh38`. \n\nSee the [nf-core website docs](https://nf-co.re/usage/reference_genomes) for more details." }, + {%- if modules %} "fasta": { "type": "string", "format": "file-path", @@ -66,16 +67,26 @@ "help_text": "This parameter is *mandatory* if `--genome` is not specified. If you don't have a BWA index available this will be generated for you automatically. Combine with `--save_reference` to save BWA index for future runs.", "fa_icon": "far fa-file-code" }, + {%- endif %} "igenomes_ignore": { "type": "boolean", "description": "Do not load the iGenomes reference config.", "fa_icon": "fas fa-ban", "hidden": true, "help_text": "Do not load `igenomes.config` when running the pipeline. You may choose this option if you observe clashes between custom parameters and those supplied in `igenomes.config`." + }, + "igenomes_base": { + "type": "string", + "format": "directory-path", + "description": "The base path to the igenomes reference files", + "fa_icon": "fas fa-ban", + "hidden": true, + "default": "s3://ngi-igenomes/igenomes/" } } }, {%- endif %} + {%- if nf_core_configs %} "institutional_config_options": { "title": "Institutional config options", "type": "object", @@ -124,41 +135,7 @@ } } }, - "max_job_request_options": { - "title": "Max job request options", - "type": "object", - "fa_icon": "fab fa-acquisitions-incorporated", - "description": "Set the top limit for requested resources for any single job.", - "help_text": "If you are running on a smaller system, a pipeline step requesting more resources than are available may cause the Nextflow to stop the run with an error. These options allow you to cap the maximum resources requested by any single job so that the pipeline will run on your system.\n\nNote that you can not _increase_ the resources requested by any job using these options. For that you will need your own configuration file. See [the nf-core website](https://nf-co.re/usage/configuration) for details.", - "properties": { - "max_cpus": { - "type": "integer", - "description": "Maximum number of CPUs that can be requested for any single job.", - "default": 16, - "fa_icon": "fas fa-microchip", - "hidden": true, - "help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`" - }, - "max_memory": { - "type": "string", - "description": "Maximum amount of memory that can be requested for any single job.", - "default": "128.GB", - "fa_icon": "fas fa-memory", - "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", - "hidden": true, - "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`" - }, - "max_time": { - "type": "string", - "description": "Maximum amount of time that can be requested for any single job.", - "default": "240.h", - "fa_icon": "far fa-clock", - "pattern": "^(\\d+\\.?\\s*(s|m|h|d|day)\\s*)+$", - "hidden": true, - "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" - } - } - }, + {%- endif %} "generic_options": { "title": "Generic options", "type": "object", @@ -166,18 +143,13 @@ "description": "Less common options for the pipeline, typically set in a config file.", "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", "properties": { - "help": { - "type": "boolean", - "description": "Display help text.", - "fa_icon": "fas fa-question-circle", - "hidden": true - }, "version": { "type": "boolean", "description": "Display version and exit.", "fa_icon": "fas fa-question-circle", "hidden": true }, + {%- if modules %} "publish_dir_mode": { "type": "string", "default": "copy", @@ -186,7 +158,7 @@ "fa_icon": "fas fa-copy", "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], "hidden": true - }, + },{% endif %}{% if email %} "email_on_fail": { "type": "string", "description": "Email address for completion summary, only when pipeline fails.", @@ -200,7 +172,8 @@ "description": "Send plain-text email instead of HTML.", "fa_icon": "fas fa-remove-format", "hidden": true - }, + },{% endif %} + {%- if multiqc %} "max_multiqc_email_size": { "type": "string", "description": "File size limit when attaching MultiQC reports to summary emails.", @@ -208,20 +181,23 @@ "default": "25.MB", "fa_icon": "fas fa-file-upload", "hidden": true - }, + },{% endif %} + {%- if modules %} "monochrome_logs": { "type": "boolean", "description": "Do not use coloured log outputs.", "fa_icon": "fas fa-palette", "hidden": true - }, + },{% endif %} + {%- if slackreport or adaptivecard %} "hook_url": { "type": "string", "description": "Incoming hook URL for messaging service", "fa_icon": "fas fa-people-group", "help_text": "Incoming hook URL for messaging service. Currently, MS Teams and Slack are supported.", "hidden": true - }, + },{% endif %} + {%- if multiqc %} "multiqc_config": { "type": "string", "format": "file-path", @@ -234,65 +210,41 @@ "description": "Custom logo file to supply to MultiQC. File name must also be set in the MultiQC config file", "fa_icon": "fas fa-image", "hidden": true - }, + },{% if citations %} "multiqc_methods_description": { "type": "string", "description": "Custom MultiQC yaml file containing HTML including a methods description.", "fa_icon": "fas fa-cog" - }, + },{% endif %}{% endif %} "validate_params": { "type": "boolean", "description": "Boolean whether to validate parameters against the schema at runtime", "default": true, "fa_icon": "fas fa-check-square", "hidden": true - }, - "validationShowHiddenParams": { - "type": "boolean", - "fa_icon": "far fa-eye-slash", - "description": "Show all params when using `--help`", - "hidden": true, - "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." - }, - "validationFailUnrecognisedParams": { - "type": "boolean", - "fa_icon": "far fa-check-circle", - "description": "Validation of parameters fails when an unrecognised parameter is found.", - "hidden": true, - "help_text": "By default, when an unrecognised parameter is found, it returns a warinig." - }, - "validationLenientMode": { - "type": "boolean", - "fa_icon": "far fa-check-circle", - "description": "Validation of parameters in lenient more.", - "hidden": true, - "help_text": "Allows string values that are parseable as numbers or booleans. For further information see [JSONSchema docs](https://github.com/everit-org/json-schema#lenient-mode)." - }, + }{% if test_config %}, "pipelines_testdata_base_path": { "type": "string", "fa_icon": "far fa-check-circle", "description": "Base URL or local path to location of pipeline test dataset files", "default": "https://raw.githubusercontent.com/nf-core/test-datasets/", "hidden": true - } + }{% endif %} } } }, "allOf": [ { - "$ref": "#/definitions/input_output_options" + "$ref": "#/$defs/input_output_options" }, {% if igenomes %}{ - "$ref": "#/definitions/reference_genome_options" + "$ref": "#/$defs/reference_genome_options" + },{% endif %} + {% if nf_core_configs %}{ + "$ref": "#/$defs/institutional_config_options" },{% endif %} { - "$ref": "#/definitions/institutional_config_options" - }, - { - "$ref": "#/definitions/max_job_request_options" - }, - { - "$ref": "#/definitions/generic_options" + "$ref": "#/$defs/generic_options" } ] } diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index a4bfb9f8b..9a58e489e 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -8,17 +8,18 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin' -include { paramsSummaryMap } from 'plugin/nf-validation' -include { fromSamplesheet } from 'plugin/nf-validation' -include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' +{% if nf_schema %}include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin' +include { paramsSummaryMap } from 'plugin/nf-schema' +include { samplesheetToList } from 'plugin/nf-schema'{% endif %} +{%- if email %} include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' +{%- endif %} include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' -include { dashedLine } from '../../nf-core/utils_nfcore_pipeline' -include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline' +{%- if adaptivecard or slackreport %} include { imNotification } from '../../nf-core/utils_nfcore_pipeline' +{%- endif %} include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' -include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline' +include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' /* ======================================================================================== @@ -30,7 +31,6 @@ workflow PIPELINE_INITIALISATION { take: version // boolean: Display version and exit - help // boolean: Display help text validate_params // boolean: Boolean whether to validate parameters against the schema at runtime monochrome_logs // boolean: Do not use coloured log outputs nextflow_cli_args // array: List of positional nextflow CLI args @@ -51,20 +51,16 @@ workflow PIPELINE_INITIALISATION { workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 ) + {% if nf_schema %} // // Validate parameters and generate parameter summary to stdout // - pre_help_text = nfCoreLogo(monochrome_logs) - post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs) - def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --outdir " - UTILS_NFVALIDATION_PLUGIN ( - help, - workflow_command, - pre_help_text, - post_help_text, + UTILS_NFSCHEMA_PLUGIN ( + workflow, validate_params, - "nextflow_schema.json" + null ) + {% endif %} // // Check config provided to the pipeline @@ -83,8 +79,14 @@ workflow PIPELINE_INITIALISATION { // // Create channel from input file provided through params.input // - Channel - .fromSamplesheet("input") + + Channel{% if nf_schema %} + .fromList(samplesheetToList(params.input, "${projectDir}/assets/schema_input.json")){% else %} + .fromPath(params.input) + .splitCsv(header: true, strip: true) + .map { row -> + [[id:row.sample], row.fastq_1, row.fastq_2] + }{% endif %} .map { meta, fastq_1, fastq_2 -> if (!fastq_2) { @@ -94,8 +96,8 @@ workflow PIPELINE_INITIALISATION { } } .groupTuple() - .map { - validateInputSamplesheet(it) + .map { samplesheet -> + validateInputSamplesheet(samplesheet) } .map { meta, fastqs -> @@ -117,31 +119,48 @@ workflow PIPELINE_INITIALISATION { workflow PIPELINE_COMPLETION { take: + {%- if email %} email // string: email address email_on_fail // string: email address sent on pipeline failure plaintext_email // boolean: Send plain-text email instead of HTML + {% endif %} outdir // path: Path to output directory where results will be published monochrome_logs // boolean: Disable ANSI colour codes in log output - hook_url // string: hook URL for notifications - multiqc_report // string: Path to MultiQC report + {% if adaptivecard or slackreport %}hook_url // string: hook URL for notifications{% endif %} + {% if multiqc %}multiqc_report // string: Path to MultiQC report{% endif %} main: - + {%- if nf_schema %} summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") + {%- else %} + summary_params = [:] + {%- endif %} // // Completion email and summary // workflow.onComplete { + {%- if email %} if (email || email_on_fail) { - completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList()) + completionEmail( + summary_params, + email, + email_on_fail, + plaintext_email, + outdir, + monochrome_logs, + {% if multiqc %}multiqc_report.toList(){% else %}[]{% endif %} + ) } + {%- endif %} completionSummary(monochrome_logs) + {%- if adaptivecard or slackreport %} if (hook_url) { imNotification(summary_params, hook_url) } + {%- endif %} } workflow.onError { @@ -171,7 +190,7 @@ def validateInputSamplesheet(input) { def (metas, fastqs) = input[1..2] // Check that multiple runs of the same sample are of the same datatype i.e. single-end / paired-end - def endedness_ok = metas.collect{ it.single_end }.unique().size == 1 + def endedness_ok = metas.collect{ meta -> meta.single_end }.unique().size == 1 if (!endedness_ok) { error("Please check input samplesheet -> Multiple runs of a sample must be of the same datatype i.e. single-end or paired-end: ${metas[0].id}") } @@ -206,7 +225,7 @@ def genomeExistsError() { } } {%- endif %} - +{%- if citations or multiqc %} // // Generate methods description for MultiQC // @@ -216,8 +235,8 @@ def toolCitationText() { // Uncomment function in methodsDescriptionText to render in MultiQC report def citation_text = [ "Tools used in the workflow included:", - "FastQC (Andrews 2010),", - "MultiQC (Ewels et al. 2016)", + {% if fastqc %}"FastQC (Andrews 2010),",{% endif %} + {% if multiqc %}"MultiQC (Ewels et al. 2016)",{% endif %} "." ].join(' ').trim() @@ -229,8 +248,8 @@ def toolBibliographyText() { // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "
  • Author (2023) Pub name, Journal, DOI
  • " : "", // Uncomment function in methodsDescriptionText to render in MultiQC report def reference_text = [ - "
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ", - "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • " + {% if fastqc %}"
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ",{% endif %} + {% if multiqc %}"
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • "{% endif %} ].join(' ').trim() return reference_text @@ -248,8 +267,10 @@ def methodsDescriptionText(mqc_methods_yaml) { // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers // Removing ` ` since the manifest.doi is a string and not a proper list def temp_doi_ref = "" - String[] manifest_doi = meta.manifest_map.doi.tokenize(",") - for (String doi_ref: manifest_doi) temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + def manifest_doi = meta.manifest_map.doi.tokenize(",") + manifest_doi.each { doi_ref -> + temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + } meta["doi_text"] = temp_doi_ref.substring(0, temp_doi_ref.length() - 2) } else meta["doi_text"] = "" meta["nodoi_text"] = meta.manifest_map.doi ? "" : "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " @@ -270,3 +291,4 @@ def methodsDescriptionText(mqc_methods_yaml) { return description_html.toString() } +{% endif %} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index ac31f28f6..28e32b200 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -2,10 +2,6 @@ // Subworkflow with functionality that may be useful for any Nextflow pipeline // -import org.yaml.snakeyaml.Yaml -import groovy.json.JsonOutput -import nextflow.extension.FilesEx - /* ======================================================================================== SUBWORKFLOW DEFINITION @@ -58,7 +54,7 @@ workflow UTILS_NEXTFLOW_PIPELINE { // Generate version string // def getWorkflowVersion() { - String version_string = "" + def version_string = "" as String if (workflow.manifest.version) { def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' version_string += "${prefix_v}${workflow.manifest.version}" @@ -79,10 +75,10 @@ def dumpParametersToJSON(outdir) { def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') def filename = "params_${timestamp}.json" def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") - def jsonStr = JsonOutput.toJson(params) - temp_pf.text = JsonOutput.prettyPrint(jsonStr) + def jsonStr = groovy.json.JsonOutput.toJson(params) + temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr) - FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") + nextflow.extension.FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") temp_pf.delete() } @@ -90,7 +86,7 @@ def dumpParametersToJSON(outdir) { // When running with -profile conda, warn if channels have not been set-up appropriately // def checkCondaChannels() { - Yaml parser = new Yaml() + def parser = new org.yaml.snakeyaml.Yaml() def channels = [] try { def config = parser.load("conda config --show channels".execute().text) @@ -102,14 +98,16 @@ def checkCondaChannels() { // Check that all channels are present // This channel list is ordered by required channel priority. - def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] + def required_channels_in_order = ['conda-forge', 'bioconda'] def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean // Check that they are in the right order def channel_priority_violation = false - def n = required_channels_in_order.size() - for (int i = 0; i < n - 1; i++) { - channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) + + required_channels_in_order.eachWithIndex { channel, index -> + if (index < required_channels_in_order.size() - 1) { + channel_priority_violation |= !(channels.indexOf(channel) < channels.indexOf(required_channels_in_order[index+1])) + } } if (channels_missing | channel_priority_violation) { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config index d0a926bf6..a09572e5b 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config @@ -3,7 +3,7 @@ manifest { author = """nf-core""" homePage = 'https://127.0.0.1' description = """Dummy pipeline""" - nextflowVersion = '!>=23.04.0' + nextflowVersion = '!>=23.04.0' version = '9.9.9' doi = 'https://doi.org/10.5281/zenodo.5070524' } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 14558c392..cbd8495bb 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -2,9 +2,6 @@ // Subworkflow with utility functions specific to the nf-core pipeline template // -import org.yaml.snakeyaml.Yaml -import nextflow.extension.FilesEx - /* ======================================================================================== SUBWORKFLOW DEFINITION @@ -34,7 +31,7 @@ workflow UTILS_NFCORE_PIPELINE { // Warn if a -profile or Nextflow config has not been provided to run the pipeline // def checkConfigProvided() { - valid_config = true + def valid_config = true as Boolean if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + @@ -66,11 +63,13 @@ def checkProfileProvided(nextflow_cli_args) { // def workflowCitation() { def temp_doi_ref = "" - String[] manifest_doi = workflow.manifest.doi.tokenize(",") + def manifest_doi = workflow.manifest.doi.tokenize(",") // Using a loop to handle multiple DOIs // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers // Removing ` ` since the manifest.doi is a string and not a proper list - for (String doi_ref: manifest_doi) temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" + manifest_doi.each { doi_ref -> + temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" + } return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + temp_doi_ref + "\n" + @@ -84,7 +83,7 @@ def workflowCitation() { // Generate workflow version string // def getWorkflowVersion() { - String version_string = "" + def version_string = "" as String if (workflow.manifest.version) { def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' version_string += "${prefix_v}${workflow.manifest.version}" @@ -102,8 +101,8 @@ def getWorkflowVersion() { // Get software versions for pipeline // def processVersionsFromYAML(yaml_file) { - Yaml yaml = new Yaml() - versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] } + def yaml = new org.yaml.snakeyaml.Yaml() + def versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] } return yaml.dumpAsMap(versions).trim() } @@ -124,7 +123,7 @@ def workflowVersionToYAML() { def softwareVersionsToYAML(ch_versions) { return ch_versions .unique() - .map { processVersionsFromYAML(it) } + .map { version -> processVersionsFromYAML(version) } .unique() .mix(Channel.of(workflowVersionToYAML())) } @@ -134,19 +133,19 @@ def softwareVersionsToYAML(ch_versions) { // def paramsSummaryMultiqc(summary_params) { def summary_section = '' - for (group in summary_params.keySet()) { + summary_params.keySet().each { group -> def group_params = summary_params.get(group) // This gets the parameters of that particular group if (group_params) { summary_section += "

    $group

    \n" summary_section += "
    \n" - for (param in group_params.keySet()) { + group_params.keySet().sort().each { param -> summary_section += "
    $param
    ${group_params.get(param) ?: 'N/A'}
    \n" } summary_section += "
    \n" } } - String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" + def yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" as String yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" @@ -161,7 +160,7 @@ def paramsSummaryMultiqc(summary_params) { // nf-core logo // def nfCoreLogo(monochrome_logs=true) { - Map colors = logColours(monochrome_logs) + def colors = logColours(monochrome_logs) as Map String.format( """\n ${dashedLine(monochrome_logs)} @@ -180,7 +179,7 @@ def nfCoreLogo(monochrome_logs=true) { // Return dashed line // def dashedLine(monochrome_logs=true) { - Map colors = logColours(monochrome_logs) + def colors = logColours(monochrome_logs) as Map return "-${colors.dim}----------------------------------------------------${colors.reset}-" } @@ -188,7 +187,7 @@ def dashedLine(monochrome_logs=true) { // ANSII colours used for terminal logging // def logColours(monochrome_logs=true) { - Map colorcodes = [:] + def colorcodes = [:] as Map // Reset / Meta colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" @@ -287,7 +286,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi } def summary = [:] - for (group in summary_params.keySet()) { + summary_params.keySet().sort().each { group -> summary << summary_params[group] } @@ -344,10 +343,10 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def sendmail_html = sendmail_template.toString() // Send the HTML e-mail - Map colors = logColours(monochrome_logs) + def colors = logColours(monochrome_logs) as Map if (email_address) { try { - if (plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } + if (plaintext_email) { throw new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") sendmail_tf.withWriter { w -> w << sendmail_html } @@ -364,13 +363,13 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi // Write summary e-mail HTML to a file def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") output_hf.withWriter { w -> w << email_html } - FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html"); + nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html"); output_hf.delete() // Write summary e-mail TXT to a file def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") output_tf.withWriter { w -> w << email_txt } - FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt"); + nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt"); output_tf.delete() } @@ -378,7 +377,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi // Print pipeline summary on completion // def completionSummary(monochrome_logs=true) { - Map colors = logColours(monochrome_logs) + def colors = logColours(monochrome_logs) as Map if (workflow.success) { if (workflow.stats.ignoredCount == 0) { log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" @@ -395,7 +394,7 @@ def completionSummary(monochrome_logs=true) { // def imNotification(summary_params, hook_url) { def summary = [:] - for (group in summary_params.keySet()) { + summary_params.keySet().sort().each { group -> summary << summary_params[group] } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf new file mode 100644 index 000000000..4994303ea --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf @@ -0,0 +1,46 @@ +// +// Subworkflow that uses the nf-schema plugin to validate parameters and render the parameter summary +// + +include { paramsSummaryLog } from 'plugin/nf-schema' +include { validateParameters } from 'plugin/nf-schema' + +workflow UTILS_NFSCHEMA_PLUGIN { + + take: + input_workflow // workflow: the workflow object used by nf-schema to get metadata from the workflow + validate_params // boolean: validate the parameters + parameters_schema // string: path to the parameters JSON schema. + // this has to be the same as the schema given to `validation.parametersSchema` + // when this input is empty it will automatically use the configured schema or + // "${projectDir}/nextflow_schema.json" as default. This input should not be empty + // for meta pipelines + + main: + + // + // Print parameter summary to stdout. This will display the parameters + // that differ from the default given in the JSON schema + // + if(parameters_schema) { + log.info paramsSummaryLog(input_workflow, parameters_schema:parameters_schema) + } else { + log.info paramsSummaryLog(input_workflow) + } + + // + // Validate the parameters using nextflow_schema.json or the schema + // given via the validation.parametersSchema configuration option + // + if(validate_params) { + if(parameters_schema) { + validateParameters(parameters_schema:parameters_schema) + } else { + validateParameters() + } + } + + emit: + dummy_emit = true +} + diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml new file mode 100644 index 000000000..f7d9f0288 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml @@ -0,0 +1,35 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "utils_nfschema_plugin" +description: Run nf-schema to validate parameters and create a summary of changed parameters +keywords: + - validation + - JSON schema + - plugin + - parameters + - summary +components: [] +input: + - input_workflow: + type: object + description: | + The workflow object of the used pipeline. + This object contains meta data used to create the params summary log + - validate_params: + type: boolean + description: Validate the parameters and error if invalid. + - parameters_schema: + type: string + description: | + Path to the parameters JSON schema. + This has to be the same as the schema given to the `validation.parametersSchema` config + option. When this input is empty it will automatically use the configured schema or + "${projectDir}/nextflow_schema.json" as default. The schema should not be given in this way + for meta pipelines. +output: + - dummy_emit: + type: boolean + description: Dummy emit to make nf-core subworkflows lint happy +authors: + - "@nvnieuwk" +maintainers: + - "@nvnieuwk" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test new file mode 100644 index 000000000..842dc432a --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -0,0 +1,117 @@ +nextflow_workflow { + + name "Test Subworkflow UTILS_NFSCHEMA_PLUGIN" + script "../main.nf" + workflow "UTILS_NFSCHEMA_PLUGIN" + + tag "subworkflows" + tag "subworkflows_nfcore" + tag "subworkflows/utils_nfschema_plugin" + tag "plugin/nf-schema" + + config "./nextflow.config" + + test("Should run nothing") { + + when { + + params { + test_data = '' + } + + workflow { + """ + validate_params = false + input[0] = workflow + input[1] = validate_params + input[2] = "" + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should validate params") { + + when { + + params { + test_data = '' + outdir = 1 + } + + workflow { + """ + validate_params = true + input[0] = workflow + input[1] = validate_params + input[2] = "" + """ + } + } + + then { + assertAll( + { assert workflow.failed }, + { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } } + ) + } + } + + test("Should run nothing - custom schema") { + + when { + + params { + test_data = '' + } + + workflow { + """ + validate_params = false + input[0] = workflow + input[1] = validate_params + input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" + """ + } + } + + then { + assertAll( + { assert workflow.success } + ) + } + } + + test("Should validate params - custom schema") { + + when { + + params { + test_data = '' + outdir = 1 + } + + workflow { + """ + validate_params = true + input[0] = workflow + input[1] = validate_params + input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" + """ + } + } + + then { + assertAll( + { assert workflow.failed }, + { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } } + ) + } + } +} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config new file mode 100644 index 000000000..0907ac58f --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config @@ -0,0 +1,8 @@ +plugins { + id "nf-schema@2.1.0" +} + +validation { + parametersSchema = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" + monochromeLogs = true +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json similarity index 95% rename from nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json rename to nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json index 7626c1c93..331e0d2f4 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json @@ -1,10 +1,10 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", "title": ". pipeline parameters", "description": "", "type": "object", - "definitions": { + "$defs": { "input_output_options": { "title": "Input/output options", "type": "object", @@ -87,10 +87,10 @@ }, "allOf": [ { - "$ref": "#/definitions/input_output_options" + "$ref": "#/$defs/input_output_options" }, { - "$ref": "#/definitions/generic_options" + "$ref": "#/$defs/generic_options" } ] } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf deleted file mode 100644 index 2585b65d1..000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf +++ /dev/null @@ -1,62 +0,0 @@ -// -// Subworkflow that uses the nf-validation plugin to render help text and parameter summary -// - -/* -======================================================================================== - IMPORT NF-VALIDATION PLUGIN -======================================================================================== -*/ - -include { paramsHelp } from 'plugin/nf-validation' -include { paramsSummaryLog } from 'plugin/nf-validation' -include { validateParameters } from 'plugin/nf-validation' - -/* -======================================================================================== - SUBWORKFLOW DEFINITION -======================================================================================== -*/ - -workflow UTILS_NFVALIDATION_PLUGIN { - - take: - print_help // boolean: print help - workflow_command // string: default commmand used to run pipeline - pre_help_text // string: string to be printed before help text and summary log - post_help_text // string: string to be printed after help text and summary log - validate_params // boolean: validate parameters - schema_filename // path: JSON schema file, null to use default value - - main: - - log.debug "Using schema file: ${schema_filename}" - - // Default values for strings - pre_help_text = pre_help_text ?: '' - post_help_text = post_help_text ?: '' - workflow_command = workflow_command ?: '' - - // - // Print help message if needed - // - if (print_help) { - log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text - System.exit(0) - } - - // - // Print parameter summary to stdout - // - log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text - - // - // Validate parameters relative to the parameter JSON schema - // - if (validate_params){ - validateParameters(parameters_schema: schema_filename) - } - - emit: - dummy_emit = true -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml deleted file mode 100644 index 3d4a6b04f..000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml +++ /dev/null @@ -1,44 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json -name: "UTILS_NFVALIDATION_PLUGIN" -description: Use nf-validation to initiate and validate a pipeline -keywords: - - utility - - pipeline - - initialise - - validation -components: [] -input: - - print_help: - type: boolean - description: | - Print help message and exit - - workflow_command: - type: string - description: | - The command to run the workflow e.g. "nextflow run main.nf" - - pre_help_text: - type: string - description: | - Text to print before the help message - - post_help_text: - type: string - description: | - Text to print after the help message - - validate_params: - type: boolean - description: | - Validate the parameters and error if invalid. - - schema_filename: - type: string - description: | - The filename of the schema to validate against. -output: - - dummy_emit: - type: boolean - description: | - Dummy emit to make nf-core subworkflows lint happy -authors: - - "@adamrtalbot" -maintainers: - - "@adamrtalbot" - - "@maxulysse" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test deleted file mode 100644 index 5784a33f2..000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test +++ /dev/null @@ -1,200 +0,0 @@ -nextflow_workflow { - - name "Test Workflow UTILS_NFVALIDATION_PLUGIN" - script "../main.nf" - workflow "UTILS_NFVALIDATION_PLUGIN" - tag "subworkflows" - tag "subworkflows_nfcore" - tag "plugin/nf-validation" - tag "'plugin/nf-validation'" - tag "utils_nfvalidation_plugin" - tag "subworkflows/utils_nfvalidation_plugin" - - test("Should run nothing") { - - when { - - params { - monochrome_logs = true - test_data = '' - } - - workflow { - """ - help = false - workflow_command = null - pre_help_text = null - post_help_text = null - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success } - ) - } - } - - test("Should run help") { - - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = null - pre_help_text = null - post_help_text = null - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } } - ) - } - } - - test("Should run help with command") { - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = "nextflow run noorg/doesntexist" - pre_help_text = null - post_help_text = null - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } } - ) - } - } - - test("Should run help with extra text") { - - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = "nextflow run noorg/doesntexist" - pre_help_text = "pre-help-text" - post_help_text = "post-help-text" - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('pre-help-text') } }, - { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } }, - { assert workflow.stdout.any { it.contains('post-help-text') } } - ) - } - } - - test("Should validate params") { - - when { - - params { - monochrome_logs = true - test_data = '' - outdir = 1 - } - workflow { - """ - help = false - workflow_command = null - pre_help_text = null - post_help_text = null - validate_params = true - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.failed }, - { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } } - ) - } - } -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml deleted file mode 100644 index 60b1cfff4..000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml +++ /dev/null @@ -1,2 +0,0 @@ -subworkflows/utils_nfvalidation_plugin: - - subworkflows/nf-core/utils_nfvalidation_plugin/** diff --git a/nf_core/pipeline-template/tower.yml b/nf_core/pipeline-template/tower.yml index 787aedfe9..2ddbef770 100644 --- a/nf_core/pipeline-template/tower.yml +++ b/nf_core/pipeline-template/tower.yml @@ -1,5 +1,7 @@ reports: + {%- if multiqc %} multiqc_report.html: display: "MultiQC HTML report" + {%- endif %} samplesheet.csv: display: "Auto-created samplesheet with collated metadata and FASTQ paths" diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index de0f21fe3..f878bb31a 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -4,12 +4,14 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { FASTQC } from '../modules/nf-core/fastqc/main' -include { MULTIQC } from '../modules/nf-core/multiqc/main' -include { paramsSummaryMap } from 'plugin/nf-validation' -include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' +{%- if modules %} +{% if fastqc %}include { FASTQC } from '../modules/nf-core/fastqc/main'{% endif %} +{% if multiqc %}include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} +{% if nf_schema %}include { paramsSummaryMap } from 'plugin/nf-schema'{% endif %} +{% if multiqc %}include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' -include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' +{% if citations or multiqc %}include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} +{%- endif %} /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -22,19 +24,22 @@ workflow {{ short_name|upper }} { take: ch_samplesheet // channel: samplesheet read in from --input + {%- if modules %} main: ch_versions = Channel.empty() - ch_multiqc_files = Channel.empty() + {% if multiqc %}ch_multiqc_files = Channel.empty(){% endif %} + {%- if fastqc %} // // MODULE: Run FastQC // FASTQC ( ch_samplesheet ) - ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}) + {% if multiqc %}ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}){% endif %} ch_versions = ch_versions.mix(FASTQC.out.versions.first()) + {%- endif %} // // Collate and save software versions @@ -42,11 +47,12 @@ workflow {{ short_name|upper }} { softwareVersionsToYAML(ch_versions) .collectFile( storeDir: "${params.outdir}/pipeline_info", - name: 'nf_core_pipeline_software_mqc_versions.yml', + name: {% if is_nfcore %}'nf_core_' {% else %} '' {% endif %} + 'pipeline_software_' + {% if multiqc %} 'mqc_' {% else %} '' {% endif %} + 'versions.yml', sort: true, newLine: true ).set { ch_collated_versions } +{% if multiqc %} // // MODULE: MultiQC // @@ -59,36 +65,45 @@ workflow {{ short_name|upper }} { Channel.fromPath(params.multiqc_logo, checkIfExists: true) : Channel.empty() + {% if nf_schema %} summary_params = paramsSummaryMap( workflow, parameters_schema: "nextflow_schema.json") ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) + ch_multiqc_files = ch_multiqc_files.mix( + ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) + {% endif %} + {%- if citations %} ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) ch_methods_description = Channel.value( methodsDescriptionText(ch_multiqc_custom_methods_description)) + {%- endif %} - ch_multiqc_files = ch_multiqc_files.mix( - ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) + {%- if citations %} ch_multiqc_files = ch_multiqc_files.mix( ch_methods_description.collectFile( name: 'methods_description_mqc.yaml', sort: true ) ) + {%- endif %} MULTIQC ( ch_multiqc_files.collect(), ch_multiqc_config.toList(), ch_multiqc_custom_config.toList(), - ch_multiqc_logo.toList() + ch_multiqc_logo.toList(), + [], + [] ) - +{% endif %} emit: - multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html + {%- if multiqc %}multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html{% endif %} versions = ch_versions // channel: [ path(versions.yml) ] +{% endif %} } /* diff --git a/nf_core/pipelines/__init__.py b/nf_core/pipelines/__init__.py new file mode 100644 index 000000000..bc981c449 --- /dev/null +++ b/nf_core/pipelines/__init__.py @@ -0,0 +1 @@ +from .create import PipelineCreateApp diff --git a/nf_core/bump_version.py b/nf_core/pipelines/bump_version.py similarity index 98% rename from nf_core/bump_version.py rename to nf_core/pipelines/bump_version.py index c5e8931fb..18aa86932 100644 --- a/nf_core/bump_version.py +++ b/nf_core/pipelines/bump_version.py @@ -176,7 +176,7 @@ def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patt Args: filename (str): File to scan. - pipeline_obj (nf_core.lint.PipelineLint): A PipelineLint object that holds information + pipeline_obj (nf_core.pipelines.lint.PipelineLint): A PipelineLint object that holds information about the pipeline contents and build files. pattern (str): Regex pattern to apply. diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py new file mode 100644 index 000000000..8b0edf34c --- /dev/null +++ b/nf_core/pipelines/create/__init__.py @@ -0,0 +1,107 @@ +"""A Textual app to create a pipeline.""" + +import logging +from pathlib import Path + +import click +import yaml +from textual.app import App +from textual.widgets import Button + +import nf_core +from nf_core.pipelines.create import utils +from nf_core.pipelines.create.basicdetails import BasicDetails +from nf_core.pipelines.create.custompipeline import CustomPipeline +from nf_core.pipelines.create.finaldetails import FinalDetails +from nf_core.pipelines.create.githubexit import GithubExit +from nf_core.pipelines.create.githubrepo import GithubRepo +from nf_core.pipelines.create.githubrepoquestion import GithubRepoQuestion +from nf_core.pipelines.create.loggingscreen import LoggingScreen +from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline +from nf_core.pipelines.create.pipelinetype import ChoosePipelineType +from nf_core.pipelines.create.welcome import WelcomeScreen + +log_handler = utils.CustomLogHandler( + console=utils.LoggingConsole(classes="log_console"), + rich_tracebacks=True, + show_time=False, + show_path=False, + markup=True, + tracebacks_suppress=[click], +) +logging.basicConfig( + level="INFO", + handlers=[log_handler], + format="%(message)s", +) +log_handler.setLevel("INFO") + + +class PipelineCreateApp(App[utils.CreateConfig]): + """A Textual app to manage stopwatches.""" + + CSS_PATH = "create.tcss" + TITLE = "nf-core create" + SUB_TITLE = "Create a new pipeline with the nf-core pipeline template" + BINDINGS = [ + ("d", "toggle_dark", "Toggle dark mode"), + ("q", "quit", "Quit"), + ] + SCREENS = { + "welcome": WelcomeScreen(), + "basic_details": BasicDetails(), + "choose_type": ChoosePipelineType(), + "type_custom": CustomPipeline(), + "type_nfcore": NfcorePipeline(), + "final_details": FinalDetails(), + "logging": LoggingScreen(), + "github_repo_question": GithubRepoQuestion(), + "github_repo": GithubRepo(), + "github_exit": GithubExit(), + } + + # Initialise config as empty + TEMPLATE_CONFIG = utils.CreateConfig() + + # Initialise pipeline type + NFCORE_PIPELINE = True + + # Log handler + LOG_HANDLER = log_handler + # Logging state + LOGGING_STATE = None + + # Template features + template_features_yml = utils.load_features_yaml() + + def on_mount(self) -> None: + self.push_screen("welcome") + + def on_button_pressed(self, event: Button.Pressed) -> None: + """Handle all button pressed events.""" + if event.button.id == "start": + self.push_screen("choose_type") + elif event.button.id == "type_nfcore": + self.NFCORE_PIPELINE = True + utils.NFCORE_PIPELINE_GLOBAL = True + self.push_screen("basic_details") + elif event.button.id == "type_custom": + self.NFCORE_PIPELINE = False + utils.NFCORE_PIPELINE_GLOBAL = False + self.push_screen("basic_details") + elif event.button.id == "continue": + self.push_screen("final_details") + elif event.button.id == "github_repo": + self.push_screen("github_repo") + elif event.button.id == "close_screen": + self.push_screen("github_repo_question") + elif event.button.id == "exit": + self.push_screen("github_exit") + if event.button.id == "close_app": + self.exit(return_code=0) + if event.button.id == "back": + self.pop_screen() + + def action_toggle_dark(self) -> None: + """An action to toggle dark mode.""" + self.dark: bool = not self.dark diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py new file mode 100644 index 000000000..09484fa2e --- /dev/null +++ b/nf_core/pipelines/create/basicdetails.py @@ -0,0 +1,110 @@ +"""A Textual app to create a pipeline.""" + +from pathlib import Path +from textwrap import dedent + +from textual import on +from textual.app import ComposeResult +from textual.containers import Center, Horizontal +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Input, Markdown + +from nf_core.pipelines.create.utils import CreateConfig, TextInput, add_hide_class, remove_hide_class + +pipeline_exists_warn = """ +> ⚠️ **The pipeline you are trying to create already exists.** +> +> If you continue, you will **override** the existing pipeline. +> Please change the pipeline or organisation name to create a different pipeline. +""" + + +class BasicDetails(Screen): + """Name, description, author, etc.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Basic details + """ + ) + ) + with Horizontal(): + yield TextInput( + "org", + "Organisation", + "GitHub organisation", + "nf-core", + classes="column", + disabled=self.parent.NFCORE_PIPELINE, + ) + yield TextInput( + "name", + "Pipeline Name", + "Workflow name", + classes="column", + ) + + yield TextInput( + "description", + "Description", + "A short description of your pipeline.", + ) + yield TextInput( + "author", + "Author(s)", + "Name of the main author / authors", + ) + yield Markdown(dedent(pipeline_exists_warn), id="exist_warn", classes="hide") + yield Center( + Button("Back", id="back", variant="default"), + Button("Next", id="next", variant="success"), + classes="cta", + ) + + @on(Input.Changed) + @on(Input.Submitted) + def show_exists_warn(self): + """Check if the pipeline exists on every input change or submitted. + If the pipeline exists, show warning message saying that it will be overriden.""" + config = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + config[text_input.field_id] = this_input.value + if Path(config["org"] + "-" + config["name"]).is_dir(): + remove_hide_class(self.parent, "exist_warn") + else: + add_hide_class(self.parent, "exist_warn") + + def on_screen_resume(self): + """Hide warn message on screen resume. + Update displayed value on screen resume.""" + add_hide_class(self.parent, "exist_warn") + for text_input in self.query("TextInput"): + if text_input.field_id == "org": + text_input.disabled = self.parent.NFCORE_PIPELINE + + @on(Button.Pressed) + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + config = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + validation_result = this_input.validate(this_input.value) + config[text_input.field_id] = this_input.value + if not validation_result.is_valid: + text_input.query_one(".validation_msg").update("\n".join(validation_result.failure_descriptions)) + else: + text_input.query_one(".validation_msg").update("") + try: + self.parent.TEMPLATE_CONFIG = CreateConfig(**config) + if event.button.id == "next": + if self.parent.NFCORE_PIPELINE: + self.parent.push_screen("type_nfcore") + else: + self.parent.push_screen("type_custom") + except ValueError: + pass diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py new file mode 100644 index 000000000..8ab547c1c --- /dev/null +++ b/nf_core/pipelines/create/create.py @@ -0,0 +1,471 @@ +"""Creates a nf-core pipeline matching the current +organization's specification based on a template. +""" + +import configparser +import logging +import os +import re +import shutil +from pathlib import Path +from typing import Dict, List, Optional, Tuple, Union, cast + +import git +import git.config +import jinja2 +import yaml + +import nf_core +import nf_core.pipelines.schema +import nf_core.utils +from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml +from nf_core.pipelines.create_logo import create_logo +from nf_core.pipelines.lint_utils import run_prettier_on_file +from nf_core.utils import LintConfigType, NFCoreTemplateConfig + +log = logging.getLogger(__name__) + + +class PipelineCreate: + """Creates a nf-core pipeline a la carte from the nf-core best-practice template. + + Args: + name (str): Name for the pipeline. + description (str): Description for the pipeline. + author (str): Authors name of the pipeline. + version (str): Version flag. Semantic versioning only. Defaults to `1.0.0dev`. + no_git (bool): Prevents the creation of a local Git repository for the pipeline. Defaults to False. + force (bool): Overwrites a given workflow directory with the same name. Defaults to False. Used for tests and sync command. + May the force be with you. + outdir (str): Path to the local output directory. + template_config (str|CreateConfig): Path to template.yml file for pipeline creation settings. or pydantic model with the customisation for pipeline creation settings. + organisation (str): Name of the GitHub organisation to create the pipeline. Will be the prefix of the pipeline. + from_config_file (bool): If true the pipeline will be created from the `.nf-core.yml` config file. Used for tests and sync command. + default_branch (str): Specifies the --initial-branch name. + """ + + def __init__( + self, + name: Optional[str] = None, + description: Optional[str] = None, + author: Optional[str] = None, + version: str = "1.0.0dev", + no_git: bool = False, + force: bool = False, + outdir: Optional[Union[Path, str]] = None, + template_config: Optional[Union[CreateConfig, str, Path]] = None, + organisation: str = "nf-core", + from_config_file: bool = False, + default_branch: Optional[str] = None, + is_interactive: bool = False, + ) -> None: + if isinstance(template_config, CreateConfig): + self.config = template_config + elif from_config_file: + # Try reading config file + try: + _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) + # Obtain a CreateConfig object from `.nf-core.yml` config file + if config_yml is not None and getattr(config_yml, "template", None) is not None: + self.config = CreateConfig(**config_yml["template"].model_dump()) + else: + raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") + # Update the output directory + self.config.outdir = outdir if outdir else Path().cwd() + except (FileNotFoundError, UserWarning): + log.debug("The '.nf-core.yml' configuration file was not found.") + elif (name and description and author) or ( + template_config and (isinstance(template_config, str) or isinstance(template_config, Path)) + ): + # Obtain a CreateConfig object from the template yaml file + self.config = self.check_template_yaml_info(template_config, name, description, author) + self.update_config(organisation, version, force, outdir) + else: + raise UserWarning("The template configuration was not provided.") + + # Read features yaml file + self.template_features_yml = load_features_yaml() + + if self.config.outdir is None: + self.config.outdir = str(Path.cwd()) + self.jinja_params, self.skip_areas = self.obtain_jinja_params_dict( + self.config.skip_features or [], str(self.config.outdir) + ) + + # format strings in features yaml + short_name = self.jinja_params["short_name"] + env = jinja2.Environment(loader=jinja2.PackageLoader("nf_core", "pipelines"), keep_trailing_newline=True) + features_template = env.get_template( + str(features_yml_path.relative_to(Path(nf_core.__file__).parent / "pipelines")) + ) + rendered_features = features_template.render({"short_name": short_name}) + self.template_features_yml = yaml.safe_load(rendered_features) + + # Get list of files we're skipping with the supplied skip keys + self.skip_paths = set(sp for k in self.skip_areas for sp in self.template_features_yml[k]["skippable_paths"]) + + # Set convenience variables + self.name = self.config.name + + # Set fields used by the class methods + self.no_git = no_git + self.default_branch = default_branch + self.is_interactive = is_interactive + self.force = self.config.force + + if self.config.outdir == ".": + self.outdir = Path(self.config.outdir, self.jinja_params["name_noslash"]).absolute() + else: + self.outdir = Path(self.config.outdir).absolute() + + def check_template_yaml_info(self, template_yaml, name, description, author): + """Ensure that the provided template yaml file contains the necessary information. + + Args: + template_yaml (str): Template yaml file. + name (str): Name for the pipeline. + description (str): Description for the pipeline. + author (str): Authors name of the pipeline. + + Returns: + CreateConfig: Pydantic model for the nf-core create config. + + Raises: + UserWarning: if template yaml file does not contain all the necessary information. + UserWarning: if template yaml file does not exist. + """ + # Obtain template customization info from template yaml file or `.nf-core.yml` config file + config = CreateConfig() + if template_yaml: + try: + with open(template_yaml) as f: + template_yaml = yaml.safe_load(f) + config = CreateConfig(**template_yaml) + except FileNotFoundError: + raise UserWarning(f"Template YAML file '{template_yaml}' not found.") + + # Check required fields + missing_fields = [] + if config.name is None and name is None: + missing_fields.append("name") + elif config.name is None: + config.name = name + if config.description is None and description is None: + missing_fields.append("description") + elif config.description is None: + config.description = description + if config.author is None and author is None: + missing_fields.append("author") + elif config.author is None: + config.author = author + if len(missing_fields) > 0: + raise UserWarning( + f"Template YAML file does not contain the following required fields: {', '.join(missing_fields)}" + ) + + return config + + def update_config(self, organisation, version, force, outdir): + """Updates the config file with arguments provided through command line. + + Args: + organisation (str): Name of the GitHub organisation to create the pipeline. + version (str): Version of the pipeline. + force (bool): Overwrites a given workflow directory with the same name. + outdir (str): Path to the local output directory. + """ + if self.config.org is None: + self.config.org = organisation + if self.config.version is None: + self.config.version = version if version else "1.0.0dev" + if self.config.force is None: + self.config.force = force if force else False + if self.config.outdir is None: + self.config.outdir = outdir if outdir else "." + if self.config.is_nfcore is None or self.config.is_nfcore == "null": + self.config.is_nfcore = self.config.org == "nf-core" + + def obtain_jinja_params_dict( + self, features_to_skip: List[str], pipeline_dir: Union[str, Path] + ) -> Tuple[Dict, List[str]]: + """Creates a dictionary of parameters for the new pipeline. + + Args: + features_to_skip (list): List of template features/areas to skip. + pipeline_dir (str): Path to the pipeline directory. + + Returns: + jinja_params (dict): Dictionary of template areas to skip with values true/false. + skip_areas (list): List of template areas which contain paths to skip. + """ + # Try reading config file + try: + _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) + except UserWarning: + config_yml = None + + # Set the parameters for the jinja template + jinja_params = self.config.model_dump() + + # Add template areas to jinja params and create list of areas with paths to skip + skip_areas = [] + for t_area in self.template_features_yml.keys(): + if t_area in features_to_skip: + if self.template_features_yml[t_area]["skippable_paths"]: + skip_areas.append(t_area) + jinja_params[t_area] = False + else: + jinja_params[t_area] = True + + # Add is_nfcore as an area to skip for non-nf-core pipelines, to skip all nf-core files + if not self.config.is_nfcore: + skip_areas.append("is_nfcore") + jinja_params["is_nfcore"] = False + + # Set the last parameters based on the ones provided + jinja_params["short_name"] = ( + jinja_params["name"].lower().replace(r"/\s+/", "-").replace(f"{jinja_params['org']}/", "").replace("/", "-") + ) + jinja_params["name"] = f"{jinja_params['org']}/{jinja_params['short_name']}" + jinja_params["name_noslash"] = jinja_params["name"].replace("/", "-") + jinja_params["prefix_nodash"] = jinja_params["org"].replace("-", "") + jinja_params["name_docker"] = jinja_params["name"].replace(jinja_params["org"], jinja_params["prefix_nodash"]) + jinja_params["logo_light"] = f"{jinja_params['name_noslash']}_logo_light.png" + jinja_params["logo_dark"] = f"{jinja_params['name_noslash']}_logo_dark.png" + if config_yml is not None: + if ( + hasattr(config_yml, "lint") + and hasattr(config_yml["lint"], "nextflow_config") + and hasattr(config_yml["lint"]["nextflow_config"], "manifest.name") + ): + return jinja_params, skip_areas + + # Check that the pipeline name matches the requirements + if not re.match(r"^[a-z]+$", jinja_params["short_name"]): + if jinja_params["is_nfcore"]: + raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") + else: + log.warning( + "Your workflow name is not lowercase without punctuation. This may cause Nextflow errors.\nConsider changing the name to avoid special characters." + ) + + return jinja_params, skip_areas + + def init_pipeline(self): + """Creates the nf-core pipeline.""" + # Make the new pipeline + self.render_template() + + # Init the git repository and make the first commit + if not self.no_git: + self.git_init_pipeline() + + if self.config.is_nfcore and not self.is_interactive: + log.info( + "[green bold]!!!!!! IMPORTANT !!!!!!\n\n" + "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" + "PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE!\n\n" + "[default]Please read: [link=https://nf-co.re/docs/tutorials/adding_a_pipeline/overview#join-the-community]" + "https://nf-co.re/docs/tutorials/adding_a_pipeline/overview#join-the-community[/link]" + ) + + def render_template(self) -> None: + """Runs Jinja to create a new nf-core pipeline.""" + log.info(f"Creating new pipeline: '{self.name}'") + + # Check if the output directory exists + if self.outdir.exists(): + if self.force: + log.warning(f"Output directory '{self.outdir}' exists - continuing as --force specified") + else: + log.error(f"Output directory '{self.outdir}' exists!") + log.info("Use -f / --force to overwrite existing files") + raise UserWarning(f"Output directory '{self.outdir}' exists!") + else: + self.outdir.mkdir(parents=True, exist_ok=True) + + # Run jinja2 for each file in the template folder + env = jinja2.Environment( + loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True + ) + template_dir = Path(nf_core.__file__).parent / "pipeline-template" + object_attrs = self.jinja_params + object_attrs["nf_core_version"] = nf_core.__version__ + + # Can't use glob.glob() as need recursive hidden dotfiles - https://stackoverflow.com/a/58126417/713980 + template_files = list(Path(template_dir).glob("**/*")) + template_files += list(Path(template_dir).glob("*")) + ignore_strs = [".pyc", "__pycache__", ".pyo", ".pyd", ".DS_Store", ".egg"] + short_name = self.jinja_params["short_name"] + rename_files: Dict[str, str] = { + "workflows/pipeline.nf": f"workflows/{short_name}.nf", + "subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf": f"subworkflows/local/utils_nfcore_{short_name}_pipeline/main.nf", + } + + # Set the paths to skip according to customization + for template_fn_path in template_files: + # Skip files that are in the self.skip_paths list + for skip_path in self.skip_paths: + if str(template_fn_path.relative_to(template_dir)).startswith(skip_path): + break + else: + if template_fn_path.is_dir(): + continue + if any([s in str(template_fn_path) for s in ignore_strs]): + log.debug(f"Ignoring '{template_fn_path}' in jinja2 template creation") + continue + + # Set up vars and directories + template_fn = template_fn_path.relative_to(template_dir) + output_path = self.outdir / template_fn + + if str(template_fn) in rename_files: + output_path = self.outdir / rename_files[str(template_fn)] + output_path.parent.mkdir(parents=True, exist_ok=True) + + try: + # Just copy binary files + if nf_core.utils.is_file_binary(template_fn_path): + raise AttributeError(f"Binary file: {template_fn_path}") + + # Got this far - render the template + log.debug(f"Rendering template file: '{template_fn}'") + j_template = env.get_template(str(template_fn)) + rendered_output = j_template.render(object_attrs) + + # Write to the pipeline output file + with open(output_path, "w") as fh: + log.debug(f"Writing to output file: '{output_path}'") + fh.write(rendered_output) + + # Copy the file directly instead of using Jinja + except (AttributeError, UnicodeDecodeError) as e: + log.debug(f"Copying file without Jinja: '{output_path}' - {e}") + shutil.copy(template_fn_path, output_path) + + # Something else went wrong + except Exception as e: + log.error(f"Copying raw file as error rendering with Jinja: '{output_path}' - {e}") + shutil.copy(template_fn_path, output_path) + + # Mirror file permissions + template_stat = os.stat(template_fn_path) + os.chmod(output_path, template_stat.st_mode) + + if self.config.is_nfcore: + # Make a logo and save it, if it is a nf-core pipeline + self.make_pipeline_logo() + + # Update the .nf-core.yml with linting configurations + self.fix_linting() + + if self.config: + config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) + if config_fn is not None and config_yml is not None: + with open(str(config_fn), "w") as fh: + config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) + yaml.safe_dump(config_yml.model_dump(), fh) + log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") + + # Run prettier on files + run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) + + def fix_linting(self): + """ + Updates the .nf-core.yml with linting configurations + for a customized pipeline. + """ + # Create a lint config + lint_config = {} + for area in (self.config.skip_features or []) + self.skip_areas: + try: + for lint_test in self.template_features_yml[area]["linting"]: + try: + if self.template_features_yml[area]["linting"][lint_test]: + lint_config.setdefault(lint_test, []).extend( + self.template_features_yml[area]["linting"][lint_test] + ) + else: + lint_config[lint_test] = False + except AttributeError: + pass # When linting is False + except KeyError: + pass # Areas without linting + + # Add the lint content to the preexisting nf-core config + config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) + if config_fn is not None and nf_core_yml is not None: + nf_core_yml.lint = cast(LintConfigType, lint_config) + with open(self.outdir / config_fn, "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) + + def make_pipeline_logo(self): + """Fetch a logo for the new pipeline from the nf-core website""" + email_logo_path = Path(self.outdir) / "assets" + create_logo( + text=self.jinja_params["short_name"], directory=email_logo_path, theme="light", force=bool(self.force) + ) + for theme in ["dark", "light"]: + readme_logo_path = Path(self.outdir) / "docs" / "images" + create_logo( + text=self.jinja_params["short_name"], + directory=readme_logo_path, + width=600, + theme=theme, + force=bool(self.force), + ) + + def git_init_pipeline(self) -> None: + """Initialises the new pipeline as a Git repository and submits first commit. + + Raises: + UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. + """ + default_branch: Optional[str] = self.default_branch + try: + default_branch = default_branch or str(git.config.GitConfigParser().get_value("init", "defaultBranch")) + except configparser.Error: + log.debug("Could not read init.defaultBranch") + if default_branch in ["dev", "TEMPLATE"]: + raise UserWarning( + f"Your Git defaultBranch '{default_branch}' is incompatible with nf-core.\n" + "'dev' and 'TEMPLATE' can not be used as default branch name.\n" + "Set the default branch name with " + "[white on grey23] git config --global init.defaultBranch [/]\n" + "Or set the default_branch parameter in this class.\n" + "Pipeline git repository will not be initialised." + ) + + log.info("Initialising local pipeline git repository") + repo = git.Repo.init(self.outdir) + repo.git.add(A=True) + repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") + if default_branch: + repo.active_branch.rename(default_branch) + try: + repo.git.branch("TEMPLATE") + repo.git.branch("dev") + + except git.GitCommandError as e: + if "already exists" in e.stderr: + log.debug("Branches 'TEMPLATE' and 'dev' already exist") + if self.force: + log.debug("Force option set - deleting branches") + repo.git.branch("-D", "TEMPLATE") + repo.git.branch("-D", "dev") + repo.git.branch("TEMPLATE") + repo.git.branch("dev") + else: + raise UserWarning( + "Branches 'TEMPLATE' and 'dev' already exist. Use --force to overwrite existing branches." + ) + if self.is_interactive: + log.info(f"Pipeline created: ./{self.outdir.relative_to(Path.cwd())}") + else: + log.info( + "Done. Remember to add a remote and push to GitHub:\n" + f"[white on grey23] cd {self.outdir} \n" + " git remote add origin git@github.com:USERNAME/REPO_NAME.git \n" + " git push --all origin " + ) + log.info("This will also push your newly created dev branch and the TEMPLATE branch for syncing.") diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss new file mode 100644 index 000000000..747be3f75 --- /dev/null +++ b/nf_core/pipelines/create/create.tcss @@ -0,0 +1,141 @@ +#logo { + width: 100%; + content-align-horizontal: center; + content-align-vertical: middle; +} +.cta { + layout: horizontal; + margin-bottom: 1; +} +.cta Button { + margin: 0 3; +} + +.pipeline-type-grid { + height: auto; + margin-bottom: 2; +} + +.custom_grid { + height: auto; +} +.custom_grid Switch { + width: auto; +} +.custom_grid Static { + width: 1fr; + margin: 1 8; +} +.custom_grid Button { + width: auto; +} + +.text-input-grid { + padding: 1 1 1 1; + grid-size: 1 3; + grid-rows: 3 3 auto; + height: auto; +} +.field_help { + padding: 1 1 0 1; + color: $text-muted; + text-style: italic; +} +.validation_msg { + padding: 0 1; + color: $error; +} +.-valid { + border: tall $success-darken-3; +} + +Horizontal{ + width: 100%; + height: auto; +} +.column { + width: 1fr; +} + +HorizontalScroll { + width: 100%; +} +.feature_subtitle { + color: grey; +} + +Vertical{ + height: auto; +} + +.features-container { + padding: 0 4 1 4; +} + +/* Display help messages */ + +.help_box { + background: #333333; + padding: 1 3 0 3; + margin: 0 5 2 5; + overflow-y: auto; + transition: height 50ms; + display: none; + height: 0; +} +.displayed .help_box { + display: block; + height: 12; +} +#show_help { + display: block; +} +#hide_help { + display: none; +} +.displayed #show_help { + display: none; +} +.displayed #hide_help { + display: block; +} + +/* Show password */ + +#show_password { + display: block; +} +#hide_password { + display: none; +} +.displayed #show_password { + display: none; +} +.displayed #hide_password { + display: block; +} + +/* Logging console */ + +.log_console { + height: auto; + background: #333333; + padding: 1 3; + margin: 0 4 2 4; +} + +.hide { + display: none; +} + +/* Layouts */ +.col-2 { + grid-size: 2 1; +} + +.ghrepo-cols { + margin: 0 4; +} +.ghrepo-cols Button { + margin-top: 2; +} diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py new file mode 100644 index 000000000..5debcfee7 --- /dev/null +++ b/nf_core/pipelines/create/custompipeline.py @@ -0,0 +1,47 @@ +from textwrap import dedent + +from textual import on +from textual.app import ComposeResult +from textual.containers import Center, ScrollableContainer +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Switch + +from nf_core.pipelines.create.utils import PipelineFeature + + +class CustomPipeline(Screen): + """Select if the pipeline will use genomic data.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Template features + """ + ) + ) + yield ScrollableContainer(id="features") + yield Center( + Button("Back", id="back", variant="default"), + Button("Continue", id="continue", variant="success"), + classes="cta", + ) + + def on_mount(self) -> None: + for name, feature in self.parent.template_features_yml.items(): + if feature["custom_pipelines"]: + self.query_one("#features").mount( + PipelineFeature(feature["help_text"], feature["short_description"], feature["description"], name) + ) + + @on(Button.Pressed, "#continue") + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + skip = [] + for feature_input in self.query("PipelineFeature"): + this_switch = feature_input.query_one(Switch) + if not this_switch.value: + skip.append(this_switch.id) + self.parent.TEMPLATE_CONFIG.__dict__.update({"skip_features": skip, "is_nfcore": False}) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py new file mode 100644 index 000000000..bd15cf9dd --- /dev/null +++ b/nf_core/pipelines/create/finaldetails.py @@ -0,0 +1,110 @@ +"""A Textual app to create a pipeline.""" + +from pathlib import Path +from textwrap import dedent + +from textual import on, work +from textual.app import ComposeResult +from textual.containers import Center, Horizontal +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Input, Markdown + +from nf_core.pipelines.create.create import PipelineCreate +from nf_core.pipelines.create.utils import ShowLogs, TextInput, add_hide_class, remove_hide_class + +pipeline_exists_warn = """ +> ⚠️ **The pipeline you are trying to create already exists.** +> +> If you continue, you will **override** the existing pipeline. +> Please change the pipeline or organisation name to create a different pipeline. +> Alternatively, provide a different output directory. +""" + + +class FinalDetails(Screen): + """Name, description, author, etc.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Final details + """ + ) + ) + + with Horizontal(): + yield TextInput( + "version", + "Version", + "First version of the pipeline", + "1.0.0dev", + classes="column", + ) + yield TextInput( + "outdir", + "Output directory", + "Path to the output directory where the pipeline will be created", + ".", + classes="column", + ) + + yield Markdown(dedent(pipeline_exists_warn), id="exist_warn", classes="hide") + + yield Center( + Button("Back", id="back", variant="default"), + Button("Finish", id="finish", variant="success"), + classes="cta", + ) + + @on(Button.Pressed, "#finish") + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + new_config = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + validation_result = this_input.validate(this_input.value) + new_config[text_input.field_id] = this_input.value + if not validation_result.is_valid: + text_input.query_one(".validation_msg").update("\n".join(validation_result.failure_descriptions)) + else: + text_input.query_one(".validation_msg").update("") + try: + self.parent.TEMPLATE_CONFIG.__dict__.update(new_config) + except ValueError: + pass + + # Create the new pipeline + self._create_pipeline() + self.parent.LOGGING_STATE = "pipeline created" + self.parent.push_screen("logging") + + @on(Input.Changed) + @on(Input.Submitted) + def show_exists_warn(self): + """Check if the pipeline exists on every input change or submitted. + If the pipeline exists, show warning message saying that it will be overriden.""" + outdir = "" + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + if text_input.field_id == "outdir": + outdir = this_input.value + if Path(outdir, self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name).is_dir(): + remove_hide_class(self.parent, "exist_warn") + + def on_screen_resume(self): + """Hide warn message on screen resume.""" + add_hide_class(self.parent, "exist_warn") + + @work(thread=True, exclusive=True) + def _create_pipeline(self) -> None: + """Create the pipeline.""" + self.post_message(ShowLogs()) + create_obj = PipelineCreate( + template_config=self.parent.TEMPLATE_CONFIG, + is_interactive=True, + ) + create_obj.init_pipeline() + remove_hide_class(self.parent, "close_screen") diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py new file mode 100644 index 000000000..3dac88cc5 --- /dev/null +++ b/nf_core/pipelines/create/githubexit.py @@ -0,0 +1,48 @@ +from textwrap import dedent + +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static + +from nf_core.utils import nfcore_logo + +exit_help_text_markdown = """ +If you would like to create the GitHub repository later, you can do it manually by following these steps: + +1. Create a new GitHub repository +2. Add the remote to your local repository: + ```bash + cd + git remote add origin git@github.com:/.git + ``` +3. Push the code to the remote: + ```bash + git push --all origin + ``` + > 💡 Note the `--all` flag: this is needed to push all branches to the remote. +""" + + +class GithubExit(Screen): + """A screen to show a help text when a GitHub repo is NOT created.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # HowTo create a GitHub repository + """ + ) + ) + yield Static( + "\n" + "\n".join(nfcore_logo) + "\n", + id="logo", + ) + yield Markdown(exit_help_text_markdown) + yield Center( + Button("Close", id="close_app", variant="success"), + classes="cta", + ) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py new file mode 100644 index 000000000..99e7b09ab --- /dev/null +++ b/nf_core/pipelines/create/githubrepo.py @@ -0,0 +1,253 @@ +import logging +import os +from pathlib import Path +from textwrap import dedent + +import git +import yaml +from github import Github, GithubException, UnknownObjectException +from textual import on, work +from textual.app import ComposeResult +from textual.containers import Center, Horizontal, Vertical +from textual.message import Message +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch + +from nf_core.pipelines.create.utils import ShowLogs, TextInput, remove_hide_class + +log = logging.getLogger(__name__) + +github_org_help = """ +> ⚠️ **You can't create a repository directly in the nf-core organisation.** +> +> Please create the pipeline repo to an organisation where you have access or use your user account. +> A core-team member will be able to transfer the repo to nf-core once the development has started. + +> 💡 Your GitHub user account will be used by default if `nf-core` is given as the org name. +""" + + +class GithubRepo(Screen): + """Create a GitHub repository and push all branches.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + gh_user, gh_token = self._get_github_credentials() + github_text_markdown = dedent( + """ + # Create GitHub repository + + Now that we have created a new pipeline locally, we can + create a new GitHub repository and push the code to it. + """ + ) + if gh_user: + github_text_markdown += f">\n> 💡 _Found GitHub username {'and token ' if gh_token else ''}in local [GitHub CLI](https://cli.github.com/) config_\n>\n" + yield Markdown(github_text_markdown) + with Horizontal(classes="ghrepo-cols"): + yield TextInput( + "gh_username", + "GitHub username", + "Your GitHub username", + default=gh_user[0] if gh_user is not None else "GitHub username", + classes="column", + ) + yield TextInput( + "token", + "GitHub token", + "Your GitHub [link=https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens]personal access token[/link] for login.", + default=gh_token if gh_token is not None else "GitHub token", + password=True, + classes="column", + ) + yield Button("Show", id="show_password") + yield Button("Hide", id="hide_password") + with Horizontal(classes="ghrepo-cols"): + yield TextInput( + "repo_org", + "Organisation name", + "The name of the organisation where the GitHub repo will be cretaed", + default=self.parent.TEMPLATE_CONFIG.org, + classes="column", + ) + yield TextInput( + "repo_name", + "Repository name", + "The name of the new GitHub repository", + default=self.parent.TEMPLATE_CONFIG.name, + classes="column", + ) + if self.parent.TEMPLATE_CONFIG.is_nfcore: + yield Markdown(dedent(github_org_help)) + with Horizontal(classes="ghrepo-cols"): + yield Switch(value=False, id="private") + with Vertical(): + yield Static("Private", classes="") + yield Static("Select to make the new GitHub repo private.", classes="feature_subtitle") + yield Center( + Button("Back", id="back", variant="default"), + Button("Create GitHub repo", id="create_github", variant="success"), + Button("Finish without creating a repo", id="exit", variant="primary"), + classes="cta", + ) + + def on_button_pressed(self, event: Button.Pressed) -> None: + """Create a GitHub repo or show help message and exit""" + if event.button.id == "show_password": + self.add_class("displayed") + text_input = self.query_one("#token", TextInput) + text_input.query_one(Input).password = False + elif event.button.id == "hide_password": + self.remove_class("displayed") + text_input = self.query_one("#token", TextInput) + text_input.query_one(Input).password = True + elif event.button.id == "create_github": + # Create a GitHub repo + + # Save GitHub username, token and repo name + github_variables = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + github_variables[text_input.field_id] = this_input.value + # Save GitHub repo config + for switch_input in self.query("Switch"): + github_variables[switch_input.id] = switch_input.value + + # Pipeline git repo + pipeline_repo = git.Repo.init( + Path(self.parent.TEMPLATE_CONFIG.outdir) + / Path(self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name) + ) + + # GitHub authentication + if github_variables["token"]: + github_auth = self._github_authentication(github_variables["gh_username"], github_variables["token"]) + else: + raise UserWarning( + f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." + "Please provide an authentication token or set the environment variable 'GITHUB_AUTH_TOKEN'." + ) + + user = github_auth.get_user() + org = None + # Make sure that the authentication was successful + try: + user.login + log.debug("GitHub authentication successful") + except GithubException: + raise UserWarning( + f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." + "Please make sure that the provided user name and token are correct." + ) + + # Check if organisation exists + # If the organisation is nf-core or it doesn't exist, the repo will be created in the user account + if github_variables["repo_org"] != "nf-core": + try: + org = github_auth.get_organization(github_variables["repo_org"]) + log.info( + f"Repo will be created in the GitHub organisation account '{github_variables['repo_org']}'" + ) + except UnknownObjectException: + log.warn(f"Provided organisation '{github_variables['repo_org']}' not found. ") + + # Create the repo + try: + if org: + self._create_repo_and_push( + org, + github_variables["repo_name"], + pipeline_repo, + github_variables["private"], + ) + else: + # Create the repo in the user's account + log.info( + f"Repo will be created in the GitHub organisation account '{github_variables['gh_username']}'" + ) + self._create_repo_and_push( + user, + github_variables["repo_name"], + pipeline_repo, + github_variables["private"], + ) + except UserWarning as e: + log.error(f"There was an error with message: {e}") + self.parent.push_screen("github_exit") + + self.parent.LOGGING_STATE = "repo created" + self.parent.push_screen("logging") + + class RepoExists(Message): + """Custom message to indicate that the GitHub repo already exists.""" + + pass + + @on(RepoExists) + def show_github_info_button(self) -> None: + remove_hide_class(self.parent, "exit") + remove_hide_class(self.parent, "back") + + @work(thread=True, exclusive=True) + def _create_repo_and_push(self, org, repo_name, pipeline_repo, private): + """Create a GitHub repository and push all branches.""" + self.post_message(ShowLogs()) + # Check if repo already exists + try: + repo = org.get_repo(repo_name) + # Check if it has a commit history + try: + repo.get_commits().totalCount + raise UserWarning(f"GitHub repository '{repo_name}' already exists") + except GithubException: + # Repo is empty + repo_exists = True + except UserWarning as e: + # Repo already exists + log.error(e) + self.post_message(self.RepoExists()) + return + except UnknownObjectException: + # Repo doesn't exist + repo_exists = False + + # Create the repo + if not repo_exists: + repo = org.create_repo(repo_name, description=self.parent.TEMPLATE_CONFIG.description, private=private) + log.info(f"GitHub repository '{repo_name}' created successfully") + remove_hide_class(self.parent, "close_app") + + # Add the remote + try: + pipeline_repo.create_remote("origin", repo.clone_url) + except git.exc.GitCommandError: + # Remote already exists + pass + # Push all branches + pipeline_repo.remotes.origin.push(all=True).raise_if_error() + + def _github_authentication(self, gh_username, gh_token): + """Authenticate to GitHub""" + log.debug(f"Authenticating GitHub as {gh_username}") + github_auth = Github(gh_username, gh_token) + return github_auth + + def _get_github_credentials(self): + """Get GitHub credentials""" + gh_user = None + gh_token = None + # Use gh CLI config if installed + gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") + if os.path.exists(gh_cli_config_fn): + try: + with open(gh_cli_config_fn) as fh: + gh_cli_config = yaml.safe_load(fh) + gh_user = (gh_cli_config["github.com"]["user"],) + gh_token = gh_cli_config["github.com"]["oauth_token"] + except KeyError: + pass + # If gh CLI not installed, try to get credentials from environment variables + elif os.environ.get("GITHUB_TOKEN") is not None: + gh_token = self.auth = os.environ["GITHUB_TOKEN"] + return (gh_user, gh_token) diff --git a/nf_core/pipelines/create/githubrepoquestion.py b/nf_core/pipelines/create/githubrepoquestion.py new file mode 100644 index 000000000..ded33d188 --- /dev/null +++ b/nf_core/pipelines/create/githubrepoquestion.py @@ -0,0 +1,36 @@ +import logging +from textwrap import dedent + +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown + +log = logging.getLogger(__name__) + +github_text_markdown = """ +After creating the pipeline template locally, we can create a GitHub repository and push the code to it. + +Do you want to create a GitHub repository? +""" + + +class GithubRepoQuestion(Screen): + """Ask if the user wants to create a GitHub repository.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Create GitHub repository + """ + ) + ) + yield Markdown(dedent(github_text_markdown)) + yield Center( + Button("Create GitHub repo", id="github_repo", variant="success"), + Button("Finish without creating a repo", id="exit", variant="primary"), + classes="cta", + ) diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py new file mode 100644 index 000000000..f862dccea --- /dev/null +++ b/nf_core/pipelines/create/loggingscreen.py @@ -0,0 +1,48 @@ +from textwrap import dedent + +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static + +from nf_core.pipelines.create.utils import add_hide_class +from nf_core.utils import nfcore_logo + + +class LoggingScreen(Screen): + """A screen to show the final logs.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Logging + """ + ) + ) + yield Static( + "\n" + "\n".join(nfcore_logo) + "\n", + id="logo", + ) + yield Markdown("Creating...") + yield Center(self.parent.LOG_HANDLER.console) + yield Center( + Button("Back", id="back", variant="default", classes="hide"), + Button("Continue", id="close_screen", variant="success", classes="hide"), + Button("Continue", id="exit", variant="success", classes="hide"), + Button("Close App", id="close_app", variant="success", classes="hide"), + classes="cta", + ) + + def on_screen_resume(self): + """Hide all buttons as disabled on screen resume.""" + button_ids = ["back", "close_screen", "exit", "close_app"] + for button in self.query("Button"): + if button.id in button_ids: + add_hide_class(self.parent, button.id) + + def on_screen_suspend(self): + """Clear console on screen suspend.""" + self.parent.LOG_HANDLER.console.clear() diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py new file mode 100644 index 000000000..ebb986698 --- /dev/null +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -0,0 +1,47 @@ +from textwrap import dedent + +from textual import on +from textual.app import ComposeResult +from textual.containers import Center, ScrollableContainer +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Switch + +from nf_core.pipelines.create.utils import PipelineFeature + + +class NfcorePipeline(Screen): + """Select if the pipeline will use genomic data.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Template features + """ + ) + ) + yield ScrollableContainer(id="features") + yield Center( + Button("Back", id="back", variant="default"), + Button("Continue", id="continue", variant="success"), + classes="cta", + ) + + def on_mount(self) -> None: + for name, feature in self.parent.template_features_yml.items(): + if feature["nfcore_pipelines"]: + self.query_one("#features").mount( + PipelineFeature(feature["help_text"], feature["short_description"], feature["description"], name) + ) + + @on(Button.Pressed, "#continue") + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + skip = [] + for feature_input in self.query("PipelineFeature"): + this_switch = feature_input.query_one(Switch) + if not this_switch.value: + skip.append(this_switch.id) + self.parent.TEMPLATE_CONFIG.__dict__.update({"skip_features": skip, "is_nfcore": True}) diff --git a/nf_core/pipelines/create/pipelinetype.py b/nf_core/pipelines/create/pipelinetype.py new file mode 100644 index 000000000..48914e855 --- /dev/null +++ b/nf_core/pipelines/create/pipelinetype.py @@ -0,0 +1,56 @@ +from textual.app import ComposeResult +from textual.containers import Center, Grid +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown + +markdown_intro = """ +# Choose pipeline type +""" + +markdown_type_nfcore = """ +## Choose _"nf-core"_ if: + +* You want your pipeline to be part of the nf-core community +* You think that there's an outside chance that it ever _could_ be part of nf-core +""" +markdown_type_custom = """ +## Choose _"Custom"_ if: + +* Your pipeline will _never_ be part of nf-core +* You want full control over *all* features that are included from the template + (including those that are mandatory for nf-core). +""" + +markdown_details = """ +## What's the difference? + +Choosing _"nf-core"_ effectively pre-selects the following template features: + +* GitHub Actions continuous-integration configuration files: + * Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) + * Code formatting checks with [Prettier](https://prettier.io/) + * Auto-fix linting functionality using [@nf-core-bot](https://github.com/nf-core-bot) + * Marking old issues as stale +* Inclusion of [shared nf-core configuration profiles](https://nf-co.re/configs) +""" + + +class ChoosePipelineType(Screen): + """Choose whether this will be an nf-core pipeline or not.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown(markdown_intro) + yield Grid( + Center( + Markdown(markdown_type_nfcore), + Center(Button("nf-core", id="type_nfcore", variant="success")), + ), + Center( + Markdown(markdown_type_custom), + Center(Button("Custom", id="type_custom", variant="primary")), + ), + classes="col-2 pipeline-type-grid", + ) + yield Markdown(markdown_details) diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml new file mode 100644 index 000000000..3eb654726 --- /dev/null +++ b/nf_core/pipelines/create/template_features.yml @@ -0,0 +1,434 @@ +github: + skippable_paths: + - ".github" + - ".gitattributes" + short_description: "Use a GitHub repository." + description: "Create a GitHub repository for the pipeline." + help_text: | + This will create a GitHub repository for the pipeline. + + The repository will include: + - Continuous Integration (CI) tests + - Issues and pull requests templates + + The initialisation of a git repository is required to use the nf-core/tools. + This means that even if you unselect this option, your pipeline will still contain a `.git` directory and `.gitignore` file. + linting: + files_exist: + - ".github/ISSUE_TEMPLATE/bug_report.yml" + - ".github/ISSUE_TEMPLATE/feature_request.yml" + - ".github/PULL_REQUEST_TEMPLATE.md" + - ".github/CONTRIBUTING.md" + - ".github/.dockstore.yml" + files_unchanged: + - ".github/ISSUE_TEMPLATE/bug_report.yml" + - ".github/ISSUE_TEMPLATE/config.yml" + - ".github/ISSUE_TEMPLATE/feature_request.yml" + - ".github/PULL_REQUEST_TEMPLATE.md" + - ".github/workflows/branch.yml" + - ".github/workflows/linting_comment.yml" + - ".github/workflows/linting.yml" + - ".github/CONTRIBUTING.md" + - ".github/.dockstore.yml" + readme: + - "nextflow_badge" + nfcore_pipelines: False + custom_pipelines: True +ci: + skippable_paths: + - ".github/workflows/" + short_description: "Add Github CI tests" + description: "The pipeline will include several GitHub actions for Continuous Integration (CI) testing" + help_text: | + Nf-core provides a set of Continuous Integration (CI) tests for Github. + When you open a pull request (PR) on your pipeline repository, these tests will run automatically. + + There are different types of tests: + * Linting tests check that your code is formatted correctly and that it adheres to nf-core standards + For code linting they will use [prettier](https://prettier.io/). + * Pipeline tests run your pipeline on a small dataset to check that it works + These tests are run with a small test dataset on GitHub and a larger test dataset on AWS + * Marking old issues as stale + linting: + files_exist: + - ".github/workflows/branch.yml" + - ".github/workflows/ci.yml" + - ".github/workflows/linting_comment.yml" + - ".github/workflows/linting.yml" + nfcore_pipelines: False + custom_pipelines: True +igenomes: + skippable_paths: + - "conf/igenomes.config" + - "conf/igenomes_ignored.config" + short_description: "Use reference genomes" + description: "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes" + help_text: | + Nf-core pipelines are configured to use a copy of the most common reference genome files. + + By selecting this option, your pipeline will include a configuration file specifying the paths to these files. + + The required code to use these files will also be included in the template. + When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference files. + + For more information about reference genomes in nf-core pipelines, + see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). + linting: + files_exist: + - "conf/igenomes.config" + - "conf/igenomes_ignored.config" + nfcore_pipelines: True + custom_pipelines: True +github_badges: + skippable_paths: False + short_description: "Add Github badges" + description: "The README.md file of the pipeline will include GitHub badges" + help_text: | + The pipeline `README.md` will include badges for: + * AWS CI Tests + * Zenodo DOI + * Nextflow + * Conda + * Docker + * Singularity + * Launching on Nextflow Tower + linting: + readme: + - "nextflow_badge" + nfcore_pipelines: False + custom_pipelines: True +nf_core_configs: + skippable_paths: False + short_description: "Add configuration files" + description: "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions" + help_text: | + Nf-core has a repository with a collection of configuration profiles. + + Those config files define a set of parameters which are specific to compute environments at different Institutions. + They can be used within all nf-core pipelines. + If you are likely to be running nf-core pipelines regularly it is a good idea to use or create a custom config file for your organisation. + + For more information about nf-core configuration profiles, see the [nf-core/configs repository](https://github.com/nf-core/configs) + linting: + files_exist: + - "conf/igenomes.config" + nextflow_config: + - "process.cpus" + - "process.memory" + - "process.time" + - "custom_config" + - "params.custom_config_version" + - "params.custom_config_base" + included_configs: False + nfcore_pipelines: False + custom_pipelines: True +is_nfcore: + skippable_paths: + - ".github/ISSUE_TEMPLATE/config" + - "CODE_OF_CONDUCT.md" + - ".github/workflows/awsfulltest.yml" + - ".github/workflows/awstest.yml" + - ".github/workflows/release-announcements.yml" + short_description: "A custom pipeline which won't be part of the nf-core organisation but be compatible with nf-core/tools." + description: "" + help_text: "" + linting: + files_exist: + - "CODE_OF_CONDUCT.md" + - "assets/nf-core-{{short_name}}_logo_light.png" + - "docs/images/nf-core-{{short_name}}_logo_light.png" + - "docs/images/nf-core-{{short_name}}_logo_dark.png" + - ".github/ISSUE_TEMPLATE/config.yml" + - ".github/workflows/awstest.yml" + - ".github/workflows/awsfulltest.yml" + files_unchanged: + - "CODE_OF_CONDUCT.md" + - "assets/nf-core-{{short_name}}_logo_light.png" + - "docs/images/nf-core-{{short_name}}_logo_light.png" + - "docs/images/nf-core-{{short_name}}_logo_dark.png" + - ".github/ISSUE_TEMPLATE/bug_report.yml" + nextflow_config: + - "manifest.name" + - "manifest.homePage" + - "validation.help.beforeText" + - "validation.help.afterText" + - "validation.summary.beforeText" + - "validation.summary.afterText" + multiqc_config: + - "report_comment" + nfcore_pipelines: False + custom_pipelines: False +code_linters: + skippable_paths: + - ".editorconfig" + - ".pre-commit-config.yaml" + - ".prettierignore" + - ".prettierrc.yml" + - ".github/workflows/fix-linting.yml" + short_description: "Use code linters" + description: "The pipeline will include code linters and CI tests to lint your code: pre-commit, editor-config and prettier." + help_text: | + Pipelines include code linters to check the formatting of your code in order to harmonize code styles between developers. + Linters will check all non-ignored files, e.g., JSON, YAML, Nextlow or Python files in your repository. + The available code linters are: + + - pre-commit (https://pre-commit.com/): used to run all code-linters on every PR and on ever commit if you run `pre-commit install` to install it in your local repository. + - editor-config (https://github.com/editorconfig-checker/editorconfig-checker): checks rules such as indentation or trailing spaces. + - prettier (https://github.com/prettier/prettier): enforces a consistent style (indentation, quoting, line length, etc). + linting: + files_exist: + - ".editorconfig" + - ".prettierignore" + - ".prettierrc.yml" + nfcore_pipelines: False + custom_pipelines: True +citations: + skippable_paths: + - "assets/methods_description_template.yml" + - "CITATIONS.md" + short_description: "Include citations" + description: "Include pipeline tools citations in CITATIONS.md and a method description in the MultiQC report (if enabled)." + help_text: | + If adding citations, the pipeline template will contain a `CITATIONS.md` file to add the citations of all tools used in the pipeline. + + Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, + and the logics to add this section to the output MultiQC report (if the report is generated). + linting: + files_exist: + - "CITATIONS.md" + nfcore_pipelines: False + custom_pipelines: True +gitpod: + skippable_paths: + - ".gitpod.yml" + short_description: "Include a gitpod environment" + description: "Include the configuration required to use Gitpod." + help_text: | + Gitpod (https://www.gitpod.io/) provides standardized and automated development environments. + + Including this to your pipeline will provide an environment with the latest version of nf-core/tools installed and all its requirements. + This is useful to have all the tools ready for pipeline development. + nfcore_pipelines: False + custom_pipelines: True +codespaces: + skippable_paths: + - ".devcontainer/devcontainer.json" + short_description: "Include GitHub Codespaces" + description: "The pipeline will include a devcontainer configuration for GitHub Codespaces, providing a development environment with nf-core/tools and Nextflow installed." + help_text: | + The pipeline will include a devcontainer configuration. + The devcontainer will create a GitHub Codespaces for Nextflow development with nf-core/tools and Nextflow installed. + + Github Codespaces (https://github.com/features/codespaces) is an online developer environment that runs in your browser, complete with VSCode and a terminal. + linting: + files_unchanged: + - ".github/CONTRIBUTING.md" + nfcore_pipelines: False + custom_pipelines: True +multiqc: + skippable_paths: + - "assets/multiqc_config.yml" + - "assets/methods_description_template.yml" + - "modules/nf-core/multiqc/" + short_description: "Use multiqc" + description: "The pipeline will include the MultiQC module which generates an HTML report for quality control." + help_text: | + MultiQC is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline quality control results can be visualised in the report and further statistics are available in the report data directory. + + The pipeline will include the MultiQC module and will have special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see http://multiqc.info. + linting: + files_unchanged: + - ".github/CONTRIBUTING.md" + - "assets/sendmail_template.txt" + files_exist: + - "assets/multiqc_config.yml" + multiqc_config: False + nfcore_pipelines: True + custom_pipelines: True +fastqc: + skippable_paths: + - "modules/nf-core/fastqc/" + short_description: "Use fastqc" + description: "The pipeline will include the FastQC module which performs quality control analysis of input FASTQ files." + help_text: | + FastQC is a tool which provides quality control checks on raw sequencing data. + The pipeline will include the FastQC module. + nfcore_pipelines: True + custom_pipelines: True +modules: + skippable_paths: + - "conf/base.config" + - "conf/modules.config" + - "modules.json" + - "modules" + - "subworkflows" + short_description: "Use nf-core components" + description: "Include all required files to use nf-core modules and subworkflows" + help_text: | + It is *recommended* to use this feature if you want to use modules and subworkflows in your pipeline. + This will add all required files to use nf-core components or any compatible components from private repos by using `nf-core modules` and `nf-core subworkflows` commands. + linting: + nfcore_components: False + modules_json: False + base_config: False + modules_config: False + files_exist: + - "conf/base.config" + - "conf/modules.config" + - "modules.json" + nfcore_pipelines: False + custom_pipelines: True +changelog: + skippable_paths: + - "CHANGELOG.md" + short_description: "Add a changelog" + description: "Add a CHANGELOG.md file." + help_text: | + Having a `CHANGELOG.md` file in the pipeline root directory is useful to track the changes added to each version. + + You can read more information on the recommended format here: https://keepachangelog.com/en/1.0.0/ + linting: + files_exist: + - "CHANGELOG.md" + nfcore_pipelines: False + custom_pipelines: True +nf_schema: + skippable_paths: + - "subworkflows/nf-core/utils_nfschema_plugin" + - "nextflow_schema.json" + - "assets/schema_input.json" + - "assets/samplesheet.csv" + short_description: "Use nf-schema" + description: "Use the nf-schema Nextflow plugin for this pipeline." + help_text: | + [nf-schema](https://nextflow-io.github.io/nf-schema/latest/) is used to validate input parameters based on a JSON schema. + It also provides helper functionality to create help messages, get a summary + of changed parameters and validate and convert a samplesheet to a channel. + linting: + files_exist: + - "nextflow_schema.json" + schema_params: False + schema_lint: False + schema_description: False + nextflow_config: False + nfcore_pipelines: True + custom_pipelines: True +license: + skippable_paths: + - "LICENSE" + short_description: "Add a license File" + description: "Add the MIT license file." + help_text: | + To protect the copyright of the pipeline, you can add a LICENSE file. + This option ads the MIT License. You can read the conditions here: https://opensource.org/license/MIT + linting: + files_exist: + - "LICENSE" + files_unchanged: + - "LICENSE" + nfcore_pipelines: False + custom_pipelines: True +email: + skippable_paths: + - "assets/email_template.html" + - "assets/sendmail_template.txt" + - "assets/email_template.txt" + short_description: "Enable email updates" + description: "Enable sending emails on pipeline completion." + help_text: | + Enable the option of sending an email which will include pipeline execution reports on pipeline completion. + linting: + files_exist: + - "assets/email_template.html" + - "assets/sendmail_template.txt" + - "assets/email_template.txt" + files_unchanged: + - ".prettierignore" + nfcore_pipelines: False + custom_pipelines: True +adaptivecard: + skippable_paths: + - "assets/adaptivecard.json" + short_description: "Support Microsoft Teams notifications" + description: "Enable pipeline status update messages through Microsoft Teams" + help_text: | + This adds an Adaptive Card. A snippets of user interface. + This Adaptive Card is used as a template for pipeline update messages and it is compatible with Microsoft Teams. + linting: + files_unchanged: + - ".prettierignore" + nfcore_pipelines: False + custom_pipelines: True +slackreport: + skippable_paths: + - "assets/slackreport.json" + short_description: "Support Slack notifications" + description: "Enable pipeline status update messages through Slack" + help_text: | + This adds an JSON template used as a template for pipeline update messages in Slack. + linting: + files_unchanged: + - ".prettierignore" + nfcore_pipelines: False + custom_pipelines: True +documentation: + skippable_paths: + - "docs" + short_description: "Add documentation" + description: "Add documentation to the pipeline" + help_text: | + This will add documentation markdown files where you can describe your pipeline. + It includes: + - docs/README.md: A README file where you can describe the structure of your documentation. + - docs/output.md: A file where you can explain the output generated by the pipeline + - docs/usage.md: A file where you can explain the usage of the pipeline and its parameters. + + These files come with an exemplary documentation structure written. + linting: + files_exist: + - "docs/output.md" + - "docs/README.md" + - "docs/usage.md" + nfcore_pipelines: False + custom_pipelines: True +test_config: + skippable_paths: + - "conf/test.config" + - "conf/test_full.config" + - ".github/workflows/awsfulltest.yml" + - ".github/workflows/awstest.yml" + - ".github/workflows/ci.yml" + short_description: "Add testing profiles" + description: "Add two default testing profiles" + help_text: | + This will add two default testing profiles to run the pipeline with different inputs. + You can customise them and add other test profiles. + + These profiles can be used to run the pipeline with a minimal testing dataset with `nextflow run -profile test`. + + The pipeline will include two profiles: `test` and `test_full`. + In nf-core, we typically use the `test` profile to run the pipeline with a minimal dataset and the `test_full` to run the pipeline with a larger dataset that simulates a real-world scenario. + linting: + files_exist: + - "conf/test.config" + - "conf/test_full.config" + - ".github/workflows/ci.yml" + nextflow_config: False + files_unchanged: + - ".github/CONTRIBUTING.md" + - ".github/PULL_REQUEST_TEMPLATE.md" + nfcore_pipelines: False + custom_pipelines: True +seqera_platform: + skippable_paths: + - "tower.yml" + short_description: "Add Seqera Platform output" + description: "Add a YAML file to specify which output files to upload when launching a pipeline from the Seqera Platform" + help_text: | + When launching a pipeline with the Seqera Platform, a `tower.yml` file can be used to add configuration options. + + In the pipeline template, this file is used to specify the output files of you pipeline which will be shown on the reports tab of Seqera Platform. + You can extend this file adding any other desired configuration. + nfcore_pipelines: False + custom_pipelines: True diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py new file mode 100644 index 000000000..9b331c2a3 --- /dev/null +++ b/nf_core/pipelines/create/utils.py @@ -0,0 +1,255 @@ +import re +from contextlib import contextmanager +from contextvars import ContextVar +from logging import LogRecord +from pathlib import Path +from typing import Any, Dict, Iterator, Union + +import yaml +from pydantic import ConfigDict, ValidationError, ValidationInfo, field_validator +from rich.logging import RichHandler +from textual import on +from textual._context import active_app +from textual.app import ComposeResult +from textual.containers import Grid, HorizontalScroll +from textual.message import Message +from textual.validation import ValidationResult, Validator +from textual.widget import Widget +from textual.widgets import Button, Input, Markdown, RichLog, Static, Switch + +import nf_core +from nf_core.utils import NFCoreTemplateConfig + +# Use ContextVar to define a context on the model initialization +_init_context_var: ContextVar = ContextVar("_init_context_var", default={}) + + +@contextmanager +def init_context(value: Dict[str, Any]) -> Iterator[None]: + token = _init_context_var.set(value) + try: + yield + finally: + _init_context_var.reset(token) + + +# Define a global variable to store the pipeline type +NFCORE_PIPELINE_GLOBAL: bool = True + +# YAML file describing template features +features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "template_features.yml" + + +class CreateConfig(NFCoreTemplateConfig): + """Pydantic model for the nf-core create config.""" + + model_config = ConfigDict(extra="allow") + + def __init__(self, /, **data: Any) -> None: + """Custom init method to allow using a context on the model initialization.""" + self.__pydantic_validator__.validate_python( + data, + self_instance=self, + context=_init_context_var.get(), + ) + + @field_validator("name") + @classmethod + def name_nospecialchars(cls, v: str, info: ValidationInfo) -> str: + """Check that the pipeline name is simple.""" + context = info.context + if context and context["is_nfcore"]: + if not re.match(r"^[a-z]+$", v): + raise ValueError("Must be lowercase without punctuation.") + else: + if not re.match(r"^[-\w]+$", v): + raise ValueError("Must not contain special characters. Only '-' or '_' are allowed.") + return v + + @field_validator("org", "description", "author", "version", "outdir") + @classmethod + def notempty(cls, v: str) -> str: + """Check that string values are not empty.""" + if v.strip() == "": + raise ValueError("Cannot be left empty.") + return v + + @field_validator("version") + @classmethod + def version_nospecialchars(cls, v: str) -> str: + """Check that the pipeline version is simple.""" + if not re.match(r"^([0-9]+)(\.?([0-9]+))*(dev)?$", v): + raise ValueError( + "Must contain at least one number, and can be prefixed by 'dev'. Do not use a 'v' prefix or spaces." + ) + return v + + @field_validator("outdir") + @classmethod + def path_valid(cls, v: str) -> str: + """Check that a path is valid.""" + if not Path(v).is_dir(): + raise ValueError("Must be a valid path.") + return v + + +class TextInput(Static): + """Widget for text inputs. + + Provides standard interface for a text input with help text + and validation messages. + """ + + def __init__(self, field_id, placeholder, description, default="", password=False, **kwargs) -> None: + """Initialise the widget with our values. + + Pass on kwargs upstream for standard usage.""" + super().__init__(**kwargs) + self.field_id: str = field_id + self.id: str = field_id + self.placeholder: str = placeholder + self.description: str = description + self.default: str = default + self.password: bool = password + + def compose(self) -> ComposeResult: + yield Grid( + Static(self.description, classes="field_help"), + Input( + placeholder=self.placeholder, + validators=[ValidateConfig(self.field_id)], + value=self.default, + password=self.password, + ), + Static(classes="validation_msg"), + classes="text-input-grid", + ) + + @on(Input.Changed) + @on(Input.Submitted) + def show_invalid_reasons(self, event: Union[Input.Changed, Input.Submitted]) -> None: + """Validate the text input and show errors if invalid.""" + val_msg = self.query_one(".validation_msg") + if not isinstance(val_msg, Static): + raise ValueError("Validation message not found.") + + if event.validation_result is not None and not event.validation_result.is_valid: + # check that val_msg is instance of Static + if isinstance(val_msg, Static): + val_msg.update("\n".join(event.validation_result.failure_descriptions)) + else: + val_msg.update("") + + +class ValidateConfig(Validator): + """Validate any config value, using Pydantic.""" + + def __init__(self, key) -> None: + """Initialise the validator with the model key to validate.""" + super().__init__() + self.key = key + + def validate(self, value: str) -> ValidationResult: + """Try creating a Pydantic object with this key set to this value. + + If it fails, return the error messages.""" + try: + with init_context({"is_nfcore": NFCORE_PIPELINE_GLOBAL}): + CreateConfig(**{f"{self.key}": value}) + return self.success() + except ValidationError as e: + return self.failure(", ".join([err["msg"] for err in e.errors()])) + + +class HelpText(Markdown): + """A class to show a text box with help text.""" + + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + + def show(self) -> None: + """Method to show the help text box.""" + self.add_class("displayed") + + def hide(self) -> None: + """Method to hide the help text box.""" + self.remove_class("displayed") + + +class PipelineFeature(Static): + """Widget for the selection of pipeline features.""" + + def __init__(self, markdown: str, title: str, subtitle: str, field_id: str, **kwargs) -> None: + super().__init__(**kwargs) + self.markdown = markdown + self.title = title + self.subtitle = subtitle + self.field_id = field_id + + def on_button_pressed(self, event: Button.Pressed) -> None: + """When the button is pressed, change the type of the button.""" + if event.button.id == "show_help": + self.add_class("displayed") + elif event.button.id == "hide_help": + self.remove_class("displayed") + + def compose(self) -> ComposeResult: + """ + Create child widgets. + + Displayed row with a switch, a short text description and a help button. + Hidden row with a help text box. + """ + yield HorizontalScroll( + Switch(value=True, id=self.field_id), + Static(self.title, classes="feature_title"), + Static(self.subtitle, classes="feature_subtitle"), + Button("Show help", id="show_help", variant="primary"), + Button("Hide help", id="hide_help"), + classes="custom_grid", + ) + yield HelpText(markdown=self.markdown, classes="help_box") + + +class LoggingConsole(RichLog): + file = False + console: Widget + + def print(self, content): + self.write(content) + + +class CustomLogHandler(RichHandler): + """A Logging handler which extends RichHandler to write to a Widget and handle a Textual App.""" + + def emit(self, record: LogRecord) -> None: + """Invoked by logging.""" + try: + _app = active_app.get() + except LookupError: + pass + else: + super().emit(record) + + +class ShowLogs(Message): + """Custom message to show the logging messages.""" + + pass + + +## Functions +def add_hide_class(app, widget_id: str) -> None: + """Add class 'hide' to a widget. Not display widget.""" + app.get_widget_by_id(widget_id).add_class("hide") + + +def remove_hide_class(app, widget_id: str) -> None: + """Remove class 'hide' to a widget. Display widget.""" + app.get_widget_by_id(widget_id).remove_class("hide") + + +def load_features_yaml() -> Dict: + """Load the YAML file describing template features.""" + with open(features_yml_path) as fh: + return yaml.safe_load(fh) diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py new file mode 100644 index 000000000..1da0a3c01 --- /dev/null +++ b/nf_core/pipelines/create/welcome.py @@ -0,0 +1,37 @@ +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static + +from nf_core.utils import nfcore_logo + +markdown = """ +# Welcome to the nf-core pipeline creation wizard + +This app will help you create a new Nextflow pipeline +from the [nf-core/tools pipeline template](https://github.com/nf-core/tools). + +The template helps anyone benefit from nf-core best practices, +and is a requirement for nf-core pipelines. + +> 💡 If you want to add a pipeline to nf-core, please +> [join on Slack](https://nf-co.re/join) and discuss your plans with the +> community as early as possible; _**ideally before you start on your pipeline!**_ +> See the [nf-core guidelines](https://nf-co.re/docs/contributing/guidelines) +> and the [#new-pipelines](https://nfcore.slack.com/channels/new-pipelines) +> Slack channel for more information. +""" + + +class WelcomeScreen(Screen): + """A welcome screen for the app.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Static( + "\n" + "\n".join(nfcore_logo) + "\n", + id="logo", + ) + yield Markdown(markdown) + yield Center(Button("Let's go!", id="start", variant="success"), classes="cta") diff --git a/nf_core/create_logo.py b/nf_core/pipelines/create_logo.py similarity index 80% rename from nf_core/create_logo.py rename to nf_core/pipelines/create_logo.py index 1e96b7032..c54d8f208 100644 --- a/nf_core/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -1,6 +1,6 @@ import logging from pathlib import Path -from typing import Union +from typing import Optional, Union from PIL import Image, ImageDraw, ImageFont @@ -12,7 +12,7 @@ def create_logo( text: str, - dir: Union[Path, str], + directory: Union[Path, str], filename: str = "", theme: str = "light", width: int = 2300, @@ -20,13 +20,12 @@ def create_logo( force: bool = False, ) -> Path: """Create a logo for a pipeline.""" - if not text: raise UserWarning("Please provide the name of the text to put on the logo.") - dir = Path(dir) - if not dir.is_dir(): - log.debug(f"Creating directory {dir}") - dir.mkdir(parents=True, exist_ok=True) + directory = Path(directory) + if not directory.is_dir(): + log.debug(f"Creating directory {directory}") + directory.mkdir(parents=True, exist_ok=True) assets = Path(nf_core.__file__).parent / "assets/logo" if format == "svg": @@ -44,7 +43,7 @@ def create_logo( # save the svg logo_filename = f"nf-core-{text}_logo_{theme}.svg" if not filename else filename logo_filename = f"{logo_filename}.svg" if not logo_filename.lower().endswith(".svg") else logo_filename - logo_path = Path(dir, logo_filename) + logo_path = Path(directory, logo_filename) with open(logo_path, "w") as fh: fh.write(svg) @@ -52,7 +51,7 @@ def create_logo( logo_filename = f"nf-core-{text}_logo_{theme}.png" if not filename else filename logo_filename = f"{logo_filename}.png" if not logo_filename.lower().endswith(".png") else logo_filename cache_name = f"nf-core-{text}_logo_{theme}_{width}.png" - logo_path = Path(dir, logo_filename) + logo_path = Path(directory, logo_filename) # Check if we haven't already created this logo if logo_path.is_file() and not force: @@ -60,11 +59,11 @@ def create_logo( return logo_path # cache file cache_path = Path(NFCORE_CACHE_DIR, "logo", cache_name) - img = None + img: Optional[Image.Image] = None if cache_path.is_file(): log.debug(f"Logo already exists in cache at: {cache_path}. Reusing this file.") - img = Image.open(str(cache_path)) - if not img: + img = Image.open(cache_path) + if img is None: log.debug(f"Creating logo for {text}") # make sure the figure fits the text @@ -82,7 +81,7 @@ def create_logo( template_fn = "nf-core-repo-logo-base-darkbg.png" template_path = assets / template_fn - img = Image.open(str(template_path)) + img = Image.open(template_path) # get the height of the template image height = img.size[1] @@ -91,11 +90,14 @@ def create_logo( color = theme == "dark" and (250, 250, 250) or (5, 5, 5) draw.text((110, 465), text, color, font=font) - # Crop to max width - img = img.crop((0, 0, max_width, height)) + if img is not None: + # Crop to max width + img = img.crop((0, 0, max_width, height)) - # Resize - img = img.resize((width, int((width / max_width) * height))) + # Resize + img = img.resize((width, int((width / max_width) * height))) + else: + log.error("Failed to create logo, no image object created.") # Save to cache Path(cache_path.parent).mkdir(parents=True, exist_ok=True) diff --git a/nf_core/download.py b/nf_core/pipelines/download.py similarity index 98% rename from nf_core/download.py rename to nf_core/pipelines/download.py index f5ab3a0f5..b9028d4b3 100644 --- a/nf_core/download.py +++ b/nf_core/pipelines/download.py @@ -10,7 +10,8 @@ import tarfile import textwrap from datetime import datetime -from typing import List, Optional, Tuple +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple from zipfile import ZipFile import git @@ -20,10 +21,11 @@ import rich import rich.progress from git.exc import GitCommandError, InvalidGitRepositoryError -from pkg_resources import parse_version as version_parser +from packaging.version import Version import nf_core -import nf_core.list +import nf_core.modules.modules_utils +import nf_core.pipelines.list import nf_core.utils from nf_core.synced_repo import RemoteProgressbar, SyncedRepo from nf_core.utils import ( @@ -42,7 +44,7 @@ class DownloadError(RuntimeError): - """A custom exception that is raised when nf-core download encounters a problem that we already took into consideration. + """A custom exception that is raised when nf-core pipelines download encounters a problem that we already took into consideration. In this case, we do not want to print the traceback, but give the user some concise, helpful feedback instead. """ @@ -130,10 +132,9 @@ def __init__( self.compress_type = compress_type self.force = force self.platform = platform - # if flag is not specified, do not assume deliberate choice and prompt config inclusion interactively. - # this implies that non-interactive "no" choice is only possible implicitly (e.g. with --platform or if prompt is suppressed by !stderr.is_interactive). - # only alternative would have been to make it a parameter with argument, e.g. -d="yes" or -d="no". - self.include_configs = True if download_configuration else False if bool(platform) else None + self.fullname: Optional[str] = None + # downloading configs is not supported for Seqera Platform downloads. + self.include_configs = True if download_configuration == "yes" and not bool(platform) else False # Additional tags to add to the downloaded pipeline. This enables to mark particular commits or revisions with # additional tags, e.g. "stable", "testing", "validated", "production" etc. Since this requires a git-repo, it is only # available for the bare / Seqera Platform download. @@ -160,8 +161,8 @@ def __init__( # allows to specify a container library / registry or a respective mirror to download images from self.parallel_downloads = parallel_downloads - self.wf_revisions = {} - self.wf_branches = {} + self.wf_revisions = [] + self.wf_branches: Dict[str, Any] = {} self.wf_sha = {} self.wf_download_url = {} self.nf_config = {} @@ -169,7 +170,7 @@ def __init__( self.containers_remote = [] # stores the remote images provided in the file. # Fetch remote workflows - self.wfs = nf_core.list.Workflows() + self.wfs = nf_core.pipelines.list.Workflows() self.wfs.get_remote_workflows() def download_workflow(self): @@ -230,7 +231,7 @@ def download_workflow(self): summary_log.append(f"Enabled for Seqera Platform: '{self.platform}'") # Check that the outdir doesn't already exist - if os.path.exists(self.outdir): + if self.outdir is not None and os.path.exists(self.outdir): if not self.force: raise DownloadError( f"Output directory '{self.outdir}' already exists (use [red]--force[/] to overwrite)" @@ -338,7 +339,7 @@ def prompt_pipeline_name(self): stderr.print("Specify the name of a nf-core pipeline or a GitHub repository name (user/repo).") self.pipeline = nf_core.utils.prompt_remote_pipeline_name(self.wfs) - def prompt_revision(self): + def prompt_revision(self) -> None: """ Prompt for pipeline revision / branch Prompt user for revision tag if '--revision' was not set @@ -505,7 +506,7 @@ def prompt_singularity_cachedir_creation(self): with open(os.path.expanduser(shellprofile_path), "a") as f: f.write( "\n\n#######################################\n" - f"## Added by `nf-core download` v{nf_core.__version__} ##\n" + f"## Added by `nf-core pipelines download` v{nf_core.__version__} ##\n" + f'export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"' + "\n#######################################\n" ) @@ -687,7 +688,7 @@ def wf_use_local_configs(self, revision_dirname): # Append the singularity.cacheDir to the end if we need it if self.container_system == "singularity" and self.container_cache_utilisation == "copy": nfconfig += ( - f"\n\n// Added by `nf-core download` v{nf_core.__version__} //\n" + f"\n\n// Added by `nf-core pipelines download` v{nf_core.__version__} //\n" + 'singularity.cacheDir = "${projectDir}/../singularity-images/"' + "\n///////////////////////////////////////" ) @@ -697,7 +698,7 @@ def wf_use_local_configs(self, revision_dirname): with open(nfconfig_fn, "w") as nfconfig_fh: nfconfig_fh.write(nfconfig) - def find_container_images(self, workflow_directory): + def find_container_images(self, workflow_directory: str) -> None: """Find container image names for workflow. Starts by using `nextflow config` to pull out any process.container @@ -716,7 +717,7 @@ def find_container_images(self, workflow_directory): module_findings = [] # Use linting code to parse the pipeline nextflow config - self.nf_config = nf_core.utils.fetch_wf_config(workflow_directory) + self.nf_config = nf_core.utils.fetch_wf_config(Path(workflow_directory)) # Find any config variables that look like a container for k, v in self.nf_config.items(): @@ -745,7 +746,7 @@ def find_container_images(self, workflow_directory): self.nf_config is needed, because we need to restart search over raw input if no proper container matches are found. """ - config_findings.append((k, v.strip('"').strip("'"), self.nf_config, "Nextflow configs")) + config_findings.append((k, v.strip("'\""), self.nf_config, "Nextflow configs")) # rectify the container paths found in the config # Raw config_findings may yield multiple containers, so better create a shallow copy of the list, since length of input and output may be different ?!? @@ -1007,7 +1008,7 @@ def gather_registries(self, workflow_directory: str) -> None: # should exist, because find_container_images() is always called before if not self.nf_config: - self.nf_config = nf_core.utils.fetch_wf_config(workflow_directory) + self.nf_config = nf_core.utils.fetch_wf_config(Path(workflow_directory)) # Select registries defined in pipeline config configured_registries = [ @@ -1385,7 +1386,7 @@ def singularity_pull_image( # where the output of 'singularity pull' is first generated before being copied to the NXF_SINGULARITY_CACHDIR. # if not defined by the Singularity administrators, then use the temporary directory to avoid storing the images in the work directory. if os.environ.get("SINGULARITY_CACHEDIR") is None: - os.environ["SINGULARITY_CACHEDIR"] = NFCORE_CACHE_DIR + os.environ["SINGULARITY_CACHEDIR"] = str(NFCORE_CACHE_DIR) # Sometimes, container still contain an explicit library specification, which # resulted in attempted pulls e.g. from docker://quay.io/quay.io/qiime2/core:2022.11 @@ -1693,13 +1694,11 @@ def tidy_tags_and_branches(self): else: # desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns. valid_versions = [ - version_parser(v) - for v in desired_revisions - if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) + Version(v) for v in desired_revisions if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) ] # valid versions sorted in ascending order, last will be aliased as "latest". latest = sorted(valid_versions)[-1] - self.repo.create_head("latest", latest) + self.repo.create_head("latest", str(latest)) self.checkout(latest) if self.repo.head.is_detached: self.repo.head.reset(index=True, working_tree=True) @@ -1730,7 +1729,7 @@ def __add_additional_tags(self) -> None: # Although "dev-null" is a syntactically-valid local-part that is equally valid for delivery, # and only the receiving MTA can decide whether to accept it, it is to my best knowledge configured with # a Postfix discard mail delivery agent (https://www.postfix.org/discard.8.html), so incoming mails should be sinkholed. - self.ensure_git_user_config(f"nf-core download v{nf_core.__version__}", "dev-null@example.com") + self.ensure_git_user_config(f"nf-core pipelines download v{nf_core.__version__}", "dev-null@example.com") for additional_tag in self.additional_tags: # A valid git branch or tag name can contain alphanumeric characters, underscores, hyphens, and dots. @@ -1740,7 +1739,9 @@ def __add_additional_tags(self) -> None: if self.repo.is_valid_object(anchor) and not self.repo.is_valid_object(tag): try: self.repo.create_tag( - tag, ref=anchor, message=f"Synonynmous tag to {anchor}; added by `nf-core download`." + tag, + ref=anchor, + message=f"Synonynmous tag to {anchor}; added by `nf-core pipelines download`.", ) except (GitCommandError, InvalidGitRepositoryError) as e: log.error(f"[red]Additional tag(s) could not be applied:[/]\n{e}\n") diff --git a/nf_core/launch.py b/nf_core/pipelines/launch.py similarity index 95% rename from nf_core/launch.py rename to nf_core/pipelines/launch.py index bc0cd58ae..a80639ea9 100644 --- a/nf_core/launch.py +++ b/nf_core/pipelines/launch.py @@ -7,15 +7,16 @@ import re import subprocess import webbrowser +from pathlib import Path import questionary from rich.console import Console from rich.markdown import Markdown from rich.prompt import Confirm -import nf_core.schema +import nf_core.pipelines.schema import nf_core.utils -from nf_core.lint_utils import dump_json_with_prettier +from nf_core.pipelines.lint_utils import dump_json_with_prettier log = logging.getLogger(__name__) @@ -38,7 +39,7 @@ def __init__( """Initialise the Launcher class Args: - schema: An nf_core.schema.PipelineSchema() object + schema: An nf_core.pipelines.schema.PipelineSchema() object """ self.pipeline = pipeline @@ -46,7 +47,7 @@ def __init__( self.schema_obj = None self.use_params_file = False if command_only else True self.params_in = params_in - self.params_out = params_out if params_out else os.path.join(os.getcwd(), "nf-params.json") + self.params_out = params_out if params_out else Path.cwd() / "nf-params.json" self.save_all = save_all self.show_hidden = show_hidden self.web_schema_launch_url = url if url else "https://nf-co.re/launch" @@ -59,7 +60,7 @@ def __init__( self.nextflow_cmd = None # Fetch remote workflows - self.wfs = nf_core.list.Workflows() + self.wfs = nf_core.pipelines.list.Workflows() self.wfs.get_remote_workflows() # Prepend property names with a single hyphen in case we have parameters with the same ID @@ -138,7 +139,7 @@ def launch_pipeline(self): # Check if we have a web ID if self.web_id is not None: - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() try: if not self.get_web_launch_response(): log.info( @@ -191,7 +192,7 @@ def get_pipeline_schema(self): """Load and validate the schema from the supplied pipeline""" # Set up the schema - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() # Check if this is a local directory localpath = os.path.abspath(os.path.expanduser(self.pipeline)) @@ -262,15 +263,21 @@ def set_schema_inputs(self): def merge_nxf_flag_schema(self): """Take the Nextflow flag schema and merge it with the pipeline schema""" + if "allOf" not in self.schema_obj.schema: + self.schema_obj.schema["allOf"] = [] # Add the coreNextflow subschema to the schema definitions - if "definitions" not in self.schema_obj.schema: + if "$defs" in self.schema_obj.schema or "definitions" not in self.schema_obj.schema: + if "$defs" not in self.schema_obj.schema: + self.schema_obj["$defs"] = {} + self.schema_obj.schema["$defs"].update(self.nxf_flag_schema) + self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/$defs/coreNextflow"}) + + if "definitions" in self.schema_obj.schema: self.schema_obj.schema["definitions"] = {} - self.schema_obj.schema["definitions"].update(self.nxf_flag_schema) + self.schema_obj.schema["definitions"].update(self.nxf_flag_schema) + self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) # Add the new defintion to the allOf key so that it's included in validation # Put it at the start of the list so that it comes first - if "allOf" not in self.schema_obj.schema: - self.schema_obj.schema["allOf"] = [] - self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) def prompt_web_gui(self): """Ask whether to use the web-based or cli wizard to collect params""" @@ -340,7 +347,7 @@ def get_web_launch_response(self): elif web_response["status"] == "waiting_for_user": return False elif web_response["status"] == "launch_params_complete": - log.info("Found completed parameters from nf-core launch GUI") + log.info("Found completed parameters from nf-core pipelines launch GUI") try: # Set everything that we can with the cache results # NB: If using web builder, may have only run with --id and nothing else @@ -378,7 +385,8 @@ def sanitise_web_response(self): for param_id, param_obj in self.schema_obj.schema.get("properties", {}).items(): questionary_objects[param_id] = self.single_param_to_questionary(param_id, param_obj, print_help=False) - for _, definition in self.schema_obj.schema.get("definitions", {}).items(): + definitions_schemas = self.schema_obj.schema.get("$defs", self.schema_obj.schema.get("definitions", {})).items() + for _, definition in definitions_schemas: for param_id, param_obj in definition.get("properties", {}).items(): questionary_objects[param_id] = self.single_param_to_questionary(param_id, param_obj, print_help=False) @@ -398,9 +406,10 @@ def prompt_schema(self): """Go through the pipeline schema and prompt user to change defaults""" answers = {} # Start with the subschema in the definitions - use order of allOf + definitions_schemas = self.schema_obj.schema.get("$defs", self.schema_obj.schema.get("definitions", {})).items() for allOf in self.schema_obj.schema.get("allOf", []): d_key = allOf["$ref"][14:] - answers.update(self.prompt_group(d_key, self.schema_obj.schema["definitions"][d_key])) + answers.update(self.prompt_group(d_key, definitions_schemas[d_key])) # Top level schema params for param_id, param_obj in self.schema_obj.schema.get("properties", {}).items(): @@ -697,7 +706,7 @@ def build_command(self): # Write the user selection to a file and run nextflow with that if self.use_params_file: dump_json_with_prettier(self.params_out, self.schema_obj.input_params) - self.nextflow_cmd += f' -params-file "{os.path.relpath(self.params_out)}"' + self.nextflow_cmd += f' -params-file "{Path(self.params_out)}"' # Call nextflow with a list of command line flags else: diff --git a/nf_core/lint/__init__.py b/nf_core/pipelines/lint/__init__.py similarity index 78% rename from nf_core/lint/__init__.py rename to nf_core/pipelines/lint/__init__.py index 9292a07fd..8cc7c37cb 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -9,7 +9,7 @@ import logging import os from pathlib import Path -from typing import List, Tuple, Union +from typing import List, Optional, Tuple, Union import git import rich @@ -20,16 +20,41 @@ from rich.panel import Panel from rich.table import Table -import nf_core.lint_utils import nf_core.modules.lint +import nf_core.pipelines.lint_utils import nf_core.subworkflows.lint import nf_core.utils from nf_core import __version__ from nf_core.components.lint import ComponentLint -from nf_core.lint_utils import console +from nf_core.pipelines.lint_utils import console from nf_core.utils import plural_s as _s from nf_core.utils import strip_ansi_codes +from .actions_awsfulltest import actions_awsfulltest +from .actions_awstest import actions_awstest +from .actions_ci import actions_ci +from .actions_schema_validation import actions_schema_validation +from .configs import base_config, modules_config +from .files_exist import files_exist +from .files_unchanged import files_unchanged +from .included_configs import included_configs +from .merge_markers import merge_markers +from .modules_json import modules_json +from .modules_structure import modules_structure +from .multiqc_config import multiqc_config +from .nextflow_config import nextflow_config +from .nfcore_yml import nfcore_yml +from .pipeline_name_conventions import pipeline_name_conventions +from .pipeline_todos import pipeline_todos +from .plugin_includes import plugin_includes +from .readme import readme +from .schema_description import schema_description +from .schema_lint import schema_lint +from .schema_params import schema_params +from .system_exit import system_exit +from .template_strings import template_strings +from .version_consistency import version_consistency + log = logging.getLogger(__name__) @@ -52,32 +77,32 @@ class PipelineLint(nf_core.utils.Pipeline): warned (list): A list of tuples of the form: ``(, )`` """ - from .actions_awsfulltest import actions_awsfulltest # type: ignore[misc] - from .actions_awstest import actions_awstest # type: ignore[misc] - from .actions_ci import actions_ci # type: ignore[misc] - from .actions_schema_validation import ( # type: ignore[misc] - actions_schema_validation, - ) - from .configs import base_config, modules_config # type: ignore[misc] - from .files_exist import files_exist # type: ignore[misc] - from .files_unchanged import files_unchanged # type: ignore[misc] - from .merge_markers import merge_markers # type: ignore[misc] - from .modules_json import modules_json # type: ignore[misc] - from .modules_structure import modules_structure # type: ignore[misc] - from .multiqc_config import multiqc_config # type: ignore[misc] - from .nextflow_config import nextflow_config # type: ignore[misc] - from .nfcore_yml import nfcore_yml # type: ignore[misc] - from .pipeline_name_conventions import ( # type: ignore[misc] - pipeline_name_conventions, - ) - from .pipeline_todos import pipeline_todos # type: ignore[misc] - from .readme import readme # type: ignore[misc] - from .schema_description import schema_description # type: ignore[misc] - from .schema_lint import schema_lint # type: ignore[misc] - from .schema_params import schema_params # type: ignore[misc] - from .system_exit import system_exit # type: ignore[misc] - from .template_strings import template_strings # type: ignore[misc] - from .version_consistency import version_consistency # type: ignore[misc] + # Import all linting tests as methods for this class + actions_awsfulltest = actions_awsfulltest + actions_awstest = actions_awstest + actions_ci = actions_ci + actions_schema_validation = actions_schema_validation + base_config = base_config + modules_config = modules_config + files_exist = files_exist + files_unchanged = files_unchanged + merge_markers = merge_markers + modules_json = modules_json + modules_structure = modules_structure + multiqc_config = multiqc_config + nextflow_config = nextflow_config + nfcore_yml = nfcore_yml + pipeline_name_conventions = pipeline_name_conventions + pipeline_todos = pipeline_todos + plugin_includes = plugin_includes + readme = readme + schema_description = schema_description + schema_lint = schema_lint + schema_params = schema_params + system_exit = system_exit + template_strings = template_strings + version_consistency = version_consistency + included_configs = included_configs def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False @@ -114,6 +139,7 @@ def _get_all_lint_tests(release_mode): "actions_awsfulltest", "readme", "pipeline_todos", + "plugin_includes", "pipeline_name_conventions", "template_strings", "schema_lint", @@ -128,17 +154,17 @@ def _get_all_lint_tests(release_mode): "base_config", "modules_config", "nfcore_yml", - ] + (["version_consistency"] if release_mode else []) + ] + (["version_consistency", "included_configs"] if release_mode else []) - def _load(self): + def _load(self) -> bool: """Load information about the pipeline into the PipelineLint object""" # Load everything using the parent object super()._load() # Load lint object specific stuff - self._load_lint_config() + return self._load_lint_config() - def _load_lint_config(self): + def _load_lint_config(self) -> bool: """Parse a pipeline lint config file. Load the '.nf-core.yml' config file and extract @@ -147,14 +173,20 @@ def _load_lint_config(self): Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.wf_path) - self.lint_config = tools_config.get("lint", {}) + self.lint_config = getattr(tools_config, "lint", {}) or {} + is_correct = True # Check if we have any keys that don't match lint test names - for k in self.lint_config: - if k not in self.lint_tests: - log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") + if self.lint_config is not None: + for k in self.lint_config: + if k != "nfcore_components" and k not in self.lint_tests: + # nfcore_components is an exception to allow custom pipelines without nf-core components + log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") + is_correct = False + + return is_correct - def _lint_pipeline(self): + def _lint_pipeline(self) -> None: """Main linting function. Takes the pipeline directory as the primary input and iterates through @@ -219,7 +251,8 @@ def _lint_pipeline(self): "Running lint checks", total=len(self.lint_tests), test_name=self.lint_tests[0] ) for test_name in self.lint_tests: - if self.lint_config.get(test_name, {}) is False: + lint_test = self.lint_config.get(test_name, {}) if self.lint_config is not None else {} + if lint_test is False: log.debug(f"Skipping lint test '{test_name}'") self.ignored.append((test_name, test_name)) continue @@ -439,7 +472,7 @@ def _get_results_md(self): comment_body_text = f"Posted for pipeline commit {self.git_sha[:7]}" if self.git_sha is not None else "" timestamp = now.strftime("%Y-%m-%d %H:%M:%S") markdown = ( - f"## `nf-core lint` overall result: {overall_result}\n\n" + f"## `nf-core pipelines lint` overall result: {overall_result}\n\n" f"{comment_body_text}\n\n" f"```diff{test_passed_count}{test_ignored_count}{test_fixed_count}{test_warning_count}{test_failure_count}" "\n```\n\n" @@ -516,7 +549,7 @@ def run_linting( md_fn=None, json_fn=None, hide_progress: bool = False, -) -> Tuple[PipelineLint, ComponentLint, Union[ComponentLint, None]]: +) -> Tuple[PipelineLint, Optional[ComponentLint], Optional[ComponentLint]]: """Runs all nf-core linting checks on a given Nextflow pipeline project in either `release` mode or `normal` mode (default). Returns an object of type :class:`PipelineLint` after finished. @@ -559,44 +592,47 @@ def run_linting( # Load the various pipeline configs lint_obj._load_lint_config() - lint_obj._load_pipeline_config() - lint_obj._list_files() + lint_obj.load_pipeline_config() - # Create the modules lint object - module_lint_obj = nf_core.modules.lint.ModuleLint(pipeline_dir, hide_progress=hide_progress) - # Create the subworkflows lint object - try: - subworkflow_lint_obj = nf_core.subworkflows.lint.SubworkflowLint(pipeline_dir, hide_progress=hide_progress) - except LookupError: + if "nfcore_components" in lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: + module_lint_obj = None subworkflow_lint_obj = None - - # Verify that the pipeline is correctly configured and has a modules.json file - module_lint_obj.has_valid_directory() - module_lint_obj.has_modules_file() - # Run only the tests we want - if key: - # Select only the module lint tests - module_lint_tests = list( - set(key).intersection(set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True))) - ) - # Select only the subworkflow lint tests - subworkflow_lint_tests = list( - set(key).intersection( - set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)) - ) - ) else: - # If no key is supplied, run the default modules tests - module_lint_tests = list(("module_changes", "module_version")) - subworkflow_lint_tests = list(("subworkflow_changes", "subworkflow_version")) - module_lint_obj.filter_tests_by_key(module_lint_tests) - if subworkflow_lint_obj is not None: - subworkflow_lint_obj.filter_tests_by_key(subworkflow_lint_tests) - - # Set up files for component linting test - module_lint_obj.set_up_pipeline_files() - if subworkflow_lint_obj is not None: - subworkflow_lint_obj.set_up_pipeline_files() + # Create the modules lint object + module_lint_obj = nf_core.modules.lint.ModuleLint(pipeline_dir, hide_progress=hide_progress) + # Create the subworkflows lint object + try: + subworkflow_lint_obj = nf_core.subworkflows.lint.SubworkflowLint(pipeline_dir, hide_progress=hide_progress) + except LookupError: + subworkflow_lint_obj = None + + # Verify that the pipeline is correctly configured and has a modules.json file + module_lint_obj.has_valid_directory() + module_lint_obj.has_modules_file() + # Run only the tests we want + if key: + # Select only the module lint tests + module_lint_tests = list( + set(key).intersection(set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True))) + ) + # Select only the subworkflow lint tests + subworkflow_lint_tests = list( + set(key).intersection( + set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)) + ) + ) + else: + # If no key is supplied, run the default modules tests + module_lint_tests = list(("module_changes", "module_version")) + subworkflow_lint_tests = list(("subworkflow_changes", "subworkflow_version")) + module_lint_obj.filter_tests_by_key(module_lint_tests) + if subworkflow_lint_obj is not None: + subworkflow_lint_obj.filter_tests_by_key(subworkflow_lint_tests) + + # Set up files for component linting test + module_lint_obj.set_up_pipeline_files() + if subworkflow_lint_obj is not None: + subworkflow_lint_obj.set_up_pipeline_files() # Run the pipeline linting tests try: @@ -606,13 +642,14 @@ def run_linting( log.info("Stopping tests...") return lint_obj, module_lint_obj, subworkflow_lint_obj - # Run the module lint tests - if len(module_lint_obj.all_local_components) > 0: - module_lint_obj.lint_modules(module_lint_obj.all_local_components, local=True) - if len(module_lint_obj.all_remote_components) > 0: - module_lint_obj.lint_modules(module_lint_obj.all_remote_components, local=False) - # Run the subworkflows lint tests + if module_lint_obj is not None: + # Run the module lint tests + if len(module_lint_obj.all_local_components) > 0: + module_lint_obj.lint_modules(module_lint_obj.all_local_components, local=True) + if len(module_lint_obj.all_remote_components) > 0: + module_lint_obj.lint_modules(module_lint_obj.all_remote_components, local=False) if subworkflow_lint_obj is not None: + # Run the subworkflows lint tests if len(subworkflow_lint_obj.all_local_components) > 0: subworkflow_lint_obj.lint_subworkflows(subworkflow_lint_obj.all_local_components, local=True) if len(subworkflow_lint_obj.all_remote_components) > 0: @@ -620,11 +657,12 @@ def run_linting( # Print the results lint_obj._print_results(show_passed) - module_lint_obj._print_results(show_passed, sort_by=sort_by) + if module_lint_obj is not None: + module_lint_obj._print_results(show_passed, sort_by=sort_by) if subworkflow_lint_obj is not None: subworkflow_lint_obj._print_results(show_passed, sort_by=sort_by) - nf_core.lint_utils.print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj) - nf_core.lint_utils.print_fixes(lint_obj) + nf_core.pipelines.lint_utils.print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj) + nf_core.pipelines.lint_utils.print_fixes(lint_obj) # Save results to Markdown file if md_fn is not None: diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py similarity index 87% rename from nf_core/lint/actions_awsfulltest.py rename to nf_core/pipelines/lint/actions_awsfulltest.py index d5a061c93..7ea167f6c 100644 --- a/nf_core/lint/actions_awsfulltest.py +++ b/nf_core/pipelines/lint/actions_awsfulltest.py @@ -1,9 +1,10 @@ -import os +from pathlib import Path +from typing import Dict, List import yaml -def actions_awsfulltest(self): +def actions_awsfulltest(self) -> Dict[str, List[str]]: """Checks the GitHub Actions awsfulltest is valid. In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS. @@ -29,8 +30,8 @@ def actions_awsfulltest(self): warned = [] failed = [] - fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml") - if os.path.isfile(fn): + fn = Path(self.wf_path, ".github", "workflows", "awsfulltest.yml") + if fn.is_file(): try: with open(fn) as fh: wf = yaml.safe_load(fh) @@ -41,7 +42,9 @@ def actions_awsfulltest(self): # Check that the action is only turned on for published releases try: - if wf[True]["release"]["types"] != ["published"]: + if wf[True]["pull_request"]["branches"] != ["master"]: + raise AssertionError() + if wf[True]["pull_request_review"]["types"] != ["submitted"]: raise AssertionError() if "workflow_dispatch" not in wf[True]: raise AssertionError() diff --git a/nf_core/lint/actions_awstest.py b/nf_core/pipelines/lint/actions_awstest.py similarity index 93% rename from nf_core/lint/actions_awstest.py rename to nf_core/pipelines/lint/actions_awstest.py index 7c5599894..7e4c0fc49 100644 --- a/nf_core/lint/actions_awstest.py +++ b/nf_core/pipelines/lint/actions_awstest.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import yaml @@ -22,8 +22,8 @@ def actions_awstest(self): * Must be turned on for ``workflow_dispatch``. """ - fn = os.path.join(self.wf_path, ".github", "workflows", "awstest.yml") - if not os.path.isfile(fn): + fn = Path(self.wf_path, ".github", "workflows", "awstest.yml") + if not fn.is_file(): return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]} try: diff --git a/nf_core/lint/actions_ci.py b/nf_core/pipelines/lint/actions_ci.py similarity index 96% rename from nf_core/lint/actions_ci.py rename to nf_core/pipelines/lint/actions_ci.py index a3e7d54b6..74f433ef8 100644 --- a/nf_core/lint/actions_ci.py +++ b/nf_core/pipelines/lint/actions_ci.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import yaml @@ -40,10 +40,10 @@ def actions_ci(self): """ passed = [] failed = [] - fn = os.path.join(self.wf_path, ".github", "workflows", "ci.yml") + fn = Path(self.wf_path, ".github", "workflows", "ci.yml") # Return an ignored status if we can't find the file - if not os.path.isfile(fn): + if not fn.is_file(): return {"ignored": ["'.github/workflows/ci.yml' not found"]} try: diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/pipelines/lint/actions_schema_validation.py similarity index 91% rename from nf_core/lint/actions_schema_validation.py rename to nf_core/pipelines/lint/actions_schema_validation.py index 7e878a1af..a057d8058 100644 --- a/nf_core/lint/actions_schema_validation.py +++ b/nf_core/pipelines/lint/actions_schema_validation.py @@ -1,6 +1,5 @@ -import glob import logging -import os +from pathlib import Path from typing import Any, Dict, List import jsonschema @@ -23,10 +22,10 @@ def actions_schema_validation(self) -> Dict[str, List[str]]: warned: List[str] = [] # Only show error messages from schema - logging.getLogger("nf_core.schema").setLevel(logging.ERROR) + logging.getLogger("nf_core.pipelines.schema").setLevel(logging.ERROR) # Get all workflow files - action_workflows = glob.glob(os.path.join(self.wf_path, ".github/workflows/*.y*ml")) + action_workflows = list(Path(self.wf_path).glob(".github/workflows/*.y*ml")) # Load the GitHub workflow schema r = requests.get("https://json.schemastore.org/github-workflow", allow_redirects=True) @@ -40,7 +39,7 @@ def actions_schema_validation(self) -> Dict[str, List[str]]: # Validate all workflows against the schema for wf_path in action_workflows: - wf = os.path.basename(wf_path) + wf = wf_path.name # load workflow try: diff --git a/nf_core/lint/configs.py b/nf_core/pipelines/lint/configs.py similarity index 98% rename from nf_core/lint/configs.py rename to nf_core/pipelines/lint/configs.py index 274152919..f0fa1170c 100644 --- a/nf_core/lint/configs.py +++ b/nf_core/pipelines/lint/configs.py @@ -3,7 +3,7 @@ from pathlib import Path from typing import Dict, List -from nf_core.lint_utils import ignore_file +from nf_core.pipelines.lint_utils import ignore_file log = logging.getLogger(__name__) diff --git a/nf_core/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py similarity index 84% rename from nf_core/lint/files_exist.py rename to nf_core/pipelines/lint/files_exist.py index d801caf70..9dd307d8b 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -1,11 +1,11 @@ import logging from pathlib import Path -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Union log = logging.getLogger(__name__) -def files_exist(self) -> Dict[str, Union[List[str], bool]]: +def files_exist(self) -> Dict[str, List[str]]: """Checks a given pipeline directory for required files. Iterates through the pipeline's directory content and checks that specified @@ -87,6 +87,7 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: lib/Workflow.groovy lib/WorkflowMain.groovy lib/WorkflowPIPELINE.groovy + lib/nfcore_external_java_deps.jar parameters.settings.json pipeline_template.yml # saving information in .nf-core.yml Singularity @@ -98,13 +99,7 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: .travis.yml - Files that *must not* be present if a certain entry is present in ``nextflow.config``: - - .. code-block:: bash - - lib/nfcore_external_java_deps.jar # if "nf-validation" is in nextflow.config - - .. tip:: You can configure the ``nf-core lint`` tests to ignore any of these checks by setting + .. tip:: You can configure the ``nf-core pipelines lint`` tests to ignore any of these checks by setting the ``files_exist`` key as follows in your ``.nf-core.yml`` config file. For example: .. code-block:: yaml @@ -172,6 +167,7 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: [Path("assets", "multiqc_config.yml")], [Path("conf", "base.config")], [Path("conf", "igenomes.config")], + [Path("conf", "igenomes_ignored.config")], [Path(".github", "workflows", "awstest.yml")], [Path(".github", "workflows", "awsfulltest.yml")], [Path("modules.json")], @@ -198,14 +194,12 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: Path("parameters.settings.json"), Path("pipeline_template.yml"), # saving information in .nf-core.yml Path("Singularity"), + Path("lib", "nfcore_external_java_deps.jar"), ] files_warn_ifexists = [Path(".travis.yml")] - files_fail_ifinconfig: List[Tuple[Path, Dict[str, str]]] = [ - (Path("lib", "nfcore_external_java_deps.jar"), {"plugins": "nf-validation"}), - ] # Remove files that should be ignored according to the linting config - ignore_files = self.lint_config.get("files_exist", []) + ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] def pf(file_path: Union[str, Path]) -> Path: return Path(self.wf_path, file_path) @@ -241,23 +235,7 @@ def pf(file_path: Union[str, Path]) -> Path: failed.append(f"File must be removed: {self._wrap_quotes(file)}") else: passed.append(f"File not found check: {self._wrap_quotes(file)}") - # Files that cause an error if they exists together with a certain entry in nextflow.config - for file_cond in files_fail_ifinconfig: - if str(file_cond[0]) in ignore_files: - continue - in_config = False - config_key, config_value = list(file_cond[1].items())[0] - if config_key in self.nf_config and config_value in self.nf_config[config_key]: - log.debug(f"Found {config_key} in nextflow.config with value {config_value}") - in_config = True - if pf(file_cond[0]).is_file() and in_config: - failed.append(f"File must be removed: {self._wrap_quotes(file_cond[0])}") - elif pf(file_cond[0]).is_file() and not in_config: - passed.append(f"File found check: {self._wrap_quotes(file_cond[0])}") - elif not pf(file_cond[0]).is_file() and not in_config: - failed.append(f"File not found check: {self._wrap_quotes(file_cond[0])}") - elif not pf(file_cond[0]).is_file() and in_config: - passed.append(f"File not found check: {self._wrap_quotes(file_cond[0])}") + # Files that cause a warning if they exist for file in files_warn_ifexists: if str(file) in ignore_files: diff --git a/nf_core/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py similarity index 94% rename from nf_core/lint/files_unchanged.py rename to nf_core/pipelines/lint/files_unchanged.py index 014c2b5f0..300b3674b 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -8,7 +8,7 @@ import yaml -import nf_core.create +import nf_core.pipelines.create.create log = logging.getLogger(__name__) @@ -49,7 +49,7 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: .prettierignore - .. tip:: You can configure the ``nf-core lint`` tests to ignore any of these checks by setting + .. tip:: You can configure the ``nf-core pipelines lint`` tests to ignore any of these checks by setting the ``files_unchanged`` key as follows in your ``.nf-core.yml`` config file. For example: .. code-block:: yaml @@ -109,26 +109,28 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: ] # Only show error messages from pipeline creation - logging.getLogger("nf_core.create").setLevel(logging.ERROR) + logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) # Generate a new pipeline with nf-core create that we can compare to - tmp_dir = tempfile.mkdtemp() + tmp_dir = Path(tempfile.TemporaryDirectory().name) + tmp_dir.mkdir(parents=True) # Create a template.yaml file for the pipeline creation template_yaml = { "name": short_name, "description": self.nf_config["manifest.description"].strip("\"'"), "author": self.nf_config["manifest.author"].strip("\"'"), - "prefix": prefix, + "org": prefix, } template_yaml_path = Path(tmp_dir, "template.yaml") + with open(template_yaml_path, "w") as fh: yaml.dump(template_yaml, fh, default_flow_style=False) test_pipeline_dir = Path(tmp_dir, f"{prefix}-{short_name}") - create_obj = nf_core.create.PipelineCreate( - None, None, None, no_git=True, outdir=test_pipeline_dir, template_yaml_path=template_yaml_path + create_obj = nf_core.pipelines.create.create.PipelineCreate( + None, None, None, no_git=True, outdir=test_pipeline_dir, template_config=template_yaml_path ) create_obj.init_pipeline() @@ -141,7 +143,7 @@ def _tf(file_path: Union[str, Path]) -> Path: """Helper function - get file path for template file""" return Path(test_pipeline_dir, file_path) - ignore_files = self.lint_config.get("files_unchanged", []) + ignore_files = self.lint_config.get("files_unchanged", []) if self.lint_config is not None else [] # Files that must be completely unchanged from template for files in files_exact: diff --git a/nf_core/pipelines/lint/included_configs.py b/nf_core/pipelines/lint/included_configs.py new file mode 100644 index 000000000..75c4594f4 --- /dev/null +++ b/nf_core/pipelines/lint/included_configs.py @@ -0,0 +1,36 @@ +from pathlib import Path + + +def included_configs(self): + """Check that the pipeline nextflow.config includes the pipeline custom configs. + + If the include line is uncommented, the test passes. + If the include line is commented, the test fails. + If the include line is missing, the test warns. + + Can be skipped by adding the following to the .nf-core.yml file: + lint: + included_configs: False + """ + passed = [] + failed = [] + warned = [] + + config_file = Path(self.wf_path / "nextflow.config") + + with open(config_file) as fh: + config = fh.read() + if ( + f"// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + failed.append("Pipeline config does not include custom configs. Please uncomment the includeConfig line.") + elif ( + f"includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + passed.append("Pipeline config includes custom configs.") + else: + warned.append("Pipeline config does not include custom configs. Please add the includeConfig line.") + + return {"passed": passed, "failed": failed, "warned": warned} diff --git a/nf_core/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py similarity index 63% rename from nf_core/lint/merge_markers.py rename to nf_core/pipelines/lint/merge_markers.py index d57b63fd1..1c3d70a76 100644 --- a/nf_core/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -1,6 +1,7 @@ import fnmatch import logging import os +from pathlib import Path import nf_core.utils @@ -35,36 +36,36 @@ def merge_markers(self): failed = [] ignored = [] - ignored_config = self.lint_config.get("merge_markers", []) + ignored_config = self.lint_config.get("merge_markers", []) if self.lint_config is not None else [] ignore = [".git"] - if os.path.isfile(os.path.join(self.wf_path, ".gitignore")): - with open(os.path.join(self.wf_path, ".gitignore"), encoding="latin1") as fh: + if Path(self.wf_path, ".gitignore").is_file(): + with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: - ignore.append(os.path.basename(line.strip().rstrip("/"))) + ignore.append(Path(line.strip().rstrip("/")).name) for root, dirs, files in os.walk(self.wf_path, topdown=True): # Ignore files for i_base in ignore: - i = os.path.join(root, i_base) - dirs[:] = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] - files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] + i = str(Path(root, i_base)) + dirs[:] = [d for d in dirs if not fnmatch.fnmatch(str(Path(root, d)), i)] + files[:] = [f for f in files if not fnmatch.fnmatch(str(Path(root, f)), i)] for fname in files: # File ignored in config - if os.path.relpath(os.path.join(root, fname), self.wf_path) in ignored_config: - ignored.append(f"Ignoring file `{os.path.join(root, fname)}`") + if str(Path(root, fname).relative_to(self.wf_path)) in ignored_config: + ignored.append(f"Ignoring file `{Path(root, fname)}`") continue # Skip binary files - if nf_core.utils.is_file_binary(os.path.join(root, fname)): + if nf_core.utils.is_file_binary(Path(root, fname)): continue try: - with open(os.path.join(root, fname), encoding="latin1") as fh: + with open(Path(root, fname), encoding="latin1") as fh: for line in fh: if ">>>>>>>" in line: - failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {line[:30]}") + failed.append(f"Merge marker '>>>>>>>' in `{Path(root, fname)}`: {line[:30]}") if "<<<<<<<" in line: - failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {line[:30]}") + failed.append(f"Merge marker '<<<<<<<' in `{Path(root, fname)}`: {line[:30]}") except FileNotFoundError: - log.debug(f"Could not open file {os.path.join(root, fname)} in merge_markers lint test") + log.debug(f"Could not open file {Path(root, fname)} in merge_markers lint test") if len(failed) == 0: passed.append("No merge markers found in pipeline files") return {"passed": passed, "failed": failed, "ignored": ignored} diff --git a/nf_core/lint/modules_json.py b/nf_core/pipelines/lint/modules_json.py similarity index 88% rename from nf_core/lint/modules_json.py rename to nf_core/pipelines/lint/modules_json.py index dd0a59d55..2b7c26684 100644 --- a/nf_core/lint/modules_json.py +++ b/nf_core/pipelines/lint/modules_json.py @@ -1,9 +1,10 @@ from pathlib import Path +from typing import Dict, List, Union -from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_json import ModulesJson, ModulesJsonType -def modules_json(self): +def modules_json(self) -> Dict[str, List[str]]: """Make sure all modules described in the ``modules.json`` file are actually installed Every module installed from ``nf-core/modules`` must have an entry in the ``modules.json`` file @@ -18,10 +19,10 @@ def modules_json(self): # Load pipeline modules and modules.json _modules_json = ModulesJson(self.wf_path) _modules_json.load() - modules_json_dict = _modules_json.modules_json + modules_json_dict: Union[ModulesJsonType, None] = _modules_json.modules_json modules_dir = Path(self.wf_path, "modules") - if _modules_json: + if _modules_json and modules_json_dict is not None: all_modules_passed = True for repo in modules_json_dict["repos"].keys(): diff --git a/nf_core/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py similarity index 100% rename from nf_core/lint/modules_structure.py rename to nf_core/pipelines/lint/modules_structure.py diff --git a/nf_core/lint/multiqc_config.py b/nf_core/pipelines/lint/multiqc_config.py similarity index 99% rename from nf_core/lint/multiqc_config.py rename to nf_core/pipelines/lint/multiqc_config.py index 8b4fa2120..2b0fc7902 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/pipelines/lint/multiqc_config.py @@ -3,7 +3,7 @@ import yaml -from nf_core.lint_utils import ignore_file +from nf_core.pipelines.lint_utils import ignore_file def multiqc_config(self) -> Dict[str, List[str]]: diff --git a/nf_core/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py similarity index 82% rename from nf_core/lint/nextflow_config.py rename to nf_core/pipelines/lint/nextflow_config.py index 47b7d78f5..dd45621bc 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -1,14 +1,15 @@ +import ast import logging -import os import re from pathlib import Path +from typing import Dict, List, Optional, Union -from nf_core.schema import PipelineSchema +from nf_core.pipelines.schema import PipelineSchema log = logging.getLogger(__name__) -def nextflow_config(self): +def nextflow_config(self) -> Dict[str, List[str]]: """Checks the pipeline configuration for required variables. All nf-core pipelines are required to be configured with a minimal set of variable @@ -26,7 +27,7 @@ def nextflow_config(self): * ``manifest.version`` * The version of this pipeline. This should correspond to a `GitHub release `_. - * If ``--release`` is set when running ``nf-core lint``, the version number must not contain the string ``dev`` + * If ``--release`` is set when running ``nf-core pipelines lint``, the version number must not contain the string ``dev`` * If ``--release`` is _not_ set, the version should end in ``dev`` (warning triggered if not) * ``manifest.nextflowVersion`` @@ -65,14 +66,6 @@ def nextflow_config(self): * Should always be set to default value: ``https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}`` - * ``params.validationShowHiddenParams`` - - * Determines whether boilerplate params are showed by schema. Set to ``false`` by default - - * ``params.validationSchemaIgnoreParams`` - - * A comma separated string of inputs the schema validation should ignore. - **The following variables throw warnings if missing:** * ``manifest.mainScript``: The filename of the main pipeline script (should be ``main.nf``) @@ -87,6 +80,9 @@ def nextflow_config(self): * ``params.nf_required_version``: The old method for specifying the minimum Nextflow version. Replaced by ``manifest.nextflowVersion`` * ``params.container``: The old method for specifying the dockerhub container address. Replaced by ``process.container`` * ``igenomesIgnore``: Changed to ``igenomes_ignore`` + * ``params.max_cpus``: Old method of specifying the maximum number of CPUs a process can request. Replaced by native Nextflow `resourceLimits`directive in config files. + * ``params.max_memory``: Old method of specifying the maximum number of memory can request. Replaced by native Nextflow `resourceLimits`directive. + * ``params.max_time``: Old method of specifying the maximum number of CPUs can request. Replaced by native Nextflow `resourceLimits`directive. .. tip:: The ``snake_case`` convention should now be used when defining pipeline parameters @@ -151,8 +147,6 @@ def nextflow_config(self): ["process.time"], ["params.outdir"], ["params.input"], - ["params.validationShowHiddenParams"], - ["params.validationSchemaIgnoreParams"], ] # Throw a warning if these are missing config_warn = [ @@ -170,10 +164,56 @@ def nextflow_config(self): "params.igenomesIgnore", "params.name", "params.enable_conda", + "params.max_cpus", + "params.max_memory", + "params.max_time", ] + # Lint for plugins + config_plugins = ast.literal_eval(self.nf_config.get("plugins", "[]")) + found_plugins = [] + for plugin in config_plugins: + if "@" not in plugin: + failed.append(f"Plugin '{plugin}' does not have a pinned version") + found_plugins.append(plugin.split("@")[0]) + + if "nf-validation" in found_plugins or "nf-schema" in found_plugins: + if "nf-validation" in found_plugins and "nf-schema" in found_plugins: + failed.append("nextflow.config contains both nf-validation and nf-schema") + + if "nf-schema" in found_plugins: + passed.append("Found nf-schema plugin") + if self.nf_config.get("validation.help.enabled", "false") == "false": + failed.append( + "The help message has not been enabled. Set the `validation.help.enabled` configuration option to `true` to enable help messages" + ) + config_fail.extend([["validation.help.enabled"]]) + config_warn.extend( + [ + ["validation.help.beforeText"], + ["validation.help.afterText"], + ["validation.help.command"], + ["validation.summary.beforeText"], + ["validation.summary.afterText"], + ] + ) + config_fail_ifdefined.extend( + [ + "params.validationFailUnrecognisedParams", + "params.validationLenientMode", + "params.validationSchemaIgnoreParams", + "params.validationShowHiddenParams", + ] + ) + + if "nf-validation" in found_plugins: + passed.append("Found nf-validation plugin") + warned.append( + "nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/" + ) + # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get("nextflow_config", []) + ignore_configs = self.lint_config.get("nextflow_config", []) if self.lint_config is not None else [] for cfs in config_fail: for cf in cfs: @@ -205,12 +245,13 @@ def nextflow_config(self): failed.append(f"Config variable (incorrectly) found: {self._wrap_quotes(cf)}") # Check and warn if the process configuration is done with deprecated syntax + process_with_deprecated_syntax = list( set( [ - re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1) + match.group(1) for ck in self.nf_config.keys() - if re.match(r"^(process\.\$.*?)\.+.*$", ck) + if (match := re.match(r"^(process\.\$.*?)\.+.*$", ck)) is not None ] ) ) @@ -305,7 +346,7 @@ def nextflow_config(self): failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") # Check that lines for loading custom profiles exist - lines = [ + old_lines = [ r"// Load nf-core custom profiles from different Institutions", r"try {", r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', @@ -313,11 +354,19 @@ def nextflow_config(self): r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', r"}", ] - path = os.path.join(self.wf_path, "nextflow.config") + lines = [ + r"// Load nf-core custom profiles from different Institutions", + r'''includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null"''', + ] + path = Path(self.wf_path, "nextflow.config") i = 0 with open(path) as f: for line in f: - if lines[i] in line: + if old_lines[i] in line: + i += 1 + if i == len(old_lines): + break + elif lines[i] in line: i += 1 if i == len(lines): break @@ -325,6 +374,12 @@ def nextflow_config(self): i = 0 if i == len(lines): passed.append("Lines for loading custom profiles found") + elif i == len(old_lines): + failed.append( + "Old lines for loading custom profiles found. File should contain: ```groovy\n{}".format( + "\n".join(lines) + ) + ) else: lines[2] = f"\t{lines[2]}" lines[4] = f"\t{lines[4]}" @@ -335,7 +390,7 @@ def nextflow_config(self): ) # Check for the availability of the "test" configuration profile by parsing nextflow.config - with open(os.path.join(self.wf_path, "nextflow.config")) as f: + with open(Path(self.wf_path, "nextflow.config")) as f: content = f.read() # Remove comments @@ -379,8 +434,8 @@ def nextflow_config(self): if param in ignore_defaults: ignored.append(f"Config default ignored: {param}") elif param in self.nf_config.keys(): - config_default = None - schema_default = None + config_default: Optional[Union[str, float, int]] = None + schema_default: Optional[Union[str, float, int]] = None if schema.schema_types[param_name] == "boolean": schema_default = str(schema.schema_defaults[param_name]).lower() config_default = str(self.nf_config[param]).lower() diff --git a/nf_core/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py similarity index 96% rename from nf_core/lint/nfcore_yml.py rename to nf_core/pipelines/lint/nfcore_yml.py index f23b2f1a8..e0d5fb200 100644 --- a/nf_core/lint/nfcore_yml.py +++ b/nf_core/pipelines/lint/nfcore_yml.py @@ -27,8 +27,7 @@ def nfcore_yml(self) -> Dict[str, List[str]]: ignored: List[str] = [] # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get(".nf-core", []) - + ignore_configs = self.lint_config.get(".nf-core", []) if self.lint_config is not None else [] try: with open(Path(self.wf_path, ".nf-core.yml")) as fh: content = fh.read() diff --git a/nf_core/lint/pipeline_name_conventions.py b/nf_core/pipelines/lint/pipeline_name_conventions.py similarity index 100% rename from nf_core/lint/pipeline_name_conventions.py rename to nf_core/pipelines/lint/pipeline_name_conventions.py diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py similarity index 82% rename from nf_core/lint/pipeline_todos.py rename to nf_core/pipelines/lint/pipeline_todos.py index ba6ec7915..0535069f9 100644 --- a/nf_core/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -1,6 +1,7 @@ import fnmatch import logging import os +from pathlib import Path log = logging.getLogger(__name__) @@ -39,19 +40,19 @@ def pipeline_todos(self, root_dir=None): root_dir = self.wf_path ignore = [".git"] - if os.path.isfile(os.path.join(root_dir, ".gitignore")): - with open(os.path.join(root_dir, ".gitignore"), encoding="latin1") as fh: + if Path(root_dir, ".gitignore").is_file(): + with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: - ignore.append(os.path.basename(line.strip().rstrip("/"))) + ignore.append(Path(line.strip().rstrip("/")).name) for root, dirs, files in os.walk(root_dir, topdown=True): # Ignore files for i_base in ignore: - i = os.path.join(root, i_base) - dirs[:] = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] - files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] + i = str(Path(root, i_base)) + dirs[:] = [d for d in dirs if not fnmatch.fnmatch(str(Path(root, d)), i)] + files[:] = [f for f in files if not fnmatch.fnmatch(str(Path(root, f)), i)] for fname in files: try: - with open(os.path.join(root, fname), encoding="latin1") as fh: + with open(Path(root, fname), encoding="latin1") as fh: for line in fh: if "TODO nf-core" in line: line = ( @@ -63,7 +64,7 @@ def pipeline_todos(self, root_dir=None): .strip() ) warned.append(f"TODO string in `{fname}`: _{line}_") - file_paths.append(os.path.join(root, fname)) + file_paths.append(Path(root, fname)) except FileNotFoundError: log.debug(f"Could not open file {fname} in pipeline_todos lint test") diff --git a/nf_core/pipelines/lint/plugin_includes.py b/nf_core/pipelines/lint/plugin_includes.py new file mode 100644 index 000000000..4fc40ae26 --- /dev/null +++ b/nf_core/pipelines/lint/plugin_includes.py @@ -0,0 +1,44 @@ +import ast +import glob +import logging +import re +from typing import Dict, List + +log = logging.getLogger(__name__) + + +def plugin_includes(self) -> Dict[str, List[str]]: + """Checks the include statements in the all *.nf files for plugin includes + + When nf-schema is used in an nf-core pipeline, the include statements of the plugin + functions have to use nf-schema instead of nf-validation and vice versa + """ + config_plugins = [plugin.split("@")[0] for plugin in ast.literal_eval(self.nf_config.get("plugins", "[]"))] + validation_plugin = "nf-validation" if "nf-validation" in config_plugins else "nf-schema" + + passed: List[str] = [] + warned: List[str] = [] + failed: List[str] = [] + ignored: List[str] = [] + + plugin_include_pattern = re.compile(r"^include\s*{[^}]+}\s*from\s*[\"']plugin/([^\"']+)[\"']\s*$", re.MULTILINE) + workflow_files = [ + file for file in glob.glob(f"{self.wf_path}/**/*.nf", recursive=True) if not file.startswith("./modules/") + ] + test_passed = True + for file in workflow_files: + with open(file) as of: + plugin_includes = re.findall(plugin_include_pattern, of.read()) + for include in plugin_includes: + if include not in ["nf-validation", "nf-schema"]: + continue + if include != validation_plugin: + test_passed = False + failed.append( + f"Found a `{include}` plugin import in `{file[2:]}`, but `{validation_plugin}` was used in `nextflow.config`" + ) + + if test_passed: + passed.append("No wrong validation plugin imports have been found") + + return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/lint/readme.py b/nf_core/pipelines/lint/readme.py similarity index 92% rename from nf_core/lint/readme.py rename to nf_core/pipelines/lint/readme.py index cade9ca3e..bdfad5200 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -1,5 +1,5 @@ -import os import re +from pathlib import Path def readme(self): @@ -29,14 +29,14 @@ def readme(self): failed = [] # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get("readme", []) + ignore_configs = self.lint_config.get("readme", []) if self.lint_config is not None else [] - with open(os.path.join(self.wf_path, "README.md")) as fh: + with open(Path(self.wf_path, "README.md")) as fh: content = fh.read() if "nextflow_badge" not in ignore_configs: # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) diff --git a/nf_core/lint/schema_description.py b/nf_core/pipelines/lint/schema_description.py similarity index 85% rename from nf_core/lint/schema_description.py rename to nf_core/pipelines/lint/schema_description.py index 90735f609..b586cc524 100644 --- a/nf_core/lint/schema_description.py +++ b/nf_core/pipelines/lint/schema_description.py @@ -1,4 +1,4 @@ -import nf_core.schema +import nf_core.pipelines.schema def schema_description(self): @@ -17,14 +17,14 @@ def schema_description(self): # First, get the top-level config options for the pipeline # Schema object already created in the `schema_lint` test - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.schema_obj.get_schema_path(self.wf_path) self.schema_obj.get_wf_params() self.schema_obj.no_prompts = True self.schema_obj.load_lint_schema() # Get parameters that should be ignored according to the linting config - ignore_params = self.lint_config.get("schema_description", []) + ignore_params = self.lint_config.get("schema_description", []) if self.lint_config is not None else [] # Get ungrouped params if "properties" in self.schema_obj.schema.keys(): @@ -36,8 +36,9 @@ def schema_description(self): warned.append(f"Ungrouped param in schema: `{up}`") # Iterate over groups and add warning for parameters without a description - for group_key in self.schema_obj.schema["definitions"].keys(): - group = self.schema_obj.schema["definitions"][group_key] + defs_notation = self.schema_obj.defs_notation + for group_key in self.schema_obj.schema[defs_notation].keys(): + group = self.schema_obj.schema[defs_notation][group_key] for param_key, param in group["properties"].items(): if param_key in ignore_params: ignored.append(f"Ignoring description check for param in schema: `{param_key}`") diff --git a/nf_core/lint/schema_lint.py b/nf_core/pipelines/lint/schema_lint.py similarity index 73% rename from nf_core/lint/schema_lint.py rename to nf_core/pipelines/lint/schema_lint.py index 178063d5d..4007bf8fe 100644 --- a/nf_core/lint/schema_lint.py +++ b/nf_core/pipelines/lint/schema_lint.py @@ -1,6 +1,6 @@ import logging -import nf_core.schema +import nf_core.pipelines.schema def schema_lint(self): @@ -10,32 +10,32 @@ def schema_lint(self): pipeline parameters (eg. ``params.something``, ``--something``). .. tip:: Reminder: you should generally never need to edit this JSON file by hand. - The ``nf-core schema build`` command can create *and edit* the file for you + The ``nf-core pipelines schema build`` command can create *and edit* the file for you to keep it up to date, with a friendly user-interface for customisation. The lint test checks the schema for the following: * Schema should be a valid JSON file - * Schema should adhere to `JSONSchema `_, Draft 7. + * Schema should adhere to `JSONSchema `_, Draft 7 or Draft 2020-12. * Parameters can be described in two places: * As ``properties`` in the top-level schema object - * As ``properties`` within subschemas listed in a top-level ``definitions`` objects + * As ``properties`` within subschemas listed in a top-level ``definitions``(draft 7) or ``$defs``(draft 2020-12) objects * The schema must describe at least one parameter * There must be no duplicate parameter IDs across the schema and definition subschema - * All subschema in ``definitions`` must be referenced in the top-level ``allOf`` key + * All subschema in ``definitions`` or ``$defs`` must be referenced in the top-level ``allOf`` key * The top-level ``allOf`` key must not describe any non-existent definitions * Default parameters in the schema must be valid * Core top-level schema attributes should exist and be set as follows: - * ``$schema``: ``https://json-schema.org/draft-07/schema`` + * ``$schema``: ``https://json-schema.org/draft-07/schema`` or ``https://json-schema.org/draft/2020-12/schema`` * ``$id``: URL to the raw schema file, eg. ``https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`` * ``title``: ``YOURPIPELINE pipeline parameters`` * ``description``: The pipeline config ``manifest.description`` * That the ``input`` property is defined and has a mimetype. A list of common mimetypes can be found `here `_. - For example, an *extremely* minimal schema could look like this: + For example, an *extremely* minimal schema could look like this (draft 7): .. code-block:: json @@ -57,18 +57,40 @@ def schema_lint(self): "allOf": [{"$ref": "#/definitions/my_first_group"}] } + Or this (draft 2020-12): + + .. code-block:: json + + { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json", + "title": "YOURPIPELINE pipeline parameters", + "description": "This pipeline is for testing", + "properties": { + "first_param": { "type": "string" } + }, + "$defs": { + "my_first_group": { + "properties": { + "second_param": { "type": "string" } + } + } + }, + "allOf": [{"$ref": "#/$defs/my_first_group"}] + } + .. tip:: You can check your pipeline schema without having to run the entire pipeline lint - by running ``nf-core schema lint`` instead of ``nf-core lint`` + by running ``nf-core pipelines schema lint`` instead of ``nf-core pipelines lint`` """ passed = [] warned = [] failed = [] # Only show error messages from schema - logging.getLogger("nf_core.schema").setLevel(logging.ERROR) + logging.getLogger("nf_core.pipelines.schema").setLevel(logging.ERROR) # Lint the schema - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.schema_obj.get_schema_path(self.wf_path) try: diff --git a/nf_core/lint/schema_params.py b/nf_core/pipelines/lint/schema_params.py similarity index 95% rename from nf_core/lint/schema_params.py rename to nf_core/pipelines/lint/schema_params.py index 9280fe470..4c569defd 100644 --- a/nf_core/lint/schema_params.py +++ b/nf_core/pipelines/lint/schema_params.py @@ -1,4 +1,4 @@ -import nf_core.schema +import nf_core.pipelines.schema def schema_params(self): @@ -15,7 +15,7 @@ def schema_params(self): failed = [] # First, get the top-level config options for the pipeline - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.schema_obj.get_schema_path(self.wf_path) self.schema_obj.get_wf_params() self.schema_obj.no_prompts = True diff --git a/nf_core/lint/system_exit.py b/nf_core/pipelines/lint/system_exit.py similarity index 100% rename from nf_core/lint/system_exit.py rename to nf_core/pipelines/lint/system_exit.py diff --git a/nf_core/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py similarity index 94% rename from nf_core/lint/template_strings.py rename to nf_core/pipelines/lint/template_strings.py index 9b015bc20..37a1f64da 100644 --- a/nf_core/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -38,11 +38,12 @@ def template_strings(self): failed = [] ignored = [] # Files that should be ignored according to the linting config - ignore_files = self.lint_config.get("template_strings", []) + ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] + files = self.list_files() # Loop through files, searching for string num_matches = 0 - for fn in self.files: + for fn in files: if str(fn.relative_to(self.wf_path)) in ignore_files: ignored.append(f"Ignoring Jinja template strings in file `{fn}`") continue diff --git a/nf_core/lint/version_consistency.py b/nf_core/pipelines/lint/version_consistency.py similarity index 98% rename from nf_core/lint/version_consistency.py rename to nf_core/pipelines/lint/version_consistency.py index e396ca9e7..5fe24ed72 100644 --- a/nf_core/lint/version_consistency.py +++ b/nf_core/pipelines/lint/version_consistency.py @@ -4,7 +4,7 @@ def version_consistency(self): """Pipeline and container version number consistency. - .. note:: This test only runs when the ``--release`` flag is set for ``nf-core lint``, + .. note:: This test only runs when the ``--release`` flag is set for ``nf-core pipelines lint``, or ``$GITHUB_REF`` is equal to ``master``. This lint fetches the pipeline version number from three possible locations: diff --git a/nf_core/lint_utils.py b/nf_core/pipelines/lint_utils.py similarity index 73% rename from nf_core/lint_utils.py rename to nf_core/pipelines/lint_utils.py index 167600bfc..b4c56c600 100644 --- a/nf_core/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -2,9 +2,10 @@ import logging import subprocess from pathlib import Path -from typing import List +from typing import List, Union import rich +import yaml from rich.console import Console from rich.table import Table @@ -22,15 +23,22 @@ def print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj): swf_passed = 0 swf_warned = 0 swf_failed = 0 + module_passed = 0 + module_warned = 0 + module_failed = 0 if subworkflow_lint_obj is not None: swf_passed = len(subworkflow_lint_obj.passed) swf_warned = len(subworkflow_lint_obj.warned) swf_failed = len(subworkflow_lint_obj.failed) - nbr_passed = len(lint_obj.passed) + len(module_lint_obj.passed) + swf_passed + if module_lint_obj is not None: + module_passed = len(module_lint_obj.passed) + module_warned = len(module_lint_obj.warned) + module_failed = len(module_lint_obj.failed) + nbr_passed = len(lint_obj.passed) + module_passed + swf_passed nbr_ignored = len(lint_obj.ignored) nbr_fixed = len(lint_obj.fixed) - nbr_warned = len(lint_obj.warned) + len(module_lint_obj.warned) + swf_warned - nbr_failed = len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed + nbr_warned = len(lint_obj.warned) + module_warned + swf_warned + nbr_failed = len(lint_obj.failed) + module_failed + swf_failed summary_colour = "red" if nbr_failed > 0 else "green" table = Table(box=rich.box.ROUNDED, style=summary_colour) @@ -50,7 +58,7 @@ def print_fixes(lint_obj): if lint_obj.could_fix: fix_flags = "".join([f" --fix {fix}" for fix in lint_obj.could_fix]) wf_dir = "" if lint_obj.wf_path == "." else f"--dir {lint_obj.wf_path}" - fix_cmd = f"nf-core lint {wf_dir} {fix_flags}" + fix_cmd = f"nf-core pipelines lint {wf_dir} {fix_flags}" console.print( "\nTip: Some of these linting errors can automatically be resolved with the following command:\n\n" f"[blue] {fix_cmd}\n" @@ -62,7 +70,7 @@ def print_fixes(lint_obj): ) -def run_prettier_on_file(file): +def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: """Run the pre-commit hook prettier on a file. Args: @@ -73,12 +81,15 @@ def run_prettier_on_file(file): """ nf_core_pre_commit_config = Path(nf_core.__file__).parent / ".pre-commit-prettier-config.yaml" + args = ["pre-commit", "run", "--config", str(nf_core_pre_commit_config), "prettier"] + if isinstance(file, List): + args.extend(["--files", *file]) + else: + args.extend(["--files", str(file)]) + try: - subprocess.run( - ["pre-commit", "run", "--config", nf_core_pre_commit_config, "prettier", "--files", file], - capture_output=True, - check=True, - ) + subprocess.run(args, capture_output=True, check=True) + log.debug(f"${subprocess.STDOUT}") except subprocess.CalledProcessError as e: if ": SyntaxError: " in e.stdout.decode(): log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") @@ -104,15 +115,31 @@ def dump_json_with_prettier(file_name, file_content): run_prettier_on_file(file_name) +def dump_yaml_with_prettier(file_name: Union[Path, str], file_content: dict) -> None: + """Dump a YAML file and run prettier on it. + + Args: + file_name (Path | str): A file identifier as a string or pathlib.Path. + file_content (dict): Content to dump into the YAML file + """ + with open(file_name, "w") as fh: + yaml.safe_dump(file_content, fh) + run_prettier_on_file(file_name) + + def ignore_file(lint_name: str, file_path: Path, dir_path: Path) -> List[List[str]]: """Ignore a file and add the result to the ignored list. Return the passed, failed, ignored and ignore_configs lists.""" passed: List[str] = [] failed: List[str] = [] ignored: List[str] = [] - _, lint_conf = nf_core.utils.load_tools_config(dir_path) - lint_conf = lint_conf.get("lint", {}) - ignore_entry: List[str] | bool = lint_conf.get(lint_name, []) + _, pipeline_conf = nf_core.utils.load_tools_config(dir_path) + lint_conf = getattr(pipeline_conf, "lint", None) or None + + if lint_conf is None: + ignore_entry: List[str] = [] + else: + ignore_entry = lint_conf.get(lint_name, []) full_path = dir_path / file_path # Return a failed status if we can't find the file if not full_path.is_file(): diff --git a/nf_core/list.py b/nf_core/pipelines/list.py similarity index 100% rename from nf_core/list.py rename to nf_core/pipelines/list.py diff --git a/nf_core/params_file.py b/nf_core/pipelines/params_file.py similarity index 97% rename from nf_core/params_file.py rename to nf_core/pipelines/params_file.py index 78798b065..d61b7cfbc 100644 --- a/nf_core/params_file.py +++ b/nf_core/pipelines/params_file.py @@ -8,9 +8,9 @@ import questionary -import nf_core.list +import nf_core.pipelines.list import nf_core.utils -from nf_core.schema import PipelineSchema +from nf_core.pipelines.schema import PipelineSchema log = logging.getLogger(__name__) @@ -97,7 +97,7 @@ def __init__( self.schema_obj: Optional[PipelineSchema] = None # Fetch remote workflows - self.wfs = nf_core.list.Workflows() + self.wfs = nf_core.pipelines.list.Workflows() self.wfs.get_remote_workflows() def get_pipeline(self): @@ -124,7 +124,7 @@ def get_pipeline(self): ).unsafe_ask() # Get the schema - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.schema_obj.get_schema_path(self.pipeline, local_only=False, revision=self.pipeline_revision) self.schema_obj.get_wf_params() diff --git a/nf_core/refgenie.py b/nf_core/pipelines/refgenie.py similarity index 96% rename from nf_core/refgenie.py rename to nf_core/pipelines/refgenie.py index de9201bcd..426ca5eb7 100644 --- a/nf_core/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -181,13 +181,17 @@ def update_config(rgc): log.info("Could not determine path to 'refgenie_genomes.config' file.") return False + if refgenie_genomes_config_file is None: + log.info("Could not determine path to 'refgenie_genomes.config' file.") + return False + # Save the updated genome config try: with open(refgenie_genomes_config_file, "w+") as fh: fh.write(refgenie_genomes) log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") except FileNotFoundError: - log.warning(f"Could not write to {refgenie_genomes_config_file}") + log.info(f"Could not write to {refgenie_genomes_config_file}") return False # Add include statement to NXF_HOME/config diff --git a/nf_core/schema.py b/nf_core/pipelines/schema.py similarity index 84% rename from nf_core/schema.py rename to nf_core/pipelines/schema.py index 4f5acfa0a..3aec815c7 100644 --- a/nf_core/schema.py +++ b/nf_core/pipelines/schema.py @@ -16,9 +16,9 @@ from rich.prompt import Confirm from rich.syntax import Syntax -import nf_core.list +import nf_core.pipelines.list import nf_core.utils -from nf_core.lint_utils import dump_json_with_prettier, run_prettier_on_file +from nf_core.pipelines.lint_utils import dump_json_with_prettier, run_prettier_on_file log = logging.getLogger(__name__) @@ -32,7 +32,7 @@ def __init__(self): self.schema = {} self.pipeline_dir = "" - self.schema_filename = "" + self._schema_filename = "" self.schema_defaults = {} self.schema_types = {} self.schema_params = {} @@ -46,6 +46,69 @@ def __init__(self): self.web_schema_build_url = "https://nf-co.re/pipeline_schema_builder" self.web_schema_build_web_url = None self.web_schema_build_api_url = None + self.validation_plugin = None + self.schema_draft = None + self.defs_notation = None + self.ignored_params = [] + + # Update the validation plugin code everytime the schema gets changed + def set_schema_filename(self, schema: str) -> None: + self._schema_filename = schema + self._update_validation_plugin_from_config() + + def get_schema_filename(self) -> str: + return self._schema_filename + + def del_schema_filename(self) -> None: + del self._schema_filename + + schema_filename = property(get_schema_filename, set_schema_filename, del_schema_filename) + + def _update_validation_plugin_from_config(self) -> None: + plugin = "nf-schema" + if self.schema_filename: + conf = nf_core.utils.fetch_wf_config(Path(self.schema_filename).parent) + else: + conf = nf_core.utils.fetch_wf_config(Path(self.pipeline_dir)) + + plugins = str(conf.get("plugins", "")).strip("'\"").strip(" ").split(",") + plugin_found = False + for plugin_instance in plugins: + if "nf-schema" in plugin_instance: + plugin = "nf-schema" + plugin_found = True + break + elif "nf-validation" in plugin_instance: + plugin = "nf-validation" + plugin_found = True + break + + if not plugin_found: + log.info( + "Could not find nf-schema or nf-validation in the pipeline config. Defaulting to nf-schema notation for the JSON schema." + ) + + self.validation_plugin = plugin + # Previous versions of nf-schema used "defs", but it's advised to use "$defs" + if plugin == "nf-schema": + self.defs_notation = "$defs" + ignored_params = [ + conf.get("validation.help.shortParameter", "help"), + conf.get("validation.help.fullParameter", "helpFull"), + conf.get("validation.help.showHiddenParameter", "showHidden"), + ] # Help parameter should be ignored by default + ignored_params_config = conf.get("validation", {}).get("defaultIgnoreParams", []) + if len(ignored_params_config) > 0: + ignored_params.extend(ignored_params_config) + self.ignored_params = ignored_params + self.schema_draft = "https://json-schema.org/draft/2020-12/schema" + + else: + self.defs_notation = "definitions" + self.schema_draft = "https://json-schema.org/draft-07/schema" + self.get_wf_params() + self.ignored_params = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") + self.ignored_params.append("validationSchemaIgnoreParams") def get_schema_path( self, path: Union[str, Path], local_only: bool = False, revision: Union[str, None] = None @@ -66,7 +129,7 @@ def get_schema_path( # Path does not exist - assume a name of a remote workflow elif not local_only: - self.pipeline_dir = nf_core.list.get_local_wf(path, revision=revision) + self.pipeline_dir = nf_core.pipelines.list.get_local_wf(path, revision=revision) self.schema_filename = Path(self.pipeline_dir or "", "nextflow_schema.json") # check if the schema file exists if not self.schema_filename.exists(): @@ -116,6 +179,8 @@ def load_schema(self): self.schema = json.load(fh) self.schema_defaults = {} self.schema_params = {} + if "$schema" not in self.schema: + raise AssertionError("Schema missing top-level `$schema` attribute") log.debug(f"JSON file loaded: {self.schema_filename}") def sanitise_param_default(self, param): @@ -168,10 +233,11 @@ def get_schema_defaults(self) -> None: if param["default"] is not None: self.schema_defaults[p_key] = param["default"] + # TODO add support for nested parameters # Grouped schema properties in subschema definitions - for defn_name, definition in self.schema.get("definitions", {}).items(): + for defn_name, definition in self.schema.get(self.defs_notation, {}).items(): for p_key, param in definition.get("properties", {}).items(): - self.schema_params[p_key] = ("definitions", defn_name, "properties", p_key) + self.schema_params[p_key] = (self.defs_notation, defn_name, "properties", p_key) if "default" in param: param = self.sanitise_param_default(param) if param["default"] is not None: @@ -182,7 +248,7 @@ def get_schema_types(self) -> None: for name, param in self.schema.get("properties", {}).items(): if "type" in param: self.schema_types[name] = param["type"] - for _, definition in self.schema.get("definitions", {}).items(): + for _, definition in self.schema.get(self.defs_notation, {}).items(): for name, param in definition.get("properties", {}).items(): if "type" in param: self.schema_types[name] = param["type"] @@ -191,7 +257,7 @@ def save_schema(self, suppress_logging=False): """Save a pipeline schema to a file""" # Write results to a JSON file num_params = len(self.schema.get("properties", {})) - num_params += sum(len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()) + num_params += sum(len(d.get("properties", {})) for d in self.schema.get(self.defs_notation, {}).values()) if not suppress_logging: log.info(f"Writing schema with {num_params} params: '{self.schema_filename}'") dump_json_with_prettier(self.schema_filename, self.schema) @@ -248,13 +314,14 @@ def validate_default_params(self): if self.schema is None: log.error("[red][✗] Pipeline schema not found") try: + # TODO add support for nested parameters # Make copy of schema and remove required flags schema_no_required = copy.deepcopy(self.schema) if "required" in schema_no_required: schema_no_required.pop("required") - for group_key, group in schema_no_required.get("definitions", {}).items(): + for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): if "required" in group: - schema_no_required["definitions"][group_key].pop("required") + schema_no_required[self.defs_notation][group_key].pop("required") jsonschema.validate(self.schema_defaults, schema_no_required) except jsonschema.exceptions.ValidationError as e: raise AssertionError(f"Default parameters are invalid: {e.message}") @@ -269,17 +336,11 @@ def validate_default_params(self): if self.pipeline_params == {}: self.get_wf_params() - # Collect parameters to ignore - if "validationSchemaIgnoreParams" in self.pipeline_params: - params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") - else: - params_ignore = [] - # Go over group keys - for group_key, group in schema_no_required.get("definitions", {}).items(): + for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): group_properties = group.get("properties") for param in group_properties: - if param in params_ignore: + if param in self.ignored_params: continue if param in self.pipeline_params: self.validate_config_default_parameter(param, group_properties[param], self.pipeline_params[param]) @@ -292,7 +353,7 @@ def validate_default_params(self): ungrouped_properties = self.schema.get("properties") if ungrouped_properties: for param in ungrouped_properties: - if param in params_ignore: + if param in self.ignored_params: continue if param in self.pipeline_params: self.validate_config_default_parameter( @@ -312,7 +373,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default) # If we have a default in the schema, check it matches the config if "default" in schema_param and ( (schema_param["type"] == "boolean" and str(config_default).lower() != str(schema_param["default"]).lower()) - and (str(schema_param["default"]) != str(config_default).strip('"').strip("'")) + and (str(schema_param["default"]) != str(config_default).strip("'\"")) ): # Check that we are not deferring the execution of this parameter in the schema default with squiggly brakcets if schema_param["type"] != "string" or "{" not in schema_param["default"]: @@ -359,39 +420,66 @@ def validate_schema(self, schema=None): """ if schema is None: schema = self.schema - try: - jsonschema.Draft7Validator.check_schema(schema) - log.debug("JSON Schema Draft7 validated") - except jsonschema.exceptions.SchemaError as e: - raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") + + if "$schema" not in schema: + raise AssertionError("Schema missing top-level `$schema` attribute") + schema_draft = schema["$schema"] + if self.schema_draft != schema_draft: + raise AssertionError(f"Schema is using the wrong draft: {schema_draft}, should be {self.schema_draft}") + if self.schema_draft == "https://json-schema.org/draft-07/schema": + try: + jsonschema.Draft7Validator.check_schema(schema) + log.debug("JSON Schema Draft7 validated") + except jsonschema.exceptions.SchemaError as e: + raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") + elif self.schema_draft == "https://json-schema.org/draft/2020-12/schema": + try: + jsonschema.Draft202012Validator.check_schema(schema) + log.debug("JSON Schema Draft2020-12 validated") + except jsonschema.exceptions.SchemaError as e: + raise AssertionError(f"Schema does not validate as Draft 2020-12 JSON Schema:\n {e}") + else: + raise AssertionError( + f"Schema `$schema` should be `https://json-schema.org/draft/2020-12/schema` or `https://json-schema.org/draft-07/schema` \n Found `{schema_draft}`" + ) param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) - for d_key, d_schema in schema.get("definitions", {}).items(): + + # Add a small check for older nf-schema JSON schemas + if "defs" in schema: + raise AssertionError( + f'Using "defs" for schema definitions is not supported. Please use "{self.defs_notation}" instead' + ) + + for d_key, d_schema in schema.get(self.defs_notation, {}).items(): # Check that this definition is mentioned in allOf if "allOf" not in schema: raise AssertionError("Schema has definitions, but no allOf key") in_allOf = False for allOf in schema.get("allOf", []): - if allOf["$ref"] == f"#/definitions/{d_key}": + if allOf["$ref"] == f"#/{self.defs_notation}/{d_key}": in_allOf = True if not in_allOf: - raise AssertionError(f"Definition subschema `{d_key}` not included in schema `allOf`") + raise AssertionError( + f"Definition subschema `#/{self.defs_notation}/{d_key}` not included in schema `allOf`" + ) + # TODO add support for nested parameters for d_param_id in d_schema.get("properties", {}): # Check that we don't have any duplicate parameter IDs in different definitions if d_param_id in param_keys: - raise AssertionError(f"Duplicate parameter found in schema `definitions`: `{d_param_id}`") + raise AssertionError(f"Duplicate parameter found in schema `{self.defs_notation}`: `{d_param_id}`") param_keys.append(d_param_id) num_params += 1 # Check that everything in allOf exists for allOf in schema.get("allOf", []): - if "definitions" not in schema: - raise AssertionError("Schema has allOf, but no definitions") - def_key = allOf["$ref"][14:] - if def_key not in schema.get("definitions", {}): - raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `definitions`") + _, allof_defs_notation, def_key = allOf["$ref"].split("/") # "#//" + if allof_defs_notation not in schema: + raise AssertionError(f"Schema has allOf, but no {allof_defs_notation}") + if def_key not in schema.get(allof_defs_notation, {}): + raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `{allof_defs_notation}`") # Check that the schema describes at least one parameter if num_params == 0: @@ -402,7 +490,7 @@ def validate_schema(self, schema=None): def validate_schema_title_description(self, schema=None): """ Extra validation command for linting. - Checks that the schema "$id", "title" and "description" attributes match the piipeline config. + Checks that the schema "$id", "title" and "description" attributes match the pipeline config. """ if schema is None: schema = self.schema @@ -410,12 +498,6 @@ def validate_schema_title_description(self, schema=None): log.debug("Pipeline schema not set - skipping validation of top-level attributes") return None - if "$schema" not in self.schema: - raise AssertionError("Schema missing top-level `$schema` attribute") - schema_attr = "http://json-schema.org/draft-07/schema" - if self.schema["$schema"] != schema_attr: - raise AssertionError(f"Schema `$schema` should be `{schema_attr}`\n Found `{self.schema['$schema']}`") - if self.pipeline_manifest == {}: self.get_wf_params() @@ -465,9 +547,9 @@ def check_for_input_mimetype(self): if "input" not in self.schema_params: raise LookupError("Parameter `input` not found in schema") # Check that the input parameter is defined in the right place - if "input" not in self.schema.get("definitions", {}).get("input_output_options", {}).get("properties", {}): + if "input" not in self.schema.get(self.defs_notation, {}).get("input_output_options", {}).get("properties", {}): raise LookupError("Parameter `input` is not defined in the correct subschema (input_output_options)") - input_entry = self.schema["definitions"]["input_output_options"]["properties"]["input"] + input_entry = self.schema[self.defs_notation]["input_output_options"]["properties"]["input"] if "mimetype" not in input_entry: return None mimetype = input_entry["mimetype"] @@ -519,7 +601,7 @@ def schema_to_markdown(self, columns): out = f"# {self.schema['title']}\n\n" out += f"{self.schema['description']}\n" # Grouped parameters - for definition in self.schema.get("definitions", {}).values(): + for definition in self.schema.get(self.defs_notation, {}).values(): out += f"\n## {definition.get('title', {})}\n\n" out += f"{definition.get('description', '')}\n\n" required = definition.get("required", []) @@ -701,15 +783,15 @@ def remove_schema_empty_definitions(self): """ # Identify and remove empty definitions from the schema empty_definitions = [] - for d_key, d_schema in list(self.schema.get("definitions", {}).items()): + for d_key, d_schema in list(self.schema.get(self.defs_notation, {}).items()): if not d_schema.get("properties"): - del self.schema["definitions"][d_key] + del self.schema[self.defs_notation][d_key] empty_definitions.append(d_key) log.warning(f"Removing empty group: '{d_key}'") # Remove "allOf" group with empty definitions from the schema for d_key in empty_definitions: - allOf = {"$ref": f"#/definitions/{d_key}"} + allOf = {"$ref": f"#/{self.defs_notation}/{d_key}"} if allOf in self.schema.get("allOf", []): self.schema["allOf"].remove(allOf) @@ -718,8 +800,8 @@ def remove_schema_empty_definitions(self): del self.schema["allOf"] # If we don't have anything left in "definitions", remove it - if self.schema.get("definitions") == {}: - del self.schema["definitions"] + if self.schema.get(self.defs_notation) == {}: + del self.schema[self.defs_notation] def remove_schema_notfound_configs(self): """ @@ -729,9 +811,9 @@ def remove_schema_notfound_configs(self): # Top-level properties self.schema, params_removed = self.remove_schema_notfound_configs_single_schema(self.schema) # Sub-schemas in definitions - for d_key, definition in self.schema.get("definitions", {}).items(): + for d_key, definition in self.schema.get(self.defs_notation, {}).items(): cleaned_schema, p_removed = self.remove_schema_notfound_configs_single_schema(definition) - self.schema["definitions"][d_key] = cleaned_schema + self.schema[self.defs_notation][d_key] = cleaned_schema params_removed.extend(p_removed) return params_removed @@ -783,13 +865,12 @@ def add_schema_found_configs(self): Update defaults if they have changed """ params_added = [] - params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") - params_ignore.append("validationSchemaIgnoreParams") + for p_key, p_val in self.pipeline_params.items(): s_key = self.schema_params.get(p_key) # Check if key is in schema parameters # Key is in pipeline but not in schema or ignored from schema - if p_key not in self.schema_params and p_key not in params_ignore: + if p_key not in self.schema_params and p_key not in self.ignored_params: if ( self.no_prompts or self.schema_from_scratch @@ -822,7 +903,7 @@ def add_schema_found_configs(self): elif ( s_key and (p_key not in self.schema_defaults) - and (p_key not in params_ignore) + and (p_key not in self.ignored_params) and (p_def := self.build_schema_param(p_val).get("default")) ): if self.no_prompts or Confirm.ask( @@ -912,7 +993,7 @@ def get_web_builder_response(self): if web_response["status"] == "waiting_for_user": return False if web_response["status"] == "web_builder_edited": - log.info("Found saved status from nf-core schema builder") + log.info("Found saved status from nf-core pipelines schema builder") try: self.schema = web_response["schema"] self.remove_schema_empty_definitions() diff --git a/nf_core/sync.py b/nf_core/pipelines/sync.py similarity index 85% rename from nf_core/sync.py rename to nf_core/pipelines/sync.py index 5e7b198d8..12b29f15e 100644 --- a/nf_core/sync.py +++ b/nf_core/pipelines/sync.py @@ -5,19 +5,23 @@ import os import re import shutil +from pathlib import Path +from typing import Dict, Optional, Union import git import questionary import requests +import requests.auth import requests_cache import rich import yaml from git import GitCommandError, InvalidGitRepositoryError import nf_core -import nf_core.create -import nf_core.list +import nf_core.pipelines.create.create +import nf_core.pipelines.list import nf_core.utils +from nf_core.pipelines.lint_utils import dump_yaml_with_prettier log = logging.getLogger(__name__) @@ -59,24 +63,24 @@ class PipelineSync: def __init__( self, - pipeline_dir, - from_branch=None, - make_pr=False, - gh_repo=None, - gh_username=None, - template_yaml_path=None, - force_pr=False, + pipeline_dir: Union[str, Path], + from_branch: Optional[str] = None, + make_pr: bool = False, + gh_repo: Optional[str] = None, + gh_username: Optional[str] = None, + template_yaml_path: Optional[str] = None, + force_pr: bool = False, ): """Initialise syncing object""" - self.pipeline_dir = os.path.abspath(pipeline_dir) + self.pipeline_dir: Path = Path(pipeline_dir).resolve() self.from_branch = from_branch self.original_branch = None self.original_merge_branch = f"nf-core-template-merge-{nf_core.__version__}" self.merge_branch = self.original_merge_branch self.made_changes = False self.make_pr = make_pr - self.gh_pr_returned_data = {} + self.gh_pr_returned_data: Dict = {} self.required_config_vars = ["manifest.name", "manifest.description", "manifest.version", "manifest.author"] self.force_pr = force_pr @@ -85,23 +89,23 @@ def __init__( self.pr_url = "" self.config_yml_path, self.config_yml = nf_core.utils.load_tools_config(self.pipeline_dir) - + assert self.config_yml_path is not None and self.config_yml is not None # mypy # Throw deprecation warning if template_yaml_path is set if template_yaml_path is not None: log.warning( f"The `template_yaml_path` argument is deprecated. Saving pipeline creation settings in .nf-core.yml instead. Please remove {template_yaml_path} file." ) - if "template" in self.config_yml: + if getattr(self.config_yml, "template", None) is not None: overwrite_template = questionary.confirm( f"A template section already exists in '{self.config_yml_path}'. Do you want to overwrite?", style=nf_core.utils.nfcore_question_style, default=False, ).unsafe_ask() - if overwrite_template or "template" not in self.config_yml: + if overwrite_template or getattr(self.config_yml, "template", None) is None: with open(template_yaml_path) as f: - self.config_yml["template"] = yaml.safe_load(f) + self.config_yml.template = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: - yaml.safe_dump(self.config_yml, fh) + yaml.safe_dump(self.config_yml.model_dump(), fh) log.info(f"Saved pipeline creation settings to '{self.config_yml_path}'") raise SystemExit( f"Please commit your changes and delete the {template_yaml_path} file. Then run the sync command again." @@ -185,7 +189,7 @@ def inspect_sync_dir(self): # Check to see if there are uncommitted changes on current branch if self.repo.is_dirty(untracked_files=True): raise SyncExceptionError( - "Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core sync" + "Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core pipelines sync" ) def get_wf_config(self): @@ -209,7 +213,7 @@ def get_wf_config(self): # Fetch workflow variables log.debug("Fetching workflow config variables") - self.wf_config = nf_core.utils.fetch_wf_config(self.pipeline_dir) + self.wf_config = nf_core.utils.fetch_wf_config(Path(self.pipeline_dir)) # Check that we have the required variables for rvar in self.required_config_vars: @@ -257,24 +261,38 @@ def make_template_pipeline(self): log.info("Making a new template pipeline using pipeline variables") # Only show error messages from pipeline creation - logging.getLogger("nf_core.create").setLevel(logging.ERROR) + logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) + assert self.config_yml_path is not None + assert self.config_yml is not None # Re-write the template yaml info from .nf-core.yml config - if "template" in self.config_yml: + if self.config_yml.template is not None: + # Set force true in config to overwrite existing files + + self.config_yml.template.force = True with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml, config_path) + yaml.safe_dump(self.config_yml.model_dump(), config_path) try: - nf_core.create.PipelineCreate( - name=self.wf_config["manifest.name"].strip('"').strip("'"), - description=self.wf_config["manifest.description"].strip('"').strip("'"), - version=self.wf_config["manifest.version"].strip('"').strip("'"), + pipeline_create_obj = nf_core.pipelines.create.create.PipelineCreate( + outdir=str(self.pipeline_dir), + from_config_file=True, no_git=True, force=True, - outdir=self.pipeline_dir, - author=self.wf_config["manifest.author"].strip('"').strip("'"), - plain=True, - ).init_pipeline() + ) + pipeline_create_obj.init_pipeline() + + # set force to false to avoid overwriting files in the future + if self.config_yml.template is not None: + self.config_yml.template = pipeline_create_obj.config + # Set force true in config to overwrite existing files + self.config_yml.template.force = False + # Set outdir as the current directory to avoid local info leaking + self.config_yml.template.outdir = "." + # Update nf-core version + self.config_yml.nf_core_version = nf_core.__version__ + dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump()) + except Exception as err: # Reset to where you were to prevent git getting messed up. self.repo.git.reset("--hard") @@ -411,21 +429,24 @@ def close_open_template_merge_prs(self): return False for pr in list_prs_json: - log.debug(f"Looking at PR from '{pr['head']['ref']}': {pr['html_url']}") - # Ignore closed PRs - if pr["state"] != "open": - log.debug(f"Ignoring PR as state not open ({pr['state']}): {pr['html_url']}") - continue + if isinstance(pr, int): + log.debug(f"Incorrect PR format: {pr}") + else: + log.debug(f"Looking at PR from '{pr['head']['ref']}': {pr['html_url']}") + # Ignore closed PRs + if pr["state"] != "open": + log.debug(f"Ignoring PR as state not open ({pr['state']}): {pr['html_url']}") + continue - # Don't close the new PR that we just opened - if pr["head"]["ref"] == self.merge_branch: - continue + # Don't close the new PR that we just opened + if pr["head"]["ref"] == self.merge_branch: + continue - # PR is from an automated branch and goes to our target base - if pr["head"]["ref"].startswith("nf-core-template-merge-") and pr["base"]["ref"] == self.from_branch: - self.close_open_pr(pr) + # PR is from an automated branch and goes to our target base + if pr["head"]["ref"].startswith("nf-core-template-merge-") and pr["base"]["ref"] == self.from_branch: + self.close_open_pr(pr) - def close_open_pr(self, pr): + def close_open_pr(self, pr) -> bool: """Given a PR API response, add a comment and close.""" log.debug(f"Attempting to close PR: '{pr['html_url']}'") diff --git a/nf_core/subworkflow-template/tests/main.nf.test.j2 b/nf_core/subworkflow-template/tests/main.nf.test.j2 index c44e19a4e..c493e7a15 100644 --- a/nf_core/subworkflow-template/tests/main.nf.test.j2 +++ b/nf_core/subworkflow-template/tests/main.nf.test.j2 @@ -7,7 +7,7 @@ nextflow_workflow { workflow "{{ component_name_underscore|upper }}" tag "subworkflows" - tag "subworkflows_nfcore" + tag "subworkflows_{{ org_alphabet }}" tag "subworkflows/{{ component_name }}" // TODO nf-core: Add tags for all modules used within this subworkflow. Example: tag "samtools" @@ -22,12 +22,14 @@ nextflow_workflow { workflow { """ // TODO nf-core: define inputs of the workflow here. Example: - input[0] = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) - ] - input[1] = [ [ id:'genome' ], - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - ] + input[0] = [ + [ id:'test', single_end:false ], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), + ] + input[1] = [ + [ id:'genome' ], + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), + ] """ } } diff --git a/nf_core/subworkflow-template/tests/tags.yml b/nf_core/subworkflow-template/tests/tags.yml deleted file mode 100644 index 35cad3678..000000000 --- a/nf_core/subworkflow-template/tests/tags.yml +++ /dev/null @@ -1,2 +0,0 @@ -subworkflows/{{ component_name_underscore }}: - - subworkflows/{{ org }}/{{ component_dir }}/** diff --git a/nf_core/subworkflows/install.py b/nf_core/subworkflows/install.py index 6c5cfb12b..70a6b0afa 100644 --- a/nf_core/subworkflows/install.py +++ b/nf_core/subworkflows/install.py @@ -11,7 +11,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, - installed_by=False, + installed_by=None, ): super().__init__( pipeline_dir, diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index cc79ed863..cedae62f1 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -11,14 +11,23 @@ import questionary import rich +import ruamel.yaml import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) +# Import lint functions +from .main_nf import main_nf # type: ignore[misc] +from .meta_yml import meta_yml # type: ignore[misc] +from .subworkflow_changes import subworkflow_changes # type: ignore[misc] +from .subworkflow_tests import subworkflow_tests # type: ignore[misc] +from .subworkflow_todos import subworkflow_todos # type: ignore[misc] +from .subworkflow_version import subworkflow_version # type: ignore[misc] + class SubworkflowLint(ComponentLint): """ @@ -26,18 +35,18 @@ class SubworkflowLint(ComponentLint): repository or in any nf-core pipeline directory """ - # Import lint functions - from .main_nf import main_nf # type: ignore[misc] - from .meta_yml import meta_yml # type: ignore[misc] - from .subworkflow_changes import subworkflow_changes # type: ignore[misc] - from .subworkflow_tests import subworkflow_tests # type: ignore[misc] - from .subworkflow_todos import subworkflow_todos # type: ignore[misc] - from .subworkflow_version import subworkflow_version # type: ignore[misc] + main_nf = main_nf + meta_yml = meta_yml + subworkflow_changes = subworkflow_changes + subworkflow_tests = subworkflow_tests + subworkflow_todos = subworkflow_todos + subworkflow_version = subworkflow_version def __init__( self, - dir, + directory, fail_warned=False, + fix=False, remote_url=None, branch=None, no_pull=False, @@ -46,8 +55,9 @@ def __init__( ): super().__init__( component_type="subworkflows", - dir=dir, + directory=directory, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -122,9 +132,9 @@ def lint( remote_subworkflows = self.all_remote_components if self.repo_type == "modules": - log.info(f"Linting modules repo: [magenta]'{self.dir}'") + log.info(f"Linting modules repo: [magenta]'{self.directory}'") else: - log.info(f"Linting pipeline: [magenta]'{self.dir}'") + log.info(f"Linting pipeline: [magenta]'{self.directory}'") if subworkflow: log.info(f"Linting subworkflow: [magenta]'{subworkflow}'") @@ -207,6 +217,10 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): # Otherwise run all the lint tests else: + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(swf) + if self.repo_type == "pipeline" and self.modules_json: # Set correct sha version = self.modules_json.get_subworkflow_version(swf.component_name, swf.repo_url, swf.org) @@ -223,3 +237,56 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): self.failed += warned self.failed += [LintResult(swf, *s) for s in swf.failed] + + def update_meta_yml_file(self, swf): + """ + Update the meta.yml file with the correct inputs and outputs + """ + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Read meta.yml + with open(swf.meta_yml) as fh: + meta_yaml = yaml.load(fh) + meta_yaml_corrected = meta_yaml.copy() + # Obtain inputs and outputs from main.nf + swf.get_inputs_from_main_nf() + swf.get_outputs_from_main_nf() + + # Compare inputs and add them if missing + if "input" in meta_yaml: + # Delete inputs from meta.yml which are not present in main.nf + meta_yaml_corrected["input"] = [ + input for input in meta_yaml["input"] if list(input.keys())[0] in swf.inputs + ] + # Obtain inputs from main.nf missing in meta.yml + inputs_correct = [ + list(input.keys())[0] for input in meta_yaml_corrected["input"] if list(input.keys())[0] in swf.inputs + ] + inputs_missing = [input for input in swf.inputs if input not in inputs_correct] + # Add missing inputs to meta.yml + for missing_input in inputs_missing: + meta_yaml_corrected["input"].append({missing_input: {"description": ""}}) + + if "output" in meta_yaml: + # Delete outputs from meta.yml which are not present in main.nf + meta_yaml_corrected["output"] = [ + output for output in meta_yaml["output"] if list(output.keys())[0] in swf.outputs + ] + # Obtain output from main.nf missing in meta.yml + outputs_correct = [ + list(output.keys())[0] + for output in meta_yaml_corrected["output"] + if list(output.keys())[0] in swf.outputs + ] + outputs_missing = [output for output in swf.outputs if output not in outputs_correct] + # Add missing inputs to meta.yml + for missing_output in outputs_missing: + meta_yaml_corrected["output"].append({missing_output: {"description": ""}}) + + # Write corrected meta.yml to file + with open(swf.meta_yml, "w") as fh: + log.info(f"Updating {swf.meta_yml}") + yaml.dump(meta_yaml_corrected, fh) + run_prettier_on_file(fh.name) diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index c73559502..3ad3f3486 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -4,12 +4,14 @@ import logging import re -from typing import List +from typing import List, Tuple + +from nf_core.components.nfcore_component import NFCoreComponent log = logging.getLogger(__name__) -def main_nf(_, subworkflow): +def main_nf(_, subworkflow: NFCoreComponent) -> Tuple[List[str], List[str]]: """ Lint a ``main.nf`` subworkflow file @@ -25,12 +27,13 @@ def main_nf(_, subworkflow): * The subworkflow emits a software version """ - inputs = [] - outputs = [] + inputs: List[str] = [] + outputs: List[str] = [] # Read the lines directly from the subworkflow - lines = None - if lines is None: + lines: List[str] = [] + + if len(lines) == 0: try: # Check whether file exists and load it with open(subworkflow.main_nf) as fh: @@ -38,7 +41,7 @@ def main_nf(_, subworkflow): subworkflow.passed.append(("main_nf_exists", "Subworkflow file exists", subworkflow.main_nf)) except FileNotFoundError: subworkflow.failed.append(("main_nf_exists", "Subworkflow file does not exist", subworkflow.main_nf)) - return + return inputs, outputs # Go through subworkflow main.nf file and switch state according to current section # Perform section-specific linting @@ -199,7 +202,7 @@ def check_subworkflow_section(self, lines: List[str]) -> List[str]: return includes -def check_workflow_section(self, lines): +def check_workflow_section(self, lines: List[str]) -> None: """Lint the workflow definition of a subworkflow before Specifically checks that the name is all capital letters diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 24e75eddb..be282bc45 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -1,4 +1,5 @@ import json +import logging from pathlib import Path import jsonschema.validators @@ -6,6 +7,8 @@ import nf_core.components.components_utils +log = logging.getLogger(__name__) + def meta_yml(subworkflow_lint_object, subworkflow): """ @@ -65,6 +68,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.passed.append(("meta_input", f"`{input}` specified", subworkflow.meta_yml)) else: subworkflow.failed.append(("meta_input", f"`{input}` missing in `meta.yml`", subworkflow.meta_yml)) + else: + log.debug(f"No inputs specified in subworkflow `main.nf`: {subworkflow.component_name}") if "output" in meta_yaml: meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] @@ -75,6 +80,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.failed.append( ("meta_output", f"`{output}` missing in `meta.yml`", subworkflow.meta_yml) ) + else: + log.debug(f"No outputs specified in subworkflow `main.nf`: {subworkflow.component_name}") # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == subworkflow.workflow_name: diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index cfae2d553..7ca825f04 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -4,6 +4,7 @@ import json import logging +import re from pathlib import Path import yaml @@ -18,7 +19,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): Lint the tests of a subworkflow in ``nf-core/modules`` It verifies that the test directory exists - and contains a ``main.nf.test`` a ``main.nf.test.snap`` and ``tags.yml``. + and contains a ``main.nf.test`` and a ``main.nf.test.snap`` Additionally, checks that all included components in test ``main.nf`` are specified in ``test.yml`` """ @@ -26,30 +27,68 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): repo_dir = subworkflow.component_dir.parts[ : subworkflow.component_dir.parts.index(subworkflow.component_name.split("/")[0]) ][-1] - test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", repo_dir, subworkflow.component_name) - pytest_main_nf = Path(test_dir, "main.nf") + pytest_dir = Path( + subworkflow.base_dir, + "tests", + "subworkflows", + repo_dir, + subworkflow.component_name, + ) + pytest_main_nf = Path(pytest_dir, "main.nf") is_pytest = pytest_main_nf.is_file() log.debug(f"{pytest_main_nf} is pytest: {is_pytest}") if subworkflow.nftest_testdir.is_dir(): - subworkflow.passed.append(("test_dir_exists", "nf-test test directory exists", subworkflow.nftest_testdir)) + subworkflow.passed.append( + ( + "test_dir_exists", + "nf-test test directory exists", + subworkflow.nftest_testdir, + ) + ) else: if is_pytest: - subworkflow.warned.append(("test_dir_exists", "nf-test directory is missing", subworkflow.nftest_testdir)) + subworkflow.warned.append( + ( + "test_dir_exists", + "nf-test directory is missing", + subworkflow.nftest_testdir, + ) + ) else: - subworkflow.failed.append(("test_dir_exists", "nf-test directory is missing", subworkflow.nftest_testdir)) + subworkflow.failed.append( + ( + "test_dir_exists", + "nf-test directory is missing", + subworkflow.nftest_testdir, + ) + ) return # Lint the test main.nf file if subworkflow.nftest_main_nf.is_file(): - subworkflow.passed.append(("test_main_nf_exists", "test `main.nf.test` exists", subworkflow.nftest_main_nf)) + subworkflow.passed.append( + ( + "test_main_nf_exists", + "test `main.nf.test` exists", + subworkflow.nftest_main_nf, + ) + ) else: if is_pytest: subworkflow.warned.append( - ("test_main_nf_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) + ( + "test_main_nf_exists", + "test `main.nf.test` does not exist", + subworkflow.nftest_main_nf, + ) ) else: subworkflow.failed.append( - ("test_main_nf_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) + ( + "test_main_nf_exists", + "test `main.nf.test` does not exist", + subworkflow.nftest_main_nf, + ) ) if subworkflow.nftest_main_nf.is_file(): @@ -58,7 +97,13 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): if "snapshot(" in fh.read(): snap_file = subworkflow.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): - subworkflow.passed.append(("test_snapshot_exists", "test `main.nf.test.snap` exists", snap_file)) + subworkflow.passed.append( + ( + "test_snapshot_exists", + "test `main.nf.test.snap` exists", + snap_file, + ) + ) # Validate no empty files with open(snap_file) as snap_fh: try: @@ -140,19 +185,29 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) else: subworkflow.failed.append( - ("test_snapshot_exists", "test `main.nf.test.snap` does not exist", snap_file) + ( + "test_snapshot_exists", + "test `main.nf.test.snap` does not exist", + snap_file, + ) ) # Verify that tags are correct. main_nf_tags = subworkflow._get_main_nf_tags(subworkflow.nftest_main_nf) + not_alphabet = re.compile(r"[^a-zA-Z]") + org_alp = not_alphabet.sub("", subworkflow.org) + org_alphabet = org_alp if org_alp != "" else "nfcore" required_tags = [ "subworkflows", f"subworkflows/{subworkflow.component_name}", - "subworkflows_nfcore", + f"subworkflows_{org_alphabet}", ] included_components = [] - if subworkflow.main_nf.is_file(): + if subworkflow.main_nf is not None and Path(subworkflow.main_nf).is_file(): included_components = subworkflow._get_included_components(subworkflow.main_nf) - chained_components_tags = subworkflow._get_included_components_in_chained_tests(subworkflow.nftest_main_nf) + if subworkflow.nftest_main_nf is not None and subworkflow.nftest_main_nf.is_file(): + chained_components_tags = subworkflow._get_included_components_in_chained_tests( + subworkflow.nftest_main_nf + ) log.debug(f"Included components: {included_components}") log.debug(f"Required tags: {required_tags}") log.debug(f"Included components for chained nf-tests: {chained_components_tags}") @@ -161,7 +216,13 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): if tag not in main_nf_tags: missing_tags.append(tag) if len(missing_tags) == 0: - subworkflow.passed.append(("test_main_tags", "Tags adhere to guidelines", subworkflow.nftest_main_nf)) + subworkflow.passed.append( + ( + "test_main_tags", + "Tags adhere to guidelines", + subworkflow.nftest_main_nf, + ) + ) else: subworkflow.failed.append( ( @@ -187,43 +248,24 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) else: subworkflow.passed.append( - ("test_pytest_yml", "subworkflow with nf-test not in pytest_modules.yml", pytest_yml_path) + ( + "test_pytest_yml", + "subworkflow with nf-test not in pytest_modules.yml", + pytest_yml_path, + ) ) except FileNotFoundError: - subworkflow.warned.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) - - if subworkflow.tags_yml.is_file(): - # Check tags.yml exists and it has the correct entry - subworkflow.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", subworkflow.tags_yml)) - with open(subworkflow.tags_yml) as fh: - tags_yml = yaml.safe_load(fh) - if "subworkflows/" + subworkflow.component_name in tags_yml.keys(): - subworkflow.passed.append(("test_tags_yml", "correct entry in tags.yml", subworkflow.tags_yml)) - if ( - f"subworkflows/{subworkflow.org}/{subworkflow.component_name}/**" - in tags_yml["subworkflows/" + subworkflow.component_name] - ): - subworkflow.passed.append(("test_tags_yml", "correct path in tags.yml", subworkflow.tags_yml)) - else: - subworkflow.failed.append(("test_tags_yml", "incorrect path in tags.yml", subworkflow.tags_yml)) - else: - subworkflow.failed.append( - ( - "test_tags_yml", - "incorrect entry in tags.yml, should be 'subworkflows/'", - subworkflow.tags_yml, - ) + subworkflow.warned.append( + ( + "test_pytest_yml", + "Could not open pytest_modules.yml file", + pytest_yml_path, ) - else: - if is_pytest: - subworkflow.warned.append(("test_tags_yml_exists", "file `tags.yml` does not exist", subworkflow.tags_yml)) - else: - subworkflow.failed.append(("test_tags_yml_exists", "file `tags.yml` does not exist", subworkflow.tags_yml)) + ) # Check that the old test directory does not exist if not is_pytest: - old_test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", subworkflow.component_name) - if old_test_dir.is_dir(): - subworkflow.failed.append(("test_old_test_dir", "old test directory exists", old_test_dir)) + if pytest_dir.is_dir(): + subworkflow.failed.append(("test_old_test_dir", "old test directory exists", pytest_dir)) else: - subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", old_test_dir)) + subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", pytest_dir)) diff --git a/nf_core/subworkflows/lint/subworkflow_todos.py b/nf_core/subworkflows/lint/subworkflow_todos.py index 91f9f55b0..3417215db 100644 --- a/nf_core/subworkflows/lint/subworkflow_todos.py +++ b/nf_core/subworkflows/lint/subworkflow_todos.py @@ -1,6 +1,6 @@ import logging -from nf_core.lint.pipeline_todos import pipeline_todos +from nf_core.pipelines.lint.pipeline_todos import pipeline_todos log = logging.getLogger(__name__) diff --git a/nf_core/subworkflows/lint/subworkflow_version.py b/nf_core/subworkflows/lint/subworkflow_version.py index 5801abd88..1acb95e77 100644 --- a/nf_core/subworkflows/lint/subworkflow_version.py +++ b/nf_core/subworkflows/lint/subworkflow_version.py @@ -21,7 +21,7 @@ def subworkflow_version(subworkflow_lint_object, subworkflow): newer version of the subworkflow available. """ - modules_json_path = Path(subworkflow_lint_object.dir, "modules.json") + modules_json_path = Path(subworkflow_lint_object.directory, "modules.json") # Verify that a git_sha exists in the `modules.json` file for this module version = subworkflow_lint_object.modules_json.get_subworkflow_version( subworkflow.component_name, subworkflow.repo_url, subworkflow.org diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py index ddf144ee0..9e84d6cbe 100644 --- a/nf_core/subworkflows/list.py +++ b/nf_core/subworkflows/list.py @@ -1,4 +1,6 @@ import logging +from pathlib import Path +from typing import Optional, Union from nf_core.components.list import ComponentList @@ -6,5 +8,12 @@ class SubworkflowList(ComponentList): - def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + def __init__( + self, + pipeline_dir: Union[str, Path] = ".", + remote: bool = True, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + ) -> None: super().__init__("subworkflows", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/nf_core/subworkflows/update.py b/nf_core/subworkflows/update.py index 3cd4ad59f..9b6bf1692 100644 --- a/nf_core/subworkflows/update.py +++ b/nf_core/subworkflows/update.py @@ -15,6 +15,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, + limit_output=False, ): super().__init__( pipeline_dir, @@ -29,4 +30,5 @@ def __init__( remote_url, branch, no_pull, + limit_output, ) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 4d6a3f6a4..e2a76ccae 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -4,20 +4,19 @@ import shutil from configparser import NoOptionError, NoSectionError from pathlib import Path -from typing import Dict +from typing import Dict, Iterable, List, Optional, Union import git from git.exc import GitCommandError +from nf_core.components.components_utils import ( + NF_CORE_MODULES_NAME, + NF_CORE_MODULES_REMOTE, +) from nf_core.utils import load_tools_config log = logging.getLogger(__name__) -# Constants for the nf-core/modules repo used throughout the module files -NF_CORE_MODULES_NAME = "nf-core" -NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" -NF_CORE_MODULES_DEFAULT_BRANCH = "master" - class RemoteProgressbar(git.RemoteProgress): """ @@ -51,9 +50,14 @@ def update(self, op_code, cur_count, max_count=None, message=""): """ if not self.progress_bar.tasks[self.tid].started: self.progress_bar.start_task(self.tid) - self.progress_bar.update( - self.tid, total=max_count, completed=cur_count, state=f"{cur_count / max_count * 100:.1f}%" - ) + if cur_count is not None and max_count is not None: + cur_count = float(cur_count) + max_count = float(max_count) + state = f"{cur_count / max_count * 100:.1f}%" + else: + state = "Unknown" + + self.progress_bar.update(self.tid, total=max_count, completed=cur_count, state=state) class SyncedRepo: @@ -116,6 +120,8 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa remote_url = NF_CORE_MODULES_REMOTE self.remote_url = remote_url + self.fullname = None + self.local_repo_dir = None self.repo = None # TODO: SyncedRepo doesn't have this method and both the ModulesRepo and @@ -123,21 +129,31 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa # fixing. self.setup_local_repo(remote_url, branch, hide_progress) - config_fn, repo_config = load_tools_config(self.local_repo_dir) - try: - self.repo_path = repo_config["org_path"] - except KeyError: - raise UserWarning(f"'org_path' key not present in {config_fn.name}") + if self.local_repo_dir is None: + raise ValueError("Repository not initialized") + else: + config_fn, repo_config = load_tools_config(self.local_repo_dir) + if config_fn is not None and repo_config is not None: + try: + self.repo_path = repo_config.org_path + except KeyError: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") + + # Verify that the repo seems to be correctly configured + if self.repo_path != NF_CORE_MODULES_NAME or self.branch: + self.verify_branch() + + # Convenience variable + self.modules_dir = Path(self.local_repo_dir, "modules", self.repo_path) + self.subworkflows_dir = Path(self.local_repo_dir, "subworkflows", self.repo_path) - # Verify that the repo seems to be correctly configured - if self.repo_path != NF_CORE_MODULES_NAME or self.branch: - self.verify_branch() + self.avail_module_names = None - # Convenience variable - self.modules_dir = os.path.join(self.local_repo_dir, "modules", self.repo_path) - self.subworkflows_dir = os.path.join(self.local_repo_dir, "subworkflows", self.repo_path) + def __repr__(self) -> str: + return f"SyncedRepo({self.remote_url}, {self.branch})" - self.avail_module_names = None + def setup_local_repo(self, remote_url, branch, hide_progress): + pass def verify_sha(self, prompt, sha): """ @@ -258,7 +274,7 @@ def component_exists(self, component_name, component_type, checkout=True, commit """ return component_name in self.get_avail_components(component_type, checkout=checkout, commit=commit) - def get_component_dir(self, component_name, component_type): + def get_component_dir(self, component_name: str, component_type: str) -> Path: """ Returns the file path of a module/subworkflow directory in the repo. Does not verify that the path exists. @@ -269,11 +285,15 @@ def get_component_dir(self, component_name, component_type): component_path (str): The path of the module/subworkflow in the local copy of the repository """ if component_type == "modules": - return os.path.join(self.modules_dir, component_name) + return Path(self.modules_dir, component_name) elif component_type == "subworkflows": - return os.path.join(self.subworkflows_dir, component_name) + return Path(self.subworkflows_dir, component_name) + else: + raise ValueError(f"Invalid component type: {component_type}") - def install_component(self, component_name, install_dir, commit, component_type): + def install_component( + self, component_name: str, install_dir: Union[str, Path], commit: str, component_type: str + ) -> bool: """ Install the module/subworkflow files into a pipeline at the given commit @@ -281,6 +301,7 @@ def install_component(self, component_name, install_dir, commit, component_type) component_name (str): The name of the module/subworkflow install_dir (str): The path where the module/subworkflow should be installed commit (str): The git SHA for the version of the module/subworkflow to be installed + component_type (str): Either 'modules' or 'subworkflows' Returns: (bool): Whether the operation was successful or not @@ -332,6 +353,8 @@ def component_files_identical(self, component_name, base_path, commit, component return files_identical def ensure_git_user_config(self, default_name: str, default_email: str) -> None: + if self.repo is None: + raise ValueError("Repository not initialized") try: with self.repo.config_reader() as git_config: user_name = git_config.get_value("user", "name", default=None) @@ -346,7 +369,9 @@ def ensure_git_user_config(self, default_name: str, default_email: str) -> None: if not user_email: git_config.set_value("user", "email", default_email) - def get_component_git_log(self, component_name, component_type, depth=None): + def get_component_git_log( + self, component_name: Union[str, Path], component_type: str, depth: Optional[int] = None + ) -> Iterable[Dict[str, str]]: """ Fetches the commit history the of requested module/subworkflow since a given date. The default value is not arbitrary - it is the last time the structure of the nf-core/modules repository was had an @@ -358,33 +383,43 @@ def get_component_git_log(self, component_name, component_type, depth=None): Returns: ( dict ): Iterator of commit SHAs and associated (truncated) message """ + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() - component_path = os.path.join(component_type, self.repo_path, component_name) - commits_new = self.repo.iter_commits(max_count=depth, paths=component_path) - commits_new = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_new - ] - commits_old = [] + component_path = Path(component_type, self.repo_path, component_name) + + commits_new_iter = self.repo.iter_commits(max_count=depth, paths=component_path) + commits_old_iter = [] if component_type == "modules": # Grab commits also from previous modules structure - component_path = os.path.join("modules", component_name) - commits_old = self.repo.iter_commits(max_count=depth, paths=component_path) - commits_old = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_old - ] + old_component_path = Path("modules", component_name) + commits_old_iter = self.repo.iter_commits(max_count=depth, paths=old_component_path) + + commits_old = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_old_iter] + commits_new = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_new_iter] commits = iter(commits_new + commits_old) + return commits def get_latest_component_version(self, component_name, component_type): """ Returns the latest commit in the repository """ - return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] + try: + git_logs = list(self.get_component_git_log(component_name, component_type, depth=1)) + if not git_logs: + return None + return git_logs[0]["git_sha"] + except Exception as e: + log.debug(f"Could not get latest version of {component_name}: {e}") + return None def sha_exists_on_branch(self, sha): """ Verifies that a given commit sha exists on the branch """ + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() return sha in (commit.hexsha for commit in self.repo.iter_commits()) @@ -399,16 +434,20 @@ def get_commit_info(self, sha): Raises: LookupError: If the search for the commit fails """ + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() for commit in self.repo.iter_commits(): if commit.hexsha == sha: - message = commit.message.partition("\n")[0] + message = commit.message.splitlines()[0] date_obj = commit.committed_datetime date = str(date_obj.date()) return message, date raise LookupError(f"Commit '{sha}' not found in the '{self.remote_url}'") - def get_avail_components(self, component_type, checkout=True, commit=None): + def get_avail_components( + self, component_type: str, checkout: bool = True, commit: Optional[str] = None + ) -> List[str]: """ Gets the names of the modules/subworkflows in the repository. They are detected by checking which directories have a 'main.nf' file @@ -427,9 +466,7 @@ def get_avail_components(self, component_type, checkout=True, commit=None): directory = self.subworkflows_dir # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ - os.path.relpath(dirpath, start=directory) - for dirpath, _, file_names in os.walk(directory) - if "main.nf" in file_names + str(Path(dirpath).relative_to(directory)) for dirpath, _, files in os.walk(directory) if "main.nf" in files ] return avail_component_names diff --git a/nf_core/utils.py b/nf_core/utils.py index 5b31f48f4..068da22de 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,16 +19,18 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Generator, Tuple, Union +from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union import git -import prompt_toolkit +import prompt_toolkit.styles import questionary -import requests +import requests.auth import requests_cache import rich +import rich.markup import yaml from packaging.version import Version +from pydantic import BaseModel, ValidationError, field_validator from rich.live import Live from rich.spinner import Spinner @@ -36,6 +38,15 @@ log = logging.getLogger(__name__) +# ASCII nf-core logo +nfcore_logo = [ + r"[green] ,--.[grey39]/[green],-.", + r"[blue] ___ __ __ __ ___ [green]/,-._.--~\ ", + r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", + r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", + r"[green] `._,._,'", +] + # Custom style for questionary nfcore_question_style = prompt_toolkit.styles.Style( [ @@ -55,11 +66,11 @@ ] ) -NFCORE_CACHE_DIR = os.path.join( - os.environ.get("XDG_CACHE_HOME", os.path.join(os.getenv("HOME") or "", ".cache")), +NFCORE_CACHE_DIR = Path( + os.environ.get("XDG_CACHE_HOME", Path(os.getenv("HOME") or "", ".cache")), "nfcore", ) -NFCORE_DIR = os.path.join(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore") +NFCORE_DIR = Path(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore") def fetch_remote_version(source_url): @@ -124,59 +135,79 @@ class Pipeline: schema_obj (obj): A :class:`PipelineSchema` object """ - def __init__(self, wf_path): + def __init__(self, wf_path: Path) -> None: """Initialise pipeline object""" - self.conda_config = {} - self.conda_package_info = {} - self.nf_config = {} - self.files = [] - self.git_sha = None - self.minNextflowVersion = None - self.wf_path = wf_path - self.pipeline_name = None - self.pipeline_prefix = None - self.schema_obj = None + self.conda_config: Dict = {} + self.conda_package_info: Dict = {} + self.nf_config: Dict = {} + self.files: List[Path] = [] + self.git_sha: Optional[str] = None + self.minNextflowVersion: Optional[str] = None + self.wf_path = Path(wf_path) + self.pipeline_name: Optional[str] = None + self.pipeline_prefix: Optional[str] = None + self.schema_obj: Optional[Dict] = None try: repo = git.Repo(self.wf_path) self.git_sha = repo.head.object.hexsha - except Exception: - log.debug(f"Could not find git hash for pipeline: {self.wf_path}") + except Exception as e: + log.debug(f"Could not find git hash for pipeline: {self.wf_path}. {e}") # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash if os.environ.get("GITHUB_PR_COMMIT", "") != "": self.git_sha = os.environ["GITHUB_PR_COMMIT"] - def _load(self): + def __repr__(self) -> str: + return f"" + + def _load(self) -> bool: """Run core load functions""" - self._list_files() - self._load_pipeline_config() - self._load_conda_environment() - def _list_files(self): + return self.load_pipeline_config() and self._load_conda_environment() + + def _load_conda_environment(self) -> bool: + """Try to load the pipeline environment.yml file, if it exists""" + try: + with open(Path(self.wf_path, "environment.yml")) as fh: + self.conda_config = yaml.safe_load(fh) + return True + except FileNotFoundError: + log.debug("No conda `environment.yml` file found.") + return False + + def _fp(self, fn: Union[str, Path]) -> Path: + """Convenience function to get full path to a file in the pipeline""" + return Path(self.wf_path, fn) + + def list_files(self) -> List[Path]: """Get a list of all files in the pipeline""" + files = [] try: # First, try to get the list of files using git git_ls_files = subprocess.check_output(["git", "ls-files"], cwd=self.wf_path).splitlines() - self.files = [] for fn in git_ls_files: full_fn = Path(self.wf_path) / fn.decode("utf-8") if full_fn.is_file(): - self.files.append(full_fn) + files.append(full_fn) else: log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: # Failed, so probably not initialised as a git repository - just a list of all files - log.debug(f"Couldn't call 'git ls-files': {e}") - self.files = [] - for subdir, _, files in os.walk(self.wf_path): - for fn in files: - self.files.append(Path(subdir) / fn) + files = [] + for file_path in self.wf_path.rglob("*"): + if file_path.is_file(): + # Append the file path to the list + files.append(file_path) + if len(files) == 0: + log.debug(f"No files found in pipeline: {self.wf_path}") + + return files - def _load_pipeline_config(self): + def load_pipeline_config(self) -> bool: """Get the nextflow config for this pipeline - Once loaded, set a few convienence reference class attributes + Once loaded, set a few convenience reference class attributes """ self.nf_config = fetch_wf_config(self.wf_path) @@ -185,18 +216,8 @@ def _load_pipeline_config(self): nextflow_version_match = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) if nextflow_version_match: self.minNextflowVersion = nextflow_version_match.group(0) - - def _load_conda_environment(self): - """Try to load the pipeline environment.yml file, if it exists""" - try: - with open(os.path.join(self.wf_path, "environment.yml")) as fh: - self.conda_config = yaml.safe_load(fh) - except FileNotFoundError: - log.debug("No conda `environment.yml` file found.") - - def _fp(self, fn): - """Convenience function to get full path to a file in the pipeline""" - return os.path.join(self.wf_path, fn) + return True + return False def is_pipeline_directory(wf_path): @@ -220,7 +241,7 @@ def is_pipeline_directory(wf_path): raise UserWarning(warning) -def fetch_wf_config(wf_path: str, cache_config: bool = True) -> dict: +def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: """Uses Nextflow to retrieve the the configuration variables from a Nextflow workflow. @@ -254,7 +275,7 @@ def fetch_wf_config(wf_path: str, cache_config: bool = True) -> dict: concat_hash = "" for fn in ["nextflow.config", "main.nf"]: try: - with open(Path(wf_path, fn), "rb") as fh: + with open(wf_path / fn, "rb") as fh: concat_hash += hashlib.sha256(fh.read()).hexdigest() except FileNotFoundError: pass @@ -337,17 +358,17 @@ def run_cmd(executable: str, cmd: str) -> Union[Tuple[bytes, bytes], None]: ) -def setup_nfcore_dir(): +def setup_nfcore_dir() -> bool: """Creates a directory for files that need to be kept between sessions Currently only used for keeping local copies of modules repos """ - if not os.path.exists(NFCORE_DIR): - os.makedirs(NFCORE_DIR) - return True + if not NFCORE_DIR.exists(): + NFCORE_DIR.mkdir(parents=True) + return True -def setup_requests_cachedir() -> dict: +def setup_requests_cachedir() -> Dict[str, Union[Path, datetime.timedelta, str]]: """Sets up local caching for faster remote HTTP requests. Caching directory will be set up in the user's home directory under @@ -356,10 +377,10 @@ def setup_requests_cachedir() -> dict: Uses requests_cache monkey patching. Also returns the config dict so that we can use the same setup with a Session. """ - pyversion = ".".join(str(v) for v in sys.version_info[0:3]) - cachedir = setup_nfcore_cachedir(f"cache_{pyversion}") - config = { - "cache_name": os.path.join(cachedir, "github_info"), + pyversion: str = ".".join(str(v) for v in sys.version_info[0:3]) + cachedir: Path = setup_nfcore_cachedir(f"cache_{pyversion}") + config: Dict[str, Union[Path, datetime.timedelta, str]] = { + "cache_name": Path(cachedir, "github_info"), "expire_after": datetime.timedelta(hours=1), "backend": "sqlite", } @@ -382,7 +403,7 @@ def setup_nfcore_cachedir(cache_fn: Union[str, Path]) -> Path: return cachedir -def wait_cli_function(poll_func, refresh_per_second=20): +def wait_cli_function(poll_func: Callable[[], bool], refresh_per_second: int = 20) -> None: """ Display a command-line spinner while calling a function repeatedly. @@ -406,7 +427,7 @@ def wait_cli_function(poll_func, refresh_per_second=20): raise AssertionError("Cancelled!") -def poll_nfcore_web_api(api_url, post_data=None): +def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: """ Poll the nf-core website API @@ -427,7 +448,10 @@ def poll_nfcore_web_api(api_url, post_data=None): raise AssertionError(f"Could not connect to URL: {api_url}") else: if response.status_code != 200 and response.status_code != 301: - log.debug(f"Response content:\n{response.content}") + response_content = response.content + if isinstance(response_content, bytes): + response_content = response_content.decode() + log.debug(f"Response content:\n{response_content}") raise AssertionError( f"Could not access remote API results: {api_url} (HTML {response.status_code} Error)" ) @@ -439,7 +463,10 @@ def poll_nfcore_web_api(api_url, post_data=None): if "status" not in web_response: raise AssertionError() except (json.decoder.JSONDecodeError, AssertionError, TypeError): - log.debug(f"Response content:\n{response.content}") + response_content = response.content + if isinstance(response_content, bytes): + response_content = response_content.decode() + log.debug(f"Response content:\n{response_content}") raise AssertionError( f"nf-core website API results response not recognised: {api_url}\n " "See verbose log for full response" @@ -455,14 +482,14 @@ class GitHubAPISession(requests_cache.CachedSession): such as automatically setting up GitHub authentication if we can. """ - def __init__(self): # pylint: disable=super-init-not-called - self.auth_mode = None - self.return_ok = [200, 201] - self.return_retry = [403] - self.return_unauthorised = [401] - self.has_init = False + def __init__(self) -> None: + self.auth_mode: Optional[str] = None + self.return_ok: List[int] = [200, 201] + self.return_retry: List[int] = [403] + self.return_unauthorised: List[int] = [401] + self.has_init: bool = False - def lazy_init(self): + def lazy_init(self) -> None: """ Initialise the object. @@ -504,8 +531,9 @@ def __call__(self, r): self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}" except Exception: ex_type, ex_value, _ = sys.exc_info() - output = rich.markup.escape(f"{ex_type.__name__}: {ex_value}") - log.debug(f"Couldn't auto-auth with GitHub CLI auth from '{gh_cli_config_fn}': [red]{output}") + if ex_type is not None: + output = rich.markup.escape(f"{ex_type.__name__}: {ex_value}") + log.debug(f"Couldn't auto-auth with GitHub CLI auth from '{gh_cli_config_fn}': [red]{output}") # Default auth if we have a GitHub Token (eg. GitHub Actions CI) if os.environ.get("GITHUB_TOKEN") is not None and self.auth is None: @@ -569,7 +597,7 @@ def request_retry(self, url, post_data=None): """ Try to fetch a URL, keep retrying if we get a certain return code. - Used in nf-core sync code because we get 403 errors: too many simultaneous requests + Used in nf-core pipelines sync code because we get 403 errors: too many simultaneous requests See https://github.com/nf-core/tools/issues/911 """ if not self.has_init: @@ -626,7 +654,7 @@ def anaconda_package(dep, dep_channels=None): """ if dep_channels is None: - dep_channels = ["conda-forge", "bioconda", "defaults"] + dep_channels = ["conda-forge", "bioconda"] # Check if each dependency is the latest available version if "=" in dep: @@ -784,6 +812,8 @@ def get_tag_date(tag_date): singularity_image = all_singularity[k]["image"] current_date = date docker_image_name = docker_image["image_name"].lstrip("quay.io/") + if singularity_image is None: + raise LookupError(f"Could not find singularity container for {package}") return docker_image_name, singularity_image["image_name"] except TypeError: raise LookupError(f"Could not find docker or singularity container for {package}") @@ -845,7 +875,7 @@ def prompt_remote_pipeline_name(wfs): """Prompt for the pipeline name with questionary Args: - wfs: A nf_core.list.Workflows() object, where get_remote_workflows() has been called. + wfs: A nf_core.pipelines.list.Workflows() object, where get_remote_workflows() has been called. Returns: pipeline (str): GitHub repo - username/repo @@ -879,7 +909,9 @@ def prompt_remote_pipeline_name(wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") -def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): +def prompt_pipeline_release_branch( + wf_releases: List[Dict[str, Any]], wf_branches: Dict[str, Any], multiple: bool = False +) -> Tuple[Any, List[str]]: """Prompt for pipeline release / branch Args: @@ -888,18 +920,18 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): multiple (bool): Allow selection of multiple releases & branches (for Seqera Platform) Returns: - choice (str): Selected release / branch name + choice (questionary.Choice or bool): Selected release / branch or False if no releases / branches available """ # Prompt user for release tag, tag_set will contain all available. - choices = [] - tag_set = [] + choices: List[questionary.Choice] = [] + tag_set: List[str] = [] # Releases if len(wf_releases) > 0: for tag in map(lambda release: release.get("tag_name"), wf_releases): tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] choices.append(questionary.Choice(title=tag_display, value=tag)) - tag_set.append(tag) + tag_set.append(str(tag)) # Branches for branch in wf_branches.keys(): @@ -908,7 +940,7 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): tag_set.append(branch) if len(choices) == 0: - return False + return [], [] if multiple: return ( @@ -925,7 +957,7 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): class SingularityCacheFilePathValidator(questionary.Validator): """ - Validator for file path specified as --singularity-cache-index argument in nf-core download + Validator for file path specified as --singularity-cache-index argument in nf-core pipelines download """ def validate(self, value): @@ -945,7 +977,7 @@ def get_repo_releases_branches(pipeline, wfs): Args: pipeline (str): GitHub repo username/repo - wfs: A nf_core.list.Workflows() object, where get_remote_workflows() has been called. + wfs: A nf_core.pipelines.list.Workflows() object, where get_remote_workflows() has been called. Returns: wf_releases, wf_branches (tuple): Array of releases, Array of branches @@ -1017,7 +1049,74 @@ def get_repo_releases_branches(pipeline, wfs): DEPRECATED_CONFIG_PATHS = [".nf-core-lint.yml", ".nf-core-lint.yaml"] -def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, dict]: +class NFCoreTemplateConfig(BaseModel): + """Template configuration schema""" + + org: Optional[str] = None + """ Organisation name """ + name: Optional[str] = None + """ Pipeline name """ + description: Optional[str] = None + """ Pipeline description """ + author: Optional[str] = None + """ Pipeline author """ + version: Optional[str] = None + """ Pipeline version """ + force: Optional[bool] = True + """ Force overwrite of existing files """ + outdir: Optional[Union[str, Path]] = None + """ Output directory """ + skip_features: Optional[list] = None + """ Skip features. See https://nf-co.re/docs/nf-core-tools/pipelines/create for a list of features. """ + is_nfcore: Optional[bool] = None + """ Whether the pipeline is an nf-core pipeline. """ + + # convert outdir to str + @field_validator("outdir") + @classmethod + def outdir_to_str(cls, v: Optional[Union[str, Path]]) -> Optional[str]: + if v is not None: + v = str(v) + return v + + def __getitem__(self, item: str) -> Any: + if self is None: + return None + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + return getattr(self, item, default) + + +LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] + + +class NFCoreYamlConfig(BaseModel): + """.nf-core.yml configuration file schema""" + + repository_type: str + """ Type of repository: pipeline or modules """ + nf_core_version: Optional[str] = None + """ Version of nf-core/tools used to create/update the pipeline""" + org_path: Optional[str] = None + """ Path to the organisation's modules repository (used for modules repo_type only) """ + lint: Optional[LintConfigType] = None + """ Pipeline linting configuration, see https://nf-co.re/docs/nf-core-tools/pipelines/lint#linting-config for examples and documentation """ + template: Optional[NFCoreTemplateConfig] = None + """ Pipeline template configuration """ + bump_version: Optional[Dict[str, bool]] = None + """ Disable bumping of the version for a module/subworkflow (when repository_type is modules). See https://nf-co.re/docs/nf-core-tools/modules/bump-versions for more information.""" + update: Optional[Dict[str, Union[str, bool, Dict[str, Union[str, Dict[str, Union[str, bool]]]]]]] = None + """ Disable updating specific modules/subworkflows (when repository_type is pipeline). See https://nf-co.re/docs/nf-core-tools/modules/update for more information.""" + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + return getattr(self, item, default) + + +def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path], Optional[NFCoreYamlConfig]]: """ Parse the nf-core.yml configuration file @@ -1031,28 +1130,66 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, dict]: tools_config = {} config_fn = get_first_available_path(directory, CONFIG_PATHS) - if config_fn is None: depr_path = get_first_available_path(directory, DEPRECATED_CONFIG_PATHS) if depr_path: - log.error( - f"Deprecated `{depr_path.name}` file found! The file will not be loaded. " - f"Please rename the file to `{CONFIG_PATHS[0]}`." + raise UserWarning( + f"Deprecated `{depr_path.name}` file found! Please rename the file to `{CONFIG_PATHS[0]}`." ) else: - log.debug(f"No tools config file found: {CONFIG_PATHS[0]}") - return Path(directory, CONFIG_PATHS[0]), {} - + log.debug(f"Could not find a config file in the directory '{directory}'") + return Path(directory, CONFIG_PATHS[0]), None + if not Path(config_fn).is_file(): + raise FileNotFoundError(f"No `.nf-core.yml` file found in the directory '{directory}'") with open(config_fn) as fh: tools_config = yaml.safe_load(fh) + # If the file is empty - tools_config = tools_config or {} + if tools_config is None: + raise AssertionError(f"Config file '{config_fn}' is empty") + # Check for required fields + try: + nf_core_yaml_config = NFCoreYamlConfig(**tools_config) + except ValidationError as e: + error_message = f"Config file '{config_fn}' is invalid" + for error in e.errors(): + error_message += f"\n{error['loc'][0]}: {error['msg']}" + raise AssertionError(error_message) + + wf_config = fetch_wf_config(Path(directory)) + if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: + # Retrieve information if template from config file is empty + template = tools_config.get("template") + config_template_keys = template.keys() if template is not None else [] + if nf_core_yaml_config.template is None: + # The .nf-core.yml file did not contain template information + nf_core_yaml_config.template = NFCoreTemplateConfig( + org="nf-core", + name=wf_config["manifest.name"].strip("'\"").split("/")[-1], + description=wf_config["manifest.description"].strip("'\""), + author=wf_config["manifest.author"].strip("'\""), + version=wf_config["manifest.version"].strip("'\""), + outdir=str(directory), + is_nfcore=True, + ) + elif "prefix" in config_template_keys or "skip" in config_template_keys: + # The .nf-core.yml file contained the old prefix or skip keys + nf_core_yaml_config.template = NFCoreTemplateConfig( + org=tools_config["template"].get("prefix", tools_config["template"].get("org", "nf-core")), + name=tools_config["template"].get("name", wf_config["manifest.name"].strip("'\"").split("/")[-1]), + description=tools_config["template"].get("description", wf_config["manifest.description"].strip("'\"")), + author=tools_config["template"].get("author", wf_config["manifest.author"].strip("'\"")), + version=tools_config["template"].get("version", wf_config["manifest.version"].strip("'\"")), + outdir=tools_config["template"].get("outdir", str(directory)), + skip_features=tools_config["template"].get("skip", tools_config["template"].get("skip_features")), + is_nfcore=tools_config["template"].get("prefix", tools_config["template"].get("org")) == "nf-core", + ) log.debug("Using config file: %s", config_fn) - return config_fn, tools_config + return config_fn, nf_core_yaml_config -def determine_base_dir(directory="."): +def determine_base_dir(directory: Union[Path, str] = ".") -> Path: base_dir = start_dir = Path(directory).absolute() # Only iterate up the tree if the start dir doesn't have a config while not get_first_available_path(base_dir, CONFIG_PATHS) and base_dir != base_dir.parent: @@ -1060,10 +1197,10 @@ def determine_base_dir(directory="."): config_fn = get_first_available_path(base_dir, CONFIG_PATHS) if config_fn: break - return directory if base_dir == start_dir else base_dir + return Path(directory) if (base_dir == start_dir or str(base_dir) == base_dir.root) else base_dir -def get_first_available_path(directory, paths): +def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> Union[Path, None]: for p in paths: if Path(directory, p).is_file(): return Path(directory, p) diff --git a/pyproject.toml b/pyproject.toml index 8168bd7c1..775f04c9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,7 @@ requires = ["setuptools>=40.6.0", "wheel"] [tool.pytest.ini_options] markers = ["datafiles: load datafiles"] testpaths = ["tests"] +python_files = ["test_*.py"] norecursedirs = [ ".*", "build", diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index cf3715947..000000000 --- a/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -testpaths = - tests -python_files = test_*.py diff --git a/requirements-dev.txt b/requirements-dev.txt index 9fbb49c10..aab9b1e5d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,8 +6,17 @@ responses ruff Sphinx sphinx-rtd-theme +textual-dev==1.6.1 +types-PyYAML +types-requests types-jsonschema types-Markdown types-PyYAML types-requests types-setuptools +typing_extensions >=4.0.0 +pytest-asyncio +pytest-textual-snapshot==1.0.0 +pytest-workflow>=2.0.0 +pytest>=8.0.0 +ruff diff --git a/requirements.txt b/requirements.txt index 6b5b3ab57..f167a5580 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,22 +1,25 @@ click filetype GitPython +PyGithub jinja2 -jsonschema>=3.0 +jsonschema>=4.0 markdown>=3.3 packaging pillow pdiff pre-commit -prompt_toolkit>=3.0.3 -pytest-workflow>=2.0.0 -pytest>=7.0.0 +prompt_toolkit<=3.0.36 +pydantic>=2.2.1 pyyaml -questionary>=1.8.0 +questionary>=2.0.1 refgenie requests requests_cache -rich-click>=1.6.1 +rich-click==1.8.* rich>=13.3.1 tabulate +textual==0.71.0 trogon +pdiff +ruamel.yaml diff --git a/setup.py b/setup.py index 47137cde7..95a465530 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.14.1" +version = "3.0.0" with open("README.md") as f: readme = f.read() @@ -33,7 +33,7 @@ license="MIT", entry_points={ "console_scripts": ["nf-core=nf_core.__main__:run_nf_core"], - "refgenie.hooks.post_update": ["nf-core-refgenie=nf_core.refgenie:update_config"], + "refgenie.hooks.post_update": ["nf-core-refgenie=nf_core.pipelines.refgenie:update_config"], }, python_requires=">=3.8, <4", install_requires=required, diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py index 3176569ec..a5a8eaba3 100644 --- a/tests/components/generate_snapshot.py +++ b/tests/components/generate_snapshot.py @@ -91,7 +91,7 @@ def test_update_snapshot_module(self): snap_content = json.load(fh) original_timestamp = snap_content["Single-End"]["timestamp"] # delete the timestamp in json - snap_content["Single-End"]["content"][0]["0"][0][1] = "" + snap_content["Single-End"]["timestamp"] = "" with open(snap_path, "w") as fh: json.dump(snap_content, fh) snap_generator = ComponentsTest( diff --git a/tests/data/pipeline_create_template.yml b/tests/data/pipeline_create_template.yml index 12e48e9c2..0ed534aa1 100644 --- a/tests/data/pipeline_create_template.yml +++ b/tests/data/pipeline_create_template.yml @@ -1 +1,6 @@ -prefix: testprefix +name: test +description: just for 4w3s0m3 tests +author: Chuck Norris +version: 1.0.0 +force: True +org: testprefix diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index b69175e0b..3ab8f69ec 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -1,7 +1,8 @@ -prefix: testprefix -skip: - - github - - ci - - github_badges - - igenomes - - nf_core_configs +name: test +description: just for 4w3s0m3 tests +author: Chuck Norris +version: 1.0.0 +force: True +org: testprefix +is_nfcore: False +skip_features: {{ all_features }} diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py deleted file mode 100644 index bbda92a4d..000000000 --- a/tests/lint/actions_awsfulltest.py +++ /dev/null @@ -1,60 +0,0 @@ -import os - -import yaml - -import nf_core.lint - - -def test_actions_awsfulltest_warn(self): - """Lint test: actions_awsfulltest - WARN""" - self.lint_obj._load() - results = self.lint_obj.actions_awsfulltest() - assert "`.github/workflows/awsfulltest.yml` is triggered correctly" in results["passed"] - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awsfulltest_pass(self): - """Lint test: actions_awsfulltest - PASS""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: - awsfulltest_yml = fh.read() - awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: - fh.write(awsfulltest_yml) - - # Make lint object - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awsfulltest() - assert results["passed"] == [ - "`.github/workflows/awsfulltest.yml` is triggered correctly", - "`.github/workflows/awsfulltest.yml` does not use `-profile test`", - ] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awsfulltest_fail(self): - """Lint test: actions_awsfulltest - FAIL""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: - awsfulltest_yml = yaml.safe_load(fh) - del awsfulltest_yml[True]["release"] - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: - yaml.dump(awsfulltest_yml, fh) - - # Make lint object - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awsfulltest() - assert results["failed"] == ["`.github/workflows/awsfulltest.yml` is not triggered correctly"] - assert "`.github/workflows/awsfulltest.yml` does not use `-profile test`" in results["passed"] - assert len(results.get("ignored", [])) == 0 diff --git a/tests/lint/actions_awstest.py b/tests/lint/actions_awstest.py deleted file mode 100644 index 7bfa6052f..000000000 --- a/tests/lint/actions_awstest.py +++ /dev/null @@ -1,37 +0,0 @@ -import os - -import yaml - -import nf_core.lint - - -def test_actions_awstest_pass(self): - """Lint test: actions_awstest - PASS""" - self.lint_obj._load() - results = self.lint_obj.actions_awstest() - assert results["passed"] == ["'.github/workflows/awstest.yml' is triggered correctly"] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awstest_fail(self): - """Lint test: actions_awsfulltest - FAIL""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml[True]["push"] = ["master"] - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: - yaml.dump(awstest_yml, fh) - - # Make lint object - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awstest() - assert results["failed"] == ["'.github/workflows/awstest.yml' is not triggered correctly"] - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) == 0 - assert len(results.get("ignored", [])) == 0 diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py deleted file mode 100644 index 8734b2f78..000000000 --- a/tests/lint/actions_ci.py +++ /dev/null @@ -1,49 +0,0 @@ -import os - -import yaml - -import nf_core.lint - - -def test_actions_ci_pass(self): - """Lint test: actions_ci - PASS""" - self.lint_obj._load() - results = self.lint_obj.actions_ci() - assert results["passed"] == [ - "'.github/workflows/ci.yml' is triggered on expected events", - "'.github/workflows/ci.yml' checks minimum NF version", - ] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_ci_fail_wrong_nf(self): - """Lint test: actions_ci - FAIL - wrong minimum version of Nextflow tested""" - self.lint_obj._load() - self.lint_obj.minNextflowVersion = "1.2.3" - results = self.lint_obj.actions_ci() - assert results["failed"] == ["Minimum pipeline NF version '1.2.3' is not tested in '.github/workflows/ci.yml'"] - - -def test_actions_ci_fail_wrong_trigger(self): - """Lint test: actions_actions_ci - FAIL - workflow triggered incorrectly, NF ver not checked at all""" - - # Edit .github/workflows/actions_ci.yml to mess stuff up! - new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml")) as fh: - ci_yml = yaml.safe_load(fh) - ci_yml[True]["push"] = ["dev", "patch"] - ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "w") as fh: - yaml.dump(ci_yml, fh) - - # Make lint object - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_ci() - assert results["failed"] == [ - "'.github/workflows/ci.yml' is not triggered on expected events", - "'.github/workflows/ci.yml' does not check minimum NF version", - ] diff --git a/tests/lint/actions_schema_validation.py b/tests/lint/actions_schema_validation.py deleted file mode 100644 index e202b3b1c..000000000 --- a/tests/lint/actions_schema_validation.py +++ /dev/null @@ -1,66 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.lint - - -def test_actions_schema_validation_missing_jobs(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml.pop("jobs") - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert "Workflow validation failed for awstest.yml: 'jobs' is a required property" in results["failed"][0] - - -def test_actions_schema_validation_missing_on(self): - """Missing 'on' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml.pop(True) - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert results["failed"][0] == "Missing 'on' keyword in awstest.yml" - assert "Workflow validation failed for awstest.yml: 'on' is a required property" in results["failed"][1] - - -def test_actions_schema_validation_fails_for_additional_property(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml["not_jobs"] = awstest_yml["jobs"] - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert ( - "Workflow validation failed for awstest.yml: Additional properties are not allowed ('not_jobs' was unexpected)" - in results["failed"][0] - ) diff --git a/tests/lint/configs.py b/tests/lint/configs.py deleted file mode 100644 index b50a1393a..000000000 --- a/tests/lint/configs.py +++ /dev/null @@ -1,89 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.create -import nf_core.lint - - -def test_withname_in_modules_config(self): - """Tests finding withName in modules.config passes linting.""" - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["failed"]) == 0 - assert any( - ["`FASTQC` found in `conf/modules.config` and Nextflow scripts." in passed for passed in result["passed"]] - ) - - -def test_superfluous_withname_in_modules_config_fails(self): - """Tests finding withName in modules.config fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add withName to modules.config - modules_config = Path(new_pipeline) / "conf" / "modules.config" - with open(modules_config, "a") as f: - f.write("\nwithName: 'BPIPE' {\n cache = false \n}") - lint_obj = nf_core.lint.PipelineLint(new_pipeline, hide_progress=False) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`conf/modules.config` contains `withName:BPIPE`") - - -def test_ignore_modules_config(self): - """Tests ignoring the modules.config passes linting.""" - new_pipeline = self._make_pipeline_copy() - # ignore modules.config in linting - with open(Path(new_pipeline) / ".nf-core.yml") as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"modules_config": False} - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(content, f) - Path(new_pipeline, "conf", "modules.config").unlink() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["ignored"]) == 1 - assert result["ignored"][0].startswith("`conf/modules.config` not found, but it is ignored.") - # cleanup - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(old_content, f) - - -def test_superfluous_withname_in_base_config_fails(self): - """Tests finding withName in base.config fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add withName to base.config - base_config = Path(new_pipeline) / "conf" / "base.config" - with open(base_config, "a") as f: - f.write("\nwithName:CUSTOM_DUMPSOFTWAREVERSIONS {\n cache = false \n}") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.base_config() - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`conf/base.config` contains `withName:CUSTOM_DUMPSOFTWAREVERSIONS`") - - -def test_ignore_base_config(self): - """Tests ignoring the base.config passes linting.""" - new_pipeline = self._make_pipeline_copy() - # ignore base.config in linting - with open(Path(new_pipeline) / ".nf-core.yml") as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"base_config": False} - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(content, f) - Path(new_pipeline, "conf", "base.config").unlink() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.base_config() - assert len(result["ignored"]) == 1 - assert result["ignored"][0].startswith("`conf/base.config` not found, but it is ignored.") - # cleanup - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(old_content, f) diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py deleted file mode 100644 index 508152289..000000000 --- a/tests/lint/files_exist.py +++ /dev/null @@ -1,81 +0,0 @@ -import os -from pathlib import Path - -import nf_core.lint - - -def test_files_exist_missing_config(self): - """Lint test: critical files missing FAIL""" - new_pipeline = self._make_pipeline_copy() - - Path(new_pipeline, "CHANGELOG.md").unlink() - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" - - results = lint_obj.files_exist() - assert results["failed"] == ["File not found: `CHANGELOG.md`"] - - -def test_files_exist_missing_main(self): - """Check if missing main issues warning""" - new_pipeline = self._make_pipeline_copy() - - Path(new_pipeline, "main.nf").unlink() - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert "File not found: `main.nf`" in results["warned"] - - -def test_files_exist_depreciated_file(self): - """Check whether depreciated file issues warning""" - new_pipeline = self._make_pipeline_copy() - - nf = Path(new_pipeline, "parameters.settings.json") - os.system(f"touch {nf}") - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert results["failed"] == ["File must be removed: `parameters.settings.json`"] - - -def test_files_exist_pass(self): - """Lint check should pass if all files are there""" - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert results["failed"] == [] - - -def test_files_exist_pass_conditional(self): - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj.nf_config["plugins"] = [] - lib_dir = Path(new_pipeline, "lib") - lib_dir.mkdir() - (lib_dir / "nfcore_external_java_deps.jar").touch() - results = lint_obj.files_exist() - assert results["failed"] == [] - assert results["ignored"] == [] - - -def test_files_exist_fail_conditional(self): - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - lib_dir = Path(new_pipeline, "lib") - lib_dir.mkdir() - (lib_dir / "nfcore_external_java_deps.jar").touch() - results = lint_obj.files_exist() - assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"] - assert results["ignored"] == [] diff --git a/tests/lint/files_unchanged.py b/tests/lint/files_unchanged.py deleted file mode 100644 index 601f09b9d..000000000 --- a/tests/lint/files_unchanged.py +++ /dev/null @@ -1,26 +0,0 @@ -from pathlib import Path - -import nf_core.lint - - -def test_files_unchanged_pass(self): - self.lint_obj._load() - results = self.lint_obj.files_unchanged() - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - assert not results.get("could_fix", True) - - -def test_files_unchanged_fail(self): - failing_file = Path(".github", "CONTRIBUTING.md") - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, failing_file), "a") as fh: - fh.write("THIS SHOULD NOT BE HERE") - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.files_unchanged() - assert len(results["failed"]) > 0 - assert str(failing_file) in results["failed"][0] - assert results["could_fix"] diff --git a/tests/lint/merge_markers.py b/tests/lint/merge_markers.py deleted file mode 100644 index 64a62e25c..000000000 --- a/tests/lint/merge_markers.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - -import nf_core.lint - - -def test_merge_markers_found(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - with open(os.path.join(new_pipeline, "main.nf")) as fh: - main_nf_content = fh.read() - main_nf_content = ">>>>>>>\n" + main_nf_content - with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: - fh.write(main_nf_content) - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.merge_markers() - assert len(results["failed"]) > 0 - assert len(results["passed"]) == 0 - assert "Merge marker '>>>>>>>' in " in results["failed"][0] diff --git a/tests/lint/modules_json.py b/tests/lint/modules_json.py deleted file mode 100644 index f025daa7f..000000000 --- a/tests/lint/modules_json.py +++ /dev/null @@ -1,6 +0,0 @@ -def test_modules_json_pass(self): - self.lint_obj._load() - results = self.lint_obj.modules_json() - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("passed", [])) > 0 diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py deleted file mode 100644 index 463d5e765..000000000 --- a/tests/lint/multiqc_config.py +++ /dev/null @@ -1,129 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.lint - - -def test_multiqc_config_exists(self): - """Test that linting fails if the multiqc_config.yml file is missing""" - # Delete the file - new_pipeline = self._make_pipeline_copy() - Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - assert result["failed"] == ["`assets/multiqc_config.yml` not found."] - - -def test_multiqc_config_ignore(self): - """Test that linting succeeds if the multiqc_config.yml file is missing but ignored""" - # Delete the file - new_pipeline = self._make_pipeline_copy() - Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() - with open(Path(new_pipeline, ".nf-core.yml")) as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"multiqc_config": False} - with open(Path(new_pipeline, ".nf-core.yml"), "w") as f: - yaml.dump(content, f) - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - assert result["ignored"] == ["`assets/multiqc_config.yml` not found, but it is ignored."] - - # cleanup - with open(Path(new_pipeline, ".nf-core.yml"), "w") as f: - yaml.dump(old_content, f) - - -def test_multiqc_config_missing_report_section_order(self): - """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml.pop("report_section_order") - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert result["failed"] == ["`assets/multiqc_config.yml` does not contain `report_section_order`"] - - -def test_multiqc_incorrect_export_plots(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml["export_plots"] = False - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert result["failed"] == ["`assets/multiqc_config.yml` does not contain 'export_plots: true'."] - - -def test_multiqc_config_report_comment_fail(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml["report_comment"] = "This is a test" - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`assets/multiqc_config.yml` does not contain a matching 'report_comment'.") - - -def test_multiqc_config_report_comment_release_fail(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - # bump version - lint_obj.nf_config["manifest.version"] = "1.0" - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`assets/multiqc_config.yml` does not contain a matching 'report_comment'.") - - -def test_multiqc_config_report_comment_release_succeed(self): - """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" - - import nf_core.bump_version - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - # bump version using the bump_version function - nf_core.bump_version.bump_pipeline_version(lint_obj, "1.0") - # lint again - lint_obj._load() - result = lint_obj.multiqc_config() - assert "`assets/multiqc_config.yml` contains a matching 'report_comment'." in result["passed"] diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py deleted file mode 100644 index 06af8c4fb..000000000 --- a/tests/lint/nextflow_config.py +++ /dev/null @@ -1,211 +0,0 @@ -import os -import re -from pathlib import Path - -import nf_core.create -import nf_core.lint - - -def test_nextflow_config_example_pass(self): - """Tests that config variable existence test works with good pipeline example""" - self.lint_obj._load_pipeline_config() - result = self.lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_bad_name_fail(self): - """Tests that config variable existence test fails with bad pipeline name""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - - lint_obj.nf_config["manifest.name"] = "bad_name" - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_dev_in_release_mode_failed(self): - """Tests that config variable existence test fails with dev version in release mode""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - - lint_obj.release_mode = True - lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_missing_test_profile_failed(self): - """Test failure if config file does not contain `test` profile.""" - new_pipeline = self._make_pipeline_copy() - # Change the name of the test profile so there is no such profile - nf_conf_file = os.path.join(new_pipeline, "nextflow.config") - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub(r"\btest\b", "testfail", content) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_default_values_match(self): - """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - assert "Config default value correct: params.max_cpus" in str(result["passed"]) - assert "Config default value correct: params.validate_params" in str(result["passed"]) - - -def test_default_values_fail(self): - """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - # Change the default value of max_cpus in nextflow.config - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Change the default value of max_memory in nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 2 - assert ( - "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." - in result["failed"] - ) - assert ( - "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." - in result["failed"] - ) - - -def test_catch_params_assignment_in_main_nf(self): - """Test linting fails if main.nf contains an assignment to a parameter from nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - # Add parameter assignment in main.nf - main_nf_file = Path(new_pipeline) / "main.nf" - with open(main_nf_file, "a") as f: - f.write("params.max_time = 42") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 1 - assert ( - result["failed"][0] - == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." - ) - - -def test_allow_params_reference_in_main_nf(self): - """Test linting allows for references like `params.aligner == 'bwa'` in main.nf. The test will detect if the bug mentioned in GitHub-issue #2833 reemerges.""" - new_pipeline = self._make_pipeline_copy() - # Add parameter reference in main.nf - main_nf_file = Path(new_pipeline) / "main.nf" - with open(main_nf_file, "a") as f: - f.write("params.max_time == 42") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - - -def test_default_values_ignored(self): - """Test ignoring linting of default values.""" - new_pipeline = self._make_pipeline_copy() - # Add max_cpus to the ignore list - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write( - "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" - ) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - lint_obj._load_lint_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 - assert "Config default value correct: params.max_cpu" not in str(result["passed"]) - assert "Config default ignored: params.max_cpus" in str(result["ignored"]) - - -def test_default_values_float(self): - """Test comparing two float values.""" - new_pipeline = self._make_pipeline_copy() - # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content - ) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Add a float value `dummy` to the nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub( - r'"validate_params": {', - ' "dummy": {"type": "number","default":0.000000001},\n"validate_params": {', - content, - ) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - assert "Config default value correct: params.dummy" in str(result["passed"]) - - -def test_default_values_float_fail(self): - """Test comparing two float values.""" - new_pipeline = self._make_pipeline_copy() - # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content - ) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Add a float value `dummy` to the nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub( - r'"validate_params": {', ' "dummy": {"type": "float","default":0.000001},\n"validate_params": {', content - ) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - result = lint_obj.nextflow_config() - - assert len(result["failed"]) == 1 - assert len(result["warned"]) == 0 - assert "Config default value incorrect: `params.dummy" in str(result["failed"]) diff --git a/tests/lint/nfcore_yml.py b/tests/lint/nfcore_yml.py deleted file mode 100644 index 474ccd48f..000000000 --- a/tests/lint/nfcore_yml.py +++ /dev/null @@ -1,53 +0,0 @@ -import re -from pathlib import Path - -import nf_core.create -import nf_core.lint - - -def test_nfcore_yml_pass(self): - """Lint test: nfcore_yml - PASS""" - self.lint_obj._load() - results = self.lint_obj.nfcore_yml() - - assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) - assert "nf-core version in `.nf-core.yml` is set to the latest version" in str(results["passed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_nfcore_yml_fail_repo_type(self): - """Lint test: nfcore_yml - FAIL - repository type not set""" - new_pipeline = self._make_pipeline_copy() - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml) as fh: - content = fh.read() - new_content = content.replace("repository_type: pipeline", "repository_type: foo") - with open(nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 - - -def test_nfcore_yml_fail_nfcore_version(self): - """Lint test: nfcore_yml - FAIL - nf-core version not set""" - new_pipeline = self._make_pipeline_copy() - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml) as fh: - content = fh.read() - new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) - with open(nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) - assert len(results.get("failed", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 diff --git a/tests/lint/template_strings.py b/tests/lint/template_strings.py deleted file mode 100644 index ac0ae0168..000000000 --- a/tests/lint/template_strings.py +++ /dev/null @@ -1,54 +0,0 @@ -import subprocess -from pathlib import Path - -import nf_core.create -import nf_core.lint - - -def test_template_strings(self): - """Tests finding a template string in a file fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add template string to a file - txt_file = Path(new_pipeline) / "docs" / "test.txt" - with open(txt_file, "w") as f: - f.write("my {{ template_string }}") - subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.template_strings() - print(result["failed"]) - assert len(result["failed"]) == 1 - assert len(result["ignored"]) == 0 - - -def test_template_strings_ignored(self): - """Tests ignoring template_strings""" - new_pipeline = self._make_pipeline_copy() - # Ignore template_strings test - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings: False") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj._lint_pipeline() - assert len(lint_obj.failed) == 0 - assert len(lint_obj.ignored) == 1 - - -def test_template_strings_ignore_file(self): - """Tests ignoring template_strings file""" - new_pipeline = self._make_pipeline_copy() - # Add template string to a file - txt_file = Path(new_pipeline) / "docs" / "test.txt" - with open(txt_file, "w") as f: - f.write("my {{ template_string }}") - subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) - # Ignore template_strings test - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.template_strings() - assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 diff --git a/tests/lint/version_consistency.py b/tests/lint/version_consistency.py deleted file mode 100644 index c68280064..000000000 --- a/tests/lint/version_consistency.py +++ /dev/null @@ -1,14 +0,0 @@ -import nf_core.create -import nf_core.lint - - -def test_version_consistency(self): - """Tests that config variable existence test fails with bad pipeline name""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() - lint_obj.nextflow_config() - - result = lint_obj.version_consistency() - assert result["passed"] == ["Version tags are numeric and consistent between container, release tag and config."] - assert result["failed"] == ["manifest.version was not numeric: 1.0dev!"] diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py deleted file mode 100644 index ce8c6dbe1..000000000 --- a/tests/modules/bump_versions.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -import re - -import pytest - -import nf_core.modules -from nf_core.modules.modules_utils import ModuleExceptionError - - -def test_modules_bump_versions_single_module(self): - """Test updating a single module""" - # Change the bpipe/test version to an older version - env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path) as fh: - content = fh.read() - new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) - with open(env_yml_path, "w") as fh: - fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(module="bpipe/test") - assert len(version_bumper.failed) == 0 - - -def test_modules_bump_versions_all_modules(self): - """Test updating all modules""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(all_modules=True) - assert len(version_bumper.failed) == 0 - - -def test_modules_bump_versions_fail(self): - """Fail updating a module with wrong name""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - with pytest.raises(ModuleExceptionError) as excinfo: - version_bumper.bump_versions(module="no/module") - assert "Could not find the specified module:" in str(excinfo.value) - - -def test_modules_bump_versions_fail_unknown_version(self): - """Fail because of an unknown version""" - # Change the bpipe/test version to an older version - env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path) as fh: - content = fh.read() - new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) - with open(env_yml_path, "w") as fh: - fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(module="bpipe/test") - assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/create.py b/tests/modules/create.py deleted file mode 100644 index cf39621f0..000000000 --- a/tests/modules/create.py +++ /dev/null @@ -1,165 +0,0 @@ -import os -import shutil -from pathlib import Path -from unittest import mock - -import pytest -import requests_cache -import responses -import yaml -from git.repo import Repo - -import nf_core.modules -from tests.utils import ( - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, - GITLAB_URL, - mock_anaconda_api_calls, - mock_biocontainers_api_calls, -) - - -def test_modules_create_succeed(self): - """Succeed at creating the TrimGalore! module""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") - mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) - - -def test_modules_create_fail_exists(self): - """Fail at creating the same module twice""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") - mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" - ) - with requests_cache.disabled(): - module_create.create() - with pytest.raises(UserWarning) as excinfo: - with requests_cache.disabled(): - module_create.create() - assert "Module file exists already" in str(excinfo.value) - - -def test_modules_create_nfcore_modules(self): - """Create a module in nf-core/modules clone""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") - mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") - module_create = nf_core.modules.ModuleCreate( - self.nfcore_modules, "fastqc", "@author", "process_low", False, False - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test")) - - -def test_modules_create_nfcore_modules_subtool(self): - """Create a tool/subtool module in a nf-core/modules clone""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "star", "2.8.10a") - mock_biocontainers_api_calls(rsps, "star", "2.8.10a") - module_create = nf_core.modules.ModuleCreate( - self.nfcore_modules, "star/index", "@author", "process_medium", False, False - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) - assert os.path.exists( - os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") - ) - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest to nf-test""" - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(module_dir / "main.nf") as fh: - old_main_nf = fh.read() - with open(module_dir / "meta.yml") as fh: - old_meta_yml = fh.read() - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - with open(module_dir / "main.nf") as fh: - new_main_nf = fh.read() - with open(module_dir / "meta.yml") as fh: - new_meta_yml = fh.read() - nextflow_config = module_dir / "tests" / "nextflow.config" - - # Check that old files have been copied to the new module - assert old_main_nf == new_main_nf - assert old_meta_yml == new_meta_yml - assert nextflow_config.is_file() - - # Check that pytest folder is deleted - assert not pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "samtools/sort" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate_no_delete(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest to nf-test. - Test that pytest directory is not deleted.""" - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = False - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - # Check that pytest folder is not deleted - assert pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "samtools/sort" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate_symlink(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest with symlinks to nf-test. - Test that the symlink is deleted and the file is copied.""" - - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a symlinked file in the pytest directory - symlink_file = pytest_dir / "symlink_file.txt" - symlink_file.symlink_to(module_dir / "main.nf") - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - # Check that symlink is deleted - assert not symlink_file.is_symlink() diff --git a/tests/modules/info.py b/tests/modules/info.py deleted file mode 100644 index 2dbd48b24..000000000 --- a/tests/modules/info.py +++ /dev/null @@ -1,63 +0,0 @@ -from rich.console import Console - -import nf_core.modules - -from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL - - -def test_modules_info_remote(self): - """Test getting info about a remote module""" - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "fastqc") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_modules_info_remote_gitlab(self): - """Test getting info about a module in the remote gitlab repo""" - mods_info = nf_core.modules.ModuleInfo( - self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH - ) - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output - assert "--git-remote" in output - - -def test_modules_info_local(self): - """Test getting info about a locally installed module""" - self.mods_install.install("trimgalore") - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: trimgalore" in output - assert "Inputs" in output - assert "Outputs" in output - assert "Location" in output - - -def test_modules_info_in_modules_repo(self): - """Test getting info about a module in the modules repo""" - mods_info = nf_core.modules.ModuleInfo(self.nfcore_modules, "fastqc") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output diff --git a/tests/modules/install.py b/tests/modules/install.py deleted file mode 100644 index deca31204..000000000 --- a/tests/modules/install.py +++ /dev/null @@ -1,95 +0,0 @@ -import os - -import pytest - -from nf_core.modules.install import ModuleInstall -from nf_core.modules.modules_json import ModulesJson - -from ..utils import ( - GITLAB_BRANCH_ORG_PATH_BRANCH, - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_URL, - with_temporary_folder, -) - - -def test_modules_install_nopipeline(self): - """Test installing a module - no pipeline given""" - self.mods_install.dir = None - assert self.mods_install.install("foo") is False - - -@with_temporary_folder -def test_modules_install_emptypipeline(self, tmpdir): - """Test installing a module - empty dir given""" - os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.mods_install.dir = os.path.join(tmpdir, "nf-core-pipe") - with pytest.raises(UserWarning) as excinfo: - self.mods_install.install("foo") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) - - -def test_modules_install_nomodule(self): - """Test installing a module - unrecognised module given""" - assert self.mods_install.install("foo") is False - - -def test_modules_install_trimgalore(self): - """Test installing a module - TrimGalore!""" - assert self.mods_install.install("trimgalore") is not False - module_path = os.path.join(self.mods_install.dir, "modules", "nf-core", "trimgalore") - assert os.path.exists(module_path) - - -def test_modules_install_trimgalore_twice(self): - """Test installing a module - TrimGalore! already there""" - self.mods_install.install("trimgalore") - assert self.mods_install.install("trimgalore") is True - - -def test_modules_install_from_gitlab(self): - """Test installing a module from GitLab""" - assert self.mods_install_gitlab.install("fastqc") is True - - -def test_modules_install_different_branch_fail(self): - """Test installing a module from a different branch""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The FastQC module does not exists in the branch-test branch - assert install_obj.install("fastqc") is False - - -def test_modules_install_different_branch_succeed(self): - """Test installing a module from a different branch""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The fastp module does exists in the branch-test branch - assert install_obj.install("fastp") is True - - # Verify that the branch entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - - -def test_modules_install_tracking(self): - """Test installing a module and finding 'modules' in the installed_by section of modules.json""" - self.mods_install.install("trimgalore") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ - "installed_by" - ] == ["modules"] - - -def test_modules_install_alternate_remote(self): - """Test installing a module from a different remote with the same organization path""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_ORG_PATH_BRANCH) - # Install fastqc from GitLab which is also installed from GitHub with the same org_path - with pytest.raises(Exception) as excinfo: - install_obj.install("fastqc") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/lint.py b/tests/modules/lint.py deleted file mode 100644 index 595509de4..000000000 --- a/tests/modules/lint.py +++ /dev/null @@ -1,710 +0,0 @@ -import json -from pathlib import Path - -import pytest -import yaml -from git.repo import Repo - -import nf_core.modules -from nf_core.modules.lint import main_nf -from nf_core.utils import set_wd - -from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL -from .patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf - - -def setup_patch(pipeline_dir: str, modify_module: bool): - install_obj = nf_core.modules.ModuleInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=CORRECT_SHA - ) - - # Install the module - install_obj.install(BISMARK_ALIGN) - - if modify_module: - # Modify the module - module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - modify_main_nf(module_path / "main.nf") - - -def test_modules_lint_trimgalore(self): - """Test linting the TrimGalore! module""" - self.mods_install.install("trimgalore") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) - module_lint.lint(print_results=False, module="trimgalore") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_empty(self): - """Test linting a pipeline with no modules installed""" - self.mods_remove.remove("fastqc", force=True) - self.mods_remove.remove("multiqc", force=True) - with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir) - - -def test_modules_lint_new_modules(self): - """lint a new module""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_no_gitlab(self): - """Test linting a pipeline with no modules installed""" - self.mods_remove.remove("fastqc", force=True) - self.mods_remove.remove("multiqc", force=True) - with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - - -def test_modules_lint_gitlab_modules(self): - """Lint modules from a different remote""" - self.mods_install_gitlab.install("fastqc") - self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 2 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_multiple_remotes(self): - """Lint modules from a different remote""" - self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 1 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_registry(self): - """Test linting the samtools module and alternative registry""" - self.mods_install.install("samtools") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, registry="public.ecr.aws") - module_lint.lint(print_results=False, module="samtools") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) - module_lint.lint(print_results=False, module="samtools") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_patched_modules(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - setup_patch(self.pipeline_dir, True) - - # Create a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - # change temporarily working directory to the pipeline directory - # to avoid error from try_apply_patch() during linting - with set_wd(self.pipeline_dir): - module_lint = nf_core.modules.ModuleLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH, hide_progress=True - ) - module_lint.lint( - print_results=False, - all_modules=True, - ) - - assert len(module_lint.failed) == 1 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -# A skeleton object with the passed/warned/failed list attrs -# Use this in place of a ModuleLint object to test behaviour of -# linting methods which don't need the full setup -class MockModuleLint: - def __init__(self): - self.passed = [] - self.warned = [] - self.failed = [] - - self.main_nf = "main_nf" - - -PROCESS_LABEL_GOOD = ( - """ - label 'process_high' - cpus 12 - """, - 1, - 0, - 0, -) -PROCESS_LABEL_NON_ALPHANUMERIC = ( - """ - label 'a:label:with:colons' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_GOOD_CONFLICTING = ( - """ - label 'process_high' - label 'process_low' - cpus 12 - """, - 0, - 1, - 0, -) -PROCESS_LABEL_GOOD_DUPLICATES = ( - """ - label 'process_high' - label 'process_high' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_GOOD_AND_NONSTANDARD = ( - """ - label 'process_high' - label 'process_extra_label' - cpus 12 - """, - 1, - 1, - 0, -) -PROCESS_LABEL_NONSTANDARD = ( - """ - label 'process_extra_label' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_NONSTANDARD_DUPLICATES = ( - """ - label process_extra_label - label process_extra_label - cpus 12 - """, - 0, - 3, - 0, -) -PROCESS_LABEL_NONE_FOUND = ( - """ - cpus 12 - """, - 0, - 1, - 0, -) - -PROCESS_LABEL_TEST_CASES = [ - PROCESS_LABEL_GOOD, - PROCESS_LABEL_NON_ALPHANUMERIC, - PROCESS_LABEL_GOOD_CONFLICTING, - PROCESS_LABEL_GOOD_DUPLICATES, - PROCESS_LABEL_GOOD_AND_NONSTANDARD, - PROCESS_LABEL_NONSTANDARD, - PROCESS_LABEL_NONSTANDARD_DUPLICATES, - PROCESS_LABEL_NONE_FOUND, -] - - -def test_modules_lint_check_process_labels(self): - for test_case in PROCESS_LABEL_TEST_CASES: - process, passed, warned, failed = test_case - mocked_ModuleLint = MockModuleLint() - main_nf.check_process_labels(mocked_ModuleLint, process.splitlines()) - assert len(mocked_ModuleLint.passed) == passed - assert len(mocked_ModuleLint.warned) == warned - assert len(mocked_ModuleLint.failed) == failed - - -# Test cases for linting the container definitions - -CONTAINER_SINGLE_GOOD = ( - "Single-line container definition should pass", - """ - container "quay.io/nf-core/gatk:4.4.0.0" //Biocontainers is missing a package - """, - 2, # passed - 0, # warned - 0, # failed -) - -CONTAINER_TWO_LINKS_GOOD = ( - "Multi-line container definition should pass", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0': - 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" - """, - 6, - 0, - 0, -) - -CONTAINER_WITH_SPACE_BAD = ( - "Space in container URL should fail", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': - 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" - """, - 5, - 0, - 1, -) - -CONTAINER_MULTIPLE_DBLQUOTES_BAD = ( - "Incorrect quoting of container string should fail", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': - "biocontainers/gatk4:4.4.0.0--py36hdfd78af_0" }" - """, - 4, - 0, - 1, -) - -CONTAINER_TEST_CASES = [ - CONTAINER_SINGLE_GOOD, - CONTAINER_TWO_LINKS_GOOD, - CONTAINER_WITH_SPACE_BAD, - CONTAINER_MULTIPLE_DBLQUOTES_BAD, -] - - -def test_modules_lint_check_url(self): - for test_case in CONTAINER_TEST_CASES: - test, process, passed, warned, failed = test_case - mocked_ModuleLint = MockModuleLint() - for line in process.splitlines(): - if line.strip(): - main_nf.check_container_link_line(mocked_ModuleLint, line, registry="quay.io") - - assert ( - len(mocked_ModuleLint.passed) == passed - ), f"{test}: Expected {passed} PASS, got {len(mocked_ModuleLint.passed)}." - assert ( - len(mocked_ModuleLint.warned) == warned - ), f"{test}: Expected {warned} WARN, got {len(mocked_ModuleLint.warned)}." - assert ( - len(mocked_ModuleLint.failed) == failed - ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." - - -def test_modules_lint_snapshot_file(self): - """Test linting a module with a snapshot file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_snapshot_file_missing_fail(self): - """Test linting a module with a snapshot file missing, which should fail""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap").unlink() - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap").touch() - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snapshot_exists" - - -def test_modules_lint_snapshot_file_not_needed(self): - """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: - content = fh.read() - new_content = content.replace("snapshot(", "snap (") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_doesnt_exists(self): - """Test linting a module with an environment.yml file""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml.bak") - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml.bak").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_exists" - - -def test_modules_environment_yml_file_sorted_correctly(self): - """Test linting a module with a correctly sorted environment.yml file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_sorted_incorrectly(self): - """Test linting a module with an incorrectly sorted environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: - yaml_content = yaml.safe_load(fh) - # Add a new dependency to the environment.yml file and reverse the order - yaml_content["dependencies"].append("z") - yaml_content["dependencies"].reverse() - yaml_content = yaml.dump(yaml_content) - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: - fh.write(yaml_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - # we fix the sorting on the fly, so this should pass - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_not_array(self): - """Test linting a module with an incorrectly formatted environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: - yaml_content = yaml.safe_load(fh) - yaml_content["dependencies"] = "z" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: - fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_valid" - - -def test_modules_environment_yml_file_name_mismatch(self): - """Test linting a module with a different name in the environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: - yaml_content = yaml.safe_load(fh) - yaml_content["name"] = "bpipe-test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: - fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - # reset changes - yaml_content["name"] = "bpipe_test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: - fh.write(yaml.dump(yaml_content)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_name" - - -def test_modules_meta_yml_incorrect_licence_field(self): - """Test linting a module with an incorrect Licence field in meta.yml""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: - meta_yml = yaml.safe_load(fh) - meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: - fh.write(yaml.dump(meta_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - - # reset changes - meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: - fh.write(yaml.dump(meta_yml)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "meta_yml_valid" - - -def test_modules_meta_yml_input_mismatch(self): - """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("path bam", "path bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_input_meta_only") == 1 - assert lint_tests.count("meta_input_main_only") == 1 - - -def test_modules_meta_yml_output_mismatch(self): - """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("emit: bam", "emit: bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_output_meta_only") == 1 - assert lint_tests.count("meta_output_main_only") == 1 - - -def test_modules_meta_yml_incorrect_name(self): - """Test linting a module with an incorrect name in meta.yml""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: - meta_yml = yaml.safe_load(fh) - meta_yml["name"] = "bpipe/test" - # need to make the same change to the environment.yml file - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: - environment_yml = yaml.safe_load(fh) - environment_yml["name"] = "bpipe/test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: - fh.write(yaml.dump(meta_yml)) - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: - fh.write(yaml.dump(environment_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - - # reset changes - meta_yml["name"] = "bpipe_test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: - fh.write(yaml.dump(meta_yml)) - environment_yml["name"] = "bpipe_test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: - fh.write(yaml.dump(environment_yml)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "meta_name" - - -def test_modules_missing_test_dir(self): - """Test linting a module with a missing test directory""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_dir_exists" - - -def test_modules_missing_test_main_nf(self): - """Test linting a module with a missing test/main.nf file""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.bak") - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.bak").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_main_nf_exists" - - -def test_modules_missing_required_tag(self): - """Test linting a module with a missing required tag""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: - content = fh.read() - new_content = content.replace("modules_nfcore", "foo") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_main_tags" - - -def test_modules_missing_tags_yml(self): - """Test linting a module with a missing tags.yml file""" - tags_path = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml") - tags_path.rename(tags_path.parent / "tags.yml.bak") - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml_exists" - # cleanup - Path(tags_path.parent / "tags.yml.bak").rename(tags_path.parent / "tags.yml") - - -def test_modules_incorrect_tags_yml_key(self): - """Test linting a module with an incorrect key in tags.yml file""" - tags_path = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml") - with open(tags_path) as fh: - content = fh.read() - new_content = content.replace("bpipe/test:", "bpipe_test:") - with open(tags_path, "w") as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=True, module="bpipe/test") - with open(tags_path, "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml" - - -def test_modules_incorrect_tags_yml_values(self): - """Test linting a module with an incorrect path in tags.yml file""" - tags_path = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml") - with open(tags_path) as fh: - content = fh.read() - new_content = content.replace("modules/nf-core/bpipe/test/**", "foo") - with open(tags_path, "w") as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(tags_path, "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml" - - -def test_modules_unused_pytest_files(self): - """Test linting a nf-test module with files still present in `tests/modules/`""" - Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_old_test_dir" - - -def test_nftest_failing_linting(self): - """Test linting a module which includes other modules in nf-test tests. - Linting tests""" - # Clone modules repo with testing modules - tmp_dir = self.nfcore_modules.parent - self.nfcore_modules = Path(tmp_dir, "modules-test") - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="kallisto/quant") - - assert len(module_lint.failed) == 4, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_valid" - assert module_lint.failed[1].lint_test == "meta_yml_valid" - assert module_lint.failed[2].lint_test == "test_main_tags" - assert "kallisto/index" in module_lint.failed[2].message - assert module_lint.failed[3].lint_test == "test_tags_yml" - - -def test_modules_absent_version(self): - """Test linting a nf-test module if the versions is absent in the snapshot file `""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap")) as fh: - content = fh.read() - new_content = content.replace("versions", "foo") - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap"), "w" - ) as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap"), "w" - ) as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snap_versions" - - -def test_modules_empty_file_in_snapshot(self): - """Test linting a nf-test module with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" - snap_file = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snap_md5sum" - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_modules_empty_file_in_stub_snapshot(self): - """Test linting a nf-test module with an empty file sha sum in the stub test snapshot, which should make it not fail""" - snap_file = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert any(x.lint_test == "test_snap_md5sum" for x in module_lint.passed) - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) diff --git a/tests/modules/list.py b/tests/modules/list.py deleted file mode 100644 index 3cb00a84d..000000000 --- a/tests/modules/list.py +++ /dev/null @@ -1,134 +0,0 @@ -import json -from pathlib import Path - -import yaml -from rich.console import Console - -import nf_core.modules - -from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL - - -def test_modules_list_remote(self): - """Test listing available modules""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_remote_gitlab(self): - """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_pipeline(self): - """Test listing locally installed modules""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - assert "multiqc" in output - - -def test_modules_install_and_list_pipeline(self): - """Test listing locally installed modules""" - self.mods_install.install("trimgalore") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "trimgalore" in output - - -def test_modules_install_gitlab_and_list_pipeline(self): - """Test listing locally installed modules""" - self.mods_install_gitlab.install("fastqc") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_local_json(self): - """Test listing locally installed modules as JSON""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components(print_json=True) - listed_mods = json.loads(listed_mods) - assert "fastqc" in listed_mods - assert "multiqc" in listed_mods - - -def test_modules_list_remote_json(self): - """Test listing available modules as JSON""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(print_json=True) - listed_mods = json.loads(listed_mods) - assert "fastqc" in listed_mods - assert "multiqc" in listed_mods - - -def test_modules_list_with_one_keyword(self): - """Test listing available modules with one keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(keywords=["qc"]) - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "multiqc" in output - - -def test_modules_list_with_keywords(self): - """Test listing available modules with multiple keywords""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_with_unused_keyword(self): - """Test listing available modules with an unused keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - with self.assertLogs(level="INFO") as log: - listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) - self.assertIn("No available", log.output[0]) - # expect empty list - assert listed_mods == "" - - -def test_modules_list_in_wrong_repo_fail(self): - """Test listing available modules in a non-pipeline repo""" - # modify repotype in .nf-core.yml - with open(Path(self.pipeline_dir, ".nf-core.yml")) as fh: - nf_core_yml = yaml.safe_load(fh) - nf_core_yml_orig = nf_core_yml.copy() - nf_core_yml["repository_type"] = "modules" - nf_core_yml["org_path"] = "nf-core" - - print(nf_core_yml) - with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: - yaml.safe_dump(nf_core_yml, fh) - # expect error logged - with self.assertLogs(level="ERROR") as log: - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - self.assertIn("must be run from a pipeline directory", log.output[0]) - # expect empty list - assert listed_mods == "" - # restore .nf-core.yml - with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: - yaml.safe_dump(nf_core_yml_orig, fh) diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py deleted file mode 100644 index e0100adfb..000000000 --- a/tests/modules/modules_json.py +++ /dev/null @@ -1,253 +0,0 @@ -import copy -import json -import shutil -from pathlib import Path - -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import ( - NF_CORE_MODULES_DEFAULT_BRANCH, - NF_CORE_MODULES_NAME, - NF_CORE_MODULES_REMOTE, - ModulesRepo, -) -from nf_core.modules.patch import ModulePatch - - -def test_get_modules_json(self): - """Checks that the get_modules_json function returns the correct result""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - with open(mod_json_path) as fh: - try: - mod_json_sb = json.load(fh) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") - - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - # Check that the modules.json hasn't changed - assert mod_json == mod_json_sb - - -def test_mod_json_update(self): - """Checks whether the update function works properly""" - mod_json_obj = ModulesJson(self.pipeline_dir) - # Update the modules.json file - mod_repo_obj = ModulesRepo() - mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", "modules", write_file=False) - mod_json = mod_json_obj.get_modules_json() - assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] - assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] - assert ( - NF_CORE_MODULES_DEFAULT_BRANCH - == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] - ) - assert "modules" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["installed_by"] - - -def test_mod_json_create(self): - """Test creating a modules.json file from scratch""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - # Remove the existing modules.json file - mod_json_path.unlink() - - # Create the new modules.json file - # (There are no prompts as long as there are only nf-core modules) - ModulesJson(self.pipeline_dir).create() - - # Check that the file exists - assert (mod_json_path).exists() - - # Get the contents of the file - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - mods = ["fastqc", "multiqc"] - for mod in mods: - assert mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] - assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] - - -def modify_main_nf(path): - """Modify a file to test patch creation""" - with open(path) as fh: - lines = fh.readlines() - # Modify $meta.id to $meta.single_end - lines[1] = ' tag "$meta.single_end"\n' - with open(path, "w") as fh: - fh.writelines(lines) - - -def test_mod_json_create_with_patch(self): - """Test creating a modules.json file from scratch when there are patched modules""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - - # Modify the module - module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") - modify_main_nf(module_path / "main.nf") - - # Try creating a patch file - patch_obj = ModulePatch(self.pipeline_dir, NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_DEFAULT_BRANCH) - patch_obj.patch("fastqc") - - # Remove the existing modules.json file - mod_json_path.unlink() - - # Create the new modules.json file - ModulesJson(self.pipeline_dir).create() - - # Check that the file exists - assert mod_json_path.is_file() - - # Get the contents of the file - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - # Check that fastqc is in the file - assert "fastqc" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] - assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] - - # Check that fastqc/main.nf maintains the changes - with open(module_path / "main.nf") as fh: - lines = fh.readlines() - assert lines[1] == ' tag "$meta.single_end"\n' - - -def test_mod_json_up_to_date(self): - """ - Checks if the modules.json file is up to date - when no changes have been made to the pipeline - """ - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_before = mod_json_obj.get_modules_json() - mod_json_obj.check_up_to_date() - mod_json_after = mod_json_obj.get_modules_json() - - # Check that the modules.json hasn't changed - assert mod_json_before == mod_json_after - - -def test_mod_json_up_to_date_module_removed(self): - """ - Reinstall a module that has an entry in the modules.json - but is missing in the pipeline - """ - # Remove the fastqc module - fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") - shutil.rmtree(fastqc_path) - - # Check that the modules.json file is up to date, and reinstall the module - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.check_up_to_date() - - # Check that the module has been reinstalled - files = ["main.nf", "meta.yml"] - assert fastqc_path.exists() - for f in files: - assert Path(fastqc_path, f).exists() - - -def test_mod_json_up_to_date_reinstall_fails(self): - """ - Try reinstalling a module where the git_sha is invalid - """ - mod_json_obj = ModulesJson(self.pipeline_dir) - - # Update the fastqc module entry to an invalid git_sha - mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) - - # Remove the fastqc module - fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") - shutil.rmtree(fastqc_path) - - # Check that the modules.json file is up to date, and remove the fastqc module entry - mod_json_obj.check_up_to_date() - mod_json = mod_json_obj.get_modules_json() - - # Check that the module has been removed from the modules.json - assert "fastqc" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - - -def test_mod_json_repo_present(self): - """Tests the repo_present function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - - assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True - assert mod_json_obj.repo_present("INVALID_REPO") is False - - -def test_mod_json_module_present(self): - """Tests the module_present function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False - - -def test_mod_json_get_module_version(self): - """Test the get_module_version function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - assert ( - mod_json_obj.get_module_version("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) - == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"]["git_sha"] - ) - assert mod_json_obj.get_module_version("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is None - - -def test_mod_json_dump(self): - """Tests the dump function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Remove the modules.json file - mod_json_path = Path(self.pipeline_dir, "modules.json") - mod_json_path.unlink() - - # Check that the dump function creates the file - mod_json_obj.dump() - assert mod_json_path.exists() - - # Check that the dump function writes the correct content - with open(mod_json_path) as f: - try: - mod_json_new = json.load(f) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") - assert mod_json == mod_json_new - - -def test_mod_json_with_empty_modules_value(self): - # Load module.json and remove the modules entry - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.create() # Create modules.json explicitly to get correct module sha - mod_json_orig = mod_json_obj.get_modules_json() - mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = "" - # save the altered module.json and load it again to check if it will fix itself - mod_json_obj.modules_json = mod_json - mod_json_obj.dump() - mod_json_obj_new = ModulesJson(self.pipeline_dir) - mod_json_obj_new.check_up_to_date() - mod_json_new = mod_json_obj_new.get_modules_json() - assert mod_json_orig == mod_json_new - - -def test_mod_json_with_missing_modules_entry(self): - # Load module.json and remove the modules entry - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.create() # Create modules.json explicitly to get correct module sha - mod_json_orig = mod_json_obj.get_modules_json() - mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"][NF_CORE_MODULES_REMOTE].pop("modules") - # save the altered module.json and load it again to check if it will fix itself - mod_json_obj.modules_json = mod_json - mod_json_obj.dump() - mod_json_obj_new = ModulesJson(self.pipeline_dir) - mod_json_obj_new.check_up_to_date() - mod_json_new = mod_json_obj_new.get_modules_json() - assert mod_json_orig == mod_json_new diff --git a/tests/modules/patch.py b/tests/modules/patch.py deleted file mode 100644 index dc939c7ea..000000000 --- a/tests/modules/patch.py +++ /dev/null @@ -1,360 +0,0 @@ -import os -import tempfile -from pathlib import Path -from unittest import mock - -import pytest - -import nf_core.components.components_command -import nf_core.modules - -from ..utils import GITLAB_URL - -""" -Test the 'nf-core modules patch' command - -Uses a branch (patch-tester) in the GitLab nf-core/modules-test repo when -testing if the update commands works correctly with patch files -""" - -ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" -CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" -SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" -FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" -BISMARK_ALIGN = "bismark/align" -REPO_NAME = "nf-core-test" -PATCH_BRANCH = "patch-tester" -REPO_URL = "https://gitlab.com/nf-core/modules-test.git" - - -def setup_patch(pipeline_dir, modify_module): - install_obj = nf_core.modules.ModuleInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA - ) - - # Install the module - install_obj.install(BISMARK_ALIGN) - - if modify_module: - # Modify the module - module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - modify_main_nf(module_path / "main.nf") - - -def modify_main_nf(path): - """Modify a file to test patch creation""" - with open(path) as fh: - lines = fh.readlines() - # We want a patch file that looks something like: - # - tuple val(meta), path(reads) - # - path index - # + tuple val(meta), path(reads), path(index) - for line_index in range(len(lines)): - if lines[line_index] == " tuple val(meta), path(reads)\n": - lines[line_index] = " tuple val(meta), path(reads), path(index)\n" - elif lines[line_index] == " path index\n": - to_pop = line_index - lines.pop(to_pop) - with open(path, "w") as fh: - fh.writelines(lines) - - -def test_create_patch_no_change(self): - """Test creating a patch when there is no change to the module""" - setup_patch(self.pipeline_dir, False) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - with pytest.raises(UserWarning): - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} - - # Check the 'modules.json' contains no patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None - - -def test_create_patch_change(self): - """Test creating a patch when there is a change to the module""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - -def test_create_patch_try_apply_successful(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - - setup_patch(self.pipeline_dir, True) - module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) - module_path = Path(self.pipeline_dir, module_relpath) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files(BISMARK_ALIGN, SUCCEED_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - module_install_dir = install_dir / BISMARK_ALIGN - patch_relpath = module_relpath / patch_fn - assert update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_relpath, module_path, module_install_dir) is True - - # Move the files from the temporary directory - update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) - - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf") as fh: - main_nf_lines = fh.readlines() - # These lines should have been removed by the patch - assert " tuple val(meta), path(reads)\n" not in main_nf_lines - assert " path index\n" not in main_nf_lines - # This line should have been added - assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines - - -def test_create_patch_try_apply_failed(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - - setup_patch(self.pipeline_dir, True) - module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) - module_path = Path(self.pipeline_dir, module_relpath) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=FAIL_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - module_install_dir = install_dir / BISMARK_ALIGN - patch_path = module_relpath / patch_fn - assert update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_path, module_path, module_install_dir) is False - - -def test_create_patch_update_success(self): - """ - Test creating a patch file and the updating the module - - Should have the same effect as 'test_create_patch_try_apply_successful' - but uses higher level api - """ - - setup_patch(self.pipeline_dir, True) - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Update the module - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, - sha=SUCCEED_SHA, - show_diff=False, - update_deps=True, - remote_url=GITLAB_URL, - branch=PATCH_BRANCH, - ) - assert update_obj.update(BISMARK_ALIGN) - - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf") as fh: - main_nf_lines = fh.readlines() - # These lines should have been removed by the patch - assert " tuple val(meta), path(reads)\n" not in main_nf_lines - assert " path index\n" not in main_nf_lines - # This line should have been added - assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines - - -def test_create_patch_update_fail(self): - """ - Test creating a patch file and updating a module when there is a diff conflict - """ - - setup_patch(self.pipeline_dir, True) - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Save the file contents for downstream comparison - with open(module_path / patch_fn) as fh: - patch_contents = fh.read() - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=FAIL_SHA, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - update_obj.update(BISMARK_ALIGN) - - # Check that the installed files have not been affected by the attempted patch - temp_dir = Path(tempfile.mkdtemp()) - nf_core.components.components_command.ComponentCommand( - "modules", self.pipeline_dir, GITLAB_URL, PATCH_BRANCH - ).install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir) - - temp_module_dir = temp_dir / BISMARK_ALIGN - for file in os.listdir(temp_module_dir): - assert file in os.listdir(module_path) - with open(module_path / file) as fh: - installed = fh.read() - with open(temp_module_dir / file) as fh: - shouldbe = fh.read() - assert installed == shouldbe - - # Check that the patch file is unaffected - with open(module_path / patch_fn) as fh: - new_patch_contents = fh.read() - assert patch_contents == new_patch_contents - - -def test_remove_patch(self): - """Test creating a patch when there is no change to the module""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Check that a patch file with the correct name has been created - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - with mock.patch.object(nf_core.create.questionary, "confirm") as mock_questionary: - mock_questionary.unsafe_ask.return_value = True - patch_obj.remove(BISMARK_ALIGN) - # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} - - # Check that the 'modules.json' entry has been removed - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/modules/remove.py b/tests/modules/remove.py deleted file mode 100644 index e412fd35a..000000000 --- a/tests/modules/remove.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - - -def test_modules_remove_trimgalore(self): - """Test removing TrimGalore! module after installing it""" - self.mods_install.install("trimgalore") - module_path = os.path.join(self.mods_install.dir, "modules", "nf-core", "modules", "trimgalore") - assert self.mods_remove.remove("trimgalore") - assert os.path.exists(module_path) is False - - -def test_modules_remove_trimgalore_uninstalled(self): - """Test removing TrimGalore! module without installing it""" - assert self.mods_remove.remove("trimgalore") is False - - -def test_modules_remove_multiqc_from_gitlab(self): - """Test removing multiqc module after installing it from an alternative source""" - self.mods_install_gitlab.install("multiqc") - module_path = os.path.join(self.mods_install_gitlab.dir, "modules", "nf-core-test", "multiqc") - assert self.mods_remove_gitlab.remove("multiqc", force=True) - assert os.path.exists(module_path) is False diff --git a/tests/modules/test_bump_versions.py b/tests/modules/test_bump_versions.py new file mode 100644 index 000000000..d46b8747c --- /dev/null +++ b/tests/modules/test_bump_versions.py @@ -0,0 +1,50 @@ +import os +import re + +import pytest + +import nf_core.modules.bump_versions +from nf_core.modules.modules_utils import ModuleExceptionError + +from ..test_modules import TestModules + + +class TestModulesBumpVersions(TestModules): + def test_modules_bump_versions_single_module(self): + """Test updating a single module""" + # Change the bpipe/test version to an older version + env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + with open(env_yml_path) as fh: + content = fh.read() + new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) + with open(env_yml_path, "w") as fh: + fh.write(new_content) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(module="bpipe/test") + assert len(version_bumper.failed) == 0 + + def test_modules_bump_versions_all_modules(self): + """Test updating all modules""" + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(all_modules=True) + assert len(version_bumper.failed) == 0 + + def test_modules_bump_versions_fail(self): + """Fail updating a module with wrong name""" + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + with pytest.raises(ModuleExceptionError) as excinfo: + version_bumper.bump_versions(module="no/module") + assert "Could not find the specified module:" in str(excinfo.value) + + def test_modules_bump_versions_fail_unknown_version(self): + """Fail because of an unknown version""" + # Change the bpipe/test version to an older version + env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + with open(env_yml_path) as fh: + content = fh.read() + new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) + with open(env_yml_path, "w") as fh: + fh.write(new_content) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(module="bpipe/test") + assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/test_create.py b/tests/modules/test_create.py new file mode 100644 index 000000000..219f86999 --- /dev/null +++ b/tests/modules/test_create.py @@ -0,0 +1,164 @@ +import os +import shutil +from pathlib import Path +from unittest import mock + +import pytest +import requests_cache +import responses +import yaml +from git.repo import Repo + +import nf_core.modules.create +from tests.utils import ( + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + mock_anaconda_api_calls, + mock_biocontainers_api_calls, +) + +from ..test_modules import TestModules + + +class TestModulesCreate(TestModules): + def test_modules_create_succeed(self): + """Succeed at creating the TrimGalore! module""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") + mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") + module_create = nf_core.modules.create.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) + + def test_modules_create_fail_exists(self): + """Fail at creating the same module twice""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") + mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") + module_create = nf_core.modules.create.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" + ) + with requests_cache.disabled(): + module_create.create() + with pytest.raises(UserWarning) as excinfo: + with requests_cache.disabled(): + module_create.create() + assert "Module file exists already" in str(excinfo.value) + + def test_modules_create_nfcore_modules(self): + """Create a module in nf-core/modules clone""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") + mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") + module_create = nf_core.modules.create.ModuleCreate( + self.nfcore_modules, "fastqc", "@author", "process_low", False, False + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) + assert os.path.exists( + os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test") + ) + + def test_modules_create_nfcore_modules_subtool(self): + """Create a tool/subtool module in a nf-core/modules clone""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "star", "2.8.10a") + mock_biocontainers_api_calls(rsps, "star", "2.8.10a") + module_create = nf_core.modules.create.ModuleCreate( + self.nfcore_modules, "star/index", "@author", "process_medium", False, False + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) + assert os.path.exists( + os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") + ) + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(module_dir / "main.nf") as fh: + old_main_nf = fh.read() + with open(module_dir / "meta.yml") as fh: + old_meta_yml = fh.read() + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + with open(module_dir / "main.nf") as fh: + new_main_nf = fh.read() + with open(module_dir / "meta.yml") as fh: + new_meta_yml = fh.read() + nextflow_config = module_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate_no_delete(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate_symlink(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest with symlinks to nf-test. + Test that the symlink is deleted and the file is copied.""" + + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a symlinked file in the pytest directory + symlink_file = pytest_dir / "symlink_file.txt" + symlink_file.symlink_to(module_dir / "main.nf") + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that symlink is deleted + assert not symlink_file.is_symlink() diff --git a/tests/modules/test_info.py b/tests/modules/test_info.py new file mode 100644 index 000000000..8e60bed31 --- /dev/null +++ b/tests/modules/test_info.py @@ -0,0 +1,62 @@ +from rich.console import Console + +import nf_core.modules.info + +from ..test_modules import TestModules +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL + + +class TestModulesCreate(TestModules): + def test_modules_info_remote(self): + """Test getting info about a remote module""" + mods_info = nf_core.modules.info.ModuleInfo(self.pipeline_dir, "fastqc") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_modules_info_remote_gitlab(self): + """Test getting info about a module in the remote gitlab repo""" + mods_info = nf_core.modules.info.ModuleInfo( + self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH + ) + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output + assert "--git-remote" in output + + def test_modules_info_local(self): + """Test getting info about a locally installed module""" + self.mods_install.install("trimgalore") + mods_info = nf_core.modules.info.ModuleInfo(self.pipeline_dir, "trimgalore") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: trimgalore" in output + assert "Inputs" in output + assert "Outputs" in output + assert "Location" in output + + def test_modules_info_in_modules_repo(self): + """Test getting info about a module in the modules repo""" + mods_info = nf_core.modules.info.ModuleInfo(self.nfcore_modules, "fastqc") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py new file mode 100644 index 000000000..92d30a494 --- /dev/null +++ b/tests/modules/test_install.py @@ -0,0 +1,89 @@ +from pathlib import Path + +import pytest + +from nf_core.modules.install import ModuleInstall +from nf_core.modules.modules_json import ModulesJson + +from ..test_modules import TestModules +from ..utils import ( + GITLAB_BRANCH_ORG_PATH_BRANCH, + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_URL, + with_temporary_folder, +) + + +class TestModulesCreate(TestModules): + @with_temporary_folder + def test_modules_install_emptypipeline(self, tmpdir): + """Test installing a module - empty dir given""" + Path(tmpdir, "nf-core-pipe").mkdir() + self.mods_install.directory = Path(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.mods_install.install("fastp") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + def test_modules_install_nomodule(self): + """Test installing a module - unrecognised module given""" + with pytest.raises(ValueError) as excinfo: + self.mods_install.install("foo") + assert excinfo.typename == "ValueError" + assert "Module 'foo' not found in available modules" in self.caplog.text + + def test_modules_install_trimgalore(self): + """Test installing a module - TrimGalore!""" + assert self.mods_install.install("trimgalore") is not False + assert self.mods_install.directory is not None + module_path = Path(self.mods_install.directory, "modules", "nf-core", "trimgalore") + assert module_path.exists() + + def test_modules_install_trimgalore_twice(self): + """Test installing a module - TrimGalore! already there""" + self.mods_install.install("trimgalore") + assert self.mods_install.install("trimgalore") is True + + def test_modules_install_from_gitlab(self): + """Test installing a module from GitLab""" + assert self.mods_install_gitlab.install("fastqc") is True + + def test_modules_install_different_branch_fail(self): + """Test installing a module from a different branch""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The FastQC module does not exists in the branch-test branch + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Module 'fastqc' not found in available module" in str(excinfo.value) + + def test_modules_install_different_branch_succeed(self): + """Test installing a module from a different branch""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The fastp module does exists in the branch-test branch + assert install_obj.install("fastp") is True + + # Verify that the branch entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + + def test_modules_install_tracking(self): + """Test installing a module and finding 'modules' in the installed_by section of modules.json""" + self.mods_install.install("trimgalore") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ + "installed_by" + ] == ["modules"] + + def test_modules_install_alternate_remote(self): + """Test installing a module from a different remote with the same organization path""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_ORG_PATH_BRANCH) + # Install fastqc from GitLab which is also installed from GitHub with the same org_path + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py new file mode 100644 index 000000000..537280798 --- /dev/null +++ b/tests/modules/test_lint.py @@ -0,0 +1,773 @@ +import json +from pathlib import Path +from typing import Union + +import yaml +from git.repo import Repo + +import nf_core.modules.lint +import nf_core.modules.patch +from nf_core.modules.lint.main_nf import check_container_link_line, check_process_labels +from nf_core.utils import set_wd + +from ..test_modules import TestModules +from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL +from .test_patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf + +PROCESS_LABEL_GOOD = ( + """ + label 'process_high' + cpus 12 + """, + 1, + 0, + 0, +) +PROCESS_LABEL_NON_ALPHANUMERIC = ( + """ + label 'a:label:with:colons' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_GOOD_CONFLICTING = ( + """ + label 'process_high' + label 'process_low' + cpus 12 + """, + 0, + 1, + 0, +) +PROCESS_LABEL_GOOD_DUPLICATES = ( + """ + label 'process_high' + label 'process_high' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_GOOD_AND_NONSTANDARD = ( + """ + label 'process_high' + label 'process_extra_label' + cpus 12 + """, + 1, + 1, + 0, +) +PROCESS_LABEL_NONSTANDARD = ( + """ + label 'process_extra_label' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_NONSTANDARD_DUPLICATES = ( + """ + label process_extra_label + label process_extra_label + cpus 12 + """, + 0, + 3, + 0, +) +PROCESS_LABEL_NONE_FOUND = ( + """ + cpus 12 + """, + 0, + 1, + 0, +) + +PROCESS_LABEL_TEST_CASES = [ + PROCESS_LABEL_GOOD, + PROCESS_LABEL_NON_ALPHANUMERIC, + PROCESS_LABEL_GOOD_CONFLICTING, + PROCESS_LABEL_GOOD_DUPLICATES, + PROCESS_LABEL_GOOD_AND_NONSTANDARD, + PROCESS_LABEL_NONSTANDARD, + PROCESS_LABEL_NONSTANDARD_DUPLICATES, + PROCESS_LABEL_NONE_FOUND, +] + + +# Test cases for linting the container definitions + +CONTAINER_SINGLE_GOOD = ( + "Single-line container definition should pass", + """ + container "quay.io/nf-core/gatk:4.4.0.0" //Biocontainers is missing a package + """, + 2, # passed + 0, # warned + 0, # failed +) + +CONTAINER_TWO_LINKS_GOOD = ( + "Multi-line container definition should pass", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0': + 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" + """, + 6, + 0, + 0, +) + +CONTAINER_WITH_SPACE_BAD = ( + "Space in container URL should fail", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': + 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" + """, + 5, + 0, + 1, +) + +CONTAINER_MULTIPLE_DBLQUOTES_BAD = ( + "Incorrect quoting of container string should fail", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': + "biocontainers/gatk4:4.4.0.0--py36hdfd78af_0" }" + """, + 4, + 0, + 1, +) + +CONTAINER_TEST_CASES = [ + CONTAINER_SINGLE_GOOD, + CONTAINER_TWO_LINKS_GOOD, + CONTAINER_WITH_SPACE_BAD, + CONTAINER_MULTIPLE_DBLQUOTES_BAD, +] + + +class TestModulesCreate(TestModules): + def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): + install_obj = nf_core.modules.install.ModuleInstall( + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + sha=CORRECT_SHA, + ) + + # Install the module + install_obj.install(BISMARK_ALIGN) + + if modify_module: + # Modify the module + module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + modify_main_nf(module_path / "main.nf") + + def test_modules_lint_trimgalore(self): + """Test linting the TrimGalore! module""" + self.mods_install.install("trimgalore") + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + module_lint.lint(print_results=False, module="trimgalore") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_tabix_tabix(self): + """Test linting the tabix/tabix module""" + self.mods_install.install("tabix/tabix") + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + module_lint.lint(print_results=False, module="tabix/tabix") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_empty(self): + """Test linting a pipeline with no modules installed""" + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + assert "No modules from https://github.com/nf-core/modules.git installed in pipeline" in self.caplog.text + + def test_modules_lint_new_modules(self): + """lint a new module""" + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_no_gitlab(self): + """Test linting a pipeline with no modules installed""" + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + assert f"No modules from {GITLAB_URL} installed in pipeline" in self.caplog.text + + def test_modules_lint_gitlab_modules(self): + """Lint modules from a different remote""" + self.mods_install_gitlab.install("fastqc") + self.mods_install_gitlab.install("multiqc") + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 2 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_multiple_remotes(self): + """Lint modules from a different remote""" + self.mods_install_gitlab.install("multiqc") + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 1 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_registry(self): + """Test linting the samtools module and alternative registry""" + assert self.mods_install.install("samtools/sort") + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, registry="public.ecr.aws") + module_lint.lint(print_results=False, module="samtools/sort") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + module_lint.lint(print_results=False, module="samtools/sort") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_patched_modules(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + self._setup_patch(str(self.pipeline_dir), True) + + # Create a patch file + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + # change temporarily working directory to the pipeline directory + # to avoid error from try_apply_patch() during linting + with set_wd(self.pipeline_dir): + module_lint = nf_core.modules.lint.ModuleLint( + directory=self.pipeline_dir, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + hide_progress=True, + ) + module_lint.lint( + print_results=False, + all_modules=True, + ) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_check_process_labels(self): + for test_case in PROCESS_LABEL_TEST_CASES: + process, passed, warned, failed = test_case + mocked_ModuleLint = MockModuleLint() + check_process_labels(mocked_ModuleLint, process.splitlines()) + assert len(mocked_ModuleLint.passed) == passed + assert len(mocked_ModuleLint.warned) == warned + assert len(mocked_ModuleLint.failed) == failed + + def test_modules_lint_check_url(self): + for test_case in CONTAINER_TEST_CASES: + test, process, passed, warned, failed = test_case + mocked_ModuleLint = MockModuleLint() + for line in process.splitlines(): + if line.strip(): + check_container_link_line(mocked_ModuleLint, line, registry="quay.io") + + assert ( + len(mocked_ModuleLint.passed) == passed + ), f"{test}: Expected {passed} PASS, got {len(mocked_ModuleLint.passed)}." + assert ( + len(mocked_ModuleLint.warned) == warned + ), f"{test}: Expected {warned} WARN, got {len(mocked_ModuleLint.warned)}." + assert ( + len(mocked_ModuleLint.failed) == failed + ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." + + def test_modules_lint_update_meta_yml(self): + """update the meta.yml of a module""" + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules, fix=True) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_snapshot_file(self): + """Test linting a module with a snapshot file""" + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_snapshot_file_missing_fail(self): + """Test linting a module with a snapshot file missing, which should fail""" + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ).unlink() + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ).touch() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snapshot_exists" + + def test_modules_lint_snapshot_file_not_needed(self): + """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ) + ) as fh: + content = fh.read() + new_content = content.replace("snapshot(", "snap (") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ), + "w", + ) as fh: + fh.write(new_content) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_doesnt_exists(self): + """Test linting a module with an environment.yml file""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml").rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml.bak", + ) + ) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml.bak", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_exists" + + def test_modules_environment_yml_file_sorted_correctly(self): + """Test linting a module with a correctly sorted environment.yml file""" + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_sorted_incorrectly(self): + """Test linting a module with an incorrectly sorted environment.yml file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + yaml_content = yaml.safe_load(fh) + # Add a new dependency to the environment.yml file and reverse the order + yaml_content["dependencies"].append("z=0.0.0") + yaml_content["dependencies"].reverse() + yaml_content = yaml.dump(yaml_content) + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml_content) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + # we fix the sorting on the fly, so this should pass + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_not_array(self): + """Test linting a module with an incorrectly formatted environment.yml file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + yaml_content = yaml.safe_load(fh) + yaml_content["dependencies"] = "z" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(yaml_content)) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + + def test_modules_meta_yml_incorrect_licence_field(self): + """Test linting a module with an incorrect Licence field in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_yml_valid" + + def test_modules_meta_yml_output_mismatch(self): + """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("emit: bam", "emit: bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert "Module `meta.yml` does not match `main.nf`" in module_lint.failed[0].message + + def test_modules_meta_yml_incorrect_name(self): + """Test linting a module with an incorrect name in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["name"] = "bpipe/test" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["name"] = "bpipe_test" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_name" + + def test_modules_missing_test_dir(self): + """Test linting a module with a missing test directory""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") + ) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_dir_exists" + + def test_modules_missing_test_main_nf(self): + """Test linting a module with a missing test/main.nf file""" + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.bak", + ) + ) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.bak", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ) + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_main_nf_exists" + + def test_modules_unused_pytest_files(self): + """Test linting a nf-test module with files still present in `tests/modules/`""" + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_old_test_dir" + + def test_nftest_failing_linting(self): + """Test linting a module which includes other modules in nf-test tests. + Linting tests""" + # Clone modules repo with testing modules + tmp_dir = self.nfcore_modules.parent + self.nfcore_modules = Path(tmp_dir, "modules-test") + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) + + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="kallisto/quant") + + assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + assert module_lint.failed[1].lint_test == "meta_yml_valid" + assert module_lint.failed[2].lint_test == "test_main_tags" + assert "kallisto/index" in module_lint.failed[2].message + + def test_modules_absent_version(self): + """Test linting a nf-test module if the versions is absent in the snapshot file `""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + ) as fh: + content = fh.read() + new_content = content.replace("versions", "foo") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ), + "w", + ) as fh: + fh.write(new_content) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ), + "w", + ) as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snap_versions" + + def test_modules_empty_file_in_snapshot(self): + """Test linting a nf-test module with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" + snap_file = Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snap_md5sum" + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + def test_modules_empty_file_in_stub_snapshot(self): + """Test linting a nf-test module with an empty file sha sum in the stub test snapshot, which should make it not fail""" + snap_file = Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert any(x.lint_test == "test_snap_md5sum" for x in module_lint.passed) + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + +# A skeleton object with the passed/warned/failed list attrs +# Use this in place of a ModuleLint object to test behaviour of +# linting methods which don't need the full setup +class MockModuleLint: + def __init__(self): + self.passed = [] + self.warned = [] + self.failed = [] + + self.main_nf = "main_nf" diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py new file mode 100644 index 000000000..3e92a33ab --- /dev/null +++ b/tests/modules/test_list.py @@ -0,0 +1,126 @@ +import json +from pathlib import Path + +import yaml +from rich.console import Console + +import nf_core.modules.list + +from ..test_modules import TestModules +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL + + +class TestModulesList(TestModules): + def test_modules_list_remote(self): + """Test listing available modules""" + mods_list = nf_core.modules.list.ModuleList() + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_remote_gitlab(self): + """Test listing the modules in the remote gitlab repo""" + mods_list = nf_core.modules.list.ModuleList(remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_pipeline(self): + """Test listing locally installed modules""" + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + assert "multiqc" in output + + def test_modules_install_and_list_pipeline(self): + """Test listing locally installed modules""" + self.mods_install.install("trimgalore") + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "trimgalore" in output + + def test_modules_install_gitlab_and_list_pipeline(self): + """Test listing locally installed modules""" + self.mods_install_gitlab.install("fastqc") + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_local_json(self): + """Test listing locally installed modules as JSON""" + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) + listed_mods = str(mods_list.list_components(print_json=True)) + listed_mods = json.loads(listed_mods) + assert "fastqc" in listed_mods + assert "multiqc" in listed_mods + + def test_modules_list_remote_json(self) -> None: + """Test listing available modules as JSON""" + mods_list = nf_core.modules.list.ModuleList(remote=True) + listed_mods: str = str(mods_list.list_components(print_json=True)) + listed_mods = json.loads(listed_mods) + assert "fastqc" in listed_mods + assert "multiqc" in listed_mods + + def test_modules_list_with_one_keyword(self): + """Test listing available modules with one keyword""" + mods_list = nf_core.modules.list.ModuleList(remote=True) + listed_mods = mods_list.list_components(keywords=["qc"]) + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "multiqc" in output + + def test_modules_list_with_keywords(self): + """Test listing available modules with multiple keywords""" + mods_list = nf_core.modules.list.ModuleList(remote=True) + listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_with_unused_keyword(self): + """Test listing available modules with an unused keyword""" + mods_list = nf_core.modules.list.ModuleList(remote=True) + with self.assertLogs(level="INFO") as log: + listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) + self.assertIn("No available", log.output[0]) + # expect empty list + assert listed_mods == "" + + def test_modules_list_in_wrong_repo_fail(self): + """Test listing available modules in a non-pipeline repo""" + # modify repotype in .nf-core.yml + with open(Path(self.pipeline_dir, ".nf-core.yml")) as fh: + nf_core_yml = yaml.safe_load(fh) + nf_core_yml_orig = nf_core_yml.copy() + nf_core_yml["repository_type"] = "modules" + nf_core_yml["org_path"] = "nf-core" + + print(nf_core_yml) + with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: + yaml.safe_dump(nf_core_yml, fh) + # expect error logged + with self.assertLogs(level="ERROR") as log: + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + self.assertIn("must be run from a pipeline directory", log.output[0]) + # expect empty list + assert listed_mods == "" + # restore .nf-core.yml + with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: + yaml.safe_dump(nf_core_yml_orig, fh) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py new file mode 100644 index 000000000..0368c146c --- /dev/null +++ b/tests/modules/test_modules_json.py @@ -0,0 +1,245 @@ +import copy +import json +import shutil +from pathlib import Path + +from nf_core.components.components_utils import ( + NF_CORE_MODULES_DEFAULT_BRANCH, + NF_CORE_MODULES_NAME, + NF_CORE_MODULES_REMOTE, +) +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ModulesRepo +from nf_core.modules.patch import ModulePatch + +from ..test_modules import TestModules + + +class TestModulesCreate(TestModules): + def test_get_modules_json(self): + """Checks that the get_modules_json function returns the correct result""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + with open(mod_json_path) as fh: + try: + mod_json_sb = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") + + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + # Check that the modules.json hasn't changed + assert mod_json == mod_json_sb + + def test_mod_json_update(self): + """Checks whether the update function works properly""" + mod_json_obj = ModulesJson(self.pipeline_dir) + # Update the modules.json file + mod_repo_obj = ModulesRepo() + mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", ["modules"], write_file=False) + mod_json = mod_json_obj.get_modules_json() + assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] + assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] + assert ( + NF_CORE_MODULES_DEFAULT_BRANCH + == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] + ) + assert ( + "modules" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["installed_by"] + ) + + def test_mod_json_create(self): + """Test creating a modules.json file from scratch""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + # Remove the existing modules.json file + mod_json_path.unlink() + + # Create the new modules.json file + # (There are no prompts as long as there are only nf-core modules) + ModulesJson(self.pipeline_dir).create() + + # Check that the file exists + assert (mod_json_path).exists() + + # Get the contents of the file + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + mods = ["fastqc", "multiqc"] + for mod in mods: + assert mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] + + def _modify_main_nf(self, path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # Modify $meta.id to $meta.single_end + lines[1] = ' tag "$meta.single_end"\n' + with open(path, "w") as fh: + fh.writelines(lines) + + def test_mod_json_create_with_patch(self): + """Test creating a modules.json file from scratch when there are patched modules""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + + # Modify the module + module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") + self._modify_main_nf(module_path / "main.nf") + + # Try creating a patch file + patch_obj = ModulePatch(self.pipeline_dir, NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_DEFAULT_BRANCH) + patch_obj.patch("fastqc") + + # Remove the existing modules.json file + mod_json_path.unlink() + + # Create the new modules.json file + ModulesJson(self.pipeline_dir).create() + + # Check that the file exists + assert mod_json_path.is_file() + + # Get the contents of the file + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + # Check that fastqc is in the file + assert "fastqc" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] + + # Check that fastqc/main.nf maintains the changes + with open(module_path / "main.nf") as fh: + lines = fh.readlines() + assert lines[1] == ' tag "$meta.single_end"\n' + + def test_mod_json_up_to_date(self): + """ + Checks if the modules.json file is up to date + when no changes have been made to the pipeline + """ + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_before = mod_json_obj.get_modules_json() + mod_json_obj.check_up_to_date() + mod_json_after = mod_json_obj.get_modules_json() + + # Check that the modules.json hasn't changed + assert mod_json_before == mod_json_after + + def test_mod_json_up_to_date_module_removed(self): + """ + Reinstall a module that has an entry in the modules.json + but is missing in the pipeline + """ + # Remove the fastqc module + fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") + shutil.rmtree(fastqc_path) + + # Check that the modules.json file is up to date, and reinstall the module + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.check_up_to_date() + + # Check that the module has been reinstalled + files = ["main.nf", "meta.yml"] + assert fastqc_path.exists() + for f in files: + assert Path(fastqc_path, f).exists() + + def test_mod_json_up_to_date_reinstall_fails(self): + """ + Try reinstalling a module where the git_sha is invalid + """ + mod_json_obj = ModulesJson(self.pipeline_dir) + + # Update the fastqc module entry to an invalid git_sha + mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", ["modules"], write_file=True) + + # Remove the fastqc module + fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") + shutil.rmtree(fastqc_path) + + # Check that the modules.json file is up to date, and remove the fastqc module entry + mod_json_obj.check_up_to_date() + mod_json = mod_json_obj.get_modules_json() + + # Check that the module has been removed from the modules.json + assert "fastqc" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + + def test_mod_json_repo_present(self): + """Tests the repo_present function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + + assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True + assert mod_json_obj.repo_present("INVALID_REPO") is False + + def test_mod_json_module_present(self): + """Tests the module_present function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + + assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True + assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False + assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False + + def test_mod_json_get_module_version(self): + """Test the get_module_version function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + assert ( + mod_json_obj.get_module_version("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) + == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"]["git_sha"] + ) + assert mod_json_obj.get_module_version("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is None + + def test_mod_json_dump(self): + """Tests the dump function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Remove the modules.json file + mod_json_path = Path(self.pipeline_dir, "modules.json") + mod_json_path.unlink() + + # Check that the dump function creates the file + mod_json_obj.dump() + assert mod_json_path.exists() + + # Check that the dump function writes the correct content + with open(mod_json_path) as f: + try: + mod_json_new = json.load(f) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") + assert mod_json == mod_json_new + + def test_mod_json_with_empty_modules_value(self): + # Load module.json and remove the modules entry + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.create() # Create modules.json explicitly to get correct module sha + mod_json_orig = mod_json_obj.get_modules_json() + mod_json = copy.deepcopy(mod_json_orig) + mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = {} + # save the altered module.json and load it again to check if it will fix itself + mod_json_obj.modules_json = mod_json + mod_json_obj.dump() + mod_json_obj_new = ModulesJson(self.pipeline_dir) + mod_json_obj_new.check_up_to_date() + mod_json_new = mod_json_obj_new.get_modules_json() + assert mod_json_orig == mod_json_new + + def test_mod_json_with_missing_modules_entry(self): + # Load module.json and remove the modules entry + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.create() # Create modules.json explicitly to get correct module sha + mod_json_orig = mod_json_obj.get_modules_json() + mod_json = copy.deepcopy(mod_json_orig) + mod_json["repos"][NF_CORE_MODULES_REMOTE].pop("modules") + # save the altered module.json and load it again to check if it will fix itself + mod_json_obj.modules_json = mod_json + mod_json_obj.dump() + mod_json_obj_new = ModulesJson(self.pipeline_dir) + mod_json_obj_new.check_up_to_date() + mod_json_new = mod_json_obj_new.get_modules_json() + assert mod_json_orig == mod_json_new diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py new file mode 100644 index 000000000..2f60cd4a2 --- /dev/null +++ b/tests/modules/test_patch.py @@ -0,0 +1,368 @@ +import os +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.components.components_command +import nf_core.components.patch +import nf_core.modules.modules_json +import nf_core.modules.patch +import nf_core.modules.update + +from ..test_modules import TestModules +from ..utils import GITLAB_URL + +""" +Test the 'nf-core modules patch' command + +Uses a branch (patch-tester) in the GitLab nf-core/modules-test repo when +testing if the update commands works correctly with patch files +""" + +ORG_SHA = "3dc7c14d29af40f1a0871a675364e437559d97a8" +CORRECT_SHA = "63e780200600e340365b669f9c673b670764c569" +SUCCEED_SHA = "0d0515c3f11266e1314e129bec3e308f804c8dc7" +FAIL_SHA = "cb64a5c1ef85619b89ab99dec2e9097fe84e1dc8" +BISMARK_ALIGN = "bismark/align" +REPO_NAME = "nf-core-test" +PATCH_BRANCH = "patch-tester" +REPO_URL = "https://gitlab.com/nf-core/modules-test.git" + + +def setup_patch(pipeline_dir, modify_module): + install_obj = nf_core.modules.install.ModuleInstall( + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA + ) + + # Install the module + install_obj.install(BISMARK_ALIGN) + + if modify_module: + # Modify the module + module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + modify_main_nf(module_path / "main.nf") + + +def modify_main_nf(path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - tuple val(meta), path(reads) + # - path index + # + tuple val(meta), path(reads), path(index) + for line_index in range(len(lines)): + if lines[line_index] == " tuple val(meta), path(reads)\n": + lines[line_index] = " tuple val(meta), path(reads), path(index)\n" + elif lines[line_index] == " path index\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + + +class TestModulesCreate(TestModules): + def test_create_patch_no_change(self): + """Test creating a patch when there is no change to the module""" + setup_patch(self.pipeline_dir, False) + + # Try creating a patch file + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Check that no patch file has been added to the directory + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} + + # Check the 'modules.json' contains no patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + + def test_create_patch_change(self): + """Test creating a patch when there is a change to the module""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + def test_create_patch_try_apply_successful(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + + setup_patch(self.pipeline_dir, True) + module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) + module_path = Path(self.pipeline_dir, module_relpath) + + # Try creating a patch file + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + update_obj = nf_core.modules.update.ModuleUpdate( + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH + ) + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files(BISMARK_ALIGN, SUCCEED_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + module_install_dir = install_dir / BISMARK_ALIGN + patch_relpath = module_relpath / patch_fn + assert ( + update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_relpath, module_path, module_install_dir) is True + ) + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(module_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert " tuple val(meta), path(reads)\n" not in main_nf_lines + assert " path index\n" not in main_nf_lines + # This line should have been added + assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines + + def test_create_patch_try_apply_failed(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + + setup_patch(self.pipeline_dir, True) + module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) + module_path = Path(self.pipeline_dir, module_relpath) + + # Try creating a patch file + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + update_obj = nf_core.modules.update.ModuleUpdate( + self.pipeline_dir, sha=FAIL_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH + ) + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + module_install_dir = install_dir / BISMARK_ALIGN + patch_path = module_relpath / patch_fn + assert ( + update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_path, module_path, module_install_dir) is False + ) + + def test_create_patch_update_success(self): + """ + Test creating a patch file and the updating the module + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + + setup_patch(self.pipeline_dir, True) + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Try creating a patch file + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Update the module + update_obj = nf_core.modules.update.ModuleUpdate( + self.pipeline_dir, + sha=SUCCEED_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + ) + assert update_obj.update(BISMARK_ALIGN) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(module_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert " tuple val(meta), path(reads)\n" not in main_nf_lines + assert " path index\n" not in main_nf_lines + # This line should have been added + assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and updating a module when there is a diff conflict + """ + + setup_patch(self.pipeline_dir, True) + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Try creating a patch file + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Save the file contents for downstream comparison + with open(module_path / patch_fn) as fh: + patch_contents = fh.read() + + update_obj = nf_core.modules.update.ModuleUpdate( + self.pipeline_dir, + sha=FAIL_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + ) + update_obj.update(BISMARK_ALIGN) + + # Check that the installed files have not been affected by the attempted patch + temp_dir = Path(tempfile.mkdtemp()) + nf_core.components.components_command.ComponentCommand( + "modules", self.pipeline_dir, GITLAB_URL, PATCH_BRANCH + ).install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir) + + temp_module_dir = temp_dir / BISMARK_ALIGN + for file in os.listdir(temp_module_dir): + assert file in os.listdir(module_path) + with open(module_path / file) as fh: + installed = fh.read() + with open(temp_module_dir / file) as fh: + shouldbe = fh.read() + assert installed == shouldbe + + # Check that the patch file is unaffected + with open(module_path / patch_fn) as fh: + new_patch_contents = fh.read() + assert patch_contents == new_patch_contents + + def test_remove_patch(self): + """Test creating a patch when there is no change to the module""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Check that a patch file with the correct name has been created + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove(BISMARK_ALIGN) + # Check that the diff file has been removed + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} + + # Check that the 'modules.json' entry has been removed + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/modules/test_remove.py b/tests/modules/test_remove.py new file mode 100644 index 000000000..2caece7ce --- /dev/null +++ b/tests/modules/test_remove.py @@ -0,0 +1,26 @@ +import os +from pathlib import Path + +from ..test_modules import TestModules + + +class TestModulesRemove(TestModules): + def test_modules_remove_trimgalore(self): + """Test removing TrimGalore! module after installing it""" + self.mods_install.install("trimgalore") + assert self.mods_install.directory is not None + module_path = Path(self.mods_install.directory, "modules", "nf-core", "modules", "trimgalore") + assert self.mods_remove.remove("trimgalore") + assert os.path.exists(module_path) is False + + def test_modules_remove_trimgalore_uninstalled(self): + """Test removing TrimGalore! module without installing it""" + assert self.mods_remove.remove("trimgalore") is False + + def test_modules_remove_multiqc_from_gitlab(self): + """Test removing multiqc module after installing it from an alternative source""" + self.mods_install_gitlab.install("multiqc") + assert self.mods_install.directory is not None + module_path = Path(self.mods_install_gitlab.directory, "modules", "nf-core-test", "multiqc") + assert self.mods_remove_gitlab.remove("multiqc", force=True) + assert os.path.exists(module_path) is False diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py new file mode 100644 index 000000000..6c8eacc66 --- /dev/null +++ b/tests/modules/test_update.py @@ -0,0 +1,439 @@ +import logging +import shutil +import tempfile +from pathlib import Path +from unittest import mock + +import questionary +import yaml + +import nf_core.utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.install import ModuleInstall +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.patch import ModulePatch +from nf_core.modules.update import ModuleUpdate + +from ..test_modules import TestModules +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_BRANCH_TEST_NEW_SHA, + GITLAB_BRANCH_TEST_OLD_SHA, + GITLAB_DEFAULT_BRANCH, + GITLAB_REPO, + GITLAB_URL, + OLD_TRIMGALORE_BRANCH, + OLD_TRIMGALORE_SHA, + cmp_component, +) + + +class TestModulesInstall(TestModules): + def test_install_and_update(self): + """Installs a module in the pipeline and updates it (no change)""" + self.mods_install.install("trimgalore") + update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) + + # Copy the module files and check that they are unaffected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True + + def test_install_at_hash_and_update(self): + """Installs an old version of a module in the pipeline and updates it""" + assert self.mods_install_old.install("trimgalore") + update_obj = ModuleUpdate( + self.pipeline_dir, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + + # Copy the module files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False + + # Check that the modules.json is correctly updated + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Get the up-to-date git_sha for the module from the ModulesRepo object + correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") + current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] + assert correct_git_sha == current_git_sha + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) + def test_install_at_hash_and_update_limit_output(self, mock_prompt): + """Installs an old version of a module in the pipeline and updates it with limited output reporting""" + self.caplog.set_level(logging.INFO) + assert self.mods_install_old.install("trimgalore") + + update_obj = ModuleUpdate( + self.pipeline_dir, + show_diff=True, + update_deps=True, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + limit_output=True, + ) + assert update_obj.update("trimgalore") + + # Check changes not shown for non-.nf files + assert "Changes in 'trimgalore/meta.yml' but not shown" in self.caplog.text + # Check changes shown for .nf files + assert "Changes in 'trimgalore/main.nf'" in self.caplog.text + for line in self.caplog.text.split("\n"): + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_install_at_hash_and_update_and_save_diff_to_file(self): + """Installs an old version of a module in the pipeline and updates it""" + self.mods_install_old.install("trimgalore") + patch_path = Path(self.pipeline_dir, "trimgalore.patch") + update_obj = ModuleUpdate( + self.pipeline_dir, + save_diff_fn=patch_path, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + + # Copy the module files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True + + # TODO: Apply the patch to the module + + def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): + """Installs an old version of a module in the pipeline and updates it""" + # Install old version of trimgalore + self.mods_install_old.install("trimgalore") + patch_path = Path(self.pipeline_dir, "trimgalore.patch") + # Update saving the differences to a patch file and with `limit_output` + update_obj = ModuleUpdate( + self.pipeline_dir, + save_diff_fn=patch_path, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + limit_output=True, + ) + assert update_obj.update("trimgalore") + + # Check that the patch file was created + assert patch_path.exists(), f"Patch file was not created at {patch_path}" + + # Read the contents of the patch file + with open(patch_path) as fh: + patch_content = fh.read() + # Check changes not shown for non-.nf files + assert "Changes in 'trimgalore/meta.yml' but not shown" in patch_content + # Check changes only shown for main.nf + assert "Changes in 'trimgalore/main.nf'" in patch_content + for line in patch_content: + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_update_all(self): + """Updates all modules present in the pipeline""" + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + # Get the current modules.json + assert update_obj.update() is True + + # We must reload the modules.json to get the updated version + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Loop through all modules and check that they are updated (according to the modules.json file) + for mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha == current_git_sha + + def test_update_with_config_fixed_version(self): + """Try updating when there are entries in the .nf-core.yml""" + # Install trimgalore at the latest version + assert self.mods_install_trimgalore.install("trimgalore") + + # Fix the trimgalore version in the .nf-core.yml to an old version + update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config.model_dump(), f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_dont_update(self): + """Try updating when module is to be ignored""" + # Install an old version of trimgalore + self.mods_install_old.install("trimgalore") + + # Set the trimgalore field to no update in the .nf-core.yml + update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config.model_dump(), f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_fix_all(self): + """Fix the version of all nf-core modules""" + self.mods_install_trimgalore.install("trimgalore") + + # Fix the version of all nf-core modules in the .nf-core.yml to an old version + update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config.model_dump(), f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_no_updates(self): + """Don't update any nf-core modules""" + assert self.mods_install_old.install("trimgalore") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Fix the version of all nf-core modules in the .nf-core.yml to an old version + update_config = {GITLAB_URL: False} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config.model_dump(), f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded and none of the modules has changed + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + for module in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]: + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module] + assert ( + mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] + == old_mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] + ) + + def test_update_different_branch_single_module(self): + """Try updating a module in a specific branch""" + install_obj = ModuleInstall( + self.pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_OLD_SHA, + ) + assert install_obj.install("fastp") + + update_obj = ModuleUpdate( + self.pipeline_dir, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + show_diff=False, + ) + update_obj.update("fastp") + + # Verify that the branch entry was updated correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + + def test_update_different_branch_mixed_modules_main(self): + """Try updating all modules where MultiQC is installed from main branch""" + # Install fastp + assert self.mods_install_gitlab_old.install("fastp") + + # Install MultiQC from gitlab default branch + assert self.mods_install_gitlab.install("multiqc") + + # Try updating + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + modules_json = ModulesJson(self.pipeline_dir) + # Verify that the branch entry was updated correctly + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + # MultiQC is present in both branches but should've been updated using the 'main' branch + assert ( + modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) + == GITLAB_DEFAULT_BRANCH + ) + + def test_update_different_branch_mix_modules_branch_test(self): + """Try updating all modules where MultiQC is installed from branch-test branch""" + # Install multiqc from the branch-test branch + assert self.mods_install_gitlab_old.install( + "multiqc" + ) # Force as the same module is installed from github nf-core modules repo + modules_json = ModulesJson(self.pipeline_dir) + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_NEW_SHA, + ) + assert update_obj.update() + + assert ( + modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("multiqc", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) + def test_update_only_show_differences(self, mock_prompt): + """Try updating all modules showing differences. + Only show diffs, don't actually save any updated files. + Check that the sha in modules.json is not changed.""" + + # Update modules to a fixed old SHA + update_old = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" + ) + update_old.update() + + tmpdir = Path(tempfile.TemporaryDirectory().name) + shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) + + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + assert ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True).update() + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Loop through all modules and check that they are NOT updated (according to the modules.json file) + # A module that can be updated but shouldn't is fastqc + # Module multiqc is already up to date so don't check + mod = "fastqc" + non_updated_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert non_updated_git_sha != current_git_sha + assert cmp_component(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) + def test_update_only_show_differences_when_patch(self, mock_prompt): + """Try updating all modules showing differences when there's a patched module. + Don't update some of them. + Check that the sha in modules.json is not changed.""" + modules_json = ModulesJson(self.pipeline_dir) + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + + # Update modules to a fixed old SHA + update_old = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" + ) + assert update_old.update() + + # Modify fastqc module, it will have a patch which will be applied during update + # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) + module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") + main_path = Path(module_path, "main.nf") + with open(main_path) as fh: + lines = fh.readlines() + for line_index in range(len(lines)): + if lines[line_index] == " label 'process_medium'\n": + lines[line_index] = " label 'process_low'\n" + break + with open(main_path, "w") as fh: + fh.writelines(lines) + # Create a patch file + patch_obj = ModulePatch(self.pipeline_dir) + patch_obj.patch("fastqc") + # Check that a patch file with the correct name has been created + assert "fastqc.diff" in [f.name for f in module_path.glob("*.diff")] + + # Update all modules + assert update_obj.update() is True + + mod_json = modules_json.get_modules_json() + # Loop through all modules and check that they are NOT updated (according to the modules.json file) + # A module that can be updated but shouldn't is fastqc + # Module multiqc is already up to date so don't check + mod = "fastqc" + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha != current_git_sha + + def test_update_module_with_extra_config_file(self): + """Try updating a module with a config file""" + # Install the module + assert self.mods_install.install("trimgalore") + # Add a nextflow_test.config file to the module + trimgalore_path = Path(self.pipeline_dir, "modules", "nf-core", "trimgalore") + Path(trimgalore_path, "nextflow_test.config").touch() + with open(Path(trimgalore_path, "nextflow_test.config"), "w") as fh: + fh.write("params.my_param = 'my_value'\n") + # Update the module + update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) + assert update_obj.update("trimgalore") + # Check that the nextflow_test.config file is still there + assert Path(trimgalore_path, "nextflow_test.config").exists() + with open(Path(trimgalore_path, "nextflow_test.config")) as fh: + assert "params.my_param = 'my_value'" in fh.read() diff --git a/tests/modules/update.py b/tests/modules/update.py deleted file mode 100644 index 81eb85716..000000000 --- a/tests/modules/update.py +++ /dev/null @@ -1,396 +0,0 @@ -import filecmp -import os -import shutil -import tempfile -from pathlib import Path -from unittest import mock - -import questionary -import yaml - -import nf_core.utils -from nf_core.modules.install import ModuleInstall -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE -from nf_core.modules.patch import ModulePatch -from nf_core.modules.update import ModuleUpdate - -from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_BRANCH_TEST_NEW_SHA, - GITLAB_BRANCH_TEST_OLD_SHA, - GITLAB_DEFAULT_BRANCH, - GITLAB_REPO, - GITLAB_URL, - OLD_TRIMGALORE_BRANCH, - OLD_TRIMGALORE_SHA, -) - - -def test_install_and_update(self): - """Installs a module in the pipeline and updates it (no change)""" - self.mods_install.install("trimgalore") - update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) - - # Copy the module files and check that they are unaffected by the update - tmpdir = tempfile.mkdtemp() - trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_module(trimgalore_tmpdir, trimgalore_path) is True - - -def test_install_at_hash_and_update(self): - """Installs an old version of a module in the pipeline and updates it""" - assert self.mods_install_old.install("trimgalore") - update_obj = ModuleUpdate( - self.pipeline_dir, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - - # Copy the module files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_module(trimgalore_tmpdir, trimgalore_path) is False - - # Check that the modules.json is correctly updated - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Get the up-to-date git_sha for the module from the ModulesRepo object - correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") - current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] - assert correct_git_sha == current_git_sha - - -def test_install_at_hash_and_update_and_save_diff_to_file(self): - """Installs an old version of a module in the pipeline and updates it""" - self.mods_install_old.install("trimgalore") - patch_path = os.path.join(self.pipeline_dir, "trimgalore.patch") - update_obj = ModuleUpdate( - self.pipeline_dir, - save_diff_fn=patch_path, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - - # Copy the module files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_module(trimgalore_tmpdir, trimgalore_path) is True - - # TODO: Apply the patch to the module - - -def test_update_all(self): - """Updates all modules present in the pipeline""" - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) - # Get the current modules.json - assert update_obj.update() is True - - # We must reload the modules.json to get the updated version - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Loop through all modules and check that they are updated (according to the modules.json file) - for mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]: - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha == current_git_sha - - -def test_update_with_config_fixed_version(self): - """Try updating when there are entries in the .nf-core.yml""" - # Install trimgalore at the latest version - assert self.mods_install_trimgalore.install("trimgalore") - - # Fix the trimgalore version in the .nf-core.yml to an old version - update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_dont_update(self): - """Try updating when module is to be ignored""" - # Install an old version of trimgalore - self.mods_install_old.install("trimgalore") - - # Set the trimgalore field to no update in the .nf-core.yml - update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_fix_all(self): - """Fix the version of all nf-core modules""" - self.mods_install_trimgalore.install("trimgalore") - - # Fix the version of all nf-core modules in the .nf-core.yml to an old version - update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_no_updates(self): - """Don't update any nf-core modules""" - assert self.mods_install_old.install("trimgalore") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Fix the version of all nf-core modules in the .nf-core.yml to an old version - update_config = {GITLAB_URL: False} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded and none of the modules has changed - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - for module in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]: - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module] - assert ( - mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] - == old_mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] - ) - - -def test_update_different_branch_single_module(self): - """Try updating a module in a specific branch""" - install_obj = ModuleInstall( - self.pipeline_dir, - prompt=False, - force=False, - remote_url=GITLAB_URL, - branch=GITLAB_BRANCH_TEST_BRANCH, - sha=GITLAB_BRANCH_TEST_OLD_SHA, - ) - assert install_obj.install("fastp") - - update_obj = ModuleUpdate( - self.pipeline_dir, update_deps=True, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, show_diff=False - ) - update_obj.update("fastp") - - # Verify that the branch entry was updated correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - - -def test_update_different_branch_mixed_modules_main(self): - """Try updating all modules where MultiQC is installed from main branch""" - # Install fastp - assert self.mods_install_gitlab_old.install("fastp") - - # Install MultiQC from gitlab default branch - assert self.mods_install_gitlab.install("multiqc") - - # Try updating - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - modules_json = ModulesJson(self.pipeline_dir) - # Verify that the branch entry was updated correctly - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - # MultiQC is present in both branches but should've been updated using the 'main' branch - assert ( - modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) - == GITLAB_DEFAULT_BRANCH - ) - - -def test_update_different_branch_mix_modules_branch_test(self): - """Try updating all modules where MultiQC is installed from branch-test branch""" - # Install multiqc from the branch-test branch - assert self.mods_install_gitlab_old.install( - "multiqc" - ) # Force as the same module is installed from github nf-core modules repo - modules_json = ModulesJson(self.pipeline_dir) - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - remote_url=GITLAB_URL, - branch=GITLAB_BRANCH_TEST_BRANCH, - sha=GITLAB_BRANCH_TEST_NEW_SHA, - ) - assert update_obj.update() - - assert ( - modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("multiqc", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - - -# Mock questionary answer: do not update module, only show diffs -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) -def test_update_only_show_differences(self, mock_prompt): - """Try updating all modules showing differences. - Don't update some of them. - Check that the sha in modules.json is not changed.""" - modules_json = ModulesJson(self.pipeline_dir) - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) - - # Update modules to a fixed old SHA - update_old = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" - ) - update_old.update() - - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) - shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) - - assert update_obj.update() is True - - mod_json = modules_json.get_modules_json() - # Loop through all modules and check that they are NOT updated (according to the modules.json file) - # A module that can be updated but shouldn't is fastqc - # Module multiqc is already up to date so don't check - mod = "fastqc" - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha != current_git_sha - assert cmp_module(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True - - -# Mock questionary answer: do not update module, only show diffs -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) -def test_update_only_show_differences_when_patch(self, mock_prompt): - """Try updating all modules showing differences when there's a patched module. - Don't update some of them. - Check that the sha in modules.json is not changed.""" - modules_json = ModulesJson(self.pipeline_dir) - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) - - # Update modules to a fixed old SHA - update_old = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" - ) - update_old.update() - - # Modify fastqc module, it will have a patch which will be applied during update - # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) - module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") - main_path = Path(module_path, "main.nf") - with open(main_path) as fh: - lines = fh.readlines() - for line_index in range(len(lines)): - if lines[line_index] == " label 'process_medium'\n": - lines[line_index] = " label 'process_low'\n" - break - with open(main_path, "w") as fh: - fh.writelines(lines) - # Create a patch file - patch_obj = ModulePatch(self.pipeline_dir) - patch_obj.patch("fastqc") - # Check that a patch file with the correct name has been created - assert "fastqc.diff" in set(os.listdir(module_path)) - - # Update all modules - assert update_obj.update() is True - - mod_json = modules_json.get_modules_json() - # Loop through all modules and check that they are NOT updated (according to the modules.json file) - # A module that can be updated but shouldn't is fastqc - # Module multiqc is already up to date so don't check - mod = "fastqc" - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha != current_git_sha - - -def cmp_module(dir1, dir2): - """Compare two versions of the same module""" - files = ["main.nf", "meta.yml"] - return all(filecmp.cmp(os.path.join(dir1, f), os.path.join(dir2, f), shallow=False) for f in files) - - -def test_update_module_with_extra_config_file(self): - """Try updating a module with a config file""" - # Install the module - assert self.mods_install.install("trimgalore") - # Add a nextflow_test.config file to the module - trimgalore_path = Path(self.pipeline_dir, "modules", "nf-core", "trimgalore") - Path(trimgalore_path, "nextflow_test.config").touch() - with open(Path(trimgalore_path, "nextflow_test.config"), "w") as fh: - fh.write("params.my_param = 'my_value'\n") - # Update the module - update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) - assert update_obj.update("trimgalore") - # Check that the nextflow_test.config file is still there - assert Path(trimgalore_path, "nextflow_test.config").exists() - with open(Path(trimgalore_path, "nextflow_test.config")) as fh: - assert "params.my_param = 'my_value'" in fh.read() diff --git a/tests/lint/__init__.py b/tests/pipelines/__init__.py similarity index 100% rename from tests/lint/__init__.py rename to tests/pipelines/__init__.py diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg new file mode 100644 index 000000000..5c4a15831 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-corePipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg new file mode 100644 index 000000000..4445dc68a --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg @@ -0,0 +1,274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                                   Pipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg new file mode 100644 index 000000000..f32d5271e --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Choose pipeline type + + + + +Choose "nf-core" if:Choose "Custom" if: + +● You want your pipeline to be part of the ● Your pipeline will never be part of  +nf-core communitynf-core +● You think that there's an outside chance ● You want full control over all features  +that it ever could be part of nf-corethat are included from the template  +(including those that are mandatory for  +nf-core). +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Custom  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + +What's the difference? + +  Choosing "nf-core" effectively pre-selects the following template features: + +● GitHub Actions continuous-integration configuration files: +▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) +▪ Code formatting checks with Prettier +▪ Auto-fix linting functionality using @nf-core-bot +▪ Marking old issues as stale +● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg new file mode 100644 index 000000000..8969694cb --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -0,0 +1,275 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Template features + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI)  +testing▄▄ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Hide help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg new file mode 100644 index 000000000..ddd0ff57e --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Final details + + + + +First version of the pipelinePath to the output directory where the  +pipeline will be created +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +1.0.0dev.                                          +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Finish  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg new file mode 100644 index 000000000..3013b9961 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg @@ -0,0 +1,276 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Create GitHub repository + +  Now that we have created a new pipeline locally, we can create a new GitHub repository and push    +  the code to it. + + + + +Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +for login. Show  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +GitHub username••••••••••••                   +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +The name of the organisation where the The name of the new GitHub repository +GitHub repo will be cretaed +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                               mypipeline                             +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + +⚠️ You can't create a repository directly in the nf-core organisation. +Please create the pipeline repo to an organisation where you have access or use your user  +account. A core-team member will be able to transfer the repo to nf-core once the development +has started. + +💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + +▔▔▔▔▔▔▔▔Private +Select to make the new GitHub repo private. +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg new file mode 100644 index 000000000..3612a062c --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +HowTo create a GitHub repository + + + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +  If you would like to create the GitHub repository later, you can do it manually by following  +  these steps: + + 1. Create a new GitHub repository + 2. Add the remote to your local repository: + + +cd <pipeline_directory> +git remote add origin git@github.com:<username>/<repo_name>.git + + + 3. Push the code to the remote: + + +git push --all origin + + +💡 Note the --all flag: this is needed to push all branches to the remote. + + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg new file mode 100644 index 000000000..a0ca3d70d --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg @@ -0,0 +1,265 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Create GitHub repository + + +  After creating the pipeline template locally, we can create a GitHub repository and push the  +  code to it. + +  Do you want to create a GitHub repository? + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg new file mode 100644 index 000000000..7ff071efa --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -0,0 +1,273 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Template features + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI)  +testing +▃▃ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add configuration The pipeline will  Show help  +▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +profiles containing  +custom parameters  +requried to run  +nf-core pipelines at  +different institutions + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use code lintersThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +and CI tests to lint  +your code: pre-commit, +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg new file mode 100644 index 000000000..48b9b91e9 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Template features + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most common +reference genome files  +from iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use multiqcThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +module which generates  +an HTML report for  +quality control. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use fastqcThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include the FastQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +module which performs  +quality control  +analysis of input FASTQ +files. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use nf-schemaUse the nf-schema  Show help  +▁▁▁▁▁▁▁▁Nextflow plugin for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +this pipeline. + + + + + + + + + + + + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg new file mode 100644 index 000000000..7e55b2b0f --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg @@ -0,0 +1,273 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                                   Pipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Must be lowercase without  +punctuation. + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Cannot be left empty. + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Cannot be left empty. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg new file mode 100644 index 000000000..2670307c2 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + + + +Welcome to the nf-core pipeline creation wizard + +  This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. + +  The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    +  pipelines. + +💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with +the community as early as possible; ideally before you start on your pipeline! See the  +nf-core guidelines and the #new-pipelines Slack channel for more information. + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Let's go!  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/docs/api/_src/_static/js/custom.js b/tests/pipelines/lint/__init__.py similarity index 100% rename from docs/api/_src/_static/js/custom.js rename to tests/pipelines/lint/__init__.py diff --git a/tests/pipelines/lint/test_actions_awsfulltest.py b/tests/pipelines/lint/test_actions_awsfulltest.py new file mode 100644 index 000000000..5c070fd5c --- /dev/null +++ b/tests/pipelines/lint/test_actions_awsfulltest.py @@ -0,0 +1,61 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsAwsfulltest(TestLint): + def test_actions_awsfulltest_warn(self): + """Lint test: actions_awsfulltest - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_awsfulltest() + assert "`.github/workflows/awsfulltest.yml` is triggered correctly" in results["passed"] + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awsfulltest_pass(self): + """Lint test: actions_awsfulltest - WARN""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: + awsfulltest_yml = fh.read() + awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + fh.write(awsfulltest_yml) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awsfulltest() + assert results["passed"] == [ + "`.github/workflows/awsfulltest.yml` is triggered correctly", + "`.github/workflows/awsfulltest.yml` does not use `-profile test`", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awsfulltest_fail(self): + """Lint test: actions_awsfulltest - FAIL""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: + awsfulltest_yml = yaml.safe_load(fh) + del awsfulltest_yml[True]["pull_request_review"] + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + yaml.dump(awsfulltest_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awsfulltest() + assert results["failed"] == ["`.github/workflows/awsfulltest.yml` is not triggered correctly"] + assert "`.github/workflows/awsfulltest.yml` does not use `-profile test`" in results["passed"] + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_actions_awstest.py b/tests/pipelines/lint/test_actions_awstest.py new file mode 100644 index 000000000..51b55cb86 --- /dev/null +++ b/tests/pipelines/lint/test_actions_awstest.py @@ -0,0 +1,39 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsAws(TestLint): + def test_actions_awstest_pass(self): + """Lint test: actions_awstest - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_awstest() + assert results["passed"] == ["'.github/workflows/awstest.yml' is triggered correctly"] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awstest_fail(self): + """Lint test: actions_awsfulltest - FAIL""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: + awstest_yml = yaml.safe_load(fh) + awstest_yml[True]["push"] = ["master"] + with open(Path(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: + yaml.dump(awstest_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awstest() + assert results["failed"] == ["'.github/workflows/awstest.yml' is not triggered correctly"] + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_actions_ci.py b/tests/pipelines/lint/test_actions_ci.py new file mode 100644 index 000000000..7319ce4b0 --- /dev/null +++ b/tests/pipelines/lint/test_actions_ci.py @@ -0,0 +1,50 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsCi(TestLint): + def test_actions_ci_pass(self): + """Lint test: actions_ci - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_ci() + assert results["passed"] == [ + "'.github/workflows/ci.yml' is triggered on expected events", + "'.github/workflows/ci.yml' checks minimum NF version", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_ci_fail_wrong_nf(self): + """Lint test: actions_ci - FAIL - wrong minimum version of Nextflow tested""" + self.lint_obj._load() + self.lint_obj.minNextflowVersion = "1.2.3" + results = self.lint_obj.actions_ci() + assert results["failed"] == ["Minimum pipeline NF version '1.2.3' is not tested in '.github/workflows/ci.yml'"] + + def test_actions_ci_fail_wrong_trigger(self): + """Lint test: actions_actions_ci - FAIL - workflow triggered incorrectly, NF ver not checked at all""" + + # Edit .github/workflows/actions_ci.yml to mess stuff up! + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "ci.yml")) as fh: + ci_yml = yaml.safe_load(fh) + ci_yml[True]["push"] = ["dev", "patch"] + ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} + with open(Path(new_pipeline, ".github", "workflows", "ci.yml"), "w") as fh: + yaml.dump(ci_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_ci() + assert results["failed"] == [ + "'.github/workflows/ci.yml' is not triggered on expected events", + "'.github/workflows/ci.yml' does not check minimum NF version", + ] diff --git a/tests/pipelines/lint/test_actions_schema_validation.py b/tests/pipelines/lint/test_actions_schema_validation.py new file mode 100644 index 000000000..34f6b5fcb --- /dev/null +++ b/tests/pipelines/lint/test_actions_schema_validation.py @@ -0,0 +1,62 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsSchemaValidation(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.awstest_yml_path = Path(self.new_pipeline) / ".github" / "workflows" / "awstest.yml" + with open(self.awstest_yml_path) as fh: + self.awstest_yml = yaml.safe_load(fh) + + def test_actions_schema_validation_missing_jobs(self): + """Missing 'jobs' field should result in failure""" + + self.awstest_yml.pop("jobs") + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert "Workflow validation failed for awstest.yml: 'jobs' is a required property" in results["failed"][0] + + def test_actions_schema_validation_missing_on(self): + """Missing 'on' field should result in failure""" + + self.awstest_yml.pop(True) + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert results["failed"][0] == "Missing 'on' keyword in awstest.yml" + assert "Workflow validation failed for awstest.yml: 'on' is a required property" in results["failed"][1] + + def test_actions_schema_validation_fails_for_additional_property(self): + """Missing 'jobs' field should result in failure""" + + self.awstest_yml["not_jobs"] = self.awstest_yml["jobs"] + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert ( + "Workflow validation failed for awstest.yml: Additional properties are not allowed ('not_jobs' was unexpected)" + in results["failed"][0] + ) diff --git a/tests/pipelines/lint/test_configs.py b/tests/pipelines/lint/test_configs.py new file mode 100644 index 000000000..7bb6329b5 --- /dev/null +++ b/tests/pipelines/lint/test_configs.py @@ -0,0 +1,91 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintConfigs(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_withname_in_modules_config(self): + """Tests finding withName in modules.config passes linting.""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["failed"]) == 0 + assert any( + ["`FASTQC` found in `conf/modules.config` and Nextflow scripts." in passed for passed in result["passed"]] + ) + + def test_superfluous_withname_in_modules_config_fails(self): + """Tests finding withName in modules.config fails linting.""" + + # Add withName to modules.config + modules_config = Path(self.new_pipeline) / "conf" / "modules.config" + with open(modules_config, "a") as f: + f.write("\nwithName: 'BPIPE' {\n cache = false \n}") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline, hide_progress=False) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("`conf/modules.config` contains `withName:BPIPE`") + + def test_ignore_modules_config(self): + """Tests ignoring the modules.config passes linting.""" + + # ignore modules.config in linting + with open(Path(self.new_pipeline) / ".nf-core.yml") as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"modules_config": False} + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(content, f) + Path(self.new_pipeline, "conf", "modules.config").unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["ignored"]) == 1 + assert result["ignored"][0].startswith("`conf/modules.config` not found, but it is ignored.") + # cleanup + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(old_content, f) + + def test_superfluous_withname_in_base_config_fails(self): + """Tests finding withName in base.config fails linting.""" + + # Add withName to base.config + base_config = Path(self.new_pipeline) / "conf" / "base.config" + with open(base_config, "a") as f: + f.write("\nwithName:CUSTOM_DUMPSOFTWAREVERSIONS {\n cache = false \n}") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.base_config() + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("`conf/base.config` contains `withName:CUSTOM_DUMPSOFTWAREVERSIONS`") + + def test_ignore_base_config(self): + """Tests ignoring the base.config passes linting.""" + + # ignore base.config in linting + with open(Path(self.new_pipeline) / ".nf-core.yml") as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"base_config": False} + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(content, f) + Path(self.new_pipeline, "conf", "base.config").unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.base_config() + assert len(result["ignored"]) == 1 + assert result["ignored"][0].startswith("`conf/base.config` not found, but it is ignored.") + # cleanup + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(old_content, f) diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py new file mode 100644 index 000000000..97dd346cd --- /dev/null +++ b/tests/pipelines/lint/test_files_exist.py @@ -0,0 +1,70 @@ +from pathlib import Path + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintFilesExist(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_files_exist_missing_config(self): + """Lint test: critical files missing FAIL""" + + Path(self.new_pipeline, "CHANGELOG.md").unlink() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" + + results = lint_obj.files_exist() + assert "File not found: `CHANGELOG.md`" in results["failed"] + + def test_files_exist_missing_main(self): + """Check if missing main issues warning""" + + Path(self.new_pipeline, "main.nf").unlink() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert "File not found: `main.nf`" in results["warned"] + + def test_files_exist_deprecated_file(self): + """Check whether deprecated file issues warning""" + + nf = Path(self.new_pipeline, "parameters.settings.json") + nf.touch() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == ["File must be removed: `parameters.settings.json`"] + + def test_files_exist_pass(self): + """Lint check should pass if all files are there""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == [] + + def test_files_exist_pass_conditional_nfschema(self): + # replace nf-validation with nf-schema in nextflow.config + with open(Path(self.new_pipeline, "nextflow.config")) as f: + config = f.read() + config = config.replace("nf-validation", "nf-schema") + with open(Path(self.new_pipeline, "nextflow.config"), "w") as f: + f.write(config) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj.nf_config["manifest.schema"] = "nf-core" + results = lint_obj.files_exist() + assert results["failed"] == [] + assert results["ignored"] == [] diff --git a/tests/pipelines/lint/test_files_unchanged.py b/tests/pipelines/lint/test_files_unchanged.py new file mode 100644 index 000000000..4282b4995 --- /dev/null +++ b/tests/pipelines/lint/test_files_unchanged.py @@ -0,0 +1,28 @@ +from pathlib import Path + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintFilesUnchanged(TestLint): + def test_files_unchanged_pass(self): + self.lint_obj._load() + results = self.lint_obj.files_unchanged() + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + assert not results.get("could_fix", True) + + def test_files_unchanged_fail(self): + failing_file = Path(".github", "CONTRIBUTING.md") + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, failing_file), "a") as fh: + fh.write("THIS SHOULD NOT BE HERE") + + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + results = lint_obj.files_unchanged() + assert len(results["failed"]) > 0 + assert str(failing_file) in results["failed"][0] + assert results["could_fix"] diff --git a/tests/pipelines/lint/test_merge_markers.py b/tests/pipelines/lint/test_merge_markers.py new file mode 100644 index 000000000..3094d8f8d --- /dev/null +++ b/tests/pipelines/lint/test_merge_markers.py @@ -0,0 +1,25 @@ +import os + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintMergeMarkers(TestLint): + def test_merge_markers_found(self): + """Missing 'jobs' field should result in failure""" + new_pipeline = self._make_pipeline_copy() + + with open(os.path.join(new_pipeline, "main.nf")) as fh: + main_nf_content = fh.read() + main_nf_content = ">>>>>>>\n" + main_nf_content + with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: + fh.write(main_nf_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.merge_markers() + assert len(results["failed"]) > 0 + assert len(results["passed"]) == 0 + assert "Merge marker '>>>>>>>' in " in results["failed"][0] diff --git a/tests/pipelines/lint/test_modules_json.py b/tests/pipelines/lint/test_modules_json.py new file mode 100644 index 000000000..0d8333d9a --- /dev/null +++ b/tests/pipelines/lint/test_modules_json.py @@ -0,0 +1,10 @@ +from ..test_lint import TestLint + + +class TestLintModulesJson(TestLint): + def test_modules_json_pass(self): + self.lint_obj._load() + results = self.lint_obj.modules_json() + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("passed", [])) > 0 diff --git a/tests/pipelines/lint/test_multiqc_config.py b/tests/pipelines/lint/test_multiqc_config.py new file mode 100644 index 000000000..5da6e567e --- /dev/null +++ b/tests/pipelines/lint/test_multiqc_config.py @@ -0,0 +1,127 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.bump_version +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintMultiqcConfig(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.multiqc_config_yml = Path(self.new_pipeline, "assets", "multiqc_config.yml") + + def test_multiqc_config_exists(self): + """Test that linting fails if the multiqc_config.yml file is missing""" + # Delete the file + self.multiqc_config_yml.unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + assert result["failed"] == ["`assets/multiqc_config.yml` not found."] + + def test_multiqc_config_ignore(self): + """Test that linting succeeds if the multiqc_config.yml file is missing but ignored""" + # Delete the file + self.multiqc_config_yml.unlink() + with open(Path(self.new_pipeline, ".nf-core.yml")) as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"multiqc_config": False} + with open(Path(self.new_pipeline, ".nf-core.yml"), "w") as f: + yaml.dump(content, f) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + assert result["ignored"] == ["`assets/multiqc_config.yml` not found, but it is ignored."] + + # cleanup + with open(Path(self.new_pipeline, ".nf-core.yml"), "w") as f: + yaml.dump(old_content, f) + + def test_multiqc_config_missing_report_section_order(self): + """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml.pop("report_section_order") + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["`assets/multiqc_config.yml` does not contain `report_section_order`"] + + def test_multiqc_incorrect_export_plots(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml["export_plots"] = False + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["`assets/multiqc_config.yml` does not contain 'export_plots: true'."] + + def test_multiqc_config_report_comment_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml["report_comment"] = "This is a test" + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith( + "`assets/multiqc_config.yml` does not contain a matching 'report_comment'." + ) + + def test_multiqc_config_report_comment_release_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + # bump version + lint_obj.nf_config["manifest.version"] = "1.0" + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith( + "`assets/multiqc_config.yml` does not contain a matching 'report_comment'." + ) + + def test_multiqc_config_report_comment_release_succeed(self): + """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + # bump version using the bump_version function + nf_core.pipelines.bump_version.bump_pipeline_version(lint_obj, "1.0") + # lint again + lint_obj._load() + result = lint_obj.multiqc_config() + assert "`assets/multiqc_config.yml` contains a matching 'report_comment'." in result["passed"] diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py new file mode 100644 index 000000000..a655fb8ac --- /dev/null +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -0,0 +1,209 @@ +import os +import re +from pathlib import Path + +import yaml + +import nf_core.pipelines.create.create +import nf_core.pipelines.lint +from nf_core.utils import NFCoreYamlConfig + +from ..test_lint import TestLint + + +class TestLintNextflowConfig(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_nextflow_config_example_pass(self): + """Tests that config variable existence test works with good pipeline example""" + self.lint_obj.load_pipeline_config() + result = self.lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + + def test_default_values_match(self): + """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + assert "Config default value correct: params.validate_params" in str(result["passed"]) + + def test_nextflow_config_bad_name_fail(self): + """Tests that config variable existence test fails with bad pipeline name""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + + lint_obj.nf_config["manifest.name"] = "bad_name" + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_nextflow_config_dev_in_release_mode_failed(self): + """Tests that config variable existence test fails with dev version in release mode""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + + lint_obj.release_mode = True + lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_nextflow_config_missing_test_profile_failed(self): + """Test failure if config file does not contain `test` profile.""" + # Change the name of the test profile so there is no such profile + nf_conf_file = os.path.join(self.new_pipeline, "nextflow.config") + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub(r"\btest\b", "testfail", content) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_default_values_fail(self): + """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" + # Change the default value of max_multiqc_email_size in nextflow.config + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub(r"\bmax_multiqc_email_size\s*=\s*'25.MB'", "max_multiqc_email_size = '0'", content) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Change the default value of custom_config_version in nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub(r'"default": "master"', '"default": "main"', content) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 2 + assert ( + "Config default value incorrect: `params.max_multiqc_email_size` is set as `25.MB` in `nextflow_schema.json` but is `0` in `nextflow.config`." + in result["failed"] + ) + assert ( + "Config default value incorrect: `params.custom_config_version` is set as `main` in `nextflow_schema.json` but is `master` in `nextflow.config`." + in result["failed"] + ) + + def test_catch_params_assignment_in_main_nf(self): + """Test linting fails if main.nf contains an assignment to a parameter from nextflow_schema.json.""" + # Add parameter assignment in main.nf + main_nf_file = Path(self.new_pipeline) / "main.nf" + with open(main_nf_file, "a") as f: + f.write("params.custom_config_base = 'test'") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 2 + assert ( + result["failed"][1] + == "Config default value incorrect: `params.custom_config_base` is set as `https://raw.githubusercontent.com/nf-core/configs/master` in `nextflow_schema.json` but is `null` in `nextflow.config`." + ) + + def test_allow_params_reference_in_main_nf(self): + """Test linting allows for references like `params.aligner == 'bwa'` in main.nf. The test will detect if the bug mentioned in GitHub-issue #2833 reemerges.""" + # Add parameter reference in main.nf + main_nf_file = Path(self.new_pipeline) / "main.nf" + with open(main_nf_file, "a") as f: + f.write("params.custom_config_version == 'main'") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + + def test_default_values_ignored(self): + """Test ignoring linting of default values.""" + # Add custom_config_version to the ignore list + nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + nf_core_yml = NFCoreYamlConfig( + repository_type="pipeline", + lint={"nextflow_config": [{"config_defaults": ["params.custom_config_version"]}]}, + ) + with open(nf_core_yml_path, "w") as f: + yaml.dump(nf_core_yml.model_dump(), f) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + lint_obj._load_lint_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["ignored"]) == 1 + assert "Config default value correct: params.custom_config_version" not in str(result["passed"]) + assert "Config default ignored: params.custom_config_version" in str(result["ignored"]) + + def test_default_values_float(self): + """Test comparing two float values.""" + # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub( + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, + ) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Add a float value `dummy` to the nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub( + r'"validate_params": {', + ' "dummy": {"type": "number","default":0.000000001},\n"validate_params": {', + content, + ) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + assert "Config default value correct: params.dummy" in str(result["passed"]) + + def test_default_values_float_fail(self): + """Test comparing two float values.""" + # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub( + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, + ) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Add a float value `dummy` to the nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub( + r'"validate_params": {', + ' "dummy": {"type": "float","default":0.000001},\n"validate_params": {', + content, + ) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + + assert len(result["failed"]) == 1 + assert len(result["warned"]) == 0 + assert "Config default value incorrect: `params.dummy" in str(result["failed"]) diff --git a/tests/pipelines/lint/test_nfcore_yml.py b/tests/pipelines/lint/test_nfcore_yml.py new file mode 100644 index 000000000..955c00da8 --- /dev/null +++ b/tests/pipelines/lint/test_nfcore_yml.py @@ -0,0 +1,57 @@ +import re +from pathlib import Path + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintNfCoreYml(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + + def test_nfcore_yml_pass(self): + """Lint test: nfcore_yml - PASS""" + self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + + assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) + assert "nf-core version in `.nf-core.yml` is set to the latest version" in str(results["passed"]) + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_fail_repo_type(self): + """Lint test: nfcore_yml - FAIL - repository type not set""" + + with open(self.nf_core_yml) as fh: + content = fh.read() + new_content = content.replace("repository_type: pipeline", "repository_type: foo") + with open(self.nf_core_yml, "w") as fh: + fh.write(new_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + results = lint_obj.nfcore_yml() + assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) >= 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_fail_nfcore_version(self): + """Lint test: nfcore_yml - FAIL - nf-core version not set""" + + with open(self.nf_core_yml) as fh: + content = fh.read() + new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) + with open(self.nf_core_yml, "w") as fh: + fh.write(new_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + results = lint_obj.nfcore_yml() + assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) + assert len(results.get("failed", [])) == 0 + assert len(results.get("passed", [])) >= 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_plugin_includes.py b/tests/pipelines/lint/test_plugin_includes.py new file mode 100644 index 000000000..8eb31e267 --- /dev/null +++ b/tests/pipelines/lint/test_plugin_includes.py @@ -0,0 +1,24 @@ +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintPluginIncludes(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_default_values_match(self): + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + result = lint_obj.plugin_includes() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + + def test_wrong_include(self): + test_path = self.new_pipeline / "test.nf" + with open(test_path, "w") as of: + of.write("include { paramsSummary } from 'plugin/nf-validation'\n") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + result = lint_obj.plugin_includes() + assert len(result["failed"]) == 1 + assert len(result["warned"]) == 0 diff --git a/tests/pipelines/lint/test_template_strings.py b/tests/pipelines/lint/test_template_strings.py new file mode 100644 index 000000000..406ba63e0 --- /dev/null +++ b/tests/pipelines/lint/test_template_strings.py @@ -0,0 +1,55 @@ +import subprocess +from pathlib import Path + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintTemplateStrings(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_template_strings(self): + """Tests finding a template string in a file fails linting.""" + # Add template string to a file + txt_file = Path(self.new_pipeline) / "docs" / "test.txt" + with open(txt_file, "w") as f: + f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.template_strings() + assert len(result["failed"]) == 1 + assert len(result["ignored"]) == 0 + + def test_template_strings_ignored(self): + """Tests ignoring template_strings""" + # Ignore template_strings test + nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write("repository_type: pipeline\nlint:\n template_strings: False") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj._lint_pipeline() + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == 1 + + def test_template_strings_ignore_file(self): + """Tests ignoring template_strings file""" + # Add template string to a file + txt_file = Path(self.new_pipeline) / "docs" / "test.txt" + with open(txt_file, "w") as f: + f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + # Ignore template_strings test + nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.template_strings() + assert len(result["failed"]) == 0 + assert len(result["ignored"]) == 1 diff --git a/tests/pipelines/lint/test_version_consistency.py b/tests/pipelines/lint/test_version_consistency.py new file mode 100644 index 000000000..c5a2cc74f --- /dev/null +++ b/tests/pipelines/lint/test_version_consistency.py @@ -0,0 +1,19 @@ +import nf_core.pipelines.create.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintVersionConsistency(TestLint): + def test_version_consistency(self): + """Tests that config variable existence test fails with bad pipeline name""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj.load_pipeline_config() + lint_obj.nextflow_config() + + result = lint_obj.version_consistency() + assert result["passed"] == [ + "Version tags are numeric and consistent between container, release tag and config." + ] + assert result["failed"] == ["manifest.version was not numeric: 1.0.0dev!"] diff --git a/tests/pipelines/test_bump_version.py b/tests/pipelines/test_bump_version.py new file mode 100644 index 000000000..709e82427 --- /dev/null +++ b/tests/pipelines/test_bump_version.py @@ -0,0 +1,55 @@ +"""Some tests covering the bump_version code.""" + +import yaml + +import nf_core.pipelines.bump_version +import nf_core.utils + +from ..test_pipelines import TestPipelines + + +class TestBumpVersion(TestPipelines): + def test_bump_pipeline_version(self): + """Test that making a release with the working example files works""" + + # Bump the version number + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1") + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + + # Check nextflow.config + new_pipeline_obj.load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" + + def test_dev_bump_pipeline_version(self): + """Test that making a release works with a dev name and a leading v""" + # Bump the version number + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "v1.2dev") + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + + # Check the pipeline config + new_pipeline_obj.load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.2dev" + + def test_bump_nextflow_version(self): + # Bump the version number to a specific version, preferably one + # we're not already on + version = "22.04.3" + nf_core.pipelines.bump_version.bump_nextflow_version(self.pipeline_obj, version) + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + new_pipeline_obj._load() + + # Check nextflow.config + assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == f"!>={version}" + + # Check .github/workflows/ci.yml + with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: + ci_yaml = yaml.safe_load(fh) + assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["NXF_VER"][0] == version + + # Check README.md + with open(new_pipeline_obj._fp("README.md")) as fh: + readme = fh.read().splitlines() + assert ( + f"[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{version}-23aa62.svg)]" + "(https://www.nextflow.io/)" in readme + ) diff --git a/tests/pipelines/test_create.py b/tests/pipelines/test_create.py new file mode 100644 index 000000000..f83cc274f --- /dev/null +++ b/tests/pipelines/test_create.py @@ -0,0 +1,175 @@ +"""Some tests covering the pipeline creation sub command.""" + +import os +import unittest +from pathlib import Path + +import git +import jinja2 +import yaml + +import nf_core.pipelines.create.create +from nf_core.pipelines.create.utils import load_features_yaml + +from ..utils import TEST_DATA_DIR, with_temporary_folder + +PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" +PIPELINE_TEMPLATE_YML_SKIP = TEST_DATA_DIR / "pipeline_create_template_skip.yml" +PIPELINE_TEMPLATE = Path(nf_core.__file__).parent / "pipeline-template" + + +class NfcoreCreateTest(unittest.TestCase): + def setUp(self): + self.pipeline_name = "nf-core/test" + self.pipeline_description = "just for 4w3s0m3 tests" + self.pipeline_author = "Chuck Norris" + self.pipeline_version = "1.0.0" + self.default_branch = "default" + + @with_temporary_folder + def test_pipeline_creation(self, tmp_path): + pipeline = nf_core.pipelines.create.create.PipelineCreate( + name=self.pipeline_name, + description=self.pipeline_description, + author=self.pipeline_author, + version=self.pipeline_version, + no_git=False, + force=True, + outdir=tmp_path, + default_branch=self.default_branch, + ) + + assert pipeline.config.name == self.pipeline_name + assert pipeline.config.description == self.pipeline_description + assert pipeline.config.author == self.pipeline_author + assert pipeline.config.version == self.pipeline_version + + @with_temporary_folder + def test_pipeline_creation_initiation(self, tmp_path): + pipeline = nf_core.pipelines.create.create.PipelineCreate( + name=self.pipeline_name, + description=self.pipeline_description, + author=self.pipeline_author, + version=self.pipeline_version, + no_git=False, + force=True, + outdir=tmp_path, + default_branch=self.default_branch, + ) + pipeline.init_pipeline() + assert Path(pipeline.outdir, ".git").is_dir() + assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() + assert not Path(pipeline.outdir, "pipeline_template.yml").exists() + with open(Path(pipeline.outdir, ".nf-core.yml")) as fh: + assert "template" in fh.read() + + @with_temporary_folder + def test_pipeline_creation_initiation_with_yml(self, tmp_path): + pipeline = nf_core.pipelines.create.create.PipelineCreate( + no_git=False, + outdir=tmp_path, + template_config=PIPELINE_TEMPLATE_YML, + default_branch=self.default_branch, + ) + pipeline.init_pipeline() + assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() + + # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input + assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) + assert os.path.exists(os.path.join(pipeline.outdir, ".nf-core.yml")) + with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: + nfcore_yml = yaml.safe_load(fh) + assert "template" in nfcore_yml + assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() + + @with_temporary_folder + def test_pipeline_creation_initiation_customize_template(self, tmp_path): + pipeline = nf_core.pipelines.create.create.PipelineCreate( + outdir=tmp_path, template_config=PIPELINE_TEMPLATE_YML, default_branch=self.default_branch + ) + pipeline.init_pipeline() + assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() + + # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input + assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) + assert os.path.exists(os.path.join(pipeline.outdir, ".nf-core.yml")) + with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: + nfcore_yml = yaml.safe_load(fh) + assert "template" in nfcore_yml + assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() + + @with_temporary_folder + def test_pipeline_creation_with_yml_skip(self, tmp_path): + # Update pipeline_create_template_skip.yml file + template_features_yml = load_features_yaml() + all_features = list(template_features_yml.keys()) + all_features.remove("is_nfcore") + env = jinja2.Environment(loader=jinja2.PackageLoader("tests", "data"), keep_trailing_newline=True) + skip_template = env.get_template( + str(PIPELINE_TEMPLATE_YML_SKIP.relative_to(Path(nf_core.__file__).parent.parent / "tests" / "data")) + ) + rendered_content = skip_template.render({"all_features": all_features}) + rendered_yaml = Path(tmp_path) / "pipeline_create_template_skip.yml" + with open(rendered_yaml, "w") as fh: + fh.write(rendered_content) + + pipeline = nf_core.pipelines.create.create.PipelineCreate( + outdir=tmp_path, + template_config=rendered_yaml, + default_branch=self.default_branch, + ) + pipeline.init_pipeline() + + # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input + assert not (pipeline.outdir / "pipeline_template.yml").exists() + assert (pipeline.outdir / ".nf-core.yml").exists() + with open(pipeline.outdir / ".nf-core.yml") as fh: + nfcore_yml = yaml.safe_load(fh) + assert "template" in nfcore_yml + assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() + + # Check that some of the skipped files are not present + assert not (pipeline.outdir / "CODE_OF_CONDUCT.md").exists() + assert not (pipeline.outdir / ".github").exists() + assert not (pipeline.outdir / "conf" / "igenomes.config").exists() + assert not (pipeline.outdir / ".editorconfig").exists() + + def test_template_customisation_all_files_grouping(self): + """Test that all pipeline template files are included in a pipeline customisation group.""" + template_features_yml = load_features_yaml() + base_required_files = [ + ".gitignore", + ".nf-core.yml", + "README.md", + "nextflow.config", + "CITATIONS.md", + "main.nf", + "workflows/pipeline.nf", + ] + all_skipped_files = [] + for feature in template_features_yml.keys(): + if template_features_yml[feature]["skippable_paths"]: + all_skipped_files.extend(template_features_yml[feature]["skippable_paths"]) + + for root, _, files in os.walk(PIPELINE_TEMPLATE): + for file in files: + str_path = str((Path(root) / file).relative_to(PIPELINE_TEMPLATE)) + if str_path not in base_required_files: + try: + assert ( + str_path in all_skipped_files + ), f"Template file `{str_path}` not present in a group for pipeline customisation in `template_features.yml`." + except AssertionError: + if "/" in str_path: + # Check if the parent directory is in the skipped files + upper_dir_present = False + for i in range(1, len(str_path.split("/"))): + upper_dir = "/".join(str_path.split("/")[:i]) + if upper_dir in all_skipped_files: + upper_dir_present = True + break + assert upper_dir_present, f"Template file `{str_path}` not present in a group for pipeline customisation in `template_features.yml`." + else: + raise diff --git a/tests/pipelines/test_create_app.py b/tests/pipelines/test_create_app.py new file mode 100644 index 000000000..9a02f04f0 --- /dev/null +++ b/tests/pipelines/test_create_app.py @@ -0,0 +1,296 @@ +"""Test Pipeline Create App""" + +from unittest import mock + +from nf_core.pipelines.create import PipelineCreateApp + +INIT_FILE = "../../nf_core/pipelines/create/__init__.py" + + +async def test_app_bindings(): + """Test that the app bindings work.""" + app = PipelineCreateApp() + async with app.run_test() as pilot: + # Test pressing the D key + assert app.dark + await pilot.press("d") + assert not app.dark + await pilot.press("d") + assert app.dark + + # Test pressing the Q key + await pilot.press("q") + assert app.return_code == 0 + + +def test_welcome(snap_compare): + """Test snapshot for the first screen in the app. The welcome screen.""" + assert snap_compare(INIT_FILE, terminal_size=(100, 50)) + + +def test_choose_type(snap_compare): + """Test snapshot for the choose_type screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_basic_details_nfcore(snap_compare): + """Test snapshot for the basic_details screen of an nf-core pipeline. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_basic_details_custom(snap_compare): + """Test snapshot for the basic_details screen of a custom pipeline. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press custom > + screen basic_details + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_custom") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_type_nfcore(snap_compare): + """Test snapshot for the type_nfcore screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_type_nfcore_validation(snap_compare): + """Test snapshot for the type_nfcore screen. + Validation errors should appear when input fields are empty. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > press next > + ERRORS + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#next") + await pilot.pause() + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_type_custom(snap_compare): + """Test snapshot for the type_custom screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press custom > + screen basic_details > enter pipeline details > press next > + screen type_custom + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_custom") + await pilot.click("#name") + await pilot.press("tab") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_final_details(snap_compare): + """Test snapshot for the final_details screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_customisation_help(snap_compare): + """Test snapshot for the type_custom screen - showing help messages. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_custom > press Show more + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_custom") + await pilot.click("#name") + await pilot.press("tab") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#igenomes") + await pilot.press("tab") + await pilot.press("enter") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_github_question(tmpdir, snap_compare): + """Test snapshot for the github_repo_question screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details > press finish > close logging screen > + screen github_repo_question + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + await pilot.press("backspace") + await pilot.press("tab") + await pilot.press(*str(tmpdir)) + await pilot.click("#finish") + await pilot.app.workers.wait_for_complete() + await pilot.click("#close_screen") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +@mock.patch("nf_core.pipelines.create.githubrepo.GithubRepo._get_github_credentials") +def test_github_details(mock_get_github_credentials, tmpdir, snap_compare): + """Test snapshot for the github_repo screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details > press finish > close logging screen > + screen github_repo_question > press create repo > + screen github_repo + """ + + async def run_before(pilot) -> None: + mock_get_github_credentials.return_value = ( + None, + None, + ) # mock the github credentials to have consistent snapshots + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + await pilot.press("backspace") + await pilot.press("tab") + await pilot.press(*str(tmpdir)) + await pilot.click("#finish") + await pilot.app.workers.wait_for_complete() + await pilot.click("#close_screen") + await pilot.click("#github_repo") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) + + +def test_github_exit_message(tmpdir, snap_compare): + """Test snapshot for the github_exit screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details > press finish > close logging screen > + screen github_repo_question > press create repo > + screen github_repo > press exit (close without creating a repo) > + screen github_exit + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + await pilot.press("backspace") + await pilot.press("tab") + await pilot.press(*str(tmpdir)) + await pilot.click("#finish") + await pilot.app.workers.wait_for_complete() + await pilot.click("#close_screen") + await pilot.click("#github_repo") + await pilot.click("#exit") + + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) diff --git a/tests/test_create_logo.py b/tests/pipelines/test_create_logo.py similarity index 72% rename from tests/test_create_logo.py rename to tests/pipelines/test_create_logo.py index b3c01638e..9ff9fce56 100644 --- a/tests/test_create_logo.py +++ b/tests/pipelines/test_create_logo.py @@ -1,35 +1,24 @@ """Test covering the create-logo command.""" -import tempfile -import unittest from pathlib import Path -import nf_core.create_logo +import nf_core.pipelines.create_logo +from ..test_pipelines import TestPipelines -class TestCreateLogo(unittest.TestCase): - """Class for create-logo tests""" - - # create tempdir in setup step - def setUp(self): - self.tempdir = tempfile.TemporaryDirectory() - self.tempdir_path = Path(self.tempdir.name) - - # delete tempdir in teardown step - def tearDown(self): - self.tempdir.cleanup() +class TestCreateLogo(TestPipelines): def test_create_logo_png(self): """Test that the create-logo command works for PNGs""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 1000) == int(fixture_fn.stat().st_size / 1000)) @@ -37,13 +26,13 @@ def test_create_logo_png_dark(self): """Test that the create-logo command works for dark PNGs""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, theme="dark") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, theme="dark") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo_dark.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo_dark.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 1000) == int(fixture_fn.stat().st_size / 1000)) @@ -51,13 +40,13 @@ def test_create_log_png_width(self): """Test that the create-logo command works for PNGs with a custom width""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, width=100) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, width=100) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo_width100.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo_width100.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 100) == int(fixture_fn.stat().st_size / 100)) @@ -65,12 +54,12 @@ def test_create_logo_twice(self): """Test that the create-logo command returns an info message when run twice""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Create the logo again and capture the log output with self.assertLogs(level="INFO") as log: - nf_core.create_logo.create_logo("pipes", self.tempdir_path) + nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the log message is correct self.assertIn("Logo already exists", log.output[0]) @@ -79,13 +68,15 @@ def test_create_logo_without_text_fail(self): # Create a logo with self.assertRaises(UserWarning): - nf_core.create_logo.create_logo("", self.tempdir_path) + nf_core.pipelines.create_logo.create_logo("", self.pipeline_dir) def test_create_logo_with_filename(self): """Test that the create-logo command works with a custom filename""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", Path(self.tempdir_path / "custom_dir"), filename="custom") + logo_fn = nf_core.pipelines.create_logo.create_logo( + "pipes", Path(self.pipeline_dir / "custom_dir"), filename="custom" + ) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the parent directory name @@ -97,7 +88,7 @@ def test_create_logo_svg(self): """Test that the create-logo command works for SVGs""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, format="svg") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, format="svg") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a SVG @@ -113,7 +104,7 @@ def test_create_logo_svg_dark(self): """Test that the create-logo command works for svgs and dark theme""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, format="svg", theme="dark") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, format="svg", theme="dark") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a SVG diff --git a/tests/test_download.py b/tests/pipelines/test_download.py similarity index 94% rename from tests/test_download.py rename to tests/pipelines/test_download.py index 3e0f11d57..a898d37b7 100644 --- a/tests/test_download.py +++ b/tests/pipelines/test_download.py @@ -12,13 +12,14 @@ import pytest -import nf_core.create +import nf_core.pipelines.create.create +import nf_core.pipelines.list import nf_core.utils -from nf_core.download import ContainerError, DownloadWorkflow, WorkflowRepo +from nf_core.pipelines.download import ContainerError, DownloadWorkflow, WorkflowRepo from nf_core.synced_repo import SyncedRepo from nf_core.utils import run_cmd -from .utils import with_temporary_folder +from ..utils import TEST_DATA_DIR, with_temporary_folder class DownloadTest(unittest.TestCase): @@ -44,7 +45,7 @@ def __contains__(self, item: str) -> bool: # Tests for 'get_release_hash' # def test_get_release_hash_release(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() pipeline = "methylseq" download_obj = DownloadWorkflow(pipeline=pipeline, revision="1.6") @@ -62,7 +63,7 @@ def test_get_release_hash_release(self): ) def test_get_release_hash_branch(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() # Exoseq pipeline is archived, so `dev` branch should be stable pipeline = "exoseq" @@ -81,7 +82,7 @@ def test_get_release_hash_branch(self): ) def test_get_release_hash_non_existent_release(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() pipeline = "methylseq" download_obj = DownloadWorkflow(pipeline=pipeline, revision="thisisfake") @@ -128,24 +129,23 @@ def test_download_configs(self, outdir): def test_wf_use_local_configs(self, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, - plain=True, ) create_obj.init_pipeline() with tempfile.TemporaryDirectory() as test_outdir: download_obj = DownloadWorkflow(pipeline="dummy", revision="1.2.0", outdir=test_outdir) - shutil.copytree(test_pipeline_dir, os.path.join(test_outdir, "workflow")) + shutil.copytree(test_pipeline_dir, Path(test_outdir, "workflow")) download_obj.download_configs() # Test the function download_obj.wf_use_local_configs("workflow") - wf_config = nf_core.utils.fetch_wf_config(os.path.join(test_outdir, "workflow"), cache_config=False) + wf_config = nf_core.utils.fetch_wf_config(Path(test_outdir, "workflow"), cache_config=False) assert wf_config["params.custom_config_base"] == f"{test_outdir}/workflow/../configs/" # @@ -174,7 +174,7 @@ def test_find_container_images_config_basic(self, tmp_path, mock_fetch_wf_config @mock.patch("nf_core.utils.fetch_wf_config") def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) - result = run_cmd("nextflow", f"config -flat {Path(__file__).resolve().parent / 'data/mock_config_containers'}") + result = run_cmd("nextflow", f"config -flat {TEST_DATA_DIR}'/mock_config_containers'") if result is not None: nfconfig_raw, _ = result config = {} @@ -204,7 +204,7 @@ def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_co def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) mock_fetch_wf_config.return_value = {} - download_obj.find_container_images(Path(__file__).resolve().parent / "data/mock_module_containers") + download_obj.find_container_images(str(Path(TEST_DATA_DIR, "mock_module_containers"))) # mock_docker_single_quay_io.nf assert "quay.io/biocontainers/singlequay:1.9--pyh9f0ad1d_0" in download_obj.containers @@ -547,7 +547,7 @@ def test_remote_container_functionality(self, tmp_dir): outdir=os.path.join(tmp_dir, "new"), revision="3.9", compress_type="none", - container_cache_index=Path(__file__).resolve().parent / "data/testdata_remote_containers.txt", + container_cache_index=str(Path(TEST_DATA_DIR, "testdata_remote_containers.txt")), ) download_obj.include_configs = False # suppress prompt, because stderr.is_interactive doesn't. @@ -565,7 +565,7 @@ def test_remote_container_functionality(self, tmp_dir): # Tests for the main entry method 'download_workflow' # @with_temporary_folder - @mock.patch("nf_core.download.DownloadWorkflow.singularity_pull_image") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow.singularity_pull_image") @mock.patch("shutil.which") def test_download_workflow_with_success(self, tmp_dir, mock_download_image, mock_singularity_installed): os.environ["NXF_SINGULARITY_CACHEDIR"] = "foo" @@ -586,7 +586,7 @@ def test_download_workflow_with_success(self, tmp_dir, mock_download_image, mock # Test Download for Seqera Platform # @with_temporary_folder - @mock.patch("nf_core.download.DownloadWorkflow.get_singularity_images") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow.get_singularity_images") def test_download_workflow_for_platform(self, tmp_dir, _): download_obj = DownloadWorkflow( pipeline="nf-core/rnaseq", @@ -602,7 +602,7 @@ def test_download_workflow_for_platform(self, tmp_dir, _): assert isinstance(download_obj.wf_sha, dict) and len(download_obj.wf_sha) == 0 assert isinstance(download_obj.wf_download_url, dict) and len(download_obj.wf_download_url) == 0 - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() ( download_obj.pipeline, @@ -644,10 +644,15 @@ def test_download_workflow_for_platform(self, tmp_dir, _): in download_obj.containers ) # indirect definition via $container variable. + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) + # # Brief test adding a single custom tag to Seqera Platform download # - @mock.patch("nf_core.download.DownloadWorkflow.get_singularity_images") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow.get_singularity_images") @with_temporary_folder def test_download_workflow_for_platform_with_one_custom_tag(self, _, tmp_dir): download_obj = DownloadWorkflow( @@ -660,10 +665,15 @@ def test_download_workflow_for_platform_with_one_custom_tag(self, _, tmp_dir): ) assert isinstance(download_obj.additional_tags, list) and len(download_obj.additional_tags) == 1 + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) + # # Test adding custom tags to Seqera Platform download (full test) # - @mock.patch("nf_core.download.DownloadWorkflow.get_singularity_images") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow.get_singularity_images") @with_temporary_folder def test_download_workflow_for_platform_with_custom_tags(self, _, tmp_dir): with self._caplog.at_level(logging.INFO): @@ -691,7 +701,7 @@ def test_download_workflow_for_platform_with_custom_tags(self, _, tmp_dir): assert isinstance(download_obj.wf_download_url, dict) and len(download_obj.wf_download_url) == 0 assert isinstance(download_obj.additional_tags, list) and len(download_obj.additional_tags) == 5 - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() ( download_obj.pipeline, @@ -728,3 +738,8 @@ def test_download_workflow_for_platform_with_custom_tags(self, _, tmp_dir): "[red]Could not apply invalid `--tag` specification[/]: 'What is this?'", } ) + + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) diff --git a/tests/test_launch.py b/tests/pipelines/test_launch.py similarity index 81% rename from tests/test_launch.py rename to tests/pipelines/test_launch.py index 79dbe3fb9..5e230528a 100644 --- a/tests/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -1,54 +1,42 @@ """Tests covering the pipeline launch code.""" import json -import os -import shutil from pathlib import Path -from unittest import TestCase, mock +from unittest import mock import pytest -import nf_core.create -import nf_core.launch +import nf_core.pipelines.create.create +import nf_core.pipelines.launch -from .utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder +from ..test_pipelines import TestPipelines +from ..utils import with_temporary_file, with_temporary_folder -class TestLaunch(TestCase): - """Class for launch tests""" +class TestLaunch(TestPipelines): + def setUp(self) -> None: + super().setUp() + self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") + self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) - def setUp(self): - """Create a new PipelineSchema and Launch objects""" - self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() - self.nf_params_fn = os.path.join(self.tmp_dir, "nf-params.json") - self.launcher = nf_core.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) - - def tearDown(self): - """Clean up temporary files and folders""" - - if Path(self.nf_params_fn).exists(): - Path(self.nf_params_fn).unlink() - - if Path(self.tmp_dir).exists(): - shutil.rmtree(self.tmp_dir) - - @mock.patch.object(nf_core.launch.Launch, "prompt_web_gui", side_effect=[True]) - @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") + @mock.patch.object(nf_core.pipelines.launch.Launch, "prompt_web_gui", side_effect=[True]) + @mock.patch.object(nf_core.pipelines.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): """Test the main launch function""" self.launcher.launch_pipeline() - @mock.patch.object(nf_core.launch.Confirm, "ask", side_effect=[False]) + @mock.patch.object(nf_core.pipelines.launch.Confirm, "ask", side_effect=[False]) def test_launch_file_exists(self, mock_confirm): """Test that we detect an existing params file and return""" # Make an empty params file to be overwritten open(self.nf_params_fn, "a").close() + # Try and to launch, return with error assert self.launcher.launch_pipeline() is False - @mock.patch.object(nf_core.launch.Launch, "prompt_web_gui", side_effect=[True]) - @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") - @mock.patch.object(nf_core.launch.Confirm, "ask", side_effect=[False]) + @mock.patch.object(nf_core.pipelines.launch.Launch, "prompt_web_gui", side_effect=[True]) + @mock.patch.object(nf_core.pipelines.launch.Launch, "launch_web_gui") + @mock.patch.object(nf_core.pipelines.launch.Confirm, "ask", side_effect=[False]) def test_launch_file_exists_overwrite(self, mock_webbrowser, mock_lauch_web_gui, mock_confirm): """Test that we detect an existing params file and we overwrite it""" # Make an empty params file to be overwritten @@ -59,21 +47,21 @@ def test_launch_file_exists_overwrite(self, mock_webbrowser, mock_lauch_web_gui, def test_get_pipeline_schema(self): """Test loading the params schema from a pipeline""" self.launcher.get_pipeline_schema() - assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 + assert len(self.launcher.schema_obj.schema["$defs"]["input_output_options"]["properties"]) > 2 @with_temporary_folder def test_make_pipeline_schema(self, tmp_path): """Create a workflow, but delete the schema file, then try to load it""" - test_pipeline_dir = os.path.join(tmp_path, "wf") - create_obj = nf_core.create.PipelineCreate( - "testpipeline", "", "", outdir=test_pipeline_dir, no_git=True, plain=True + test_pipeline_dir = Path(tmp_path, "wf") + create_obj = nf_core.pipelines.create.create.PipelineCreate( + "testpipeline", "a description", "Me", outdir=test_pipeline_dir, no_git=True ) create_obj.init_pipeline() - os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) - self.launcher = nf_core.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) + Path(test_pipeline_dir, "nextflow_schema.json").unlink() + self.launcher = nf_core.pipelines.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) self.launcher.get_pipeline_schema() - assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 - assert self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]["outdir"] == { + assert len(self.launcher.schema_obj.schema["$defs"]["input_output_options"]["properties"]) >= 2 + assert self.launcher.schema_obj.schema["$defs"]["input_output_options"]["properties"]["outdir"] == { "type": "string", "format": "directory-path", "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", @@ -103,8 +91,8 @@ def test_nf_merge_schema(self): self.launcher.get_pipeline_schema() self.launcher.set_schema_inputs() self.launcher.merge_nxf_flag_schema() - assert self.launcher.schema_obj.schema["allOf"][0] == {"$ref": "#/definitions/coreNextflow"} - assert "-resume" in self.launcher.schema_obj.schema["definitions"]["coreNextflow"]["properties"] + assert self.launcher.schema_obj.schema["allOf"][0] == {"$ref": "#/$defs/coreNextflow"} + assert "-resume" in self.launcher.schema_obj.schema["$defs"]["coreNextflow"]["properties"] def test_ob_to_questionary_string(self): """Check converting a python dict to a pyenquirer format - simple strings""" @@ -113,7 +101,12 @@ def test_ob_to_questionary_string(self): "default": "data/*{1,2}.fastq.gz", } result = self.launcher.single_param_to_questionary("input", sc_obj) - assert result == {"type": "input", "name": "input", "message": "", "default": "data/*{1,2}.fastq.gz"} + assert result == { + "type": "input", + "name": "input", + "message": "", + "default": "data/*{1,2}.fastq.gz", + } @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): @@ -135,7 +128,8 @@ def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): assert exc_info.value.args[0].startswith("Web launch response not recognised:") @mock.patch( - "nf_core.utils.poll_nfcore_web_api", side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}] + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}], ) @mock.patch("webbrowser.open") @mock.patch("nf_core.utils.wait_cli_function") @@ -145,7 +139,10 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa self.launcher.merge_nxf_flag_schema() assert self.launcher.launch_web_gui() is None - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "error", "message": "foo"}], + ) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status error""" with pytest.raises(AssertionError) as exc_info: @@ -159,12 +156,18 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): self.launcher.get_web_launch_response() assert exc_info.value.args[0].startswith("Web launch GUI returned unexpected status (foo): ") - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "waiting_for_user"}], + ) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status waiting_for_user""" assert self.launcher.get_web_launch_response() is False - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "launch_params_complete"}], + ) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - complete, but missing keys""" with pytest.raises(AssertionError) as exc_info: @@ -186,7 +189,7 @@ def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): } ], ) - @mock.patch.object(nf_core.launch.Launch, "sanitise_web_response") + @mock.patch.object(nf_core.pipelines.launch.Launch, "sanitise_web_response") def test_get_web_launch_response_valid(self, mock_poll_nfcore_web_api, mock_sanitise): """Test polling the website for a launch response - complete, valid response""" self.launcher.get_pipeline_schema() @@ -197,11 +200,9 @@ def test_sanitise_web_response(self): self.launcher.get_pipeline_schema() self.launcher.nxf_flags["-name"] = "" self.launcher.schema_obj.input_params["igenomes_ignore"] = "true" - self.launcher.schema_obj.input_params["max_cpus"] = "12" self.launcher.sanitise_web_response() assert "-name" not in self.launcher.nxf_flags assert self.launcher.schema_obj.input_params["igenomes_ignore"] is True - assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): """Check converting a python dict to a pyenquirer format - booleans""" @@ -274,7 +275,10 @@ def test_ob_to_questionary_enum(self): def test_ob_to_questionary_pattern(self): """Check converting a python dict to a questionary format - with pattern""" - sc_obj = {"type": "string", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"} + sc_obj = { + "type": "string", + "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", + } result = self.launcher.single_param_to_questionary("email", sc_obj) assert result["type"] == "input" assert result["validate"]("test@email.com") is True @@ -315,10 +319,7 @@ def test_build_command_params(self): self.launcher.schema_obj.input_params.update({"input": "custom_input"}) self.launcher.build_command() # Check command - assert ( - self.launcher.nextflow_cmd - == f'nextflow run {self.pipeline_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' - ) + assert self.launcher.nextflow_cmd == f'nextflow run {self.pipeline_dir} -params-file "{self.nf_params_fn}"' # Check saved parameters file with open(self.nf_params_fn) as fh: try: diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py new file mode 100644 index 000000000..9ca29d249 --- /dev/null +++ b/tests/pipelines/test_lint.py @@ -0,0 +1,150 @@ +"""Some tests covering the linting code.""" + +import json +from pathlib import Path + +import yaml + +import nf_core.pipelines.create.create +import nf_core.pipelines.lint + +from ..test_pipelines import TestPipelines +from ..utils import with_temporary_folder + + +class TestLint(TestPipelines): + """Class for lint tests""" + + def setUp(self) -> None: + super().setUp() + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) + + +########################## +# CORE lint.py FUNCTIONS # +########################## +class TestPipelinesLint(TestLint): + def test_run_linting_function(self): + """Run the master run_linting() function in lint.py + + We don't really check any of this code as it's just a series of function calls + and we're testing each of those individually. This is mostly to check for syntax errors.""" + nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) + + def test_init_pipeline_lint(self): + """Simply create a PipelineLint object. + + This checks that all of the lint test imports are working properly, + we also check that the git sha was found and that the release flag works properly + """ + lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) + + # Tests that extra test is added for release mode + assert "version_consistency" in lint_obj.lint_tests + assert lint_obj.git_sha + # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash + assert len(lint_obj.git_sha) > 0 + + def test_load_lint_config_not_found(self): + """Try to load a linting config file that doesn't exist""" + assert self.lint_obj._load_lint_config() + assert self.lint_obj.lint_config == {} + + def test_load_lint_config_ignore_all_tests(self): + """Try to load a linting config file that ignores all tests""" + + # Make a copy of the test pipeline and create a lint object + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + + # Make a config file listing all test names + config_dict = {"repository_type": "pipeline", "lint": {test_name: False for test_name in lint_obj.lint_tests}} + with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: + yaml.dump(config_dict, fh) + + # Load the new lint config file and check + lint_obj._load_lint_config() + assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + + # Try running linting and make sure that all tests are ignored + lint_obj._lint_pipeline() + assert len(lint_obj.passed) == 0 + assert len(lint_obj.warned) == 0 + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == len(lint_obj.lint_tests) + + @with_temporary_folder + def test_json_output(self, tmp_dir): + """ + Test creation of a JSON file with lint results + + Expected JSON output: + { + "nf_core_tools_version": "1.10.dev0", + "date_run": "2020-06-05 10:56:42", + "tests_pass": [ + [ 1, "This test passed"], + [ 2, "This test also passed"] + ], + "tests_warned": [ + [ 2, "This test gave a warning"] + ], + "tests_failed": [], + "num_tests_pass": 2, + "num_tests_warned": 1, + "num_tests_failed": 0, + "has_tests_pass": true, + "has_tests_warned": true, + "has_tests_failed": false + } + """ + self.lint_obj.passed.append(("test_one", "This test passed")) + self.lint_obj.passed.append(("test_two", "This test also passed")) + self.lint_obj.warned.append(("test_three", "This test gave a warning")) + + # Make a temp dir for the JSON output + json_fn = Path(tmp_dir, "lint_results.json") + self.lint_obj._save_json_results(json_fn) + + # Load created JSON file and check its contents + with open(json_fn) as fh: + try: + saved_json = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") + assert saved_json["num_tests_pass"] > 0 + assert saved_json["num_tests_warned"] > 0 + assert saved_json["num_tests_ignored"] == 0 + assert saved_json["num_tests_failed"] == 0 + assert saved_json["has_tests_pass"] + assert saved_json["has_tests_warned"] + assert not saved_json["has_tests_ignored"] + assert not saved_json["has_tests_failed"] + + def test_wrap_quotes(self): + md = self.lint_obj._wrap_quotes(["one", "two", "three"]) + assert md == "`one` or `two` or `three`" + + def test_sphinx_md_files(self): + """Check that we have .md files for all lint module code, + and that there are no unexpected files (eg. deleted lint tests)""" + + docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") + + # Get list of existing .md files + existing_docs = [] + existing_docs = [ + str(Path(docs_basedir, fn)) + for fn in Path(docs_basedir).iterdir() + if fn.match("*.md") and not fn.match("index.md") + ] + + # Check .md files against each test name + lint_obj = nf_core.pipelines.lint.PipelineLint("", True) + for test_name in lint_obj.lint_tests: + fn = Path(docs_basedir, f"{test_name}.md") + assert fn.exists(), f"Could not find lint docs .md file: {fn}" + existing_docs.remove(str(fn)) + + # Check that we have no remaining .md files that we didn't expect + assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" diff --git a/tests/test_list.py b/tests/pipelines/test_list.py similarity index 72% rename from tests/test_list.py rename to tests/pipelines/test_list.py index c78276b41..aacc3805e 100644 --- a/tests/test_list.py +++ b/tests/pipelines/test_list.py @@ -4,29 +4,30 @@ import os import tempfile import time -import unittest from datetime import datetime from pathlib import Path -from unittest import mock +from unittest import TestCase, mock import pytest from rich.console import Console -import nf_core.list +import nf_core.pipelines.list -# create a temporary directory that can be used by the tests in this file -tmp = Path(tempfile.mkdtemp()) -tmp_nxf = tmp / "nxf" -tmp_nxf_str = str(tmp_nxf) - -class TestList(unittest.TestCase): +class TestList(TestCase): """Class for list tests""" + def setUp(self) -> None: + # create a temporary directory that can be used by the tests in this file + tmp = Path(tempfile.TemporaryDirectory().name) + self.tmp_nxf = tmp / "nxf" + self.tmp_nxf_str = str(self.tmp_nxf) + os.environ["NXF_ASSETS"] = self.tmp_nxf_str + @mock.patch("subprocess.check_output") def test_working_listcall(self, mock_subprocess): """Test that listing pipelines works""" - wf_table = nf_core.list.list_workflows() + wf_table = nf_core.pipelines.list.list_workflows() console = Console(record=True) console.print(wf_table) output = console.export_text() @@ -36,7 +37,7 @@ def test_working_listcall(self, mock_subprocess): @mock.patch("subprocess.check_output") def test_working_listcall_archived(self, mock_subprocess): """Test that listing pipelines works, showing archived pipelines""" - wf_table = nf_core.list.list_workflows(show_archived=True) + wf_table = nf_core.pipelines.list.list_workflows(show_archived=True) console = Console(record=True) console.print(wf_table) output = console.export_text() @@ -45,7 +46,7 @@ def test_working_listcall_archived(self, mock_subprocess): @mock.patch("subprocess.check_output") def test_working_listcall_json(self, mock_subprocess): """Test that listing pipelines with JSON works""" - wf_json_str = nf_core.list.list_workflows(as_json=True) + wf_json_str = nf_core.pipelines.list.list_workflows(as_json=True) wf_json = json.loads(wf_json_str) for wf in wf_json["remote_workflows"]: if wf["name"] == "ampliseq": @@ -56,22 +57,22 @@ def test_working_listcall_json(self, mock_subprocess): def test_pretty_datetime(self): """Test that the pretty datetime function works""" now = datetime.now() - nf_core.list.pretty_date(now) + nf_core.pipelines.list.pretty_date(now) now_ts = time.mktime(now.timetuple()) - nf_core.list.pretty_date(now_ts) + nf_core.pipelines.list.pretty_date(now_ts) def test_local_workflows_and_fail(self): """Test the local workflow class and try to get local Nextflow workflow information""" - loc_wf = nf_core.list.LocalWorkflow("myWF") + loc_wf = nf_core.pipelines.list.LocalWorkflow("myWF") with pytest.raises(RuntimeError): loc_wf.get_local_nf_workflow_details() def test_local_workflows_compare_and_fail_silently(self): """Test the workflow class and try to compare local and remote workflows""" - wfs = nf_core.list.Workflows() - lwf_ex = nf_core.list.LocalWorkflow("myWF") + wfs = nf_core.pipelines.list.Workflows() + lwf_ex = nf_core.pipelines.list.LocalWorkflow("myWF") lwf_ex.full_name = "my Workflow" lwf_ex.commit_sha = "aw3s0meh1sh" @@ -86,7 +87,7 @@ def test_local_workflows_compare_and_fail_silently(self): "releases": [], } - rwf_ex = nf_core.list.RemoteWorkflow(remote) + rwf_ex = nf_core.pipelines.list.RemoteWorkflow(remote) rwf_ex.commit_sha = "aw3s0meh1sh" rwf_ex.releases = [{"tag_sha": "aw3s0meh1sh"}] @@ -105,43 +106,41 @@ def test_local_workflows_compare_and_fail_silently(self): rwf_ex.releases = None - @mock.patch.dict(os.environ, {"NXF_ASSETS": tmp_nxf_str}) - @mock.patch("nf_core.list.LocalWorkflow") + @mock.patch("nf_core.pipelines.list.LocalWorkflow") def test_parse_local_workflow_and_succeed(self, mock_local_wf): - test_path = tmp_nxf / "nf-core" + test_path = self.tmp_nxf / "nf-core" if not os.path.isdir(test_path): os.makedirs(test_path) - assert os.environ["NXF_ASSETS"] == tmp_nxf_str - with open(tmp_nxf / "nf-core/dummy-wf", "w") as f: + assert os.environ["NXF_ASSETS"] == self.tmp_nxf_str + with open(self.tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") - workflows_obj = nf_core.list.Workflows() + workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() assert len(workflows_obj.local_workflows) == 1 - @mock.patch.dict(os.environ, {"NXF_ASSETS": tmp_nxf_str}) - @mock.patch("nf_core.list.LocalWorkflow") + @mock.patch("nf_core.pipelines.list.LocalWorkflow") @mock.patch("subprocess.check_output") def test_parse_local_workflow_home(self, mock_local_wf, mock_subprocess): - test_path = tmp_nxf / "nf-core" + test_path = self.tmp_nxf / "nf-core" if not os.path.isdir(test_path): os.makedirs(test_path) - assert os.environ["NXF_ASSETS"] == tmp_nxf_str - with open(tmp_nxf / "nf-core/dummy-wf", "w") as f: + assert os.environ["NXF_ASSETS"] == self.tmp_nxf_str + with open(self.tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") - workflows_obj = nf_core.list.Workflows() + workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() @mock.patch("os.stat") @mock.patch("git.Repo") def test_local_workflow_investigation(self, mock_repo, mock_stat): - local_wf = nf_core.list.LocalWorkflow("dummy") - local_wf.local_path = tmp + local_wf = nf_core.pipelines.list.LocalWorkflow("dummy") + local_wf.local_path = self.tmp_nxf.parent mock_repo.head.commit.hexsha = "h00r4y" mock_stat.st_mode = 1 local_wf.get_local_nf_workflow_details() def test_worflow_filter(self): - workflows_obj = nf_core.list.Workflows(["rna", "myWF"]) + workflows_obj = nf_core.pipelines.list.Workflows(["rna", "myWF"]) remote = { "name": "myWF", @@ -154,7 +153,7 @@ def test_worflow_filter(self): "releases": [], } - rwf_ex = nf_core.list.RemoteWorkflow(remote) + rwf_ex = nf_core.pipelines.list.RemoteWorkflow(remote) rwf_ex.commit_sha = "aw3s0meh1sh" rwf_ex.releases = [{"tag_sha": "aw3s0meh1sh"}] @@ -169,7 +168,7 @@ def test_worflow_filter(self): "releases": [], } - rwf_ex2 = nf_core.list.RemoteWorkflow(remote2) + rwf_ex2 = nf_core.pipelines.list.RemoteWorkflow(remote2) rwf_ex2.commit_sha = "aw3s0meh1sh" rwf_ex2.releases = [{"tag_sha": "aw3s0meh1sh"}] @@ -182,11 +181,11 @@ def test_filter_archived_workflows(self): """ Test that archived workflows are not shown by default """ - workflows_obj = nf_core.list.Workflows() + workflows_obj = nf_core.pipelines.list.Workflows() remote1 = {"name": "myWF", "full_name": "my Workflow", "archived": True, "releases": []} - rwf_ex1 = nf_core.list.RemoteWorkflow(remote1) + rwf_ex1 = nf_core.pipelines.list.RemoteWorkflow(remote1) remote2 = {"name": "myWF", "full_name": "my Workflow", "archived": False, "releases": []} - rwf_ex2 = nf_core.list.RemoteWorkflow(remote2) + rwf_ex2 = nf_core.pipelines.list.RemoteWorkflow(remote2) workflows_obj.remote_workflows.append(rwf_ex1) workflows_obj.remote_workflows.append(rwf_ex2) @@ -200,11 +199,11 @@ def test_show_archived_workflows(self): """ Test that archived workflows can be shown optionally """ - workflows_obj = nf_core.list.Workflows(show_archived=True) + workflows_obj = nf_core.pipelines.list.Workflows(show_archived=True) remote1 = {"name": "myWF", "full_name": "my Workflow", "archived": True, "releases": []} - rwf_ex1 = nf_core.list.RemoteWorkflow(remote1) + rwf_ex1 = nf_core.pipelines.list.RemoteWorkflow(remote1) remote2 = {"name": "myWF", "full_name": "my Workflow", "archived": False, "releases": []} - rwf_ex2 = nf_core.list.RemoteWorkflow(remote2) + rwf_ex2 = nf_core.pipelines.list.RemoteWorkflow(remote2) workflows_obj.remote_workflows.append(rwf_ex1) workflows_obj.remote_workflows.append(rwf_ex2) diff --git a/tests/test_params_file.py b/tests/pipelines/test_params_file.py similarity index 65% rename from tests/test_params_file.py rename to tests/pipelines/test_params_file.py index 13c82f518..22a6182ac 100644 --- a/tests/test_params_file.py +++ b/tests/pipelines/test_params_file.py @@ -4,9 +4,9 @@ import tempfile from pathlib import Path -import nf_core.create -import nf_core.schema -from nf_core.params_file import ParamsFileBuilder +import nf_core.pipelines.create.create +import nf_core.pipelines.schema +from nf_core.pipelines.params_file import ParamsFileBuilder class TestParamsFileBuilder: @@ -15,20 +15,20 @@ class TestParamsFileBuilder: @classmethod def setup_class(cls): """Create a new PipelineSchema object""" - cls.schema_obj = nf_core.schema.PipelineSchema() + cls.schema_obj = nf_core.pipelines.schema.PipelineSchema() cls.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # Create a test pipeline in temp directory cls.tmp_dir = tempfile.mkdtemp() - cls.template_dir = os.path.join(cls.tmp_dir, "wf") - create_obj = nf_core.create.PipelineCreate( - "testpipeline", "", "", outdir=cls.template_dir, no_git=True, plain=True + cls.template_dir = Path(cls.tmp_dir, "wf") + create_obj = nf_core.pipelines.create.create.PipelineCreate( + "testpipeline", "a description", "Me", outdir=cls.template_dir, no_git=True ) create_obj.init_pipeline() - cls.template_schema = os.path.join(cls.template_dir, "nextflow_schema.json") + cls.template_schema = Path(cls.template_dir, "nextflow_schema.json") cls.params_template_builder = ParamsFileBuilder(cls.template_dir) - cls.invalid_template_schema = os.path.join(cls.template_dir, "nextflow_schema_invalid.json") + cls.invalid_template_schema = Path(cls.template_dir, "nextflow_schema_invalid.json") # Remove the allOf section to make the schema invalid with open(cls.template_schema) as fh: @@ -40,14 +40,14 @@ def setup_class(cls): @classmethod def teardown_class(cls): - if os.path.exists(cls.tmp_dir): + if Path(cls.tmp_dir).exists(): shutil.rmtree(cls.tmp_dir) def test_build_template(self): - outfile = os.path.join(self.tmp_dir, "params-file.yml") - self.params_template_builder.write_params_file(outfile) + outfile = Path(self.tmp_dir, "params-file.yml") + self.params_template_builder.write_params_file(str(outfile)) - assert os.path.exists(outfile) + assert outfile.exists() with open(outfile) as fh: out = fh.read() @@ -56,9 +56,9 @@ def test_build_template(self): def test_build_template_invalid_schema(self, caplog): """Build a schema from a template""" - outfile = os.path.join(self.tmp_dir, "params-file-invalid.yml") + outfile = Path(self.tmp_dir, "params-file-invalid.yml") builder = ParamsFileBuilder(self.invalid_template_schema) - res = builder.write_params_file(outfile) + res = builder.write_params_file(str(outfile)) assert res is False assert "Pipeline schema file is invalid" in caplog.text diff --git a/tests/test_refgenie.py b/tests/pipelines/test_refgenie.py similarity index 76% rename from tests/test_refgenie.py rename to tests/pipelines/test_refgenie.py index 23cc0dd14..734a2368b 100644 --- a/tests/test_refgenie.py +++ b/tests/pipelines/test_refgenie.py @@ -5,6 +5,7 @@ import subprocess import tempfile import unittest +from pathlib import Path class TestRefgenie(unittest.TestCase): @@ -14,36 +15,35 @@ def setUp(self): """ Prepare a refgenie config file """ - self.tmp_dir = tempfile.mkdtemp() - self.NXF_HOME = os.path.join(self.tmp_dir, ".nextflow") - self.NXF_REFGENIE_PATH = os.path.join(self.NXF_HOME, "nf-core", "refgenie_genomes.config") - self.REFGENIE = os.path.join(self.tmp_dir, "genomes_config.yaml") - self.translation_file = os.path.join(self.tmp_dir, "alias_translations.yaml") + self.tmp_dir = Path(tempfile.TemporaryDirectory().name) + self.NXF_HOME = self.tmp_dir / ".nextflow" + self.NXF_REFGENIE_PATH = self.NXF_HOME / "nf-core" / "refgenie_genomes.config" + self.REFGENIE = self.tmp_dir / "genomes_config.yaml" + self.translation_file = self.tmp_dir / "alias_translations.yaml" # Set NXF_HOME environment variable # avoids adding includeConfig statement to config file outside the current tmpdir try: self.NXF_HOME_ORIGINAL = os.environ["NXF_HOME"] except Exception: self.NXF_HOME_ORIGINAL = None - os.environ["NXF_HOME"] = self.NXF_HOME + os.environ["NXF_HOME"] = str(self.NXF_HOME) # create NXF_HOME and nf-core directories - os.makedirs(os.path.join(self.NXF_HOME, "nf-core"), exist_ok=True) + nf_core_dir = self.NXF_HOME / "nf-core" + nf_core_dir.mkdir(parents=True, exist_ok=True) # Initialize a refgenie config os.system(f"refgenie init -c {self.REFGENIE}") # Add NXF_REFGENIE_PATH to refgenie config with open(self.REFGENIE, "a") as fh: - fh.write(f"nextflow_config: {os.path.join(self.NXF_REFGENIE_PATH)}\n") + fh.write(f"nextflow_config: {self.NXF_REFGENIE_PATH}\n") # Add an alias translation to YAML file with open(self.translation_file, "a") as fh: fh.write("ensembl_gtf: gtf\n") def tearDown(self) -> None: - # Remove the tempdir again - os.system(f"rm -rf {self.tmp_dir}") # Reset NXF_HOME environment variable if self.NXF_HOME_ORIGINAL is None: del os.environ["NXF_HOME"] diff --git a/tests/test_schema.py b/tests/pipelines/test_schema.py similarity index 91% rename from tests/test_schema.py rename to tests/pipelines/test_schema.py index e0921908d..2abaf07bd 100644 --- a/tests/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -12,10 +12,10 @@ import requests import yaml -import nf_core.create -import nf_core.schema +import nf_core.pipelines.create.create +import nf_core.pipelines.schema -from .utils import with_temporary_file, with_temporary_folder +from ..utils import with_temporary_file, with_temporary_folder class TestSchema(unittest.TestCase): @@ -23,14 +23,17 @@ class TestSchema(unittest.TestCase): def setUp(self): """Create a new PipelineSchema object""" - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() + self.schema_obj.schema_draft = "https://json-schema.org/draft/2020-12/schema" + self.schema_obj.defs_notation = "$defs" + self.schema_obj.validation_plugin = "nf-schema" self.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # Create a test pipeline in temp directory self.tmp_dir = tempfile.mkdtemp() self.template_dir = os.path.join(self.tmp_dir, "wf") - create_obj = nf_core.create.PipelineCreate( - "testpipeline", "", "", outdir=self.template_dir, no_git=True, plain=True + create_obj = nf_core.pipelines.create.create.PipelineCreate( + "testpipeline", "a description", "Me", outdir=self.template_dir, no_git=True ) create_obj.init_pipeline() @@ -105,7 +108,7 @@ def test_schema_docs(self): docs = self.schema_obj.print_documentation() assert self.schema_obj.schema["title"] in docs assert self.schema_obj.schema["description"] in docs - for definition in self.schema_obj.schema.get("definitions", {}).values(): + for definition in self.schema_obj.schema.get("$defs", {}).values(): assert definition["title"] in docs assert definition["description"] in docs @@ -175,40 +178,43 @@ def test_validate_schema_fail_duplicate_ids(self): Check that the schema validation fails when we have duplicate IDs in definition subschema """ self.schema_obj.schema = { - "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"foo": {}}}}, - "allOf": [{"$ref": "#/definitions/groupOne"}, {"$ref": "#/definitions/groupTwo"}], + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"foo": {}}}}, + "allOf": [{"$ref": "#/$defs/groupOne"}, {"$ref": "#/$defs/groupTwo"}], } with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - assert exc_info.value.args[0] == "Duplicate parameter found in schema `definitions`: `foo`" + assert exc_info.value.args[0] == "Duplicate parameter found in schema `$defs`: `foo`" def test_validate_schema_fail_missing_def(self): """ - Check that the schema validation fails when we a definition in allOf is not in definitions + Check that the schema validation fails when we a definition in allOf is not in $defs """ self.schema_obj.schema = { - "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, - "allOf": [{"$ref": "#/definitions/groupOne"}], + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, + "allOf": [{"$ref": "#/$defs/groupOne"}], } with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - assert exc_info.value.args[0] == "Definition subschema `groupTwo` not included in schema `allOf`" + assert exc_info.value.args[0] == "Definition subschema `#/$defs/groupTwo` not included in schema `allOf`" def test_validate_schema_fail_unexpected_allof(self): """ Check that the schema validation fails when we an unrecognised definition is in allOf """ self.schema_obj.schema = { - "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, "allOf": [ - {"$ref": "#/definitions/groupOne"}, - {"$ref": "#/definitions/groupTwo"}, - {"$ref": "#/definitions/groupThree"}, + {"$ref": "#/$defs/groupOne"}, + {"$ref": "#/$defs/groupTwo"}, + {"$ref": "#/$defs/groupThree"}, ], } with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - assert exc_info.value.args[0] == "Subschema `groupThree` found in `allOf` but not `definitions`" + assert exc_info.value.args[0] == "Subschema `groupThree` found in `allOf` but not `$defs`" def test_make_skeleton_schema(self): """Test making a new schema skeleton""" @@ -264,7 +270,7 @@ def test_remove_schema_notfound_configs_childschema(self): even when they're in a group """ self.schema_obj.schema = { - "definitions": { + "$defs": { "subSchemaId": { "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, "required": ["foo"], @@ -274,8 +280,8 @@ def test_remove_schema_notfound_configs_childschema(self): self.schema_obj.pipeline_params = {"bar": True} self.schema_obj.no_prompts = True params_removed = self.schema_obj.remove_schema_notfound_configs() - assert len(self.schema_obj.schema["definitions"]["subSchemaId"]["properties"]) == 1 - assert "required" not in self.schema_obj.schema["definitions"]["subSchemaId"] + assert len(self.schema_obj.schema["$defs"]["subSchemaId"]["properties"]) == 1 + assert "required" not in self.schema_obj.schema["$defs"]["subSchemaId"] assert len(params_removed) == 1 assert "foo" in params_removed diff --git a/tests/test_sync.py b/tests/pipelines/test_sync.py similarity index 64% rename from tests/test_sync.py rename to tests/pipelines/test_sync.py index b94968cd4..ffbe75510 100644 --- a/tests/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -2,53 +2,106 @@ import json import os -import shutil -import tempfile -import unittest from pathlib import Path +from typing import Dict, List, Union from unittest import mock import git import pytest +import yaml -import nf_core.create -import nf_core.sync +import nf_core.pipelines.create.create +import nf_core.pipelines.sync +from nf_core.utils import NFCoreYamlConfig -from .utils import with_temporary_folder +from ..test_pipelines import TestPipelines +from ..utils import with_temporary_folder -class TestModules(unittest.TestCase): +class MockResponse: + def __init__(self, data: Union[Dict, List[Dict]], status_code: int, url: str): + self.url: str = url + self.status_code: int = status_code + self.from_cache: bool = False + self.reason: str = "Mocked response" + self.data: Union[Dict, List[Dict]] = data + self.content: str = json.dumps(data) + self.headers: Dict[str, str] = {"content-encoding": "test", "connection": "fake"} + + def json(self): + return self.data + + +def mocked_requests_get(url) -> MockResponse: + """Helper function to emulate POST requests responses from the web""" + + url_template = "https://api.github.com/repos/{}/response/" + if url == Path(url_template.format("no_existing_pr"), "pulls?head=TEMPLATE&base=None"): + return MockResponse([], 200, url) + if url == Path(url_template.format("list_prs"), "pulls"): + response_data = [ + { + "state": "closed", + "head": {"ref": "nf-core-template-merge-2"}, + "base": {"ref": "master"}, + "html_url": "pr_url", + } + ] + [ + { + "state": "open", + "head": {"ref": f"nf-core-template-merge-{branch_no}"}, + "base": {"ref": "master"}, + "html_url": "pr_url", + } + for branch_no in range(3, 7) + ] + return MockResponse(response_data, 200, url) + + return MockResponse([{"html_url": url}], 404, url) + + +def mocked_requests_patch(url: str, data: str, **kwargs) -> MockResponse: + """Helper function to emulate POST requests responses from the web""" + + if url == "url_to_update_pr": + return MockResponse({"html_url": "great_success"}, 200, url) + # convert data to dict + response = json.loads(data) + response["patch_url"] = url + return MockResponse(response, 404, url) + + +def mocked_requests_post(url, **kwargs): + """Helper function to emulate POST requests responses from the web""" + + if url == "https://api.github.com/repos/no_existing_pr/response/pulls": + return MockResponse({"html_url": "great_success"}, 201, url) + + return MockResponse({}, 404, url) + + +class TestModules(TestPipelines): """Class for modules tests""" def setUp(self): - """Create a new pipeline to test""" - self.tmp_dir = tempfile.mkdtemp() - self.pipeline_dir = os.path.join(self.tmp_dir, "testpipeline") - default_branch = "master" - self.create_obj = nf_core.create.PipelineCreate( - "testing", - "test pipeline", - "tester", - outdir=self.pipeline_dir, - plain=True, - default_branch=default_branch, - ) - self.create_obj.init_pipeline() - self.remote_path = os.path.join(self.tmp_dir, "remote_repo") + super().setUp() + self.remote_path = Path(self.tmp_dir, "remote_repo") self.remote_repo = git.Repo.init(self.remote_path, bare=True) if self.remote_repo.active_branch.name != "master": - self.remote_repo.active_branch.rename(default_branch) - - def tearDown(self): - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) + self.remote_repo.active_branch.rename("master") @with_temporary_folder - def test_inspect_sync_dir_notgit(self, tmp_dir): + def test_inspect_sync_dir_notgit(self, tmp_dir: str): """Try syncing an empty directory""" - psync = nf_core.sync.PipelineSync(tmp_dir) - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + nf_core_yml_path = Path(tmp_dir, ".nf-core.yml") + nf_core_yml = NFCoreYamlConfig(repository_type="pipeline") + + with open(nf_core_yml_path, "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh) + + psync = nf_core.pipelines.sync.PipelineSync(tmp_dir) + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert "does not appear to be a git repository" in exc_info.value.args[0] @@ -58,9 +111,9 @@ def test_inspect_sync_dir_dirty(self): test_fn = Path(self.pipeline_dir) / "uncommitted" test_fn.touch() # Try to sync, check we halt with the right error - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) try: - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert exc_info.value.args[0].startswith("Uncommitted changes found in pipeline directory!") finally: @@ -69,8 +122,8 @@ def test_inspect_sync_dir_dirty(self): def test_get_wf_config_no_branch(self): """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error - psync = nf_core.sync.PipelineSync(self.pipeline_dir, from_branch="foo") - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir, from_branch="foo") + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() assert exc_info.value.args[0] == "Branch `foo` not found!" @@ -78,9 +131,9 @@ def test_get_wf_config_no_branch(self): def test_get_wf_config_missing_required_config(self): """Try getting a workflow config, then make it miss a required config option""" # Try to sync, check we halt with the right error - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.required_config_vars = ["fakethisdoesnotexist"] - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() # Check that we did actually get some config back @@ -90,26 +143,26 @@ def test_get_wf_config_missing_required_config(self): def test_checkout_template_branch(self): """Try checking out the TEMPLATE branch of the pipeline""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() def test_checkout_template_branch_no_template(self): """Try checking out the TEMPLATE branch of the pipeline when it does not exist""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.delete_head("TEMPLATE") - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.checkout_template_branch() assert exc_info.value.args[0] == "Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'" def test_delete_template_branch_files(self): """Confirm that we can delete all files in the TEMPLATE branch""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -119,7 +172,7 @@ def test_delete_template_branch_files(self): def test_create_template_pipeline(self): """Confirm that we can delete all files in the TEMPLATE branch""" # First, delete all the files - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -133,7 +186,7 @@ def test_create_template_pipeline(self): def test_commit_template_changes_nochanges(self): """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -143,7 +196,7 @@ def test_commit_template_changes_nochanges(self): def test_commit_template_changes_changes(self): """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -160,7 +213,7 @@ def test_commit_template_changes_changes(self): def test_push_template_branch_error(self): """Try pushing the changes, but without a remote (should fail)""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -169,13 +222,13 @@ def test_push_template_branch_error(self): test_fn.touch() psync.commit_template_changes() # Try to push changes - with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: psync.push_template_branch() assert exc_info.value.args[0].startswith("Could not push TEMPLATE branch") def test_create_merge_base_branch(self): """Try creating a merge base branch""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -194,7 +247,7 @@ def test_create_merge_base_branch_thrice(self): end, so it is needed to call it a third time to make sure this is picked up. """ - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -207,7 +260,7 @@ def test_create_merge_base_branch_thrice(self): def test_push_merge_branch(self): """Try pushing merge branch""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.create_remote("origin", self.remote_path) @@ -219,102 +272,20 @@ def test_push_merge_branch(self): def test_push_merge_branch_without_create_branch(self): """Try pushing merge branch without creating first""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.create_remote("origin", self.remote_path) - with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: psync.push_merge_branch() assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'") - def mocked_requests_get(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked response" - self.data = data - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - def json(self): - return self.data - - url_template = "https://api.github.com/repos/{}/response/" - if url == os.path.join(url_template.format("no_existing_pr"), "pulls?head=TEMPLATE&base=None"): - response_data = [] - return MockResponse(response_data, 200) - if url == os.path.join(url_template.format("list_prs"), "pulls"): - response_data = [ - { - "state": "closed", - "head": {"ref": "nf-core-template-merge-2"}, - "base": {"ref": "master"}, - "html_url": "pr_url", - } - ] + [ - { - "state": "open", - "head": {"ref": f"nf-core-template-merge-{branch_no}"}, - "base": {"ref": "master"}, - "html_url": "pr_url", - } - for branch_no in range(3, 7) - ] - return MockResponse(response_data, 200) - - return MockResponse({"html_url": url}, 404) - - def mocked_requests_patch(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked" - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - if url == "url_to_update_pr": - response_data = {"html_url": "great_success"} - return MockResponse(response_data, 200) - - return MockResponse({"patch_url": url}, 404) - - def mocked_requests_post(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked" - self.data = data - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - def json(self): - return self.data - - if url == "https://api.github.com/repos/no_existing_pr/response/pulls": - response_data = {"html_url": "great_success"} - return MockResponse(response_data, 201) - - response_data = {} - return MockResponse(response_data, 404) - @mock.patch("nf_core.utils.gh_api.get", side_effect=mocked_requests_get) @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) def test_make_pull_request_success(self, mock_post, mock_get): """Try making a PR - successful response""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.gh_api.get = mock_get psync.gh_api.post = mock_post psync.gh_username = "no_existing_pr" @@ -327,13 +298,13 @@ def test_make_pull_request_success(self, mock_post, mock_get): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) def test_make_pull_request_bad_response(self, mock_post, mock_get): """Try making a PR and getting a 404 error""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.gh_api.get = mock_get psync.gh_api.post = mock_post psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: psync.make_pull_request() assert exc_info.value.args[0].startswith( "Something went badly wrong - GitHub API PR failed - got return code 404" @@ -342,7 +313,7 @@ def test_make_pull_request_bad_response(self, mock_post, mock_get): @mock.patch("nf_core.utils.gh_api.get", side_effect=mocked_requests_get) def test_close_open_template_merge_prs(self, mock_get): """Try closing all open prs""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.get = mock_get @@ -350,18 +321,18 @@ def test_close_open_template_merge_prs(self, mock_get): psync.gh_repo = "list_prs/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - with mock.patch("nf_core.sync.PipelineSync.close_open_pr") as mock_close_open_pr: + with mock.patch("nf_core.pipelines.sync.PipelineSync.close_open_pr") as mock_close_open_pr: psync.close_open_template_merge_prs() prs = mock_get(f"https://api.github.com/repos/{psync.gh_repo}/pulls").data for pr in prs: - if pr["state"] == "open": + if pr.get("state", None) == "open": mock_close_open_pr.assert_any_call(pr) @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) - def test_close_open_pr(self, mock_patch, mock_post): - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + def test_close_open_pr(self, mock_patch, mock_post) -> None: + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.post = mock_post @@ -369,7 +340,7 @@ def test_close_open_pr(self, mock_patch, mock_post): psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - pr = { + pr: Dict[str, Union[str, Dict[str, str]]] = { "state": "open", "head": {"ref": "nf-core-template-merge-3"}, "base": {"ref": "master"}, @@ -384,7 +355,7 @@ def test_close_open_pr(self, mock_patch, mock_post): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) def test_close_open_pr_fail(self, mock_patch, mock_post): - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.post = mock_post @@ -406,7 +377,7 @@ def test_close_open_pr_fail(self, mock_patch, mock_post): def test_reset_target_dir(self): """Try resetting target pipeline directory""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -418,12 +389,12 @@ def test_reset_target_dir(self): def test_reset_target_dir_fake_branch(self): """Try resetting target pipeline directory but original branch does not exist""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.original_branch = "fake_branch" - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py deleted file mode 100644 index 002b88967..000000000 --- a/tests/subworkflows/create.py +++ /dev/null @@ -1,110 +0,0 @@ -import os -import shutil -from pathlib import Path -from unittest import mock - -import pytest -import yaml -from git.repo import Repo - -import nf_core.subworkflows -from tests.utils import GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL - - -def test_subworkflows_create_succeed(self): - """Succeed at creating a subworkflow from the template inside a pipeline""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.pipeline_dir, "test_subworkflow_local", "@author", True - ) - subworkflow_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "subworkflows", "local", "test_subworkflow_local.nf")) - - -def test_subworkflows_create_fail_exists(self): - """Fail at creating the same subworkflow twice""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.pipeline_dir, "test_subworkflow2", "@author", False - ) - subworkflow_create.create() - with pytest.raises(UserWarning) as excinfo: - subworkflow_create.create() - assert "Subworkflow file exists already" in str(excinfo.value) - - -def test_subworkflows_create_nfcore_modules(self): - """Create a subworkflow in nf-core/modules clone""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "test_subworkflow", "@author", force=True - ) - subworkflow_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf")) - assert os.path.exists( - os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test") - ) - - -@mock.patch("rich.prompt.Confirm.ask") -def test_subworkflows_migrate(self, mock_rich_ask): - """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test""" - pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") - subworkflow_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "bam_stats_samtools") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(subworkflow_dir / "main.nf") as fh: - old_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml") as fh: - old_meta_yml = fh.read() - - # Create a subworkflow with --migrate-pytest - mock_rich_ask.return_value = True - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True - ) - subworkflow_create.create() - - with open(subworkflow_dir / "main.nf") as fh: - new_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml") as fh: - new_meta_yml = fh.read() - nextflow_config = subworkflow_dir / "tests" / "nextflow.config" - - # Check that old files have been copied to the new module - assert old_main_nf == new_main_nf - assert old_meta_yml == new_meta_yml - assert nextflow_config.is_file() - - # Check that pytest folder is deleted - assert not pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_subworkflows_migrate_no_delete(self, mock_rich_ask): - """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test. - Test that pytest directory is not deleted.""" - pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = False - module_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True - ) - module_create.create() - - # Check that pytest folder is not deleted - assert pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() diff --git a/tests/subworkflows/info.py b/tests/subworkflows/info.py deleted file mode 100644 index 688120ac0..000000000 --- a/tests/subworkflows/info.py +++ /dev/null @@ -1,64 +0,0 @@ -from rich.console import Console - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_info_remote(self): - """Test getting info about a remote subworkflow""" - mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_subworkflows_info_remote_gitlab(self): - """Test getting info about a subworkflow in the remote gitlab repo""" - mods_info = nf_core.subworkflows.SubworkflowInfo( - self.pipeline_dir, "bam_sort_stats_samtools", remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - assert "--git-remote" in output - - -def test_subworkflows_info_local(self): - """Test getting info about a locally installed subworkflow""" - self.subworkflow_install.install("bam_sort_stats_samtools") - mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_subworkflows_info_in_modules_repo(self): - """Test getting info about a locally subworkflow in the modules repo""" - self.subworkflow_install.install("bam_sort_stats_samtools") - mods_info = nf_core.subworkflows.SubworkflowInfo(self.nfcore_modules, "bam_sort_stats_samtools") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py deleted file mode 100644 index dfe71686f..000000000 --- a/tests/subworkflows/install.py +++ /dev/null @@ -1,154 +0,0 @@ -import os - -import pytest - -from nf_core.modules.modules_json import ModulesJson -from nf_core.subworkflows.install import SubworkflowInstall - -from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, - GITLAB_URL, - with_temporary_folder, -) - - -def test_subworkflow_install_nopipeline(self): - """Test installing a subworkflow - no pipeline given""" - self.subworkflow_install.dir = None - assert self.subworkflow_install.install("foo") is False - - -@with_temporary_folder -def test_subworkflows_install_emptypipeline(self, tmpdir): - """Test installing a subworkflow - empty dir given""" - os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.subworkflow_install.dir = os.path.join(tmpdir, "nf-core-pipe") - with pytest.raises(UserWarning) as excinfo: - self.subworkflow_install.install("foo") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) - - -def test_subworkflows_install_nosubworkflow(self): - """Test installing a subworkflow - unrecognised subworkflow given""" - assert self.subworkflow_install.install("foo") is False - - -def test_subworkflows_install_bam_sort_stats_samtools(self): - """Test installing a subworkflow - bam_sort_stats_samtools""" - assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False - subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - sub_subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools") - samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") - samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") - samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") - assert os.path.exists(subworkflow_path) - assert os.path.exists(sub_subworkflow_path) - assert os.path.exists(samtools_index_path) - assert os.path.exists(samtools_sort_path) - assert os.path.exists(samtools_stats_path) - assert os.path.exists(samtools_idxstats_path) - assert os.path.exists(samtools_flagstat_path) - - -def test_subworkflows_install_bam_sort_stats_samtools_twice(self): - """Test installing a subworkflow - bam_sort_stats_samtools already there""" - self.subworkflow_install.install("bam_sort_stats_samtools") - assert self.subworkflow_install.install("bam_sort_stats_samtools") is False - - -def test_subworkflows_install_from_gitlab(self): - """Test installing a subworkflow from GitLab""" - assert self.subworkflow_install_gitlab.install("bam_stats_samtools") is True - # Verify that the branch entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "bam_stats_samtools", GITLAB_URL, GITLAB_REPO) - == GITLAB_SUBWORKFLOWS_BRANCH - ) - - -def test_subworkflows_install_different_branch_fail(self): - """Test installing a subworkflow from a different branch""" - install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The bam_stats_samtools subworkflow does not exists in the branch-test branch - assert install_obj.install("bam_stats_samtools") is False - - -def test_subworkflows_install_tracking(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_sort_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] == ["bam_sort_stats_samtools"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/stats"][ - "installed_by" - ] == ["bam_stats_samtools"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/sort"][ - "installed_by" - ] == ["bam_sort_stats_samtools"] - - # Clean directory - self.subworkflow_remove.remove("bam_sort_stats_samtools") - - -def test_subworkflows_install_tracking_added_already_installed(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.subworkflow_install.install("bam_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert sorted( - mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] - ) == sorted(["bam_sort_stats_samtools", "subworkflows"]) - - # Clean directory - self.subworkflow_remove.remove("bam_sort_stats_samtools") - self.subworkflow_remove.remove("bam_stats_samtools") - - -def test_subworkflows_install_tracking_added_super_subworkflow(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_stats_samtools") - self.subworkflow_install.install("bam_sort_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert sorted( - mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] - ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) - - -def test_subworkflows_install_alternate_remote(self): - """Test installing a module from a different remote with the same organization path""" - install_obj = SubworkflowInstall( - self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH - ) - # Install a subworkflow from GitLab which is also installed from GitHub with the same org_path - with pytest.raises(Exception) as excinfo: - install_obj.install("fastqc") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py deleted file mode 100644 index 73d2452b3..000000000 --- a/tests/subworkflows/lint.py +++ /dev/null @@ -1,342 +0,0 @@ -import json -import shutil -from pathlib import Path - -import pytest - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_lint(self): - """Test linting the fastq_align_bowtie2 subworkflow""" - self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="fastq_align_bowtie2") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_empty(self): - """Test linting a pipeline with no subworkflows installed""" - self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) - with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - - -def test_subworkflows_lint_new_subworkflow(self): - """lint a new subworkflow""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=True, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_no_gitlab(self): - """Test linting a pipeline with no subworkflows installed""" - with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - - -def test_subworkflows_lint_gitlab_subworkflows(self): - """Lint subworkflows from a different remote""" - self.subworkflow_install_gitlab.install("bam_stats_samtools") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - subworkflow_lint.lint(print_results=False, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_multiple_remotes(self): - """Lint subworkflows from a different remote""" - self.subworkflow_install_gitlab.install("bam_stats_samtools") - self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - subworkflow_lint.lint(print_results=False, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file(self): - """Test linting a subworkflow with a snapshot file""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file_missing_fail(self): - """Test linting a subworkflow with a snapshot file missing, which should fail""" - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file_not_needed(self): - """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test")) as fh: - content = fh.read() - new_content = content.replace("snapshot(", "snap (") - with open( - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "w" - ) as fh: - fh.write(new_content) - - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_less_than_two_modules_warning(self): - """Test linting a subworkflow with less than two modules""" - self.subworkflow_install.install("bam_stats_samtools") - # Remove two modules - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: - content = fh.read() - new_content = content.replace( - "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", "" - ) - new_content = new_content.replace( - "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", "" - ) - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: - fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) > 0 - assert subworkflow_lint.warned[0].lint_test == "main_nf_include" - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_lint_include_multiple_alias(self): - """Test linting a subworkflow with multiple include methods""" - self.subworkflow_install.install("bam_stats_samtools") - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: - content = fh.read() - new_content = content.replace("SAMTOOLS_STATS", "SAMTOOLS_STATS_1") - new_content = new_content.replace( - "include { SAMTOOLS_STATS_1 ", - "include { SAMTOOLS_STATS as SAMTOOLS_STATS_1; SAMTOOLS_STATS as SAMTOOLS_STATS_2 ", - ) - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: - fh.write(new_content) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) == 2 - assert any( - [ - x.message == "Included component 'SAMTOOLS_STATS_1' versions are added in main.nf" - for x in subworkflow_lint.passed - ] - ) - assert any([x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed]) - assert any( - [x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned] - ) - - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_lint_capitalization_fail(self): - """Test linting a subworkflow with a capitalization fail""" - self.subworkflow_install.install("bam_stats_samtools") - # change workflow name to lowercase - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: - content = fh.read() - new_content = content.replace("workflow BAM_STATS_SAMTOOLS {", "workflow bam_stats_samtools {") - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: - fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert any([x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed]) - - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_absent_version(self): - """Test linting a nf-test module if the versions is absent in the snapshot file `""" - snap_file = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") - with open(snap_file) as fh: - content = fh.read() - new_content = content.replace("versions", "foo") - with open(snap_file, "w") as fh: - fh.write(new_content) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_snap_versions" for x in subworkflow_lint.warned]) - - # cleanup - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_subworkflows_missing_test_dir(self): - """Test linting a nf-test subworkflow if the tests directory is missing""" - test_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests") - test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") - shutil.rmtree(test_dir) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_dir_versions" for x in subworkflow_lint.warned]) - - # cleanup - shutil.copytree(test_dir_copy, test_dir) - - -def test_subworkflows_missing_main_nf(self): - """Test linting a nf-test subworkflow if the main.nf file is missing""" - main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") - main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") - main_nf.unlink() - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" - - # cleanup - shutil.copy(main_nf_copy, main_nf) - - -def test_subworkflows_empty_file_in_snapshot(self): - """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" - snap_file = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "test_snap_md5sum" - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_subworkflows_empty_file_in_stub_snapshot(self): - """Test linting a nf-test subworkflow with an empty file sha sum in the stub test snapshot, which should make it not fail""" - snap_file = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert any(x.lint_test == "test_snap_md5sum" for x in subworkflow_lint.passed) - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_subworkflows_missing_tags_yml(self): - """Test linting a subworkflow with a missing tags.yml file""" - tags_path = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "tags.yml") - tags_path.rename(tags_path.parent / "tags.yml.bak") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) >= 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "test_tags_yml_exists" - - # cleanup - Path(tags_path.parent / "tags.yml.bak").rename(tags_path.parent / "tags.yml") - - -def test_subworkflows_incorrect_tags_yml_key(self): - """Test linting a subworkflow with an incorrect key in tags.yml file""" - tags_path = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "tags.yml") - with open(tags_path) as fh: - content = fh.read() - new_content = content.replace("test_subworkflow:", "subworkflow:") - with open(tags_path, "w") as fh: - fh.write(new_content) - module_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - module_lint.lint(print_results=True, subworkflow="test_subworkflow") - with open(tags_path, "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml" - # cleanup - with open(tags_path, "w") as fh: - fh.write(content) - - -def test_subworkflows_incorrect_tags_yml_values(self): - """Test linting a subworkflow with an incorrect path in tags.yml file""" - tags_path = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "tags.yml") - with open(tags_path) as fh: - content = fh.read() - new_content = content.replace("subworkflows/nf-core/test_subworkflow/**", "foo") - with open(tags_path, "w") as fh: - fh.write(new_content) - module_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, subworkflow="test_subworkflow") - with open(tags_path, "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml" - # cleanup - with open(tags_path, "w") as fh: - fh.write(content) diff --git a/tests/subworkflows/list.py b/tests/subworkflows/list.py deleted file mode 100644 index c65999d42..000000000 --- a/tests/subworkflows/list.py +++ /dev/null @@ -1,49 +0,0 @@ -from rich.console import Console - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_list_remote(self): - """Test listing available subworkflows""" - subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_list_remote_gitlab(self): - """Test listing the subworkflows in the remote gitlab repo""" - subworkflows_list = nf_core.subworkflows.SubworkflowList( - None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_install_and_list_subworkflows(self): - """Test listing locally installed subworkflows""" - self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_install_gitlab_and_list_subworkflows(self): - """Test listing locally installed subworkflows""" - self.subworkflow_install_gitlab.install("bam_sort_stats_samtools") - subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py deleted file mode 100644 index c6a3b9845..000000000 --- a/tests/subworkflows/remove.py +++ /dev/null @@ -1,100 +0,0 @@ -from pathlib import Path - -from nf_core.modules.modules_json import ModulesJson - - -def test_subworkflows_remove_uninstalled_subworkflow(self): - """Test removing subworkflow without installing it""" - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") is False - - -def test_subworkflows_remove_subworkflow(self): - """Test removing subworkflow and all it's dependencies after installing it""" - self.subworkflow_install.install("bam_sort_stats_samtools") - - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - ModulesJson(self.pipeline_dir) - mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is False - assert Path.exists(samtools_index_path) is False - assert mod_json_before != mod_json_after - # assert subworkflows key is removed from modules.json - assert ( - "bam_sort_stats_samtools" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() - ) - assert "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() - - -def test_subworkflows_remove_subworkflow_keep_installed_module(self): - """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.mods_install.install("samtools/index") - - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - - mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() - - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is False - assert Path.exists(samtools_index_path) is True - assert mod_json_before != mod_json_after - # assert subworkflows key is removed from modules.json - assert ( - "bam_sort_stats_samtools" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() - ) - assert ( - "samtools/index" - in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() - ) - - -def test_subworkflows_remove_one_of_two_subworkflow(self): - """Test removing subworkflow and all it's dependencies after installing it""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.subworkflow_install.install("bam_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - - assert Path.exists(subworkflow_path) is True - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is True - assert Path.exists(samtools_index_path) is False - assert Path.exists(samtools_stats_path) is True - self.subworkflow_remove.remove("bam_stats_samtools") - - -def test_subworkflows_remove_included_subworkflow(self): - """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" - self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - - assert self.subworkflow_remove.remove("bam_stats_samtools") is False - - assert Path.exists(subworkflow_path) is True - assert Path.exists(bam_sort_stats_samtools_path) is True - assert Path.exists(bam_stats_samtools_path) is True - assert Path.exists(samtools_index_path) is True - assert Path.exists(samtools_stats_path) is True - self.subworkflow_remove.remove("bam_sort_stats_samtools") diff --git a/tests/subworkflows/test_create.py b/tests/subworkflows/test_create.py new file mode 100644 index 000000000..48cb48226 --- /dev/null +++ b/tests/subworkflows/test_create.py @@ -0,0 +1,109 @@ +import shutil +from pathlib import Path +from unittest import mock + +import pytest +import yaml +from git.repo import Repo + +import nf_core.subworkflows +from tests.utils import GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL + +from ..test_subworkflows import TestSubworkflows + + +class TestSubworkflowsCreate(TestSubworkflows): + def test_subworkflows_create_succeed(self): + """Succeed at creating a subworkflow from the template inside a pipeline""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow_local", "@author", True + ) + subworkflow_create.create() + assert Path(self.pipeline_dir, "subworkflows", "local", "test_subworkflow_local.nf").exists() + + def test_subworkflows_create_fail_exists(self): + """Fail at creating the same subworkflow twice""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow2", "@author", False + ) + subworkflow_create.create() + with pytest.raises(UserWarning) as excinfo: + subworkflow_create.create() + assert "Subworkflow file exists already" in str(excinfo.value) + + def test_subworkflows_create_nfcore_modules(self): + """Create a subworkflow in nf-core/modules clone""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "test_subworkflow", "@author", force=True + ) + subworkflow_create.create() + assert Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf").exists() + + assert Path( + self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test" + ).exists() + + @mock.patch("rich.prompt.Confirm.ask") + def test_subworkflows_migrate(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + subworkflow_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(subworkflow_dir / "main.nf") as fh: + old_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml") as fh: + old_meta_yml = fh.read() + + # Create a subworkflow with --migrate-pytest + mock_rich_ask.return_value = True + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + subworkflow_create.create() + + with open(subworkflow_dir / "main.nf") as fh: + new_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml") as fh: + new_meta_yml = fh.read() + nextflow_config = subworkflow_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_subworkflows_migrate_no_delete(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() diff --git a/tests/subworkflows/test_info.py b/tests/subworkflows/test_info.py new file mode 100644 index 000000000..cf0f49271 --- /dev/null +++ b/tests/subworkflows/test_info.py @@ -0,0 +1,63 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsInfo(TestSubworkflows): + def test_subworkflows_info_remote(self): + """Test getting info about a remote subworkflow""" + mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_subworkflows_info_remote_gitlab(self): + """Test getting info about a subworkflow in the remote gitlab repo""" + mods_info = nf_core.subworkflows.SubworkflowInfo( + self.pipeline_dir, "bam_sort_stats_samtools", remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + assert "--git-remote" in output + + def test_subworkflows_info_local(self): + """Test getting info about a locally installed subworkflow""" + self.subworkflow_install.install("bam_sort_stats_samtools") + mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_subworkflows_info_in_modules_repo(self): + """Test getting info about a locally subworkflow in the modules repo""" + self.subworkflow_install.install("bam_sort_stats_samtools") + mods_info = nf_core.subworkflows.SubworkflowInfo(self.nfcore_modules, "bam_sort_stats_samtools") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py new file mode 100644 index 000000000..00ba88841 --- /dev/null +++ b/tests/subworkflows/test_install.py @@ -0,0 +1,155 @@ +from pathlib import Path + +import pytest + +from nf_core.modules.modules_json import ModulesJson +from nf_core.subworkflows.install import SubworkflowInstall + +from ..test_subworkflows import TestSubworkflows +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + with_temporary_folder, +) + + +class TestSubworkflowsInstall(TestSubworkflows): + def test_subworkflows_install_bam_sort_stats_samtools(self): + """Test installing a subworkflow - bam_sort_stats_samtools""" + assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False + subworkflow_path = Path( + self.subworkflow_install.directory, "subworkflows", "nf-core", "bam_sort_stats_samtools" + ) + sub_subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core", "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_sort_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "sort") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") + samtools_idxstats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "idxstats") + samtools_flagstat_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "flagstat") + assert subworkflow_path.exists() + assert sub_subworkflow_path.exists() + assert samtools_index_path.exists() + assert samtools_sort_path.exists() + assert samtools_stats_path.exists() + assert samtools_idxstats_path.exists() + assert samtools_flagstat_path.exists() + + def test_subworkflow_install_nopipeline(self): + """Test installing a subworkflow - no pipeline given""" + assert self.subworkflow_install.directory is not None + self.subworkflow_install.directory = Path("non_existent_dir") + assert self.subworkflow_install.install("bam_stats_samtools") is False + + @with_temporary_folder + def test_subworkflows_install_emptypipeline(self, tmpdir): + """Test installing a subworkflow - empty dir given""" + + Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) + self.subworkflow_install.directory = Path(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.subworkflow_install.install("bam_stats_samtools") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + def test_subworkflows_install_nosubworkflow(self): + """Test installing a subworkflow - unrecognised subworkflow given""" + with pytest.raises(ValueError) as excinfo: + self.subworkflow_install.install("foo") + assert excinfo.typename == "ValueError" + assert "Subworkflow 'foo' not found in available subworkflows" in self.caplog.text + + def test_subworkflows_install_bam_sort_stats_samtools_twice(self): + """Test installing a subworkflow - bam_sort_stats_samtools already there""" + self.subworkflow_install.install("bam_sort_stats_samtools") + assert self.subworkflow_install.install("bam_sort_stats_samtools") is False + + def test_subworkflows_install_from_gitlab(self): + """Test installing a subworkflow from GitLab""" + assert self.subworkflow_install_gitlab.install("bam_stats_samtools") is True + # Verify that the branch entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "bam_stats_samtools", GITLAB_URL, GITLAB_REPO) + == GITLAB_SUBWORKFLOWS_BRANCH + ) + + def test_subworkflows_install_different_branch_fail(self): + """Test installing a subworkflow from a different branch""" + install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The bam_stats_samtools subworkflow does not exists in the branch-test branch + with pytest.raises(Exception) as excinfo: + install_obj.install("bam_stats_samtools") + assert "Subworkflow 'bam_stats_samtools' not found in available subworkflows" in str(excinfo.value) + + def test_subworkflows_install_tracking(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + assert self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] == ["bam_sort_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/stats"][ + "installed_by" + ] == ["bam_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/sort"][ + "installed_by" + ] == ["bam_sort_stats_samtools"] + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + + def test_subworkflows_install_tracking_added_already_installed(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] + ) == sorted(["bam_sort_stats_samtools", "subworkflows"]) + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + self.subworkflow_remove.remove("bam_stats_samtools") + + def test_subworkflows_install_tracking_added_super_subworkflow(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_stats_samtools") + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] + ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) + + def test_subworkflows_install_alternate_remote(self): + """Test installing a module from a different remote with the same organization path""" + install_obj = SubworkflowInstall( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH + ) + # Install a subworkflow from GitLab which is also installed from GitHub with the same org_path + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py new file mode 100644 index 000000000..d94b55b3d --- /dev/null +++ b/tests/subworkflows/test_lint.py @@ -0,0 +1,399 @@ +import json +import shutil +from pathlib import Path + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsLint(TestSubworkflows): + def test_subworkflows_lint(self): + """Test linting the fastq_align_bowtie2 subworkflow""" + self.subworkflow_install.install("fastq_align_bowtie2") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="fastq_align_bowtie2") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_empty(self): + """Test linting a pipeline with no subworkflows installed""" + self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) + self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) + self.subworkflow_remove.remove("utils_nfschema_plugin", force=True) + nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) + assert "No subworkflows from https://github.com/nf-core/modules.git installed in pipeline" in self.caplog.text + + def test_subworkflows_lint_new_subworkflow(self): + """lint a new subworkflow""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint.lint(print_results=True, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_no_gitlab(self): + """Test linting a pipeline with no subworkflows installed""" + nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + assert f"No subworkflows from {GITLAB_URL} installed in pipeline" in self.caplog.text + + def test_subworkflows_lint_gitlab_subworkflows(self): + """Lint subworkflows from a different remote""" + self.subworkflow_install_gitlab.install("bam_stats_samtools") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint( + directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + subworkflow_lint.lint(print_results=False, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_multiple_remotes(self): + """Lint subworkflows from a different remote""" + self.subworkflow_install_gitlab.install("bam_stats_samtools") + self.subworkflow_install.install("fastq_align_bowtie2") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint( + directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + subworkflow_lint.lint(print_results=False, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_update_meta_yml(self): + """update the meta.yml of a subworkflow""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules, fix=True) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file(self): + """Test linting a subworkflow with a snapshot file""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file_missing_fail(self): + """Test linting a subworkflow with a snapshot file missing, which should fail""" + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).unlink() + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).touch() + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file_not_needed(self): + """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" + with open( + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test", + ) + ) as fh: + content = fh.read() + new_content = content.replace("snapshot(", "snap (") + with open( + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test", + ), + "w", + ) as fh: + fh.write(new_content) + + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).unlink() + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).touch() + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_less_than_two_modules_warning(self): + """Test linting a subworkflow with less than two modules""" + self.subworkflow_install.install("bam_stats_samtools") + # Remove two modules + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace( + "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", + "", + ) + new_content = new_content.replace( + "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", + "", + ) + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) > 0 + assert subworkflow_lint.warned[0].lint_test == "main_nf_include" + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_lint_include_multiple_alias(self): + """Test linting a subworkflow with multiple include methods""" + self.subworkflow_install.install("bam_stats_samtools") + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace("SAMTOOLS_STATS", "SAMTOOLS_STATS_1") + new_content = new_content.replace( + "include { SAMTOOLS_STATS_1 ", + "include { SAMTOOLS_STATS as SAMTOOLS_STATS_1; SAMTOOLS_STATS as SAMTOOLS_STATS_2 ", + ) + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) == 2 + assert any( + [ + x.message == "Included component 'SAMTOOLS_STATS_1' versions are added in main.nf" + for x in subworkflow_lint.passed + ] + ) + assert any( + [x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed] + ) + assert any( + [x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned] + ) + + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_lint_capitalization_fail(self): + """Test linting a subworkflow with a capitalization fail""" + self.subworkflow_install.install("bam_stats_samtools") + # change workflow name to lowercase + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace("workflow BAM_STATS_SAMTOOLS {", "workflow bam_stats_samtools {") + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert any([x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed]) + + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_absent_version(self): + """Test linting a nf-test subworkflow if the versions is absent in the snapshot file `""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + with open(snap_file) as fh: + content = fh.read() + new_content = content.replace("versions", "foo") + with open(snap_file, "w") as fh: + fh.write(new_content) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" + assert any([x.lint_test == "test_snap_versions" for x in subworkflow_lint.warned]) + + # cleanup + with open(snap_file, "w") as fh: + fh.write(content) + + def test_subworkflows_missing_test_dir(self): + """Test linting a nf-test subworkflow if the tests directory is missing""" + test_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests") + test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") + shutil.rmtree(test_dir) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 1 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" + assert any([x.lint_test == "test_dir_exists" for x in subworkflow_lint.failed]) + + # cleanup + shutil.copytree(test_dir_copy, test_dir) + + # There are many steps before the actual main_nf linting where we rely on the main_nf file to exist, so this test is not possible for now + # def test_subworkflows_missing_main_nf(self): + # """Test linting a nf-test subworkflow if the main.nf file is missing""" + + # subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + # main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") + # main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") + # main_nf.unlink() + # subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + # assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + # assert len(subworkflow_lint.passed) > 0 + # assert len(subworkflow_lint.warned) >= 0 + # assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" + + # # cleanup + # shutil.copy(main_nf_copy, main_nf) + # shutil.rmtree(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow_backup")) + + def test_subworkflows_empty_file_in_snapshot(self): + """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert subworkflow_lint.failed[0].lint_test == "test_snap_md5sum" + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + def test_subworkflows_empty_file_in_stub_snapshot(self): + """Test linting a nf-test subworkflow with an empty file sha sum in the stub test snapshot, which should make it not fail""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert any(x.lint_test == "test_snap_md5sum" for x in subworkflow_lint.passed) + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) diff --git a/tests/subworkflows/test_list.py b/tests/subworkflows/test_list.py new file mode 100644 index 000000000..1ae8f5fff --- /dev/null +++ b/tests/subworkflows/test_list.py @@ -0,0 +1,48 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsList(TestSubworkflows): + def test_subworkflows_list_remote(self): + """Test listing available subworkflows""" + subworkflows_list = nf_core.subworkflows.SubworkflowList(remote=True) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_list_remote_gitlab(self): + """Test listing the subworkflows in the remote gitlab repo""" + subworkflows_list = nf_core.subworkflows.SubworkflowList( + remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_install_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.subworkflow_install.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_install_gitlab_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.subworkflow_install_gitlab.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output diff --git a/tests/subworkflows/test_remove.py b/tests/subworkflows/test_remove.py new file mode 100644 index 000000000..bad5a2ddb --- /dev/null +++ b/tests/subworkflows/test_remove.py @@ -0,0 +1,101 @@ +from pathlib import Path + +from nf_core.modules.modules_json import ModulesJson + +from ..test_subworkflows import TestSubworkflows + + +class TestSubworkflowsRemove(TestSubworkflows): + def test_subworkflows_remove_uninstalled_subworkflow(self): + """Test removing subworkflow without installing it""" + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") is False + + def test_subworkflows_remove_subworkflow(self): + """Test removing subworkflow and all it's dependencies after installing it""" + self.subworkflow_install.install("bam_sort_stats_samtools") + + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + ModulesJson(self.pipeline_dir) + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is False + assert Path.exists(samtools_index_path) is False + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert ( + "bam_sort_stats_samtools" + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + ) + assert ( + "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() + ) + + def test_subworkflows_remove_subworkflow_keep_installed_module(self): + """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.mods_install.install("samtools/index") + + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is False + assert Path.exists(samtools_index_path) is True + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert ( + "bam_sort_stats_samtools" + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + ) + assert ( + "samtools/index" + in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() + ) + + def test_subworkflows_remove_one_of_two_subworkflow(self): + """Test removing subworkflow and all it's dependencies after installing it""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") + + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + + assert Path.exists(subworkflow_path) is True + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is True + assert Path.exists(samtools_index_path) is False + assert Path.exists(samtools_stats_path) is True + self.subworkflow_remove.remove("bam_stats_samtools") + + def test_subworkflows_remove_included_subworkflow(self): + """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" + self.subworkflow_install.install("bam_sort_stats_samtools") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") + + assert self.subworkflow_remove.remove("bam_stats_samtools") is False + + assert Path.exists(subworkflow_path) is True + assert Path.exists(bam_sort_stats_samtools_path) is True + assert Path.exists(bam_stats_samtools_path) is True + assert Path.exists(samtools_index_path) is True + assert Path.exists(samtools_stats_path) is True + self.subworkflow_remove.remove("bam_sort_stats_samtools") diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py new file mode 100644 index 000000000..153038cd1 --- /dev/null +++ b/tests/subworkflows/test_update.py @@ -0,0 +1,374 @@ +import logging +import shutil +import tempfile +from pathlib import Path +from unittest import mock + +import questionary +import yaml + +import nf_core.utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.update import ModuleUpdate +from nf_core.subworkflows.update import SubworkflowUpdate + +from ..test_subworkflows import TestSubworkflows +from ..utils import OLD_SUBWORKFLOWS_SHA, cmp_component + + +class TestSubworkflowsUpdate(TestSubworkflows): + def test_install_and_update(self): + """Installs a subworkflow in the pipeline and updates it (no change)""" + self.subworkflow_install.install("bam_stats_samtools") + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False) + + # Copy the sw files and check that they are unaffected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "bam_stats_samtools") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("bam_stats_samtools") is True + assert cmp_component(tmpdir, sw_path) is True + + def test_install_at_hash_and_update(self): + """Installs an old version of a subworkflow in the pipeline and updates it""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True) + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is False + + # Check that the modules.json is correctly updated + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Get the up-to-date git_sha for the sw from the ModulesRepo object + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + + # Mock questionary answer: update components + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) + def test_install_at_hash_and_update_limit_output(self, mock_prompt): + """Installs an old version of a subworkflow in the pipeline and updates it with limit_output=True""" + self.caplog.set_level(logging.INFO) + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=True, update_deps=True, limit_output=True) + + assert update_obj.update("fastq_align_bowtie2") + + # Check changes not shown for non-.nf files + assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in self.caplog.text + # Check changes only shown for main.nf files + assert "Changes in 'fastq_align_bowtie2/main.nf'" in self.caplog.text + for line in self.caplog.text.split("\n"): + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_install_at_hash_and_update_and_save_diff_to_file(self): + """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True) + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is True + + with open(patch_path) as fh: + line = fh.readline() + assert line.startswith( + "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + ) + + def test_install_at_hash_and_update_and_save_diff_limit_output(self): + """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" + # Install old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + # Update saving the differences to a patch file and with `limit_output` + update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True, limit_output=True) + assert update_obj.update("fastq_align_bowtie2") + + # Check that the patch file was created + assert patch_path.exists(), f"Patch file was not created at {patch_path}" + + # Read the contents of the patch file + with open(patch_path) as fh: + content = fh.read() + # Check changes not shown for non-.nf files + assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in content + assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in content + assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in content + assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in content + # Check changes only shown for main.nf files + assert "Changes in 'fastq_align_bowtie2/main.nf'" in content + for line in content: + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_update_all(self): + """Updates all subworkflows present in the pipeline""" + # Install subworkflows fastq_align_bowtie2, bam_sort_stats_samtools, bam_stats_samtools + self.subworkflow_install.install("fastq_align_bowtie2") + # Update all subworkflows + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # We must reload the modules.json to get the updated version + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Loop through all subworkflows and check that they are updated (according to the modules.json file) + for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(sw, "subworkflows", depth=1))[0][ + "git_sha" + ] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw][ + "git_sha" + ] + assert correct_git_sha == current_git_sha + + def test_update_with_config_fixed_version(self): + """Try updating when there are entries in the .nf-core.yml""" + # Install subworkflow at the latest version + assert self.subworkflow_install.install("fastq_align_bowtie2") + + # Fix the subworkflow version in the .nf-core.yml to an old version + update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config.model_dump(), f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_dont_update(self): + """Try updating when sw is to be ignored""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + + # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml + update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config.model_dump(), f) + + # Update all modules in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_fix_all(self): + """Fix the version of all nf-core subworkflows""" + # Install subworkflow at the latest version + assert self.subworkflow_install.install("fastq_align_bowtie2") + + # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version + update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config.model_dump(), f) + + # Update fastq_align_bowtie2 + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) + assert update_obj.update("fastq_align_bowtie2") is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_no_updates(self): + """Don't update any nf-core subworkflows""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Set all repository updates to False + update_config = {NF_CORE_MODULES_REMOTE: False} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config.model_dump(), f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded and none of the subworkflows has changed + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw] + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + == old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + ) + + def test_update_all_linked_components_from_subworkflow(self): + """Update a subworkflow and all modules and subworkflows used on it""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + subworkflows_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME) + modules_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME) + shutil.copytree(subworkflows_path, Path(tmpdir, "subworkflows")) + shutil.copytree(modules_path, Path(tmpdir, "modules")) + + # Update fastq_align_bowtie2 and all modules and subworkflows used by that + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("fastq_align_bowtie2") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Loop through all modules and subworkflows used in fastq_align_bowtie2 + # check that they are updated (according to the modules.json file) + for sw in ["fastq_align_bowtie2", "bam_sort_stats_samtools", "bam_stats_samtools"]: + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + ) + for mod in [ + "bowtie2/align", + "samtools/index", + "samtools/sort", + "samtools/flagstat", + "samtools/idxstats", + "samtools/stats", + ]: + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + ) + # Check that the subworkflow files are updated + assert ( + cmp_component( + Path(tmpdir, "subworkflows", "fastq_align_bowtie2"), Path(subworkflows_path, "fastq_align_bowtie2") + ) + is False + ) + + def test_update_all_subworkflows_from_module(self): + """Update a module and all subworkflows that use this module""" + # Install an old version of fastq_align_bowtie2 and thus all modules used by it (bowtie2/align) + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, Path(tmpdir, "fastq_align_bowtie2")) + + # Update bowtie2/align and all subworkflows using it + update_obj = ModuleUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("bowtie2/align") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Check that bowtie2/align and fastq_align_bowtie2 are updated + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + assert cmp_component(Path(tmpdir, "fastq_align_bowtie2"), sw_path) is False + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] + ) + + def test_update_change_of_included_modules(self): + """Update a subworkflow which has a module change in the new version.""" + # Install an old version of vcf_annotate_ensemblvep with tabix/bgziptabix and without tabix/tabix + self.subworkflow_install_module_change.install("vcf_annotate_ensemblvep") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is there + assert "tabix/bgziptabix" in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is not there + assert "tabix/tabix" not in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + + # Update vcf_annotate_ensemblvep without tabix/bgziptabix and with tabix/tabix + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("vcf_annotate_ensemblvep") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is not there + assert "tabix/bgziptabix" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is there + assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + # Check that ensemblevep is not there but instead we have ensemblevep/vep (due to a file re-naming) + assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py deleted file mode 100644 index 9ddc9bec0..000000000 --- a/tests/subworkflows/update.py +++ /dev/null @@ -1,333 +0,0 @@ -import filecmp -import shutil -import tempfile -from pathlib import Path - -import yaml - -import nf_core.utils -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE -from nf_core.modules.update import ModuleUpdate -from nf_core.subworkflows.update import SubworkflowUpdate - -from ..utils import OLD_SUBWORKFLOWS_SHA - - -def test_install_and_update(self): - """Installs a subworkflow in the pipeline and updates it (no change)""" - self.subworkflow_install.install("bam_stats_samtools") - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False) - - # Copy the sw files and check that they are unaffected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "bam_stats_samtools") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("bam_stats_samtools") is True - assert cmp_component(tmpdir, sw_path) is True - - -def test_install_at_hash_and_update(self): - """Installs an old version of a subworkflow in the pipeline and updates it""" - assert self.subworkflow_install_old.install("fastq_align_bowtie2") - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True) - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("fastq_align_bowtie2") is True - assert cmp_component(tmpdir, sw_path) is False - - # Check that the modules.json is correctly updated - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Get the up-to-date git_sha for the sw from the ModulesRepo object - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - ) - - -def test_install_at_hash_and_update_and_save_diff_to_file(self): - """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" - assert self.subworkflow_install_old.install("fastq_align_bowtie2") - patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") - update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True) - - # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("fastq_align_bowtie2") is True - assert cmp_component(tmpdir, sw_path) is True - - with open(patch_path) as fh: - line = fh.readline() - assert line.startswith( - "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" - ) - - -def test_update_all(self): - """Updates all subworkflows present in the pipeline""" - # Install subworkflows fastq_align_bowtie2, bam_sort_stats_samtools, bam_stats_samtools - self.subworkflow_install.install("fastq_align_bowtie2") - # Update all subworkflows - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # We must reload the modules.json to get the updated version - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Loop through all subworkflows and check that they are updated (according to the modules.json file) - for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(sw, "subworkflows", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - assert correct_git_sha == current_git_sha - - -def test_update_with_config_fixed_version(self): - """Try updating when there are entries in the .nf-core.yml""" - # Install subworkflow at the latest version - assert self.subworkflow_install.install("fastq_align_bowtie2") - - # Fix the subworkflow version in the .nf-core.yml to an old version - update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all subworkflows in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_dont_update(self): - """Try updating when sw is to be ignored""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - - # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml - update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_fix_all(self): - """Fix the version of all nf-core subworkflows""" - # Install subworkflow at the latest version - assert self.subworkflow_install.install("fastq_align_bowtie2") - - # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version - update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update fastq_align_bowtie2 - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) - assert update_obj.update("fastq_align_bowtie2") is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_no_updates(self): - """Don't update any nf-core subworkflows""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Set all repository updates to False - update_config = {NF_CORE_MODULES_REMOTE: False} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all subworkflows in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded and none of the subworkflows has changed - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw] - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - == old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - ) - - -def test_update_all_linked_components_from_subworkflow(self): - """Update a subworkflow and all modules and subworkflows used on it""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) - subworkflows_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME) - modules_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME) - shutil.copytree(subworkflows_path, Path(tmpdir, "subworkflows")) - shutil.copytree(modules_path, Path(tmpdir, "modules")) - - # Update fastq_align_bowtie2 and all modules and subworkflows used by that - update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("fastq_align_bowtie2") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Loop through all modules and subworkflows used in fastq_align_bowtie2 - # check that they are updated (according to the modules.json file) - for sw in ["fastq_align_bowtie2", "bam_sort_stats_samtools", "bam_stats_samtools"]: - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - ) - for mod in [ - "bowtie2/align", - "samtools/index", - "samtools/sort", - "samtools/flagstat", - "samtools/idxstats", - "samtools/stats", - ]: - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - ) - # Check that the subworkflow files are updated - assert ( - cmp_component( - Path(tmpdir, "subworkflows", "fastq_align_bowtie2"), Path(subworkflows_path, "fastq_align_bowtie2") - ) - is False - ) - - -def test_update_all_subworkflows_from_module(self): - """Update a module and all subworkflows that use this module""" - # Install an old version of fastq_align_bowtie2 and thus all modules used by it (bowtie2/align) - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, Path(tmpdir, "fastq_align_bowtie2")) - - # Update bowtie2/align and all subworkflows using it - update_obj = ModuleUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("bowtie2/align") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Check that bowtie2/align and fastq_align_bowtie2 are updated - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - ) - assert cmp_component(Path(tmpdir, "fastq_align_bowtie2"), sw_path) is False - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] - ) - - -def test_update_change_of_included_modules(self): - """Update a subworkflow which has a module change in the new version.""" - # Install an old version of vcf_annotate_ensemblvep with tabix/bgziptabix and without tabix/tabix - self.subworkflow_install_module_change.install("vcf_annotate_ensemblvep") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Check that tabix/bgziptabix is there - assert "tabix/bgziptabix" in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() - # Check that tabix/tabix is not there - assert "tabix/tabix" not in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() - - # Update vcf_annotate_ensemblvep without tabix/bgziptabix and with tabix/tabix - update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("vcf_annotate_ensemblvep") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Check that tabix/bgziptabix is not there - assert "tabix/bgziptabix" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() - # Check that tabix/tabix is there - assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() - # Check that ensemblevep is not there but instead we have ensemblevep/vep (due to a file re-naming) - assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() - - -def cmp_component(dir1, dir2): - """Compare two versions of the same component""" - files = ["main.nf", "meta.yml"] - return all(filecmp.cmp(Path(dir1, f), Path(dir2, f), shallow=False) for f in files) diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py deleted file mode 100644 index c697d3428..000000000 --- a/tests/test_bump_version.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Some tests covering the bump_version code.""" - -import os - -import yaml - -import nf_core.bump_version -import nf_core.create -import nf_core.utils - - -# pass tmp_path as argument, which is a pytest feature -# see: https://docs.pytest.org/en/latest/how-to/tmp_path.html#the-tmp-path-fixture -def test_bump_pipeline_version(datafiles, tmp_path): - """Test that making a release with the working example files works""" - - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number - nf_core.bump_version.bump_pipeline_version(pipeline_obj, "1.1") - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check nextflow.config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" - - -def test_dev_bump_pipeline_version(datafiles, tmp_path): - """Test that making a release works with a dev name and a leading v""" - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number - nf_core.bump_version.bump_pipeline_version(pipeline_obj, "v1.2dev") - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check the pipeline config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.2dev" - - -def test_bump_nextflow_version(datafiles, tmp_path): - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number to a specific version, preferably one - # we're not already on - version = "22.04.3" - nf_core.bump_version.bump_nextflow_version(pipeline_obj, version) - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check nextflow.config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == f"!>={version}" - - # Check .github/workflows/ci.yml - with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: - ci_yaml = yaml.safe_load(fh) - assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["NXF_VER"][0] == version - - # Check README.md - with open(new_pipeline_obj._fp("README.md")) as fh: - readme = fh.read().splitlines() - assert ( - f"[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{version}-23aa62.svg)]" - "(https://www.nextflow.io/)" in readme - ) diff --git a/tests/test_cli.py b/tests/test_cli.py index 5df0a3241..bea0223f0 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -79,7 +79,7 @@ def test_cli_verbose(self): # Checks that -v was considered valid assert "No such option: -v" not in nf_core.utils.strip_ansi_codes(result.output) - @mock.patch("nf_core.list.list_workflows", return_value="pipeline test list") + @mock.patch("nf_core.pipelines.list.list_workflows", return_value="pipeline test list") def test_cli_list(self, mock_list_workflows): """Test nf-core pipelines are listed and cli parameters are passed on.""" params = { @@ -87,7 +87,7 @@ def test_cli_list(self, mock_list_workflows): "json": None, "show-archived": None, } - cmd = ["list"] + self.assemble_params(params) + ["kw1", "kw2"] + cmd = ["pipelines", "list"] + self.assemble_params(params) + ["kw1", "kw2"] result = self.invoke_cli(cmd) mock_list_workflows.assert_called_once_with( @@ -96,7 +96,7 @@ def test_cli_list(self, mock_list_workflows): assert result.exit_code == 0 assert "pipeline test list" in result.output - @mock.patch("nf_core.launch.Launch") + @mock.patch("nf_core.pipelines.launch.Launch") def test_cli_launch(self, mock_launcher): """Test nf-core pipeline is launched and cli parameters are passed on.""" mock_launcher.return_value.launch_pipeline.return_value = True @@ -112,7 +112,7 @@ def test_cli_launch(self, mock_launcher): "show-hidden": None, "url": "builder_url", } - cmd = ["launch"] + self.assemble_params(params) + ["pipeline_name"] + cmd = ["pipelines", "launch"] + self.assemble_params(params) + ["pipeline_name"] result = self.invoke_cli(cmd) assert result.exit_code == 0 @@ -131,7 +131,7 @@ def test_cli_launch(self, mock_launcher): mock_launcher.return_value.launch_pipeline.assert_called_once() - @mock.patch("nf_core.launch.Launch") + @mock.patch("nf_core.pipelines.launch.Launch") def test_cli_launch_no_params_in(self, mock_launcher): """Test nf-core pipeline fails when params-in does not exist""" mock_launcher.return_value.launch_pipeline.return_value = True @@ -139,7 +139,7 @@ def test_cli_launch_no_params_in(self, mock_launcher): params = { "params-in": "/fake/path", } - cmd = ["launch"] + self.assemble_params(params) + ["pipeline_name"] + cmd = ["pipelines", "launch"] + self.assemble_params(params) + ["pipeline_name"] result = self.invoke_cli(cmd) assert result.exit_code == 2 @@ -150,15 +150,15 @@ def test_cli_launch_no_params_in(self, mock_launcher): mock_launcher.assert_not_called() - @mock.patch("nf_core.launch.Launch") + @mock.patch("nf_core.pipelines.launch.Launch") def test_cli_launch_fail(self, mock_launcher): """Test nf-core pipeline fails with exit code 1 when pipeline fails.""" mock_launcher.return_value.launch_pipeline.return_value = False - cmd = ["launch", "pipeline_name"] + cmd = ["pipelines", "launch", "pipeline_name"] result = self.invoke_cli(cmd) assert result.exit_code == 1 - @mock.patch("nf_core.download.DownloadWorkflow") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow") def test_cli_download(self, mock_dl): """Test nf-core pipeline is downloaded and cli parameters are passed on.""" params = { @@ -167,7 +167,7 @@ def test_cli_download(self, mock_dl): "compress": "tar.gz", "force": None, "platform": None, - "download-configuration": None, + "download-configuration": "yes", "tag": "3.12=testing", "container-system": "singularity", "container-library": "quay.io", @@ -176,7 +176,7 @@ def test_cli_download(self, mock_dl): "parallel-downloads": 2, } - cmd = ["download"] + self.assemble_params(params) + ["pipeline_name"] + cmd = ["pipelines", "download"] + self.assemble_params(params) + ["pipeline_name"] result = self.invoke_cli(cmd) assert result.exit_code == 0 @@ -188,7 +188,7 @@ def test_cli_download(self, mock_dl): params["compress"], "force" in params, "platform" in params, - "download-configuration" in params, + params["download-configuration"], (params["tag"],), params["container-system"], (params["container-library"],), @@ -199,53 +199,17 @@ def test_cli_download(self, mock_dl): mock_dl.return_value.download_workflow.assert_called_once() - @mock.patch("nf_core.licences.WorkflowLicences") - def test_licences(self, mock_lic): - """Test nf-core pipeline licence is printed out and cli parameters are passed on.""" - licence_text = "dummy licence text" - mock_lic.return_value.run_licences.return_value = licence_text - - params = { - "json": None, - } - - cmd = ["licences"] + self.assemble_params(params) + ["pipeline_name"] - result = self.invoke_cli(cmd) - - assert result.exit_code == 0 - assert licence_text in result.output - - mock_lic.assert_called_once_with(cmd[-1]) - - @mock.patch("nf_core.licences.WorkflowLicences") - def test_licences_log_error(self, mock_lic): - """Test LookupError is logged""" - error_txt = "LookupError has been raised" - mock_lic.return_value.run_licences.side_effect = LookupError(error_txt) - - cmd = ["licences", "pipeline_name"] - with self.assertLogs() as captured_logs: - result = self.invoke_cli(cmd) - - assert result.exit_code == 1 - assert error_txt in captured_logs.output[-1] - assert captured_logs.records[-1].levelname == "ERROR" - - @mock.patch("nf_core.create.PipelineCreate") + @mock.patch("nf_core.pipelines.create.create.PipelineCreate") def test_create(self, mock_create): """Test nf-core pipeline is created and cli parameters are passed on.""" params = { - "name": "pipeline name", + "name": "pipelinename", "description": "pipeline description", "author": "Kalle Anka", - "version": "1.2.3", - "force": None, "outdir": "/path/outdir", - "template-yaml": "file.yaml", - "plain": None, } - cmd = ["create"] + self.assemble_params(params) + cmd = ["pipelines", "create"] + self.assemble_params(params) result = self.invoke_cli(cmd) assert result.exit_code == 0 @@ -253,18 +217,43 @@ def test_create(self, mock_create): params["name"], params["description"], params["author"], - version=params["version"], force="force" in params, + version="1.0.0dev", outdir=params["outdir"], - template_yaml_path=params["template-yaml"], - plain="plain" in params, + template_config=None, + organisation="nf-core", ) mock_create.return_value.init_pipeline.assert_called_once() + @mock.patch("nf_core.pipelines.create.create.PipelineCreate") + def test_create_error(self, mock_create): + """Test `nf-core pipelines create` run without providing all the arguments thorws an error.""" + params = { + "name": "pipelinename", + } + + cmd = ["pipelines", "create"] + self.assemble_params(params) + result = self.invoke_cli(cmd) + + assert result.exit_code == 1 + assert "Partial arguments supplied." in result.output + + @mock.patch("nf_core.pipelines.create.PipelineCreateApp") + def test_create_app(self, mock_create): + """Test `nf-core pipelines create` runs an App.""" + cmd = ["pipelines", "create"] + result = self.invoke_cli(cmd) + + assert result.return_value == (0 or None) + assert "Launching interactive nf-core pipeline creation tool." in result.output + + mock_create.assert_called_once_with() + mock_create.return_value.run.assert_called_once() + @mock.patch("nf_core.utils.is_pipeline_directory") - @mock.patch("nf_core.lint.run_linting") + @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint(self, mock_lint, mock_is_pipeline): - """Test nf-core lint""" + """Test nf-core pipelines lint""" mock_lint_results = (mock.MagicMock, mock.MagicMock, mock.MagicMock) mock_lint_results[0].failed = [] mock_lint_results[1].failed = [] @@ -284,7 +273,7 @@ def test_lint(self, mock_lint, mock_is_pipeline): "json": "output_file.json", } - cmd = ["lint"] + self.assemble_params(params) + cmd = ["pipelines", "lint"] + self.assemble_params(params) result = self.invoke_cli(cmd) assert result.exit_code == 0 @@ -303,12 +292,12 @@ def test_lint(self, mock_lint, mock_is_pipeline): ) def test_lint_no_dir(self): - """Test nf-core lint fails if --dir does not exist""" + """Test nf-core pipelines lint fails if --dir does not exist""" params = { "dir": "/bad/path", } - cmd = ["lint"] + self.assemble_params(params) + cmd = ["pipelines", "lint"] + self.assemble_params(params) result = self.invoke_cli(cmd) assert result.exit_code == 2 @@ -319,11 +308,11 @@ def test_lint_no_dir(self): @mock.patch("nf_core.utils.is_pipeline_directory") def test_lint_dir_is_not_pipeline(self, mock_is_pipeline): - """Test nf-core lint logs an error if not called from a pipeline directory.""" + """Test nf-core pipelines lint logs an error if not called from a pipeline directory.""" error_txt = "UserWarning has been raised" mock_is_pipeline.side_effect = UserWarning(error_txt) - cmd = ["lint"] + cmd = ["pipelines", "lint"] with self.assertLogs() as captured_logs: result = self.invoke_cli(cmd) @@ -332,13 +321,13 @@ def test_lint_dir_is_not_pipeline(self, mock_is_pipeline): assert captured_logs.records[-1].levelname == "ERROR" @mock.patch("nf_core.utils.is_pipeline_directory") - @mock.patch("nf_core.lint.run_linting") + @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint_log_assert_error(self, mock_lint, mock_is_pipeline): - """Test nf-core lint logs assertion errors""" + """Test nf-core pipelines lint logs assertion errors""" error_txt = "AssertionError has been raised" mock_lint.side_effect = AssertionError(error_txt) - cmd = ["lint"] + cmd = ["pipelines", "lint"] with self.assertLogs() as captured_logs: result = self.invoke_cli(cmd) @@ -347,13 +336,13 @@ def test_lint_log_assert_error(self, mock_lint, mock_is_pipeline): assert captured_logs.records[-1].levelname == "CRITICAL" @mock.patch("nf_core.utils.is_pipeline_directory") - @mock.patch("nf_core.lint.run_linting") + @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint_log_user_warning(self, mock_lint, mock_is_pipeline): - """Test nf-core lint logs assertion errors""" + """Test nf-core pipelines lint logs assertion errors""" error_txt = "AssertionError has been raised" mock_lint.side_effect = UserWarning(error_txt) - cmd = ["lint"] + cmd = ["pipelines", "lint"] with self.assertLogs() as captured_logs: result = self.invoke_cli(cmd) @@ -361,31 +350,31 @@ def test_lint_log_user_warning(self, mock_lint, mock_is_pipeline): assert error_txt in captured_logs.output[-1] assert captured_logs.records[-1].levelname == "ERROR" - @mock.patch("nf_core.schema.PipelineSchema.get_schema_path") + @mock.patch("nf_core.pipelines.schema.PipelineSchema.get_schema_path") def test_schema_lint(self, mock_get_schema_path): - """Test nf-core schema lint defaults to nextflow_schema.json""" - cmd = ["schema", "lint"] + """Test nf-core pipelines schema lint defaults to nextflow_schema.json""" + cmd = ["pipelines", "schema", "lint"] with self.runner.isolated_filesystem(): with open("nextflow_schema.json", "w") as f: f.write("{}") self.invoke_cli(cmd) mock_get_schema_path.assert_called_with("nextflow_schema.json") - @mock.patch("nf_core.schema.PipelineSchema.get_schema_path") + @mock.patch("nf_core.pipelines.schema.PipelineSchema.get_schema_path") def test_schema_lint_filename(self, mock_get_schema_path): - """Test nf-core schema lint accepts a filename""" - cmd = ["schema", "lint", "some_other_filename"] + """Test nf-core pipelines schema lint accepts a filename""" + cmd = ["pipelines", "schema", "lint", "some_other_filename"] with self.runner.isolated_filesystem(): with open("some_other_filename", "w") as f: f.write("{}") self.invoke_cli(cmd) mock_get_schema_path.assert_called_with("some_other_filename") - @mock.patch("nf_core.create_logo.create_logo") + @mock.patch("nf_core.pipelines.create_logo.create_logo") def test_create_logo(self, mock_create_logo): # Set up the mock to return a specific value - cmd = ["create-logo", "test"] + cmd = ["pipelines", "create-logo", "test"] result = self.invoke_cli(cmd) mock_create_logo.assert_called_with("test", Path.cwd(), None, "light", 2300, "png", False) diff --git a/tests/test_create.py b/tests/test_create.py deleted file mode 100644 index e2672499c..000000000 --- a/tests/test_create.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Some tests covering the pipeline creation sub command.""" - -import os -import unittest -from pathlib import Path -from unittest import mock - -import git -import yaml - -import nf_core.create - -from .utils import with_temporary_folder - -TEST_DATA_DIR = Path(__file__).parent / "data" -PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" -PIPELINE_TEMPLATE_YML_SKIP = TEST_DATA_DIR / "pipeline_create_template_skip.yml" - - -class NfcoreCreateTest(unittest.TestCase): - def setUp(self): - self.pipeline_name = "nf-core/test" - self.pipeline_description = "just for 4w3s0m3 tests" - self.pipeline_author = "Chuck Norris" - self.pipeline_version = "1.0.0" - self.default_branch = "default" - - def test_pipeline_creation(self): - pipeline = nf_core.create.PipelineCreate( - name=self.pipeline_name, - description=self.pipeline_description, - author=self.pipeline_author, - version=self.pipeline_version, - no_git=False, - force=True, - plain=True, - default_branch=self.default_branch, - ) - - assert pipeline.template_params["name"] == self.pipeline_name - assert pipeline.template_params["description"] == self.pipeline_description - assert pipeline.template_params["author"] == self.pipeline_author - assert pipeline.template_params["version"] == self.pipeline_version - - @with_temporary_folder - def test_pipeline_creation_initiation(self, tmp_path): - pipeline = nf_core.create.PipelineCreate( - name=self.pipeline_name, - description=self.pipeline_description, - author=self.pipeline_author, - version=self.pipeline_version, - no_git=False, - force=True, - outdir=tmp_path, - plain=True, - default_branch=self.default_branch, - ) - pipeline.init_pipeline() - assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) - assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: - assert "template" not in fh.read() - - @with_temporary_folder - def test_pipeline_creation_initiation_with_yml(self, tmp_path): - pipeline = nf_core.create.PipelineCreate( - name=self.pipeline_name, - description=self.pipeline_description, - author=self.pipeline_author, - version=self.pipeline_version, - no_git=False, - force=True, - outdir=tmp_path, - template_yaml_path=PIPELINE_TEMPLATE_YML, - plain=True, - default_branch=self.default_branch, - ) - pipeline.init_pipeline() - assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) - assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() - - # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - assert os.path.exists(os.path.join(pipeline.outdir, ".nf-core.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: - nfcore_yml = yaml.safe_load(fh) - assert "template" in nfcore_yml - assert nfcore_yml["template"] == yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()) - - @mock.patch.object(nf_core.create.PipelineCreate, "customize_template") - @mock.patch.object(nf_core.create.questionary, "confirm") - @with_temporary_folder - def test_pipeline_creation_initiation_customize_template(self, mock_questionary, mock_customize, tmp_path): - mock_questionary.unsafe_ask.return_value = True - mock_customize.return_value = {"prefix": "testprefix"} - pipeline = nf_core.create.PipelineCreate( - name=self.pipeline_name, - description=self.pipeline_description, - author=self.pipeline_author, - version=self.pipeline_version, - no_git=False, - force=True, - outdir=tmp_path, - default_branch=self.default_branch, - ) - pipeline.init_pipeline() - assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) - assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() - - # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - assert os.path.exists(os.path.join(pipeline.outdir, ".nf-core.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: - nfcore_yml = yaml.safe_load(fh) - assert "template" in nfcore_yml - assert nfcore_yml["template"] == yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()) - - @with_temporary_folder - def test_pipeline_creation_with_yml_skip(self, tmp_path): - pipeline = nf_core.create.PipelineCreate( - name=self.pipeline_name, - description=self.pipeline_description, - author=self.pipeline_author, - version=self.pipeline_version, - no_git=False, - force=True, - outdir=tmp_path, - template_yaml_path=PIPELINE_TEMPLATE_YML_SKIP, - plain=True, - default_branch=self.default_branch, - ) - pipeline.init_pipeline() - assert not os.path.isdir(os.path.join(pipeline.outdir, ".git")) - - # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - assert os.path.exists(os.path.join(pipeline.outdir, ".nf-core.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: - nfcore_yml = yaml.safe_load(fh) - assert "template" in nfcore_yml - assert nfcore_yml["template"] == yaml.safe_load(PIPELINE_TEMPLATE_YML_SKIP.read_text()) - - # Check that some of the skipped files are not present - assert not os.path.exists(os.path.join(pipeline.outdir, "CODE_OF_CONDUCT.md")) - assert not os.path.exists(os.path.join(pipeline.outdir, ".github")) - assert not os.path.exists(os.path.join(pipeline.outdir, "conf", "igenomes.config")) diff --git a/tests/test_licenses.py b/tests/test_licenses.py deleted file mode 100644 index 8023c9e89..000000000 --- a/tests/test_licenses.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Some tests covering the pipeline creation sub command.""" -# import json -# import os -# import tempfile -# import unittest -# -# import pytest -# from rich.console import Console -# -# import nf_core.create -# import nf_core.licences - -# TODO nf-core: Assess and strip out if no longer required for DSL2 - -# class WorkflowLicensesTest(unittest.TestCase): -# """A class that performs tests on the workflow license -# retrieval functionality of nf-core tools.""" - -# def setUp(self): -# """ Create a new pipeline, then make a Licence object """ -# # Set up the schema -# self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "test_pipeline") -# self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) -# self.create_obj.init_pipeline() -# self.license_obj = nf_core.licences.WorkflowLicences(self.pipeline_dir) - -# def test_run_licences_successful(self): -# console = Console(record=True) -# console.print(self.license_obj.run_licences()) -# output = console.export_text() -# assert "GPL v3" in output - -# def test_run_licences_successful_json(self): -# self.license_obj.as_json = True -# console = Console(record=True) -# console.print(self.license_obj.run_licences()) -# output = json.loads(console.export_text()) -# for package in output: -# if "multiqc" in package: -# assert output[package][0] == "GPL v3" -# break -# else: -# raise LookupError("Could not find MultiQC") - -# def test_get_environment_file_local(self): -# self.license_obj.get_environment_file() -# assert any(["multiqc" in k for k in self.license_obj.conda_config["dependencies"]]) - -# def test_get_environment_file_remote(self): -# self.license_obj = nf_core.licences.WorkflowLicences("methylseq") -# self.license_obj.get_environment_file() -# assert any(["multiqc" in k for k in self.license_obj.conda_config["dependencies"]]) - -# @pytest.mark.xfail(raises=LookupError, strict=True) -# def test_get_environment_file_nonexistent(self): -# self.license_obj = nf_core.licences.WorkflowLicences("fubarnotreal") -# self.license_obj.get_environment_file() diff --git a/tests/test_lint.py b/tests/test_lint.py deleted file mode 100644 index b72a6bfdf..000000000 --- a/tests/test_lint.py +++ /dev/null @@ -1,591 +0,0 @@ -"""Some tests covering the linting code.""" - -import fnmatch -import json -import os -import shutil -import tempfile -import unittest - -import yaml - -import nf_core.create -import nf_core.lint - -from .utils import with_temporary_folder - - -class TestLint(unittest.TestCase): - """Class for lint tests""" - - def setUp(self): - """Function that runs at start of tests for common resources - - Use nf_core.create() to make a pipeline that we can use for testing - """ - - self.tmp_dir = tempfile.mkdtemp() - self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir, plain=True - ) - self.create_obj.init_pipeline() - - # Base lint object on this directory - self.lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir) - - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - - def _make_pipeline_copy(self): - """Make a copy of the test pipeline that can be edited - - Returns: Path to new temp directory with pipeline""" - new_pipeline = os.path.join(self.tmp_dir, "nf-core-testpipeline-copy") - shutil.copytree(self.test_pipeline_dir, new_pipeline) - return new_pipeline - - ########################## - # CORE lint.py FUNCTIONS # - ########################## - def test_run_linting_function(self): - """Run the master run_linting() function in lint.py - - We don't really check any of this code as it's just a series of function calls - and we're testing each of those individually. This is mostly to check for syntax errors.""" - nf_core.lint.run_linting(self.test_pipeline_dir, False) - - def test_init_pipeline_lint(self): - """Simply create a PipelineLint object. - - This checks that all of the lint test imports are working properly, - we also check that the git sha was found and that the release flag works properly - """ - lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir, True) - - # Tests that extra test is added for release mode - assert "version_consistency" in lint_obj.lint_tests - - # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash - assert len(lint_obj.git_sha) > 0 - - def test_load_lint_config_not_found(self): - """Try to load a linting config file that doesn't exist""" - self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} - - def test_load_lint_config_ignore_all_tests(self): - """Try to load a linting config file that ignores all tests""" - - # Make a copy of the test pipeline and create a lint object - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) - - # Make a config file listing all test names - config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} - with open(os.path.join(new_pipeline, ".nf-core.yml"), "w") as fh: - yaml.dump(config_dict, fh) - - # Load the new lint config file and check - lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) - - # Try running linting and make sure that all tests are ignored - lint_obj._lint_pipeline() - assert len(lint_obj.passed) == 0 - assert len(lint_obj.warned) == 0 - assert len(lint_obj.failed) == 0 - assert len(lint_obj.ignored) == len(lint_obj.lint_tests) - - @with_temporary_folder - def test_json_output(self, tmp_dir): - """ - Test creation of a JSON file with lint results - - Expected JSON output: - { - "nf_core_tools_version": "1.10.dev0", - "date_run": "2020-06-05 10:56:42", - "tests_pass": [ - [ 1, "This test passed"], - [ 2, "This test also passed"] - ], - "tests_warned": [ - [ 2, "This test gave a warning"] - ], - "tests_failed": [], - "num_tests_pass": 2, - "num_tests_warned": 1, - "num_tests_failed": 0, - "has_tests_pass": true, - "has_tests_warned": true, - "has_tests_failed": false - } - """ - self.lint_obj.passed.append(("test_one", "This test passed")) - self.lint_obj.passed.append(("test_two", "This test also passed")) - self.lint_obj.warned.append(("test_three", "This test gave a warning")) - - # Make a temp dir for the JSON output - json_fn = os.path.join(tmp_dir, "lint_results.json") - self.lint_obj._save_json_results(json_fn) - - # Load created JSON file and check its contents - with open(json_fn) as fh: - try: - saved_json = json.load(fh) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") - assert saved_json["num_tests_pass"] > 0 - assert saved_json["num_tests_warned"] > 0 - assert saved_json["num_tests_ignored"] == 0 - assert saved_json["num_tests_failed"] == 0 - assert saved_json["has_tests_pass"] - assert saved_json["has_tests_warned"] - assert not saved_json["has_tests_ignored"] - assert not saved_json["has_tests_failed"] - - def test_wrap_quotes(self): - md = self.lint_obj._wrap_quotes(["one", "two", "three"]) - assert md == "`one` or `two` or `three`" - - def test_sphinx_md_files(self): - """Check that we have .md files for all lint module code, - and that there are no unexpected files (eg. deleted lint tests)""" - - docs_basedir = os.path.join( - os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "docs", "api", "_src", "pipeline_lint_tests" - ) - - # Get list of existing .md files - existing_docs = [] - for fn in os.listdir(docs_basedir): - if fnmatch.fnmatch(fn, "*.md") and not fnmatch.fnmatch(fn, "index.md"): - existing_docs.append(os.path.join(docs_basedir, fn)) - - # Check .md files against each test name - lint_obj = nf_core.lint.PipelineLint("", True) - for test_name in lint_obj.lint_tests: - fn = os.path.join(docs_basedir, f"{test_name}.md") - assert os.path.exists(fn), f"Could not find lint docs .md file: {fn}" - existing_docs.remove(fn) - - # Check that we have no remaining .md files that we didn't expect - assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" - - ####################### - # SPECIFIC LINT TESTS # - ####################### - from .lint.actions_awsfulltest import ( # type: ignore[misc] - test_actions_awsfulltest_fail, - test_actions_awsfulltest_pass, - test_actions_awsfulltest_warn, - ) - from .lint.actions_awstest import ( # type: ignore[misc] - test_actions_awstest_fail, - test_actions_awstest_pass, - ) - from .lint.actions_ci import ( # type: ignore[misc] - test_actions_ci_fail_wrong_nf, - test_actions_ci_fail_wrong_trigger, - test_actions_ci_pass, - ) - from .lint.actions_schema_validation import ( # type: ignore[misc] - test_actions_schema_validation_fails_for_additional_property, - test_actions_schema_validation_missing_jobs, - test_actions_schema_validation_missing_on, - ) - from .lint.configs import ( # type: ignore[misc] - test_ignore_base_config, - test_ignore_modules_config, - test_superfluous_withname_in_base_config_fails, - test_superfluous_withname_in_modules_config_fails, - test_withname_in_modules_config, - ) - from .lint.files_exist import ( # type: ignore[misc] - test_files_exist_depreciated_file, - test_files_exist_fail_conditional, - test_files_exist_missing_config, - test_files_exist_missing_main, - test_files_exist_pass, - test_files_exist_pass_conditional, - ) - from .lint.files_unchanged import ( # type: ignore[misc] - test_files_unchanged_fail, - test_files_unchanged_pass, - ) - from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] - from .lint.modules_json import test_modules_json_pass # type: ignore[misc] - from .lint.multiqc_config import ( # type: ignore[misc] - test_multiqc_config_exists, - test_multiqc_config_ignore, - test_multiqc_config_missing_report_section_order, - test_multiqc_config_report_comment_fail, - test_multiqc_config_report_comment_release_fail, - test_multiqc_config_report_comment_release_succeed, - test_multiqc_incorrect_export_plots, - ) - from .lint.nextflow_config import ( # type: ignore[misc] - test_allow_params_reference_in_main_nf, - test_catch_params_assignment_in_main_nf, - test_default_values_fail, - test_default_values_float, - test_default_values_float_fail, - test_default_values_ignored, - test_default_values_match, - test_nextflow_config_bad_name_fail, - test_nextflow_config_dev_in_release_mode_failed, - test_nextflow_config_example_pass, - test_nextflow_config_missing_test_profile_failed, - ) - from .lint.nfcore_yml import ( # type: ignore[misc] - test_nfcore_yml_fail_nfcore_version, - test_nfcore_yml_fail_repo_type, - test_nfcore_yml_pass, - ) - from .lint.template_strings import ( # type: ignore[misc] - test_template_strings, - test_template_strings_ignore_file, - test_template_strings_ignored, - ) - from .lint.version_consistency import test_version_consistency # type: ignore[misc] - - -# TODO nf-core: Assess and strip out if no longer required for DSL2 - -# def test_critical_missingfiles_example(self): -# """Tests for missing nextflow config and main.nf files""" -# lint_obj = nf_core.lint.run_linting(PATH_CRITICAL_EXAMPLE, False) -# assert len(lint_obj.failed) > 0 -# -# def test_failing_missingfiles_example(self): -# """Tests for missing files like Dockerfile or LICENSE""" -# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# lint_obj.check_files_exist() -# expectations = {"failed": 6, "warned": 2, "passed": 14} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_mit_licence_example_pass(self): -# """Tests that MIT test works with good MIT licences""" -# good_lint_obj = nf_core.lint.PipelineLint(PATH_CRITICAL_EXAMPLE) -# good_lint_obj.check_licence() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_mit_license_example_with_failed(self): -# """Tests that MIT test works with bad MIT licences""" -# bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# bad_lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(bad_lint_obj, **expectations) -# -# def test_config_variable_example_pass(self): -# """Tests that config variable existence test works with good pipeline example""" -# good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# good_lint_obj.check_nextflow_config() -# expectations = {"failed": 0, "warned": 1, "passed": 34} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_config_variable_example_with_failed(self): -# """Tests that config variable existence test fails with bad pipeline example""" -# bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# bad_lint_obj.check_nextflow_config() -# expectations = {"failed": 19, "warned": 6, "passed": 10} -# self.assess_lint_status(bad_lint_obj, **expectations) -# -# @pytest.mark.xfail(raises=AssertionError, strict=True) -# def test_config_variable_error(self): -# """Tests that config variable existence test falls over nicely with nextflow can't run""" -# bad_lint_obj = nf_core.lint.PipelineLint("/non/existant/path") -# bad_lint_obj.check_nextflow_config() -# -# -# def test_wrong_license_examples_with_failed(self): -# """Tests for checking the license test behavior""" -# for example in PATHS_WRONG_LICENSE_EXAMPLE: -# lint_obj = nf_core.lint.PipelineLint(example) -# lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_missing_license_example(self): -# """Tests for missing license behavior""" -# lint_obj = nf_core.lint.PipelineLint(PATH_MISSING_LICENSE_EXAMPLE) -# lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_pass(self): -# """Tests that the pipeline README file checks work with a good example""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.minNextflowVersion = "20.04.0" -# lint_obj.files = ["environment.yml"] -# lint_obj.check_readme() -# expectations = {"failed": 0, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_warn(self): -# """Tests that the pipeline README file checks fail """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.minNextflowVersion = "0.28.0" -# lint_obj.check_readme() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_fail(self): -# """Tests that the pipeline README file checks give warnings with a bad example""" -# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.check_readme() -# expectations = {"failed": 0, "warned": 2, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_dockerfile_pass(self): -# """Tests if a valid Dockerfile passes the lint checks""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["Dockerfile"] -# lint_obj.check_docker() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_pass(self): -# """Tests the workflow version and container version sucessfully""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_env_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.5" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_numeric_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.5dev" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_no_docker_version_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong missing docker version tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.4" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_env_pass(self): -# """Tests the behaviour, when a git activity is a release -# and simulate correct release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.4" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_env_pass(self): -# """ Tests the conda environment config checks with a working example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: -# lint_obj.conda_config = yaml.safe_load(fh) -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 4, "passed": 5} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_env_fail(self): -# """ Tests the conda environment config fails with a bad example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: -# lint_obj.conda_config = yaml.safe_load(fh) -# lint_obj.conda_config["dependencies"] = ["fastqc", "multiqc=0.9", "notapackaage=0.4"] -# lint_obj.pipeline_name = "not_tools" -# lint_obj.config["manifest.version"] = "0.23" -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 3, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# @pytest.mark.xfail(raises=ValueError, strict=True) -# def test_conda_env_timeout(self, mock_get): -# """ Tests the conda environment handles API timeouts """ -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.Timeout() -# # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.conda_config["channels"] = ["bioconda"] -# lint_obj.check_anaconda_package("multiqc=1.6") -# -# def test_conda_env_skip(self): -# """ Tests the conda environment config is skipped when not needed """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_pass(self): -# """ Tests the conda Dockerfile test works with a working example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.version = "1.11" -# lint_obj.files = ["environment.yml", "Dockerfile"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "Dockerfile"), "r") as fh: -# lint_obj.dockerfile = fh.read().splitlines() -# lint_obj.conda_config["name"] = "nf-core-tools-0.4" -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_fail(self): -# """ Tests the conda Dockerfile test fails with a bad example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.version = "1.11" -# lint_obj.files = ["environment.yml", "Dockerfile"] -# lint_obj.conda_config["name"] = "nf-core-tools-0.4" -# lint_obj.dockerfile = ["fubar"] -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 5, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_skip(self): -# """ Tests the conda Dockerfile test is skipped when not needed """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 0, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_no_version_fail(self): -# """ Tests the pip dependency version definition is present """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_package_not_latest_warn(self): -# """ Tests the pip dependency version definition is present """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.4"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# def test_pypi_timeout_warn(self, mock_get): -# """Tests the PyPi connection and simulates a request timeout, which should -# return in an addiional warning in the linting""" -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.Timeout() -# # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# def test_pypi_connection_error_warn(self, mock_get): -# """Tests the PyPi connection and simulates a connection error, which should -# result in an additional warning, as we cannot test if dependent module is latest""" -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.ConnectionError() -# # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_dependency_fail(self): -# """ Tests the PyPi API package information query """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["notpresent==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dependency_fails(self): -# """Tests that linting fails, if conda dependency -# package version is not available on Anaconda. -# """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": ["openjdk=0.0.0"]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_dependency_fails(self): -# """Tests that linting fails, if conda dependency -# package version is not available on Anaconda. -# """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==0.0"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pipeline_name_pass(self): -# """Tests pipeline name good pipeline example: lower case, no punctuation""" -# # good_lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE) -# good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# good_lint_obj.pipeline_name = "tools" -# good_lint_obj.check_pipeline_name() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_pipeline_name_critical(self): -# """Tests that warning is returned for pipeline not adhering to naming convention""" -# critical_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# critical_lint_obj.pipeline_name = "Tools123" -# critical_lint_obj.check_pipeline_name() -# expectations = {"failed": 0, "warned": 1, "passed": 0} -# self.assess_lint_status(critical_lint_obj, **expectations) -# diff --git a/tests/test_lint_utils.py b/tests/test_lint_utils.py index 2b624a3ec..a4b7caf30 100644 --- a/tests/test_lint_utils.py +++ b/tests/test_lint_utils.py @@ -3,7 +3,7 @@ import git import pytest -import nf_core.lint_utils +import nf_core.pipelines.lint_utils JSON_WITH_SYNTAX_ERROR = "{'a':1, 1}" JSON_MALFORMED = "{'a':1}" @@ -50,16 +50,16 @@ def git_dir_with_json_syntax_error(temp_git_repo): def test_run_prettier_on_formatted_file(formatted_json): - nf_core.lint_utils.run_prettier_on_file(formatted_json) + nf_core.pipelines.lint_utils.run_prettier_on_file(formatted_json) assert formatted_json.read_text() == JSON_FORMATTED def test_run_prettier_on_malformed_file(malformed_json): - nf_core.lint_utils.run_prettier_on_file(malformed_json) + nf_core.pipelines.lint_utils.run_prettier_on_file(malformed_json) assert malformed_json.read_text() == JSON_FORMATTED def test_run_prettier_on_syntax_error_file(syntax_error_json, caplog): - nf_core.lint_utils.run_prettier_on_file(syntax_error_json) + nf_core.pipelines.lint_utils.run_prettier_on_file(syntax_error_json) expected_critical_log = "SyntaxError: Unexpected token (1:10)" assert expected_critical_log in caplog.text diff --git a/tests/test_modules.py b/tests/test_modules.py index d3d99abad..d0692236e 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -1,17 +1,23 @@ """Tests covering the modules commands""" import json -import os -import shutil import unittest from pathlib import Path +import pytest import requests_cache import responses -import yaml +import ruamel.yaml -import nf_core.create import nf_core.modules +import nf_core.modules.create +import nf_core.modules.install +import nf_core.modules.modules_repo +import nf_core.modules.remove +import nf_core.pipelines.create.create +from nf_core import __version__ +from nf_core.pipelines.lint_utils import run_prettier_on_file +from nf_core.utils import NFCoreYamlConfig from .utils import ( GITLAB_BRANCH_TEST_BRANCH, @@ -23,35 +29,45 @@ create_tmp_pipeline, mock_anaconda_api_calls, mock_biocontainers_api_calls, + mock_biotools_api_calls, ) def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) root_dir = Path(tmp_dir, "modules") Path(root_dir, "modules", "nf-core").mkdir(parents=True) Path(root_dir, "tests", "modules", "nf-core").mkdir(parents=True) Path(root_dir, "tests", "config").mkdir(parents=True) + + nf_core_yml = NFCoreYamlConfig(nf_core_version=__version__, repository_type="modules", org_path="nf-core") with open(Path(root_dir, ".nf-core.yml"), "w") as fh: - fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) - # mock biocontainers and anaconda response + yaml.dump(nf_core_yml.model_dump(), fh) + # mock biocontainers and anaconda response and biotools response with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "bpipe", "0.9.11--hdfd78af_0") - mock_biocontainers_api_calls(rsps, "bpipe", "0.9.11--hdfd78af_0") + mock_anaconda_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") + mock_biocontainers_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") + mock_biotools_api_calls(rsps, "bpipe") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules - module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_single", False, False) + module_create = nf_core.modules.create.ModuleCreate( + root_dir, "bpipe/test", "@author", "process_single", True, False + ) with requests_cache.disabled(): - module_create.create() + assert module_create.create() # Remove doi from meta.yml which makes lint fail meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") - with open(meta_yml_path) as fh: - meta_yml = yaml.safe_load(fh) + with open(str(meta_yml_path)) as fh: + meta_yml = yaml.load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] - with open(meta_yml_path, "w") as fh: + with open(str(meta_yml_path), "w") as fh: yaml.dump(meta_yml, fh) + run_prettier_on_file(fh.name) # Add dummy content to main.nf.test.snap test_snap_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") @@ -99,9 +115,10 @@ def setUp(self): # Set up the schema self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() + # Set up install objects - self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) - self.mods_install_old = nf_core.modules.ModuleInstall( + self.mods_install = nf_core.modules.install.ModuleInstall(self.pipeline_dir, prompt=False, force=True) + self.mods_install_old = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, @@ -109,21 +126,21 @@ def setUp(self): remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, ) - self.mods_install_trimgalore = nf_core.modules.ModuleInstall( + self.mods_install_trimgalore = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, ) - self.mods_install_gitlab = nf_core.modules.ModuleInstall( + self.mods_install_gitlab = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH, ) - self.mods_install_gitlab_old = nf_core.modules.ModuleInstall( + self.mods_install_gitlab_old = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, @@ -133,8 +150,8 @@ def setUp(self): ) # Set up remove objects - self.mods_remove = nf_core.modules.ModuleRemove(self.pipeline_dir) - self.mods_remove_gitlab = nf_core.modules.ModuleRemove( + self.mods_remove = nf_core.modules.remove.ModuleRemove(self.pipeline_dir) + self.mods_remove_gitlab = nf_core.modules.remove.ModuleRemove( self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH, @@ -143,144 +160,12 @@ def setUp(self): # Set up the nf-core/modules repo dummy self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - def test_modulesrepo_class(self): """Initialise a modules repo object""" - modrepo = nf_core.modules.ModulesRepo() + modrepo = nf_core.modules.modules_repo.ModulesRepo() assert modrepo.repo_path == "nf-core" assert modrepo.branch == "master" - ############################################ - # Test of the individual modules commands. # - ############################################ - - from .modules.bump_versions import ( # type: ignore[misc] - test_modules_bump_versions_all_modules, - test_modules_bump_versions_fail, - test_modules_bump_versions_fail_unknown_version, - test_modules_bump_versions_single_module, - ) - from .modules.create import ( # type: ignore[misc] - test_modules_create_fail_exists, - test_modules_create_nfcore_modules, - test_modules_create_nfcore_modules_subtool, - test_modules_create_succeed, - test_modules_migrate, - test_modules_migrate_no_delete, - test_modules_migrate_symlink, - ) - from .modules.info import ( # type: ignore[misc] - test_modules_info_in_modules_repo, - test_modules_info_local, - test_modules_info_remote, - test_modules_info_remote_gitlab, - ) - from .modules.install import ( # type: ignore[misc] - test_modules_install_alternate_remote, - test_modules_install_different_branch_fail, - test_modules_install_different_branch_succeed, - test_modules_install_emptypipeline, - test_modules_install_from_gitlab, - test_modules_install_nomodule, - test_modules_install_nopipeline, - test_modules_install_tracking, - test_modules_install_trimgalore, - test_modules_install_trimgalore_twice, - ) - from .modules.lint import ( # type: ignore[misc] - test_modules_absent_version, - test_modules_empty_file_in_snapshot, - test_modules_empty_file_in_stub_snapshot, - test_modules_environment_yml_file_doesnt_exists, - test_modules_environment_yml_file_name_mismatch, - test_modules_environment_yml_file_not_array, - test_modules_environment_yml_file_sorted_correctly, - test_modules_environment_yml_file_sorted_incorrectly, - test_modules_incorrect_tags_yml_key, - test_modules_incorrect_tags_yml_values, - test_modules_lint_check_process_labels, - test_modules_lint_check_url, - test_modules_lint_empty, - test_modules_lint_gitlab_modules, - test_modules_lint_multiple_remotes, - test_modules_lint_new_modules, - test_modules_lint_no_gitlab, - test_modules_lint_patched_modules, - test_modules_lint_snapshot_file, - test_modules_lint_snapshot_file_missing_fail, - test_modules_lint_snapshot_file_not_needed, - test_modules_lint_trimgalore, - test_modules_meta_yml_incorrect_licence_field, - test_modules_meta_yml_incorrect_name, - test_modules_meta_yml_input_mismatch, - test_modules_meta_yml_output_mismatch, - test_modules_missing_required_tag, - test_modules_missing_tags_yml, - test_modules_missing_test_dir, - test_modules_missing_test_main_nf, - test_modules_unused_pytest_files, - test_nftest_failing_linting, - ) - from .modules.list import ( # type: ignore[misc] - test_modules_install_and_list_pipeline, - test_modules_install_gitlab_and_list_pipeline, - test_modules_list_in_wrong_repo_fail, - test_modules_list_local_json, - test_modules_list_pipeline, - test_modules_list_remote, - test_modules_list_remote_gitlab, - test_modules_list_remote_json, - test_modules_list_with_keywords, - test_modules_list_with_one_keyword, - test_modules_list_with_unused_keyword, - ) - from .modules.modules_json import ( # type: ignore[misc] - test_get_modules_json, - test_mod_json_create, - test_mod_json_create_with_patch, - test_mod_json_dump, - test_mod_json_get_module_version, - test_mod_json_module_present, - test_mod_json_repo_present, - test_mod_json_up_to_date, - test_mod_json_up_to_date_module_removed, - test_mod_json_up_to_date_reinstall_fails, - test_mod_json_update, - test_mod_json_with_empty_modules_value, - test_mod_json_with_missing_modules_entry, - ) - from .modules.patch import ( # type: ignore[misc] - test_create_patch_change, - test_create_patch_no_change, - test_create_patch_try_apply_failed, - test_create_patch_try_apply_successful, - test_create_patch_update_fail, - test_create_patch_update_success, - test_remove_patch, - ) - from .modules.remove import ( # type: ignore[misc] - test_modules_remove_multiqc_from_gitlab, - test_modules_remove_trimgalore, - test_modules_remove_trimgalore_uninstalled, - ) - from .modules.update import ( # type: ignore[misc] - test_install_and_update, - test_install_at_hash_and_update, - test_install_at_hash_and_update_and_save_diff_to_file, - test_update_all, - test_update_different_branch_mix_modules_branch_test, - test_update_different_branch_mixed_modules_main, - test_update_different_branch_single_module, - test_update_module_with_extra_config_file, - test_update_only_show_differences, - test_update_only_show_differences_when_patch, - test_update_with_config_dont_update, - test_update_with_config_fix_all, - test_update_with_config_fixed_version, - test_update_with_config_no_updates, - ) + @pytest.fixture(autouse=True) + def _use_caplog(self, caplog): + self.caplog = caplog diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py new file mode 100644 index 000000000..656ccbef5 --- /dev/null +++ b/tests/test_pipelines.py @@ -0,0 +1,26 @@ +import shutil +from unittest import TestCase + +from nf_core.utils import Pipeline + +from .utils import create_tmp_pipeline + + +class TestPipelines(TestCase): + def setUp(self) -> None: + """Create a new Pipeline for testing""" + self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + + def tearDown(self) -> None: + """Remove the test pipeline directory""" + shutil.rmtree(self.tmp_dir) + + def _make_pipeline_copy(self): + """Make a copy of the test pipeline that can be edited + + Returns: Path to new temp directory with pipeline""" + new_pipeline = self.tmp_dir / "nf-core-testpipeline-copy" + shutil.copytree(self.pipeline_dir, new_pipeline) + return new_pipeline diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 0a9224002..7c18ab0a2 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -1,13 +1,14 @@ """Tests covering the subworkflows commands""" import json -import os -import shutil import unittest from pathlib import Path -import nf_core.create +import pytest + import nf_core.modules +import nf_core.modules.install +import nf_core.pipelines.create.create import nf_core.subworkflows from .utils import ( @@ -33,7 +34,14 @@ def create_modules_repo_dummy(tmp_dir): subworkflow_create.create() # Add dummy content to main.nf.test.snap - test_snap_path = Path(root_dir, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") + test_snap_path = Path( + root_dir, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) test_snap_path.parent.mkdir(parents=True, exist_ok=True) with open(test_snap_path, "w") as fh: json.dump( @@ -63,13 +71,18 @@ def setUp(self): # Set up the pipeline structure self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() + # Set up the nf-core/modules repo dummy self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) # Set up install objects self.subworkflow_install = nf_core.subworkflows.SubworkflowInstall(self.pipeline_dir, prompt=False, force=False) self.subworkflow_install_gitlab = nf_core.subworkflows.SubworkflowInstall( - self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + self.pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, ) self.subworkflow_install_gitlab_same_org_path = nf_core.subworkflows.SubworkflowInstall( self.pipeline_dir, @@ -90,89 +103,11 @@ def setUp(self): force=False, sha="8c343b3c8a0925949783dc547666007c245c235b", ) - self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) + self.mods_install = nf_core.modules.install.ModuleInstall(self.pipeline_dir, prompt=False, force=True) # Set up remove objects self.subworkflow_remove = nf_core.subworkflows.SubworkflowRemove(self.pipeline_dir) - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - - ################################################ - # Test of the individual subworkflow commands. # - ################################################ - - from .subworkflows.create import ( # type: ignore[misc] - test_subworkflows_create_fail_exists, - test_subworkflows_create_nfcore_modules, - test_subworkflows_create_succeed, - test_subworkflows_migrate, - test_subworkflows_migrate_no_delete, - ) - from .subworkflows.info import ( # type: ignore[misc] - test_subworkflows_info_in_modules_repo, - test_subworkflows_info_local, - test_subworkflows_info_remote, - test_subworkflows_info_remote_gitlab, - ) - from .subworkflows.install import ( # type: ignore[misc] - test_subworkflow_install_nopipeline, - test_subworkflows_install_alternate_remote, - test_subworkflows_install_bam_sort_stats_samtools, - test_subworkflows_install_bam_sort_stats_samtools_twice, - test_subworkflows_install_different_branch_fail, - test_subworkflows_install_emptypipeline, - test_subworkflows_install_from_gitlab, - test_subworkflows_install_nosubworkflow, - test_subworkflows_install_tracking, - test_subworkflows_install_tracking_added_already_installed, - test_subworkflows_install_tracking_added_super_subworkflow, - ) - from .subworkflows.lint import ( # type: ignore[misc] - test_subworkflows_absent_version, - test_subworkflows_empty_file_in_snapshot, - test_subworkflows_empty_file_in_stub_snapshot, - test_subworkflows_incorrect_tags_yml_key, - test_subworkflows_incorrect_tags_yml_values, - test_subworkflows_lint, - test_subworkflows_lint_capitalization_fail, - test_subworkflows_lint_empty, - test_subworkflows_lint_gitlab_subworkflows, - test_subworkflows_lint_include_multiple_alias, - test_subworkflows_lint_less_than_two_modules_warning, - test_subworkflows_lint_multiple_remotes, - test_subworkflows_lint_new_subworkflow, - test_subworkflows_lint_no_gitlab, - test_subworkflows_lint_snapshot_file, - test_subworkflows_lint_snapshot_file_missing_fail, - test_subworkflows_lint_snapshot_file_not_needed, - test_subworkflows_missing_tags_yml, - ) - from .subworkflows.list import ( # type: ignore[misc] - test_subworkflows_install_and_list_subworkflows, - test_subworkflows_install_gitlab_and_list_subworkflows, - test_subworkflows_list_remote, - test_subworkflows_list_remote_gitlab, - ) - from .subworkflows.remove import ( # type: ignore[misc] - test_subworkflows_remove_included_subworkflow, - test_subworkflows_remove_one_of_two_subworkflow, - test_subworkflows_remove_subworkflow, - test_subworkflows_remove_subworkflow_keep_installed_module, - ) - from .subworkflows.update import ( # type: ignore[misc] - test_install_and_update, - test_install_at_hash_and_update, - test_install_at_hash_and_update_and_save_diff_to_file, - test_update_all, - test_update_all_linked_components_from_subworkflow, - test_update_all_subworkflows_from_module, - test_update_change_of_included_modules, - test_update_with_config_dont_update, - test_update_with_config_fix_all, - test_update_with_config_fixed_version, - test_update_with_config_no_updates, - ) + @pytest.fixture(autouse=True) + def _use_caplog(self, caplog): + self.caplog = caplog diff --git a/tests/test_utils.py b/tests/test_utils.py index 145060450..bde561d95 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,19 +1,17 @@ """Tests covering for utility functions.""" import os -import shutil -import tempfile -import unittest from pathlib import Path from unittest import mock import pytest import requests -import nf_core.create -import nf_core.list +import nf_core.pipelines.create.create +import nf_core.pipelines.list import nf_core.utils +from .test_pipelines import TestPipelines from .utils import with_temporary_folder TEST_DATA_DIR = Path(__file__).parent / "data" @@ -28,32 +26,9 @@ def test_strip_ansi_codes(): assert stripped == "ls examplefile.zip" -class TestUtils(unittest.TestCase): +class TestUtils(TestPipelines): """Class for utils tests""" - def setUp(self): - """Function that runs at start of tests for common resources - - Use nf_core.create() to make a pipeline that we can use for testing - """ - self.tmp_dir = tempfile.mkdtemp() - self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.create.PipelineCreate( - "testpipeline", - "This is a test pipeline", - "Test McTestFace", - no_git=True, - outdir=self.test_pipeline_dir, - plain=True, - ) - self.create_obj.init_pipeline() - # Base Pipeline object on this directory - self.pipeline_obj = nf_core.utils.Pipeline(self.test_pipeline_dir) - - def tearDown(self): - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - def test_check_if_outdated_1(self): current_version = "1.0" remote_version = "2.0" @@ -96,9 +71,9 @@ def test_rich_force_colours_true(self): os.environ.pop("PY_COLORS", None) assert nf_core.utils.rich_force_colors() is True - def test_load_pipeline_config(self): + def testload_pipeline_config(self): """Load the pipeline Nextflow config""" - self.pipeline_obj._load_pipeline_config() + self.pipeline_obj.load_pipeline_config() assert self.pipeline_obj.nf_config["dag.enabled"] == "true" # TODO nf-core: Assess and strip out if no longer required for DSL2 @@ -106,12 +81,12 @@ def test_load_pipeline_config(self): # def test_load_conda_env(self): # """Load the pipeline Conda environment.yml file""" # self.pipeline_obj._load_conda_environment() - # assert self.pipeline_obj.conda_config["channels"] == ["conda-forge", "bioconda", "defaults"] + # assert self.pipeline_obj.conda_config["channels"] == ["conda-forge", "bioconda"] def test_list_files_git(self): """Test listing pipeline files using `git ls`""" - self.pipeline_obj._list_files() - assert Path(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files + files = self.pipeline_obj.list_files() + assert Path(self.pipeline_dir, "main.nf") in files @with_temporary_folder def test_list_files_no_git(self, tmpdir): @@ -120,8 +95,8 @@ def test_list_files_no_git(self, tmpdir): tmp_fn = Path(tmpdir, "testfile") tmp_fn.touch() pipeline_obj = nf_core.utils.Pipeline(tmpdir) - pipeline_obj._list_files() - assert tmp_fn in pipeline_obj.files + files = pipeline_obj.list_files() + assert tmp_fn in files @mock.patch("os.path.exists") @mock.patch("os.makedirs") @@ -161,7 +136,7 @@ def test_pip_erroneous_package(self): nf_core.utils.pip_package("not_a_package=1.0") def test_get_repo_releases_branches_nf_core(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("methylseq", wfs) for r in wf_releases: @@ -172,7 +147,7 @@ def test_get_repo_releases_branches_nf_core(self): assert "dev" in wf_branches.keys() def test_get_repo_releases_branches_not_nf_core(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("MultiQC/MultiQC", wfs) for r in wf_releases: @@ -183,57 +158,50 @@ def test_get_repo_releases_branches_not_nf_core(self): assert "main" in wf_branches.keys() def test_get_repo_releases_branches_not_exists(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() with pytest.raises(AssertionError): nf_core.utils.get_repo_releases_branches("made_up_pipeline", wfs) def test_get_repo_releases_branches_not_exists_slash(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() with pytest.raises(AssertionError): nf_core.utils.get_repo_releases_branches("made-up/pipeline", wfs) - -def test_validate_file_md5(): - # MD5(test) = d8e8fca2dc0f896fd7cb4cb0031ba249 - test_file = TEST_DATA_DIR / "test.txt" - test_file_md5 = "d8e8fca2dc0f896fd7cb4cb0031ba249" - different_md5 = "9e7b964750cf0bb08ee960fce356b6d6" - non_hex_string = "s" - assert nf_core.utils.validate_file_md5(test_file, test_file_md5) - with pytest.raises(IOError): - nf_core.utils.validate_file_md5(test_file, different_md5) - with pytest.raises(ValueError): - nf_core.utils.validate_file_md5(test_file, non_hex_string) - - -def test_nested_setitem(): - d = {"a": {"b": {"c": "value"}}} - nf_core.utils.nested_setitem(d, ["a", "b", "c"], "value new") - assert d["a"]["b"]["c"] == "value new" - assert d == {"a": {"b": {"c": "value new"}}} - - -def test_nested_delitem(): - d = {"a": {"b": {"c": "value"}}} - nf_core.utils.nested_delitem(d, ["a", "b", "c"]) - assert "c" not in d["a"]["b"] - assert d == {"a": {"b": {}}} - - -def test_set_wd(): - with tempfile.TemporaryDirectory() as tmpdirname: - with nf_core.utils.set_wd(tmpdirname): + def test_validate_file_md5(self): + # MD5(test) = d8e8fca2dc0f896fd7cb4cb0031ba249 + test_file = TEST_DATA_DIR / "test.txt" + test_file_md5 = "d8e8fca2dc0f896fd7cb4cb0031ba249" + different_md5 = "9e7b964750cf0bb08ee960fce356b6d6" + non_hex_string = "s" + assert nf_core.utils.validate_file_md5(test_file, test_file_md5) + with pytest.raises(IOError): + nf_core.utils.validate_file_md5(test_file, different_md5) + with pytest.raises(ValueError): + nf_core.utils.validate_file_md5(test_file, non_hex_string) + + def test_nested_setitem(self): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_setitem(d, ["a", "b", "c"], "value new") + assert d["a"]["b"]["c"] == "value new" + assert d == {"a": {"b": {"c": "value new"}}} + + def test_nested_delitem(self): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_delitem(d, ["a", "b", "c"]) + assert "c" not in d["a"]["b"] + assert d == {"a": {"b": {}}} + + def test_set_wd(self): + with nf_core.utils.set_wd(self.tmp_dir): context_wd = Path().resolve() - assert context_wd == Path(tmpdirname).resolve() + assert context_wd == Path(self.tmp_dir).resolve() assert context_wd != Path().resolve() - -def test_set_wd_revert_on_raise(): - wd_before_context = Path().resolve() - with tempfile.TemporaryDirectory() as tmpdirname: + def test_set_wd_revert_on_raise(self): + wd_before_context = Path().resolve() with pytest.raises(Exception): - with nf_core.utils.set_wd(tmpdirname): + with nf_core.utils.set_wd(self.tmp_dir): raise Exception - assert wd_before_context == Path().resolve() + assert wd_before_context == Path().resolve() diff --git a/tests/utils.py b/tests/utils.py index 89c132881..022b91227 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -2,16 +2,21 @@ Helper functions for tests """ +import filecmp import functools -import os import tempfile +from pathlib import Path from typing import Any, Callable, Tuple import responses +import yaml -import nf_core.create import nf_core.modules +import nf_core.pipelines.create.create +from nf_core import __version__ +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlConfig +TEST_DATA_DIR = Path(__file__).parent / "data" OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" @@ -93,18 +98,55 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) -def create_tmp_pipeline() -> Tuple[str, str, str, str]: +def mock_biotools_api_calls(rsps: responses.RequestsMock, module: str) -> None: + """Mock biotools api calls for module""" + biotools_api_url = f"https://bio.tools/api/t/?q={module}&format=json" + biotools_mock = { + "list": [{"name": "Bpipe", "biotoolsCURIE": "biotools:bpipe"}], + } + rsps.get(biotools_api_url, json=biotools_mock, status=200) + + +def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" - tmp_dir = tempfile.mkdtemp() - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - pipeline_name = "mypipeline" - pipeline_dir = os.path.join(tmp_dir, pipeline_name) + tmp_dir = Path(tempfile.TemporaryDirectory().name) + root_repo_dir = Path(__file__).resolve().parent.parent + template_dir = root_repo_dir / "nf_core" / "pipeline-template" + pipeline_name = "testpipeline" + pipeline_dir = tmp_dir / pipeline_name + pipeline_dir.mkdir(parents=True) + + nf_core_yml = NFCoreYamlConfig( + nf_core_version=__version__, + repository_type="modules", + org_path="nf-core", + lint=None, + template=NFCoreTemplateConfig( + name="testpipeline", + author="me", + description="it is mine", + org="nf-core", + version=None, + force=True, + is_nfcore=None, + skip_features=None, + outdir=None, + ), + bump_version=None, + ) + with open(str(Path(pipeline_dir, ".nf-core.yml")), "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh) - nf_core.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=True, outdir=pipeline_dir, plain=True + nf_core.pipelines.create.create.PipelineCreate( + pipeline_name, "it is mine", "me", no_git=no_git, outdir=pipeline_dir, force=True ).init_pipeline() # return values to instance variables for later use in test methods return tmp_dir, template_dir, pipeline_name, pipeline_dir + + +def cmp_component(dir1: Path, dir2: Path) -> bool: + """Compare two versions of the same component""" + files = ["main.nf", "meta.yml"] + return all(filecmp.cmp(dir1 / f, dir2 / f, shallow=False) for f in files)