From 513de9528235a5d90ed4c7f09427c9d399c384e5 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 21 Aug 2023 00:15:13 +0200 Subject: [PATCH 001/737] WIP: First stab at writing some Textual --- nf_core/__main__.py | 43 ++++++ nf_core/pipelines/create.py | 273 ++++++++++++++++++++++++++++++++++ nf_core/pipelines/create.tcss | 32 ++++ requirements-dev.txt | 1 + requirements.txt | 4 +- 5 files changed, 352 insertions(+), 1 deletion(-) create mode 100644 nf_core/pipelines/create.py create mode 100644 nf_core/pipelines/create.tcss diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 72762ff02..f3f2d2bf5 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -488,6 +488,49 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma sys.exit(1) +# nf-core pipelines subcommands +@nf_core_cli.group() +@click.pass_context +def pipelines(ctx): + """ + Commands to manage nf-core pipelines. + """ + # ensure that ctx.obj exists and is a dict (in case `cli()` is called + # by means other than the `if` block below) + ctx.ensure_object(dict) + + +# nf-core pipeline install +@pipelines.command("create") +@click.pass_context +@click.option( + "-n", + "--name", + type=str, + help="The name of your new pipeline", +) +@click.option("-d", "--description", type=str, help="A short description of your pipeline") +@click.option("-a", "--author", type=str, help="Name of the main author(s)") +@click.option("--version", type=str, default="1.0dev", help="The initial version number to use") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") +@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +@click.option("--plain", is_flag=True, help="Use the standard nf-core template") +def create_pipeline(ctx, name, description, author, version, force, outdir, template_yaml, plain): + """ + Create a new pipeline using the nf-core template. + + Uses the nf-core template to make a skeleton Nextflow pipeline with all required + files, boilerplate code and best-practices. + \n\n + Run without any command line arguments to use an interactive interface. + """ + from nf_core.pipelines.create import PipelineCreateApp + + app = PipelineCreateApp() + app.run() + + # nf-core modules subcommands @nf_core_cli.group() @click.option( diff --git a/nf_core/pipelines/create.py b/nf_core/pipelines/create.py new file mode 100644 index 000000000..970d2dd8c --- /dev/null +++ b/nf_core/pipelines/create.py @@ -0,0 +1,273 @@ +"""A Textual app to create a pipeline.""" +from pydantic import BaseModel, field_validator, Field +import re +from typing import Optional +from textual import on +from textual.app import App, ComposeResult +from textual.screen import Screen +from textual.containers import Horizontal, Center +from textual.validation import Function, Validator, ValidationResult +from textual.widgets import Button, Footer, Header, Static, Markdown, Input, Pretty +from textwrap import dedent + + +class CreateConfig(BaseModel): + """Pydantic model for the nf-core create config.""" + + org: Optional[str] = None + name: Optional[str] = None + description: Optional[str] = None + author: Optional[str] = None + version: Optional[str] = None + force: Optional[bool] = None + outdir: Optional[str] = None + template_yaml: Optional[str] = None + is_nfcore: Optional[bool] = None + + @field_validator("name") + @classmethod + def name_nospecialchars(cls, v: str) -> str: + """Check that the pipeline name is simple.""" + if not re.match(r"^[a-z]+$", v): + raise ValueError("Must be lowercase without punctuation.") + return v + + @field_validator("org", "description", "author") + @classmethod + def notempty(cls, v: str) -> str: + """Check that string values are not empty.""" + if v.strip() == "": + raise ValueError("Cannot be left empty.") + return v + + +# Initialise as empty +TEMPLATE_CONFIG = CreateConfig() + + +class TextInput(Static): + """Widget for text inputs. + + Provides standard interface for a text input with help text + and validation messages. + """ + + def __init__(self, field_id, placeholder, description, default=None, **kwargs) -> None: + """Initialise the widget with our values. + + Pass on kwargs upstream for standard usage.""" + super().__init__(**kwargs) + self.field_id: str = field_id + self.placeholder: str = placeholder + self.description: str = description + self.default: str = default + + def compose(self) -> ComposeResult: + yield Static(self.description, classes="field_help") + yield Input( + placeholder=self.placeholder, + validators=[ValidateConfig(self.field_id)], + value=self.default, + ) + yield Static(classes="validation_msg") + + @on(Input.Changed) + def show_invalid_reasons(self, event: Input.Changed) -> None: + """Validate the text input and show errors if invalid.""" + if not event.validation_result.is_valid: + self.query_one(".validation_msg").update("\n".join(event.validation_result.failure_descriptions)) + else: + self.query_one(".validation_msg").update("") + + +class ValidateConfig(Validator): + """Validate any config value, using Pydantic.""" + + def __init__(self, key) -> None: + """Initialise the validator with the model key to validate.""" + super().__init__() + self.key = key + + def validate(self, value: str) -> ValidationResult: + """Try creating a Pydantic object with this key set to this value. + + If it fails, return the error messages.""" + try: + CreateConfig(**{f"{self.key}": value}) + return self.success() + except ValueError as e: + return self.failure(", ".join([err["msg"] for err in e.errors()])) + + +class WelcomeScreen(Screen): + """A welcome screen for the app.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + yield Markdown( + dedent( + """ + # nf-core create + + This app will help you create a new nf-core pipeline. + It uses the nf-core pipeline template, which is kept at + within the [nf-core/tools repository](https://github.com/nf-core/tools). + + Using this tool is mandatory when making a pipeline that may + be part of the nf-core community collection at some point. + However, this tool can also be used to create pipelines that will + never be part of nf-core. You can still benefit from the community + best practices for your own workflow. + """ + ) + ) + yield Center(Button("Let's go!", id="start", variant="success"), classes="cta") + + +class BasicDetails(Screen): + """Name, description, author, etc.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Basic details + """ + ) + ) + with Horizontal(): + yield TextInput( + "org", + "Organisation", + "GitHub organisation", + "nf-core", + classes="column", + ) + yield TextInput( + "name", + "Pipeline Name", + "Workflow name", + classes="column", + ) + + yield TextInput( + "description", + "Description", + "A short description of your pipeline.", + ) + yield TextInput( + "author", + "Author(s)", + "Name of the main author / authors", + ) + yield Center( + Button("Next", variant="success"), + classes="cta", + ) + + @on(Button.Pressed) + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + config = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + this_input.validate(this_input.value) + config[text_input.field_id] = this_input.value + try: + TEMPLATE_CONFIG = CreateConfig(**config) + self.parent.switch_screen("choose_type") + except ValueError as e: + pass + + +class ChoosePipelineType(Screen): + """Choose whether this will be an nf-core pipeline or not.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # To nf-core or not to nf-core? + + Next, we need to know what kind of pipeline this will be. + + Choose _"nf-core"_ if: + + * You want your pipeline to be part of the nf-core community + * You think that there's an outside chance that it ever _could_ be part of nf-core + + Choose _"Custom"_ if: + + * Your pipeline will _never_ be part of nf-core + * You want full control over *all* features that are included from the template + (including those that are mandatory for nf-core). + """ + ) + ) + yield Center( + Button("nf-core", id="type_nfcore", variant="success"), + Button("Custom", id="type_custom", variant="primary"), + classes="cta", + ) + yield Markdown( + dedent( + """ + ## Not sure? What's the difference? + + Choosing _"nf-core"_ effectively pre-selects the following template features: + + * GitHub Actions Continuous Integration (CI) configuration for the following: + * Small-scale (GitHub) and large-scale (AWS) tests + * Code format linting with prettier + * Auto-fix functionality using @nf-core-bot + * Marking old issues as stale + * Inclusion of shared nf-core config profiles + """ + ) + ) + + +class PipelineCreateApp(App): + """A Textual app to manage stopwatches.""" + + CSS_PATH = "create.tcss" + TITLE = "nf-core create" + SUB_TITLE = "Create a new pipeline with the nf-core pipeline template" + BINDINGS = [ + ("d", "toggle_dark", "Toggle dark mode"), + ("q", "quit", "Quit"), + ] + SCREENS = { + "welcome": WelcomeScreen(), + "basic_details": BasicDetails(), + "choose_type": ChoosePipelineType(), + } + + def on_mount(self) -> None: + self.push_screen("welcome") + + def on_button_pressed(self, event: Button.Pressed) -> None: + """Handle all button pressed events.""" + if event.button.id == "start": + self.switch_screen("basic_details") + elif event.button.id == "type_nfcore": + self.switch_screen("type_nfcore") + elif event.button.id == "type_custom": + self.switch_screen("type_custom") + + def action_toggle_dark(self) -> None: + """An action to toggle dark mode.""" + self.dark = not self.dark diff --git a/nf_core/pipelines/create.tcss b/nf_core/pipelines/create.tcss new file mode 100644 index 000000000..079d51659 --- /dev/null +++ b/nf_core/pipelines/create.tcss @@ -0,0 +1,32 @@ +#logo { + text-align:center; +} +.cta { + layout: horizontal; + margin-bottom: 1; +} +.cta Button { + margin-left: 3; + margin-right: 3; +} + +.field_help { + padding: 1 1 0 1; + color: $text-muted; + text-style: italic; +} +.validation_msg { + padding: 0 1; + color: $error; +} +.-valid { + border: tall $success-darken-3; +} + +Horizontal{ + width: 100%; + height: auto; +} +.column { + width: 1fr; +} diff --git a/requirements-dev.txt b/requirements-dev.txt index 360f6ff87..23d540ca8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,3 +6,4 @@ pytest-datafiles responses Sphinx sphinx-rtd-theme +textual-dev>=1.1.0 diff --git a/requirements.txt b/requirements.txt index 9cc7fc6be..b5fc54259 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,8 +7,9 @@ markdown>=3.3 packaging pre-commit prompt_toolkit>=3.0.3 -pytest>=7.0.0 +pydantic>=2.2.1 pytest-workflow>=1.6.0 +pytest>=7.0.0 pyyaml questionary>=1.8.0 refgenie @@ -17,3 +18,4 @@ requests_cache rich-click>=1.6.1 rich>=13.3.1 tabulate +textual>=0.33.0 From 0e8204e45358acf20e15c5e2ce304737ced0066e Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 21 Aug 2023 14:15:38 +0200 Subject: [PATCH 002/737] Refactor: Break into multiple files --- nf_core/pipelines/create.py | 273 --------------------- nf_core/pipelines/create/__init__.py | 52 ++++ nf_core/pipelines/create/basicdetails.py | 67 +++++ nf_core/pipelines/{ => create}/create.tcss | 0 nf_core/pipelines/create/pipelinetype.py | 49 ++++ nf_core/pipelines/create/utils.py | 91 +++++++ nf_core/pipelines/create/welcome.py | 36 +++ 7 files changed, 295 insertions(+), 273 deletions(-) delete mode 100644 nf_core/pipelines/create.py create mode 100644 nf_core/pipelines/create/__init__.py create mode 100644 nf_core/pipelines/create/basicdetails.py rename nf_core/pipelines/{ => create}/create.tcss (100%) create mode 100644 nf_core/pipelines/create/pipelinetype.py create mode 100644 nf_core/pipelines/create/utils.py create mode 100644 nf_core/pipelines/create/welcome.py diff --git a/nf_core/pipelines/create.py b/nf_core/pipelines/create.py deleted file mode 100644 index 970d2dd8c..000000000 --- a/nf_core/pipelines/create.py +++ /dev/null @@ -1,273 +0,0 @@ -"""A Textual app to create a pipeline.""" -from pydantic import BaseModel, field_validator, Field -import re -from typing import Optional -from textual import on -from textual.app import App, ComposeResult -from textual.screen import Screen -from textual.containers import Horizontal, Center -from textual.validation import Function, Validator, ValidationResult -from textual.widgets import Button, Footer, Header, Static, Markdown, Input, Pretty -from textwrap import dedent - - -class CreateConfig(BaseModel): - """Pydantic model for the nf-core create config.""" - - org: Optional[str] = None - name: Optional[str] = None - description: Optional[str] = None - author: Optional[str] = None - version: Optional[str] = None - force: Optional[bool] = None - outdir: Optional[str] = None - template_yaml: Optional[str] = None - is_nfcore: Optional[bool] = None - - @field_validator("name") - @classmethod - def name_nospecialchars(cls, v: str) -> str: - """Check that the pipeline name is simple.""" - if not re.match(r"^[a-z]+$", v): - raise ValueError("Must be lowercase without punctuation.") - return v - - @field_validator("org", "description", "author") - @classmethod - def notempty(cls, v: str) -> str: - """Check that string values are not empty.""" - if v.strip() == "": - raise ValueError("Cannot be left empty.") - return v - - -# Initialise as empty -TEMPLATE_CONFIG = CreateConfig() - - -class TextInput(Static): - """Widget for text inputs. - - Provides standard interface for a text input with help text - and validation messages. - """ - - def __init__(self, field_id, placeholder, description, default=None, **kwargs) -> None: - """Initialise the widget with our values. - - Pass on kwargs upstream for standard usage.""" - super().__init__(**kwargs) - self.field_id: str = field_id - self.placeholder: str = placeholder - self.description: str = description - self.default: str = default - - def compose(self) -> ComposeResult: - yield Static(self.description, classes="field_help") - yield Input( - placeholder=self.placeholder, - validators=[ValidateConfig(self.field_id)], - value=self.default, - ) - yield Static(classes="validation_msg") - - @on(Input.Changed) - def show_invalid_reasons(self, event: Input.Changed) -> None: - """Validate the text input and show errors if invalid.""" - if not event.validation_result.is_valid: - self.query_one(".validation_msg").update("\n".join(event.validation_result.failure_descriptions)) - else: - self.query_one(".validation_msg").update("") - - -class ValidateConfig(Validator): - """Validate any config value, using Pydantic.""" - - def __init__(self, key) -> None: - """Initialise the validator with the model key to validate.""" - super().__init__() - self.key = key - - def validate(self, value: str) -> ValidationResult: - """Try creating a Pydantic object with this key set to this value. - - If it fails, return the error messages.""" - try: - CreateConfig(**{f"{self.key}": value}) - return self.success() - except ValueError as e: - return self.failure(", ".join([err["msg"] for err in e.errors()])) - - -class WelcomeScreen(Screen): - """A welcome screen for the app.""" - - def compose(self) -> ComposeResult: - yield Header() - yield Footer() - yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", - id="logo", - ) - yield Markdown( - dedent( - """ - # nf-core create - - This app will help you create a new nf-core pipeline. - It uses the nf-core pipeline template, which is kept at - within the [nf-core/tools repository](https://github.com/nf-core/tools). - - Using this tool is mandatory when making a pipeline that may - be part of the nf-core community collection at some point. - However, this tool can also be used to create pipelines that will - never be part of nf-core. You can still benefit from the community - best practices for your own workflow. - """ - ) - ) - yield Center(Button("Let's go!", id="start", variant="success"), classes="cta") - - -class BasicDetails(Screen): - """Name, description, author, etc.""" - - def compose(self) -> ComposeResult: - yield Header() - yield Footer() - yield Markdown( - dedent( - """ - # Basic details - """ - ) - ) - with Horizontal(): - yield TextInput( - "org", - "Organisation", - "GitHub organisation", - "nf-core", - classes="column", - ) - yield TextInput( - "name", - "Pipeline Name", - "Workflow name", - classes="column", - ) - - yield TextInput( - "description", - "Description", - "A short description of your pipeline.", - ) - yield TextInput( - "author", - "Author(s)", - "Name of the main author / authors", - ) - yield Center( - Button("Next", variant="success"), - classes="cta", - ) - - @on(Button.Pressed) - def on_button_pressed(self, event: Button.Pressed) -> None: - """Save fields to the config.""" - config = {} - for text_input in self.query("TextInput"): - this_input = text_input.query_one(Input) - this_input.validate(this_input.value) - config[text_input.field_id] = this_input.value - try: - TEMPLATE_CONFIG = CreateConfig(**config) - self.parent.switch_screen("choose_type") - except ValueError as e: - pass - - -class ChoosePipelineType(Screen): - """Choose whether this will be an nf-core pipeline or not.""" - - def compose(self) -> ComposeResult: - yield Header() - yield Footer() - yield Markdown( - dedent( - """ - # To nf-core or not to nf-core? - - Next, we need to know what kind of pipeline this will be. - - Choose _"nf-core"_ if: - - * You want your pipeline to be part of the nf-core community - * You think that there's an outside chance that it ever _could_ be part of nf-core - - Choose _"Custom"_ if: - - * Your pipeline will _never_ be part of nf-core - * You want full control over *all* features that are included from the template - (including those that are mandatory for nf-core). - """ - ) - ) - yield Center( - Button("nf-core", id="type_nfcore", variant="success"), - Button("Custom", id="type_custom", variant="primary"), - classes="cta", - ) - yield Markdown( - dedent( - """ - ## Not sure? What's the difference? - - Choosing _"nf-core"_ effectively pre-selects the following template features: - - * GitHub Actions Continuous Integration (CI) configuration for the following: - * Small-scale (GitHub) and large-scale (AWS) tests - * Code format linting with prettier - * Auto-fix functionality using @nf-core-bot - * Marking old issues as stale - * Inclusion of shared nf-core config profiles - """ - ) - ) - - -class PipelineCreateApp(App): - """A Textual app to manage stopwatches.""" - - CSS_PATH = "create.tcss" - TITLE = "nf-core create" - SUB_TITLE = "Create a new pipeline with the nf-core pipeline template" - BINDINGS = [ - ("d", "toggle_dark", "Toggle dark mode"), - ("q", "quit", "Quit"), - ] - SCREENS = { - "welcome": WelcomeScreen(), - "basic_details": BasicDetails(), - "choose_type": ChoosePipelineType(), - } - - def on_mount(self) -> None: - self.push_screen("welcome") - - def on_button_pressed(self, event: Button.Pressed) -> None: - """Handle all button pressed events.""" - if event.button.id == "start": - self.switch_screen("basic_details") - elif event.button.id == "type_nfcore": - self.switch_screen("type_nfcore") - elif event.button.id == "type_custom": - self.switch_screen("type_custom") - - def action_toggle_dark(self) -> None: - """An action to toggle dark mode.""" - self.dark = not self.dark diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py new file mode 100644 index 000000000..4de661675 --- /dev/null +++ b/nf_core/pipelines/create/__init__.py @@ -0,0 +1,52 @@ +"""A Textual app to create a pipeline.""" +from pydantic import BaseModel, field_validator, Field +import re +from typing import Optional +from textual import on +from textual.app import App, ComposeResult +from textual.screen import Screen +from textual.containers import Horizontal, Center +from textual.validation import Function, Validator, ValidationResult +from textual.widgets import Button, Footer, Header, Static, Markdown, Input, Pretty +from textwrap import dedent + +from nf_core.pipelines.create.utils import CreateConfig +from nf_core.pipelines.create.welcome import WelcomeScreen +from nf_core.pipelines.create.basicdetails import BasicDetails +from nf_core.pipelines.create.pipelinetype import ChoosePipelineType + + +class PipelineCreateApp(App): + """A Textual app to manage stopwatches.""" + + CSS_PATH = "create.tcss" + TITLE = "nf-core create" + SUB_TITLE = "Create a new pipeline with the nf-core pipeline template" + BINDINGS = [ + ("d", "toggle_dark", "Toggle dark mode"), + ("q", "quit", "Quit"), + ] + SCREENS = { + "welcome": WelcomeScreen(), + "basic_details": BasicDetails(), + "choose_type": ChoosePipelineType(), + } + + # Initialise config as empty + TEMPLATE_CONFIG = CreateConfig() + + def on_mount(self) -> None: + self.push_screen("welcome") + + def on_button_pressed(self, event: Button.Pressed) -> None: + """Handle all button pressed events.""" + if event.button.id == "start": + self.switch_screen("basic_details") + elif event.button.id == "type_nfcore": + self.switch_screen("type_nfcore") + elif event.button.id == "type_custom": + self.switch_screen("type_custom") + + def action_toggle_dark(self) -> None: + """An action to toggle dark mode.""" + self.dark = not self.dark diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py new file mode 100644 index 000000000..63b99ed40 --- /dev/null +++ b/nf_core/pipelines/create/basicdetails.py @@ -0,0 +1,67 @@ +"""A Textual app to create a pipeline.""" +from textual import on +from textual.app import ComposeResult +from textual.screen import Screen +from textual.containers import Horizontal, Center +from textual.widgets import Button, Footer, Header, Markdown, Input +from textwrap import dedent + +from nf_core.pipelines.create.utils import CreateConfig, TextInput + + +class BasicDetails(Screen): + """Name, description, author, etc.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Basic details + """ + ) + ) + with Horizontal(): + yield TextInput( + "org", + "Organisation", + "GitHub organisation", + "nf-core", + classes="column", + ) + yield TextInput( + "name", + "Pipeline Name", + "Workflow name", + classes="column", + ) + + yield TextInput( + "description", + "Description", + "A short description of your pipeline.", + ) + yield TextInput( + "author", + "Author(s)", + "Name of the main author / authors", + ) + yield Center( + Button("Next", variant="success"), + classes="cta", + ) + + @on(Button.Pressed) + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + config = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + this_input.validate(this_input.value) + config[text_input.field_id] = this_input.value + try: + self.parent.TEMPLATE_CONFIG = CreateConfig(**config) + self.parent.switch_screen("choose_type") + except ValueError: + pass diff --git a/nf_core/pipelines/create.tcss b/nf_core/pipelines/create/create.tcss similarity index 100% rename from nf_core/pipelines/create.tcss rename to nf_core/pipelines/create/create.tcss diff --git a/nf_core/pipelines/create/pipelinetype.py b/nf_core/pipelines/create/pipelinetype.py new file mode 100644 index 000000000..72624c5f8 --- /dev/null +++ b/nf_core/pipelines/create/pipelinetype.py @@ -0,0 +1,49 @@ +from textual.app import ComposeResult +from textual.screen import Screen +from textual.containers import Center +from textual.widgets import Button, Footer, Header, Markdown + +markdown_intro = """ +# To nf-core or not to nf-core? + +Next, we need to know what kind of pipeline this will be. + +Choose _"nf-core"_ if: + +* You want your pipeline to be part of the nf-core community +* You think that there's an outside chance that it ever _could_ be part of nf-core + +Choose _"Custom"_ if: + +* Your pipeline will _never_ be part of nf-core +* You want full control over *all* features that are included from the template + (including those that are mandatory for nf-core). +""" + +markdown_details = """ +## Not sure? What's the difference? + +Choosing _"nf-core"_ effectively pre-selects the following template features: + +* GitHub Actions Continuous Integration (CI) configuration for the following: + * Small-scale (GitHub) and large-scale (AWS) tests + * Code format linting with prettier + * Auto-fix functionality using @nf-core-bot + * Marking old issues as stale +* Inclusion of shared nf-core config profiles +""" + + +class ChoosePipelineType(Screen): + """Choose whether this will be an nf-core pipeline or not.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown(markdown_intro) + yield Center( + Button("nf-core", id="type_nfcore", variant="success"), + Button("Custom", id="type_custom", variant="primary"), + classes="cta", + ) + yield Markdown(markdown_details) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py new file mode 100644 index 000000000..017040d7d --- /dev/null +++ b/nf_core/pipelines/create/utils.py @@ -0,0 +1,91 @@ +from pydantic import BaseModel, field_validator +import re +from typing import Optional +from textual import on +from textual.app import ComposeResult +from textual.validation import Validator, ValidationResult +from textual.widgets import Static, Input + + +class CreateConfig(BaseModel): + """Pydantic model for the nf-core create config.""" + + org: Optional[str] = None + name: Optional[str] = None + description: Optional[str] = None + author: Optional[str] = None + version: Optional[str] = None + force: Optional[bool] = None + outdir: Optional[str] = None + template_yaml: Optional[str] = None + is_nfcore: Optional[bool] = None + + @field_validator("name") + @classmethod + def name_nospecialchars(cls, v: str) -> str: + """Check that the pipeline name is simple.""" + if not re.match(r"^[a-z]+$", v): + raise ValueError("Must be lowercase without punctuation.") + return v + + @field_validator("org", "description", "author") + @classmethod + def notempty(cls, v: str) -> str: + """Check that string values are not empty.""" + if v.strip() == "": + raise ValueError("Cannot be left empty.") + return v + + +class TextInput(Static): + """Widget for text inputs. + + Provides standard interface for a text input with help text + and validation messages. + """ + + def __init__(self, field_id, placeholder, description, default=None, **kwargs) -> None: + """Initialise the widget with our values. + + Pass on kwargs upstream for standard usage.""" + super().__init__(**kwargs) + self.field_id: str = field_id + self.placeholder: str = placeholder + self.description: str = description + self.default: str = default + + def compose(self) -> ComposeResult: + yield Static(self.description, classes="field_help") + yield Input( + placeholder=self.placeholder, + validators=[ValidateConfig(self.field_id)], + value=self.default, + ) + yield Static(classes="validation_msg") + + @on(Input.Changed) + def show_invalid_reasons(self, event: Input.Changed) -> None: + """Validate the text input and show errors if invalid.""" + if not event.validation_result.is_valid: + self.query_one(".validation_msg").update("\n".join(event.validation_result.failure_descriptions)) + else: + self.query_one(".validation_msg").update("") + + +class ValidateConfig(Validator): + """Validate any config value, using Pydantic.""" + + def __init__(self, key) -> None: + """Initialise the validator with the model key to validate.""" + super().__init__() + self.key = key + + def validate(self, value: str) -> ValidationResult: + """Try creating a Pydantic object with this key set to this value. + + If it fails, return the error messages.""" + try: + CreateConfig(**{f"{self.key}": value}) + return self.success() + except ValueError as e: + return self.failure(", ".join([err["msg"] for err in e.errors()])) diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py new file mode 100644 index 000000000..2e75ec597 --- /dev/null +++ b/nf_core/pipelines/create/welcome.py @@ -0,0 +1,36 @@ +from textual.app import ComposeResult +from textual.screen import Screen +from textual.containers import Center +from textual.widgets import Button, Footer, Header, Static, Markdown + +markdown = """ +# nf-core create + +This app will help you create a new nf-core pipeline. +It uses the nf-core pipeline template, which is kept at +within the [nf-core/tools repository](https://github.com/nf-core/tools). + +Using this tool is mandatory when making a pipeline that may +be part of the nf-core community collection at some point. +However, this tool can also be used to create pipelines that will +never be part of nf-core. You can still benefit from the community +best practices for your own workflow. +""" + + +class WelcomeScreen(Screen): + """A welcome screen for the app.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + yield Markdown(markdown) + yield Center(Button("Let's go!", id="start", variant="success"), classes="cta") From e37d2e522cda72e59fe259eb4a394708cf428bc7 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 21 Aug 2023 14:16:16 +0200 Subject: [PATCH 003/737] Remove unusued imports --- nf_core/pipelines/create/__init__.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 4de661675..7d339a1aa 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,14 +1,6 @@ """A Textual app to create a pipeline.""" -from pydantic import BaseModel, field_validator, Field -import re -from typing import Optional -from textual import on -from textual.app import App, ComposeResult -from textual.screen import Screen -from textual.containers import Horizontal, Center -from textual.validation import Function, Validator, ValidationResult -from textual.widgets import Button, Footer, Header, Static, Markdown, Input, Pretty -from textwrap import dedent +from textual.app import App +from textual.widgets import Button from nf_core.pipelines.create.utils import CreateConfig from nf_core.pipelines.create.welcome import WelcomeScreen From ee12eb5df27ae198a0b9f57bb96a8592da52bee6 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 22 Aug 2023 13:43:47 +0200 Subject: [PATCH 004/737] show error message when pressing enter --- nf_core/pipelines/create/utils.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 017040d7d..c5e85aeff 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -1,10 +1,11 @@ -from pydantic import BaseModel, field_validator import re from typing import Optional + +from pydantic import BaseModel, field_validator from textual import on from textual.app import ComposeResult -from textual.validation import Validator, ValidationResult -from textual.widgets import Static, Input +from textual.validation import ValidationResult, Validator +from textual.widgets import Input, Static class CreateConfig(BaseModel): @@ -64,7 +65,8 @@ def compose(self) -> ComposeResult: yield Static(classes="validation_msg") @on(Input.Changed) - def show_invalid_reasons(self, event: Input.Changed) -> None: + @on(Input.Submitted) + def show_invalid_reasons(self, event: Input.Changed | Input.Submitted) -> None: """Validate the text input and show errors if invalid.""" if not event.validation_result.is_valid: self.query_one(".validation_msg").update("\n".join(event.validation_result.failure_descriptions)) From dd18bcc90b520cc010a296812c5a93efa9507745 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 22 Aug 2023 15:32:04 +0200 Subject: [PATCH 005/737] show failure messages when button is pressed --- nf_core/pipelines/create/basicdetails.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index 63b99ed40..5ffae135c 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -1,10 +1,11 @@ """A Textual app to create a pipeline.""" +from textwrap import dedent + from textual import on from textual.app import ComposeResult +from textual.containers import Center, Horizontal from textual.screen import Screen -from textual.containers import Horizontal, Center -from textual.widgets import Button, Footer, Header, Markdown, Input -from textwrap import dedent +from textual.widgets import Button, Footer, Header, Input, Markdown from nf_core.pipelines.create.utils import CreateConfig, TextInput @@ -58,8 +59,12 @@ def on_button_pressed(self, event: Button.Pressed) -> None: config = {} for text_input in self.query("TextInput"): this_input = text_input.query_one(Input) - this_input.validate(this_input.value) + validation_result = this_input.validate(this_input.value) config[text_input.field_id] = this_input.value + if not validation_result.is_valid: + text_input.query_one(".validation_msg").update("\n".join(validation_result.failure_descriptions)) + else: + text_input.query_one(".validation_msg").update("") try: self.parent.TEMPLATE_CONFIG = CreateConfig(**config) self.parent.switch_screen("choose_type") From 486f180f876c38a906687daa1b494db0b9239bcb Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 22 Aug 2023 17:20:59 +0200 Subject: [PATCH 006/737] add usegenomicdata screen (TODO on_button_pressed) --- nf_core/pipelines/create/__init__.py | 8 ++-- nf_core/pipelines/create/usegenomicdata.py | 47 ++++++++++++++++++++++ nf_core/pipelines/create/utils.py | 2 +- 3 files changed, 53 insertions(+), 4 deletions(-) create mode 100644 nf_core/pipelines/create/usegenomicdata.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 7d339a1aa..5d8eee2bc 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -2,10 +2,11 @@ from textual.app import App from textual.widgets import Button -from nf_core.pipelines.create.utils import CreateConfig -from nf_core.pipelines.create.welcome import WelcomeScreen from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.pipelinetype import ChoosePipelineType +from nf_core.pipelines.create.usegenomicdata import UseGenomicData +from nf_core.pipelines.create.utils import CreateConfig +from nf_core.pipelines.create.welcome import WelcomeScreen class PipelineCreateApp(App): @@ -22,6 +23,7 @@ class PipelineCreateApp(App): "welcome": WelcomeScreen(), "basic_details": BasicDetails(), "choose_type": ChoosePipelineType(), + "genomic_data": UseGenomicData(), } # Initialise config as empty @@ -37,7 +39,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: elif event.button.id == "type_nfcore": self.switch_screen("type_nfcore") elif event.button.id == "type_custom": - self.switch_screen("type_custom") + self.switch_screen("genomic_data") def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/nf_core/pipelines/create/usegenomicdata.py b/nf_core/pipelines/create/usegenomicdata.py new file mode 100644 index 000000000..591e7893a --- /dev/null +++ b/nf_core/pipelines/create/usegenomicdata.py @@ -0,0 +1,47 @@ +from textual import on +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown + +markdown_intro = """ +## You are now creating a custom pipeline + +# Will your pipeline use genomic data? + +Nf-core pipelines are configured to use a copy of the most common reference genome files. + +By selecting this option, your pipeline will include a configuration file specifying the paths to these files. + +The required code to use these files will also be included in the template. +When the pipeline user provides an appropriate genome key, +the pipeline will automatically download the required reference files. + +For more information about reference genomes in nf-core pipelines, +see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). +""" + + +class UseGenomicData(Screen): + """Select if the pipeline will use genomic data.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown(markdown_intro) + yield Center( + Button("Use genomic data", id="true", variant="success"), + Button("Skip genomic data", id="false", variant="primary"), + classes="cta", + ) + + @on(Button.Pressed) + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save answer to the config.""" + try: + # TODO + # self.parent.TEMPLATE_CONFIG.template_yaml["skip"] = [True if event.button.id == "true" else False] + # self.parent.switch_screen("continuous_integration") + pass + except ValueError: + pass diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index c5e85aeff..3823011a8 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -18,7 +18,7 @@ class CreateConfig(BaseModel): version: Optional[str] = None force: Optional[bool] = None outdir: Optional[str] = None - template_yaml: Optional[str] = None + template_yaml: Optional[dict] = None is_nfcore: Optional[bool] = None @field_validator("name") From 5b8cfd5eb9fe7d72b8fd5b1ad11071662f8adb5c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 23 Aug 2023 17:04:49 +0200 Subject: [PATCH 007/737] add custom pipeline switch table to select pipeline features --- nf_core/pipelines/create/__init__.py | 6 +- nf_core/pipelines/create/create.tcss | 39 +++++++++++ nf_core/pipelines/create/custompipeline.py | 80 ++++++++++++++++++++++ nf_core/pipelines/create/usegenomicdata.py | 47 ------------- 4 files changed, 122 insertions(+), 50 deletions(-) create mode 100644 nf_core/pipelines/create/custompipeline.py delete mode 100644 nf_core/pipelines/create/usegenomicdata.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 5d8eee2bc..f8d5efd28 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -3,8 +3,8 @@ from textual.widgets import Button from nf_core.pipelines.create.basicdetails import BasicDetails +from nf_core.pipelines.create.custompipeline import CustomPipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType -from nf_core.pipelines.create.usegenomicdata import UseGenomicData from nf_core.pipelines.create.utils import CreateConfig from nf_core.pipelines.create.welcome import WelcomeScreen @@ -23,7 +23,7 @@ class PipelineCreateApp(App): "welcome": WelcomeScreen(), "basic_details": BasicDetails(), "choose_type": ChoosePipelineType(), - "genomic_data": UseGenomicData(), + "type_custom": CustomPipeline(), } # Initialise config as empty @@ -39,7 +39,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: elif event.button.id == "type_nfcore": self.switch_screen("type_nfcore") elif event.button.id == "type_custom": - self.switch_screen("genomic_data") + self.switch_screen("type_custom") def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 079d51659..ee00a1e91 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -10,6 +10,20 @@ margin-right: 3; } +.custom_grid { + height: auto; +} +.custom_grid Switch { + width: auto; +} +.custom_grid Static { + width: 1fr; + margin: 1 8; +} +.custom_grid Button { + width: auto; +} + .field_help { padding: 1 1 0 1; color: $text-muted; @@ -30,3 +44,28 @@ Horizontal{ .column { width: 1fr; } + +/* Display help messages */ + +.help_box { + background: white; + margin-left: 15; + margin-right: 25; + margin-bottom: 1; + display: none; +} +.displayed .help_box { + display: block; +} +#show_help { + display: block; +} +#hide_help { + display: none; +} +.displayed #show_help { + display: none; +} +.displayed #hide_help { + display: block; +} diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py new file mode 100644 index 000000000..a857eb545 --- /dev/null +++ b/nf_core/pipelines/create/custompipeline.py @@ -0,0 +1,80 @@ +from textual import on +from textual.app import ComposeResult +from textual.containers import Center, Horizontal +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static, Switch + +markdown_genomes = """ +Nf-core pipelines are configured to use a copy of the most common reference genome files. + +By selecting this option, your pipeline will include a configuration file specifying the paths to these files. + +The required code to use these files will also be included in the template. +When the pipeline user provides an appropriate genome key, +the pipeline will automatically download the required reference files. + +For more information about reference genomes in nf-core pipelines, +see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). +""" + +markdown_ci = """ +Add Github Continuous Integration tests +""" + + +class HelpText(Markdown): + """A class to show a text box with help text.""" + + def __init__(self, markdown: str, classes: str, id: str) -> None: + super().__init__(markdown=markdown, classes=classes, id=id) + + def show(self) -> None: + """Method to show the help text box.""" + self.add_class("displayed") + + def hide(self) -> None: + """Method to hide the help text box.""" + self.remove_class("displayed") + + +class CustomPipeline(Screen): + """Select if the pipeline will use genomic data.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Horizontal( + Switch(value=True), + Static("Use reference genomes"), + Button("Show help", id="show_help", name="genomes", variant="primary"), + Button("Hide help", id="hide_help", name="genomes"), + classes="custom_grid", + ) + yield HelpText(markdown_genomes, classes="help_box", id="genomes") + + yield Horizontal( + Switch(value=True), + Static("Include GitHub Continuous Integration (CI) tests"), + Button("Show help", id="show_help", name="ci", variant="primary"), + Button("Hide help", id="hide_help", name="ci"), + classes="custom_grid", + ) + yield HelpText(markdown_ci, classes="help_box", id="ci") + + yield Center( + Button("Done", id="done", variant="success"), + classes="cta", + ) + + @on(Button.Pressed) + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save answer to the config.""" + help_text = self.query_one(f"#{event.button.name}", HelpText) + if event.button.id == "show_help": + help_text.show() + self.add_class("displayed") + elif event.button.id == "hide_help": + help_text.hide() + self.remove_class("displayed") + elif event.button.id == "done": + pass diff --git a/nf_core/pipelines/create/usegenomicdata.py b/nf_core/pipelines/create/usegenomicdata.py deleted file mode 100644 index 591e7893a..000000000 --- a/nf_core/pipelines/create/usegenomicdata.py +++ /dev/null @@ -1,47 +0,0 @@ -from textual import on -from textual.app import ComposeResult -from textual.containers import Center -from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown - -markdown_intro = """ -## You are now creating a custom pipeline - -# Will your pipeline use genomic data? - -Nf-core pipelines are configured to use a copy of the most common reference genome files. - -By selecting this option, your pipeline will include a configuration file specifying the paths to these files. - -The required code to use these files will also be included in the template. -When the pipeline user provides an appropriate genome key, -the pipeline will automatically download the required reference files. - -For more information about reference genomes in nf-core pipelines, -see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). -""" - - -class UseGenomicData(Screen): - """Select if the pipeline will use genomic data.""" - - def compose(self) -> ComposeResult: - yield Header() - yield Footer() - yield Markdown(markdown_intro) - yield Center( - Button("Use genomic data", id="true", variant="success"), - Button("Skip genomic data", id="false", variant="primary"), - classes="cta", - ) - - @on(Button.Pressed) - def on_button_pressed(self, event: Button.Pressed) -> None: - """Save answer to the config.""" - try: - # TODO - # self.parent.TEMPLATE_CONFIG.template_yaml["skip"] = [True if event.button.id == "true" else False] - # self.parent.switch_screen("continuous_integration") - pass - except ValueError: - pass From 0a40dab34c6e11b8cc878460f8eb86930ed8778b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 24 Aug 2023 13:42:44 +0200 Subject: [PATCH 008/737] create new widget PipelineFeature to display feature buttons and help text --- nf_core/pipelines/create/__init__.py | 2 + nf_core/pipelines/create/create.tcss | 7 ++ nf_core/pipelines/create/custompipeline.py | 76 ++++++++++++---------- 3 files changed, 50 insertions(+), 35 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index f8d5efd28..a207ec748 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -40,6 +40,8 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("type_nfcore") elif event.button.id == "type_custom": self.switch_screen("type_custom") + elif event.button.id == "custom_done": + pass def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index ee00a1e91..7a86dec6b 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -45,6 +45,13 @@ Horizontal{ width: 1fr; } +HorizontalScroll { + width: 100%; +} +.feature_subtitle { + color: grey; +} + /* Display help messages */ .help_box { diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index a857eb545..1a0fc9bba 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -1,6 +1,5 @@ -from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal +from textual.containers import Center, HorizontalScroll, ScrollableContainer from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static, Switch @@ -25,8 +24,8 @@ class HelpText(Markdown): """A class to show a text box with help text.""" - def __init__(self, markdown: str, classes: str, id: str) -> None: - super().__init__(markdown=markdown, classes=classes, id=id) + def __init__(self, markdown: str, classes: str) -> None: + super().__init__(markdown=markdown, classes=classes) def show(self) -> None: """Method to show the help text box.""" @@ -37,44 +36,51 @@ def hide(self) -> None: self.remove_class("displayed") -class CustomPipeline(Screen): - """Select if the pipeline will use genomic data.""" +class PipelineFeature(Static): + """Widget for the selection of pipeline features.""" + + def __init__(self, markdown: str, title: str, subtitle: str) -> None: + self.markdown = markdown + self.title = title + self.subtitle = subtitle + super().__init__() + + def on_button_pressed(self, event: Button.Pressed) -> None: + """When the button is pressed, change the type of the button.""" + if event.button.id == "show_help": + self.add_class("displayed") + elif event.button.id == "hide_help": + self.remove_class("displayed") def compose(self) -> ComposeResult: - yield Header() - yield Footer() - yield Horizontal( - Switch(value=True), - Static("Use reference genomes"), - Button("Show help", id="show_help", name="genomes", variant="primary"), - Button("Hide help", id="hide_help", name="genomes"), - classes="custom_grid", - ) - yield HelpText(markdown_genomes, classes="help_box", id="genomes") + """ + Create child widgets. - yield Horizontal( + Displayed row with a switch, a short text description and a help button. + Hidden row with a help text box. + """ + yield HorizontalScroll( Switch(value=True), - Static("Include GitHub Continuous Integration (CI) tests"), - Button("Show help", id="show_help", name="ci", variant="primary"), - Button("Hide help", id="hide_help", name="ci"), + Static(self.title, classes="feature_title"), + Static(self.subtitle, classes="feature_subtitle"), + Button("Show help", id="show_help", variant="primary"), + Button("Hide help", id="hide_help"), classes="custom_grid", ) - yield HelpText(markdown_ci, classes="help_box", id="ci") + yield HelpText(self.markdown, classes="help_box") + +class CustomPipeline(Screen): + """Select if the pipeline will use genomic data.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield ScrollableContainer( + PipelineFeature(markdown_genomes, "Use reference genomes", "Include reference genome files"), + PipelineFeature(markdown_ci, "Add Github CI tests", "Include GitHub Continuous Integration (CI) tests"), + ) yield Center( - Button("Done", id="done", variant="success"), + Button("Done", id="custom_done", variant="success"), classes="cta", ) - - @on(Button.Pressed) - def on_button_pressed(self, event: Button.Pressed) -> None: - """Save answer to the config.""" - help_text = self.query_one(f"#{event.button.name}", HelpText) - if event.button.id == "show_help": - help_text.show() - self.add_class("displayed") - elif event.button.id == "hide_help": - help_text.hide() - self.remove_class("displayed") - elif event.button.id == "done": - pass From 39b9e3c206bbf61ace886e909721b3d306473922 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 24 Aug 2023 16:02:51 +0200 Subject: [PATCH 009/737] add more custom features --- nf_core/pipelines/create/custompipeline.py | 53 ++++++++++++++++++++-- 1 file changed, 50 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 1a0fc9bba..47663f5bb 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -17,7 +17,36 @@ """ markdown_ci = """ -Add Github Continuous Integration tests +Nf-core provides a set of Continuous Integration (CI) tests for Github. +When you open a pull request (PR) on your pipeline repository, these tests will run automatically. + +There are different types of tests: +* Linting tests check that your code is formatted correctly and that it adheres to nf-core standards + For code linting they will use [prettier](https://prettier.io/). +* Pipeline tests run your pipeline on a small dataset to check that it works + These tests are run with a small test dataset on GitHub and a larger test dataset on AWS +* Marking old issues as stale +""" + +markdown_badges = """ +The pipeline `README.md` will include badges for: +* AWS CI Tests +* Zenodo DOI +* Nextflow +* Conda +* Docker +* Singularity +* Launching on Nextflow Tower +""" + +markdown_configuration = """ +Nf-core has a repository with a collection of configuration profiles. + +Those config files define a set of parameters which are specific to compute environments at different Institutions. +They can be used within all nf-core pipelines. +If you are likely to be running nf-core pipelines regularly it is a good idea to use or create a custom config file for your organisation. + +For more information about nf-core configuration profiles, see the [nf-core/configs repository](https://github.com/nf-core/configs) """ @@ -77,8 +106,26 @@ def compose(self) -> ComposeResult: yield Header() yield Footer() yield ScrollableContainer( - PipelineFeature(markdown_genomes, "Use reference genomes", "Include reference genome files"), - PipelineFeature(markdown_ci, "Add Github CI tests", "Include GitHub Continuous Integration (CI) tests"), + PipelineFeature( + markdown_genomes, + "Use reference genomes", + "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", + ), + PipelineFeature( + markdown_ci, + "Add Github CI tests", + "The pipeline will include several GitHub actions for Continuous Integration (CI) testing", + ), + PipelineFeature( + markdown_badges, + "Add Github badges", + "The README.md file of the pipeline will include GitHub badges", + ), + PipelineFeature( + markdown_configuration, + "Add configuration files", + "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", + ), ) yield Center( Button("Done", id="custom_done", variant="success"), From 25c9fa1c92b593a7970a1366c87d01429204f0c0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 25 Aug 2023 11:07:04 +0200 Subject: [PATCH 010/737] add template features to skip to TEMPLATE_CONFIG --- nf_core/pipelines/create/custompipeline.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 47663f5bb..409c26041 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -1,5 +1,7 @@ +from textual import on from textual.app import ComposeResult from textual.containers import Center, HorizontalScroll, ScrollableContainer +from textual.reactive import reactive from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static, Switch @@ -68,10 +70,11 @@ def hide(self) -> None: class PipelineFeature(Static): """Widget for the selection of pipeline features.""" - def __init__(self, markdown: str, title: str, subtitle: str) -> None: + def __init__(self, markdown: str, title: str, subtitle: str, field_id: str) -> None: self.markdown = markdown self.title = title self.subtitle = subtitle + self.field_id = field_id super().__init__() def on_button_pressed(self, event: Button.Pressed) -> None: @@ -89,7 +92,7 @@ def compose(self) -> ComposeResult: Hidden row with a help text box. """ yield HorizontalScroll( - Switch(value=True), + Switch(value=True, id=self.field_id), Static(self.title, classes="feature_title"), Static(self.subtitle, classes="feature_subtitle"), Button("Show help", id="show_help", variant="primary"), @@ -110,24 +113,39 @@ def compose(self) -> ComposeResult: markdown_genomes, "Use reference genomes", "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", + "igenomes", ), PipelineFeature( markdown_ci, "Add Github CI tests", "The pipeline will include several GitHub actions for Continuous Integration (CI) testing", + "ci", ), PipelineFeature( markdown_badges, "Add Github badges", "The README.md file of the pipeline will include GitHub badges", + "github_badges", ), PipelineFeature( markdown_configuration, "Add configuration files", "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", + "nf_core_configs", ), ) yield Center( Button("Done", id="custom_done", variant="success"), classes="cta", ) + + @on(Button.Pressed) + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + skip = [] + for feature_input in self.query("PipelineFeature"): + this_switch = feature_input.query_one(Switch) + if not this_switch.value: + skip.append(this_switch.id) + self.parent.TEMPLATE_CONFIG.template_yaml = {"skip": skip} + self.parent.switch_screen("custom_done") From 610cbd901e1d9fd9ebb76f5ef754c1b3f13b6964 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 28 Aug 2023 10:14:24 +0200 Subject: [PATCH 011/737] animate help box --- nf_core/pipelines/create/__init__.py | 2 -- nf_core/pipelines/create/create.tcss | 6 ++++++ nf_core/pipelines/create/custompipeline.py | 15 ++++++++------- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index a207ec748..f8d5efd28 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -40,8 +40,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("type_nfcore") elif event.button.id == "type_custom": self.switch_screen("type_custom") - elif event.button.id == "custom_done": - pass def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 7a86dec6b..39bc8d907 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -59,10 +59,16 @@ HorizontalScroll { margin-left: 15; margin-right: 25; margin-bottom: 1; + overflow-y: scroll; display: none; + + height: 0; } .displayed .help_box { display: block; + + transition: height 50ms; + height: 10; } #show_help { display: block; diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 409c26041..259e5bd29 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -142,10 +142,11 @@ def compose(self) -> ComposeResult: @on(Button.Pressed) def on_button_pressed(self, event: Button.Pressed) -> None: """Save fields to the config.""" - skip = [] - for feature_input in self.query("PipelineFeature"): - this_switch = feature_input.query_one(Switch) - if not this_switch.value: - skip.append(this_switch.id) - self.parent.TEMPLATE_CONFIG.template_yaml = {"skip": skip} - self.parent.switch_screen("custom_done") + if event.button.id == "custom_done": + skip = [] + for feature_input in self.query("PipelineFeature"): + this_switch = feature_input.query_one(Switch) + if not this_switch.value: + skip.append(this_switch.id) + self.parent.TEMPLATE_CONFIG.template_yaml = {"skip": skip} + self.parent.switch_screen("custom_done") From 6677144045ddd92e73fd1db74b221af8e9d6ef08 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 29 Aug 2023 13:48:51 +0200 Subject: [PATCH 012/737] add type_nfcore screen and use PipelineCreate to create a pipeline --- nf_core/__main__.py | 15 ++++++++++- nf_core/pipelines/create/__init__.py | 6 ++++- nf_core/pipelines/create/custompipeline.py | 19 +++++++------- nf_core/pipelines/create/nfcorepipeline.py | 29 ++++++++++++++++++++++ 4 files changed, 57 insertions(+), 12 deletions(-) create mode 100644 nf_core/pipelines/create/nfcorepipeline.py diff --git a/nf_core/__main__.py b/nf_core/__main__.py index f3f2d2bf5..4b926931a 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -525,10 +525,23 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp \n\n Run without any command line arguments to use an interactive interface. """ + from nf_core.create import PipelineCreate from nf_core.pipelines.create import PipelineCreateApp app = PipelineCreateApp() - app.run() + config = app.run() + + create_obj = PipelineCreate( + config.name, + config.description, + config.author, + version=config.version, + force=config.force, + outdir=config.outdir, + template_yaml_path=config.template_yaml, + plain=config.is_nfcore, + ) + create_obj.init_pipeline() # nf-core modules subcommands diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index f8d5efd28..10182c6fe 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -4,12 +4,13 @@ from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline +from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType from nf_core.pipelines.create.utils import CreateConfig from nf_core.pipelines.create.welcome import WelcomeScreen -class PipelineCreateApp(App): +class PipelineCreateApp(App[CreateConfig]): """A Textual app to manage stopwatches.""" CSS_PATH = "create.tcss" @@ -24,6 +25,7 @@ class PipelineCreateApp(App): "basic_details": BasicDetails(), "choose_type": ChoosePipelineType(), "type_custom": CustomPipeline(), + "type_nfcore": NfcorePipeline(), } # Initialise config as empty @@ -40,6 +42,8 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("type_nfcore") elif event.button.id == "type_custom": self.switch_screen("type_custom") + elif event.button.id == "done": + self.exit(self.TEMPLATE_CONFIG) def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 259e5bd29..6e62095ac 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -135,18 +135,17 @@ def compose(self) -> ComposeResult: ), ) yield Center( - Button("Done", id="custom_done", variant="success"), + Button("Done", id="done", variant="success"), classes="cta", ) - @on(Button.Pressed) + @on(Button.Pressed, "#done") def on_button_pressed(self, event: Button.Pressed) -> None: """Save fields to the config.""" - if event.button.id == "custom_done": - skip = [] - for feature_input in self.query("PipelineFeature"): - this_switch = feature_input.query_one(Switch) - if not this_switch.value: - skip.append(this_switch.id) - self.parent.TEMPLATE_CONFIG.template_yaml = {"skip": skip} - self.parent.switch_screen("custom_done") + skip = [] + for feature_input in self.query("PipelineFeature"): + this_switch = feature_input.query_one(Switch) + if not this_switch.value: + skip.append(this_switch.id) + self.parent.TEMPLATE_CONFIG.template_yaml = {"skip": skip} + self.parent.TEMPLATE_CONFIG.is_nfcore = False diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py new file mode 100644 index 000000000..817e09e78 --- /dev/null +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -0,0 +1,29 @@ +from textual import on +from textual.app import ComposeResult +from textual.containers import Center, HorizontalScroll, ScrollableContainer +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static, Switch + + +class NfcorePipeline(Screen): + """Select if the pipeline will use genomic data.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + # TODO: add features to customise the pipeline template + yield Center( + Button("Done", id="done", variant="success"), + classes="cta", + ) + + @on(Button.Pressed, "#done") + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + skip = [] + for feature_input in self.query("PipelineFeature"): + this_switch = feature_input.query_one(Switch) + if not this_switch.value: + skip.append(this_switch.id) + self.parent.TEMPLATE_CONFIG.template_yaml = {"skip": skip} + self.parent.TEMPLATE_CONFIG.is_nfcore = True From 8bac0bed341e96756ca854dd367e916d12671631 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 29 Aug 2023 14:06:43 +0200 Subject: [PATCH 013/737] modify PipelineCreate to accept a template dictionary --- nf_core/__main__.py | 1 + nf_core/create.py | 21 ++++++++++++++++----- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 4b926931a..dafeb7757 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -539,6 +539,7 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp force=config.force, outdir=config.outdir, template_yaml_path=config.template_yaml, + organisation=config.org, plain=config.is_nfcore, ) create_obj.init_pipeline() diff --git a/nf_core/create.py b/nf_core/create.py index 470623f55..6fd4c4d25 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -40,6 +40,7 @@ class PipelineCreate: outdir (str): Path to the local output directory. template_yaml_path (str): Path to template.yml file for pipeline creation settings. plain (bool): If true the Git repository will be initialized plain. + organisation (str): Name of the GitHub organisation to create the pipeline. Will be the prefix of the pipeline. default_branch (str): Specifies the --initial-branch name. """ @@ -54,10 +55,11 @@ def __init__( outdir=None, template_yaml_path=None, plain=False, + organisation="nf-core", default_branch=None, ): self.template_params, skip_paths_keys, self.template_yaml = self.create_param_dict( - name, description, author, version, template_yaml_path, plain, outdir if outdir else "." + name, description, author, organisation, version, template_yaml_path, plain, outdir if outdir else "." ) skippable_paths = { @@ -90,13 +92,16 @@ def __init__( outdir = os.path.join(os.getcwd(), self.template_params["name_noslash"]) self.outdir = Path(outdir) - def create_param_dict(self, name, description, author, version, template_yaml_path, plain, pipeline_dir): + def create_param_dict( + self, name, description, author, organisation, version, template_yaml_path, plain, pipeline_dir + ): """Creates a dictionary of parameters for the new pipeline. Args: name (str): Name for the pipeline. description (str): Description for the pipeline. author (str): Authors name of the pipeline. + organisation (str): Name of the GitHub organisation to create the pipeline. version (str): Version flag. template_yaml_path (str): Path to YAML file containing template parameters. plain (bool): If true the pipeline template will be initialized plain, without customisation. @@ -108,8 +113,11 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa # Obtain template customization info from template yaml file or `.nf-core.yml` config file try: if template_yaml_path is not None: - with open(template_yaml_path, "r") as f: - template_yaml = yaml.safe_load(f) + if isinstance(template_yaml_path, str): + with open(template_yaml_path, "r") as f: + template_yaml = yaml.safe_load(f) + else: + template_yaml = template_yaml_path elif "template" in config_yml: template_yaml = config_yml["template"] else: @@ -150,7 +158,10 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa template_yaml.update(self.customize_template(template_areas)) # Now look in the template for more options, otherwise default to nf-core defaults - param_dict["prefix"] = template_yaml.get("prefix", "nf-core") + if "prefix" in template_yaml: + log.info(f"Using organisation name found in {template_yaml_path}") + organisation = template_yaml.get("prefix") + param_dict["prefix"] = organisation param_dict["branded"] = param_dict["prefix"] == "nf-core" skip_paths = [] if param_dict["branded"] else ["branded"] From 43db7f62cb588d1c9b9ec05c9c60a45e6c12d6f3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 30 Aug 2023 18:21:39 +0200 Subject: [PATCH 014/737] start modifying nf-core create command to use TUI and not CLI --- nf_core/__main__.py | 93 +++---- nf_core/create.py | 290 ++++++++++----------- nf_core/pipelines/create/custompipeline.py | 2 +- nf_core/pipelines/create/nfcorepipeline.py | 2 +- nf_core/pipelines/create/utils.py | 6 +- 5 files changed, 174 insertions(+), 219 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index dafeb7757..63d43f8cf 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -367,47 +367,6 @@ def licences(pipeline, json): sys.exit(1) -# nf-core create -@nf_core_cli.command() -@click.option( - "-n", - "--name", - type=str, - help="The name of your new pipeline", -) -@click.option("-d", "--description", type=str, help="A short description of your pipeline") -@click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, default="1.0dev", help="The initial version number to use") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") -@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") -@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -@click.option("--plain", is_flag=True, help="Use the standard nf-core template") -def create(name, description, author, version, force, outdir, template_yaml, plain): - """ - Create a new pipeline using the nf-core template. - - Uses the nf-core template to make a skeleton Nextflow pipeline with all required - files, boilerplate code and best-practices. - """ - from nf_core.create import PipelineCreate - - try: - create_obj = PipelineCreate( - name, - description, - author, - version=version, - force=force, - outdir=outdir, - template_yaml_path=template_yaml, - plain=plain, - ) - create_obj.init_pipeline() - except UserWarning as e: - log.error(e) - sys.exit(1) - - # nf-core lint @nf_core_cli.command() @click.option( @@ -500,7 +459,7 @@ def pipelines(ctx): ctx.ensure_object(dict) -# nf-core pipeline install +# nf-core pipeline create @pipelines.command("create") @click.pass_context @click.option( @@ -515,8 +474,12 @@ def pipelines(ctx): @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -@click.option("--plain", is_flag=True, help="Use the standard nf-core template") -def create_pipeline(ctx, name, description, author, version, force, outdir, template_yaml, plain): +@click.option( + "--organisation", + type=str, + help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core", +) +def create_pipeline(ctx, name, description, author, version, force, outdir, template_yaml, organisation): """ Create a new pipeline using the nf-core template. @@ -528,21 +491,33 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp from nf_core.create import PipelineCreate from nf_core.pipelines.create import PipelineCreateApp - app = PipelineCreateApp() - config = app.run() - - create_obj = PipelineCreate( - config.name, - config.description, - config.author, - version=config.version, - force=config.force, - outdir=config.outdir, - template_yaml_path=config.template_yaml, - organisation=config.org, - plain=config.is_nfcore, - ) - create_obj.init_pipeline() + if (name and description and author) or (template_yaml): + # If all command arguments are used, run without the interactive interface + config = organisation if template_yaml else None + else: + log.info( + "Ignoring provided arguments. Launching interactive nf-core pipeline creation tool." + "\nRun with all command line arguments to avoid using an interactive interface." + ) + app = PipelineCreateApp() + config = app.run() + print(config) + + try: + create_obj = PipelineCreate( + name, + description, + author, + version=version, + force=force, + outdir=outdir, + template_config=config, + organisation=organisation, + ) + create_obj.init_pipeline() + except UserWarning as e: + log.error(e) + sys.exit(1) # nf-core modules subcommands diff --git a/nf_core/create.py b/nf_core/create.py index 6fd4c4d25..a60bfac75 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -22,6 +22,7 @@ import nf_core.schema import nf_core.utils from nf_core.lint_utils import run_prettier_on_file +from nf_core.pipelines.create.utils import CreateConfig log = logging.getLogger(__name__) @@ -38,9 +39,9 @@ class PipelineCreate: force (bool): Overwrites a given workflow directory with the same name. Defaults to False. May the force be with you. outdir (str): Path to the local output directory. - template_yaml_path (str): Path to template.yml file for pipeline creation settings. - plain (bool): If true the Git repository will be initialized plain. + template_config (str|CreateConfig): Path to template.yml file for pipeline creation settings. or pydantic model with the customisation for pipeline creation settings. organisation (str): Name of the GitHub organisation to create the pipeline. Will be the prefix of the pipeline. + from_config_file (bool): If true the pipeline will be created from the `.nf-core.yml` config file. default_branch (str): Specifies the --initial-branch name. """ @@ -53,13 +54,26 @@ def __init__( no_git=False, force=False, outdir=None, - template_yaml_path=None, - plain=False, + template_config=None, organisation="nf-core", + from_config_file=False, default_branch=None, ): - self.template_params, skip_paths_keys, self.template_yaml = self.create_param_dict( - name, description, author, organisation, version, template_yaml_path, plain, outdir if outdir else "." + if template_config is not None and isinstance(template_config, str): + # Obtain a CreateConfig object from the template yaml file + self.config = self.check_template_yaml_info(template_config, name, description, author) + self.update_config(self, organisation, version, force, outdir if outdir else ".") + elif isinstance(template_config, CreateConfig): + self.config = template_config + elif from_config_file: + # Try reading config file + _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") + # Obtain a CreateConfig object from `.nf-core.yml` config file + if "template" in config_yml: + self.config = CreateConfig(**config_yml["template"]) + + self.skip_areas, skip_paths = self.obtain_skipped_areas_dict( + self.config.skip_features, outdir if outdir else "." ) skippable_paths = { @@ -77,65 +91,96 @@ def __init__( ], } # Get list of files we're skipping with the supplied skip keys - self.skip_paths = set(sp for k in skip_paths_keys for sp in skippable_paths[k]) + self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) # Set convenience variables - self.name = self.template_params["name"] + self.name = self.config.name # Set fields used by the class methods - self.no_git = ( - no_git if self.template_params["github"] else True - ) # Set to True if template was configured without github hosting + self.no_git = no_git self.default_branch = default_branch self.force = force if outdir is None: - outdir = os.path.join(os.getcwd(), self.template_params["name_noslash"]) + outdir = os.path.join(os.getcwd(), self.config.name_noslash) self.outdir = Path(outdir) - def create_param_dict( - self, name, description, author, organisation, version, template_yaml_path, plain, pipeline_dir - ): - """Creates a dictionary of parameters for the new pipeline. + def check_template_yaml_info(self, template_yaml, name, description, author): + """Ensure that the provided template yaml file contains the necessary information. Args: + template_yaml (str): Template yaml file. name (str): Name for the pipeline. description (str): Description for the pipeline. author (str): Authors name of the pipeline. - organisation (str): Name of the GitHub organisation to create the pipeline. - version (str): Version flag. - template_yaml_path (str): Path to YAML file containing template parameters. - plain (bool): If true the pipeline template will be initialized plain, without customisation. - pipeline_dir (str): Path to the pipeline directory. - """ - # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) + Returns: + CreateConfig: Pydantic model for the nf-core create config. + + Raises: + UserWarning: if template yaml file does not contain all the necessary information. + UserWarning: if template yaml file does not exist. + """ # Obtain template customization info from template yaml file or `.nf-core.yml` config file try: - if template_yaml_path is not None: - if isinstance(template_yaml_path, str): - with open(template_yaml_path, "r") as f: - template_yaml = yaml.safe_load(f) - else: - template_yaml = template_yaml_path - elif "template" in config_yml: - template_yaml = config_yml["template"] - else: - template_yaml = {} + with open(template_yaml, "r") as f: + template_yaml = yaml.safe_load(f) + config = CreateConfig(**template_yaml) except FileNotFoundError: - raise UserWarning(f"Template YAML file '{template_yaml_path}' not found.") + raise UserWarning(f"Template YAML file '{template_yaml}' not found.") + + missing_fields = [] + if config.name is None and name is None: + missing_fields.append("name") + elif config.name is None: + config.name = name + if config.description is None and description is None: + missing_fields.append("description") + elif config.description is None: + config.description = description + if config.author is None and author is None: + missing_fields.append("author") + elif config.author is None: + config.author = author + if len(missing_fields) > 0: + raise UserWarning( + f"Template YAML file does not contain the following required fields: {', '.join(missing_fields)}" + ) + + return config - param_dict = {} - # Get the necessary parameters either from the template or command line arguments - param_dict["name"] = self.get_param("name", name, template_yaml, template_yaml_path) - param_dict["description"] = self.get_param("description", description, template_yaml, template_yaml_path) - param_dict["author"] = self.get_param("author", author, template_yaml, template_yaml_path) + def update_config(self, organisation, version, force, pipeline_dir): + """Updates the config file with arguments provided through command line. - if "version" in template_yaml: - if version is not None: - log.info(f"Overriding --version with version found in {template_yaml_path}") - version = template_yaml["version"] - param_dict["version"] = version + Args: + organisation (str): Name of the GitHub organisation to create the pipeline. + version (str): Version of the pipeline. + force (bool): Overwrites a given workflow directory with the same name. + pipeline_dir (str): Path to the local output directory. + """ + if self.config.org is None: + self.config.org = organisation + if self.config.version is None: + self.config.version = version + if self.config.force is None: + self.config.force = force + if self.config.outdir is None: + self.config.outdir = pipeline_dir + if self.config.is_nfcore is None: + self.config.is_nfcore = True if organisation == "nf-core" else False + + def obtain_skipped_areas_dict(self, features_to_skip, pipeline_dir): + """Creates a dictionary of parameters for the new pipeline. + + Args: + features_to_skip (list): List of template features/areas to skip. + pipeline_dir (str): Path to the pipeline directory. + + Returns: + skip_areas (dict): Dictionary of template areas to skip with values true/false. + skip_paths (list): List of template areas which contain paths to skip. + """ + # Try reading config file + _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) # Define the different template areas, and what actions to take for each # if they are skipped @@ -147,115 +192,48 @@ def create_param_dict( "nf_core_configs": {"name": "nf-core/configs", "file": False, "content": True}, } - # Once all necessary parameters are set, check if the user wants to customize the template more - if template_yaml_path is None and not plain: - customize_template = questionary.confirm( - "Do you want to customize which parts of the template are used?", - style=nf_core.utils.nfcore_question_style, - default=False, - ).unsafe_ask() - if customize_template: - template_yaml.update(self.customize_template(template_areas)) - - # Now look in the template for more options, otherwise default to nf-core defaults - if "prefix" in template_yaml: - log.info(f"Using organisation name found in {template_yaml_path}") - organisation = template_yaml.get("prefix") - param_dict["prefix"] = organisation - param_dict["branded"] = param_dict["prefix"] == "nf-core" - - skip_paths = [] if param_dict["branded"] else ["branded"] - + skip_paths = [] + skip_areas = {} for t_area in template_areas: - areas_to_skip = template_yaml.get("skip", []) - if isinstance(areas_to_skip, str): - areas_to_skip = [areas_to_skip] - if t_area in areas_to_skip: + if t_area in features_to_skip: if template_areas[t_area]["file"]: skip_paths.append(t_area) - param_dict[t_area] = False + skip_areas[t_area] = False else: - param_dict[t_area] = True + skip_areas[t_area] = True + # If github is selected, exclude also github_badges - if not param_dict["github"]: - param_dict["github_badges"] = False + # if not param_dict["github"]: + # param_dict["github_badges"] = False # Set the last parameters based on the ones provided - param_dict["short_name"] = ( - param_dict["name"].lower().replace(r"/\s+/", "-").replace(f"{param_dict['prefix']}/", "").replace("/", "-") + self.config.short_name = ( + self.config.name.lower().replace(r"/\s+/", "-").replace(f"{self.config.org}/", "").replace("/", "-") ) - param_dict["name"] = f"{param_dict['prefix']}/{param_dict['short_name']}" - param_dict["name_noslash"] = param_dict["name"].replace("/", "-") - param_dict["prefix_nodash"] = param_dict["prefix"].replace("-", "") - param_dict["name_docker"] = param_dict["name"].replace(param_dict["prefix"], param_dict["prefix_nodash"]) - param_dict["logo_light"] = f"{param_dict['name_noslash']}_logo_light.png" - param_dict["logo_dark"] = f"{param_dict['name_noslash']}_logo_dark.png" - param_dict["version"] = version + self.config.name = f"{self.config.org}/{self.config.short_name}" + self.config.name_noslash = self.config.name.replace("/", "-") + self.config.prefix_nodash = self.config.org.replace("-", "") + self.config.name_docker = self.config.name.replace(self.config.org, self.config.prefix_nodash) + self.config.logo_light = f"{self.config.name_noslash}_logo_light.png" + self.config.logo_dark = f"{self.config.name_noslash}_logo_dark.png" if ( "lint" in config_yml and "nextflow_config" in config_yml["lint"] and "manifest.name" in config_yml["lint"]["nextflow_config"] ): - return param_dict, skip_paths, template_yaml + return skip_areas, skip_paths # Check that the pipeline name matches the requirements - if not re.match(r"^[a-z]+$", param_dict["short_name"]): - if param_dict["prefix"] == "nf-core": + if not re.match(r"^[a-z]+$", self.config.short_name): + if self.config.is_nfcore: raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") else: log.warning( "Your workflow name is not lowercase without punctuation. This may cause Nextflow errors.\nConsider changing the name to avoid special characters." ) - return param_dict, skip_paths, template_yaml - - def customize_template(self, template_areas): - """Customizes the template parameters. - - Args: - template_areas (list): List of available template areas to skip. - """ - template_yaml = {} - prefix = questionary.text("Pipeline prefix", style=nf_core.utils.nfcore_question_style).unsafe_ask() - while not re.match(r"^[a-zA-Z_][a-zA-Z0-9-_]*$", prefix): - log.error("[red]Pipeline prefix cannot start with digit or hyphen and cannot contain punctuation.[/red]") - prefix = questionary.text( - "Please provide a new pipeline prefix", style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - template_yaml["prefix"] = prefix - - choices = [{"name": template_areas[area]["name"], "value": area} for area in template_areas] - template_yaml["skip"] = questionary.checkbox( - "Skip template areas?", choices=choices, style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - return template_yaml - - def get_param(self, param_name, passed_value, template_yaml, template_yaml_path): - if param_name in template_yaml: - if passed_value is not None: - log.info(f"overriding --{param_name} with name found in {template_yaml_path}") - passed_value = template_yaml[param_name] - if passed_value is None: - passed_value = getattr(self, f"prompt_wf_{param_name}")() - return passed_value - - def prompt_wf_name(self): - wf_name = questionary.text("Workflow name", style=nf_core.utils.nfcore_question_style).unsafe_ask() - while not re.match(r"^[a-z]+$", wf_name): - log.error("[red]Invalid workflow name: must be lowercase without punctuation.") - wf_name = questionary.text( - "Please provide a new workflow name", style=nf_core.utils.nfcore_question_style - ).unsafe_ask() - return wf_name - - def prompt_wf_description(self): - wf_description = questionary.text("Description", style=nf_core.utils.nfcore_question_style).unsafe_ask() - return wf_description - - def prompt_wf_author(self): - wf_author = questionary.text("Author", style=nf_core.utils.nfcore_question_style).unsafe_ask() - return wf_author + return skip_areas, skip_paths def init_pipeline(self): """Creates the nf-core pipeline.""" @@ -267,7 +245,7 @@ def init_pipeline(self): if not self.no_git: self.git_init_pipeline() - if self.template_params["branded"]: + if self.config.is_nfcore: log.info( "[green bold]!!!!!! IMPORTANT !!!!!!\n\n" "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" @@ -283,30 +261,32 @@ def render_template(self): # Check if the output directory exists if self.outdir.exists(): if self.force: - log.warning(f"Output directory '{self.outdir}' exists - continuing as --force specified") + log.warning( + f"Output directory '{self.outdir}' exists - removing the existing directory as --force specified" + ) + shutil.rmtree(self.outdir) else: log.error(f"Output directory '{self.outdir}' exists!") log.info("Use -f / --force to overwrite existing files") sys.exit(1) - else: - os.makedirs(self.outdir) + os.makedirs(self.outdir) # Run jinja2 for each file in the template folder env = jinja2.Environment( loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True ) template_dir = os.path.join(os.path.dirname(__file__), "pipeline-template") - object_attrs = self.template_params + object_attrs = self.config.model_dump() object_attrs["nf_core_version"] = nf_core.__version__ # Can't use glob.glob() as need recursive hidden dotfiles - https://stackoverflow.com/a/58126417/713980 template_files = list(Path(template_dir).glob("**/*")) template_files += list(Path(template_dir).glob("*")) ignore_strs = [".pyc", "__pycache__", ".pyo", ".pyd", ".DS_Store", ".egg"] - short_name = self.template_params["short_name"] + short_name = self.config.short_name rename_files = { "workflows/pipeline.nf": f"workflows/{short_name}.nf", - "lib/WorkflowPipeline.groovy": f"lib/Workflow{short_name[0].upper()}{short_name[1:]}.groovy", + "lib/WorkflowPipeline.groovy": f"lib/Workflow{short_name.title()}.groovy", } # Set the paths to skip according to customization @@ -361,14 +341,14 @@ def render_template(self): os.chmod(output_path, template_stat.st_mode) # Remove all unused parameters in the nextflow schema - if not self.template_params["igenomes"] or not self.template_params["nf_core_configs"]: + if not self.skip_areas["igenomes"] or not self.skip_areas["nf_core_configs"]: self.update_nextflow_schema() - if self.template_params["branded"]: + if self.config.is_nfcore: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() else: - if self.template_params["github"]: + if self.skip_areas["github"]: # Remove field mentioning nf-core docs # in the github bug report template self.remove_nf_core_in_bug_report_template() @@ -376,10 +356,10 @@ def render_template(self): # Update the .nf-core.yml with linting configurations self.fix_linting() - if self.template_yaml: + if self.config: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) with open(self.outdir / config_fn, "w") as fh: - config_yml.update(template=self.template_yaml) + config_yml.update(template=self.config.model_dump()) yaml.safe_dump(config_yml, fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) @@ -423,7 +403,7 @@ def fix_linting(self): for a customized pipeline. """ # Create a lint config - short_name = self.template_params["short_name"] + short_name = self.skip_areas["short_name"] lint_config = { "files_exist": [ "CODE_OF_CONDUCT.md", @@ -448,7 +428,7 @@ def fix_linting(self): } # Add GitHub hosting specific configurations - if not self.template_params["github"]: + if not self.skip_areas["github"]: lint_config["files_exist"].extend( [ ".github/ISSUE_TEMPLATE/bug_report.yml", @@ -474,7 +454,7 @@ def fix_linting(self): ) # Add CI specific configurations - if not self.template_params["ci"]: + if not self.skip_areas["ci"]: lint_config["files_exist"].extend( [ ".github/workflows/branch.yml", @@ -485,7 +465,7 @@ def fix_linting(self): ) # Add custom config specific configurations - if not self.template_params["nf_core_configs"]: + if not self.skip_areas["nf_core_configs"]: lint_config["files_exist"].extend(["conf/igenomes.config"]) lint_config["nextflow_config"].extend( [ @@ -497,15 +477,15 @@ def fix_linting(self): ) # Add igenomes specific configurations - if not self.template_params["igenomes"]: + if not self.skip_areas["igenomes"]: lint_config["files_exist"].extend(["conf/igenomes.config"]) # Add github badges specific configurations - if not self.template_params["github_badges"] or not self.template_params["github"]: + if not self.skip_areas["github_badges"] or not self.skip_areas["github"]: lint_config["readme"] = ["nextflow_badge"] # If the pipeline is unbranded - if not self.template_params["branded"]: + if not self.skip_areas["branded"]: lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) # Add the lint content to the preexisting nf-core config @@ -519,16 +499,14 @@ def fix_linting(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" - logo_url = f"https://nf-co.re/logo/{self.template_params['short_name']}?theme=light" + logo_url = f"https://nf-co.re/logo/{self.config.short_name}?theme=light" log.debug(f"Fetching logo from {logo_url}") - email_logo_path = self.outdir / "assets" / f"{self.template_params['name_noslash']}_logo_light.png" + email_logo_path = self.outdir / "assets" / f"{self.config.name_noslash}_logo_light.png" self.download_pipeline_logo(f"{logo_url}?w=600&theme=light", email_logo_path) for theme in ["dark", "light"]: readme_logo_url = f"{logo_url}?w=600&theme={theme}" - readme_logo_path = ( - self.outdir / "docs" / "images" / f"{self.template_params['name_noslash']}_logo_{theme}.png" - ) + readme_logo_path = self.outdir / "docs" / "images" / f"{self.config.name_noslash}_logo_{theme}.png" self.download_pipeline_logo(readme_logo_url, readme_logo_path) def download_pipeline_logo(self, url, img_fn): diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 6e62095ac..ad5d4fbe0 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -147,5 +147,5 @@ def on_button_pressed(self, event: Button.Pressed) -> None: this_switch = feature_input.query_one(Switch) if not this_switch.value: skip.append(this_switch.id) - self.parent.TEMPLATE_CONFIG.template_yaml = {"skip": skip} + self.parent.TEMPLATE_CONFIG.skip_features = skip self.parent.TEMPLATE_CONFIG.is_nfcore = False diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 817e09e78..b1b5af816 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -25,5 +25,5 @@ def on_button_pressed(self, event: Button.Pressed) -> None: this_switch = feature_input.query_one(Switch) if not this_switch.value: skip.append(this_switch.id) - self.parent.TEMPLATE_CONFIG.template_yaml = {"skip": skip} + self.parent.TEMPLATE_CONFIG.skip_features = skip self.parent.TEMPLATE_CONFIG.is_nfcore = True diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 3823011a8..0fb743299 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -1,7 +1,7 @@ import re from typing import Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, ConfigDict, field_validator from textual import on from textual.app import ComposeResult from textual.validation import ValidationResult, Validator @@ -18,9 +18,11 @@ class CreateConfig(BaseModel): version: Optional[str] = None force: Optional[bool] = None outdir: Optional[str] = None - template_yaml: Optional[dict] = None + skip_features: Optional[dict] = None is_nfcore: Optional[bool] = None + model_config = ConfigDict(extra="allow") + @field_validator("name") @classmethod def name_nospecialchars(cls, v: str) -> str: From 99f426af80a483107b8a5f41cdf950a7c38332f6 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 31 Aug 2023 09:31:40 +0200 Subject: [PATCH 015/737] small bug fixes and add a deprecation message for nf-core create command --- nf_core/__main__.py | 59 ++++++++++++++++++++++++++-- nf_core/create.py | 11 ++++-- nf_core/pipelines/create/create.tcss | 2 +- nf_core/pipelines/create/utils.py | 2 +- nf_core/pipelines/create/welcome.py | 6 +-- 5 files changed, 67 insertions(+), 13 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 63d43f8cf..beeb68737 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -459,7 +459,7 @@ def pipelines(ctx): ctx.ensure_object(dict) -# nf-core pipeline create +# nf-core pipelines create @pipelines.command("create") @click.pass_context @click.option( @@ -493,15 +493,14 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp if (name and description and author) or (template_yaml): # If all command arguments are used, run without the interactive interface - config = organisation if template_yaml else None + config = None else: log.info( - "Ignoring provided arguments. Launching interactive nf-core pipeline creation tool." + "Launching interactive nf-core pipeline creation tool." "\nRun with all command line arguments to avoid using an interactive interface." ) app = PipelineCreateApp() config = app.run() - print(config) try: create_obj = PipelineCreate( @@ -520,6 +519,58 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp sys.exit(1) +# nf-core create (deprecated) +@nf_core_cli.command(hidden=True, deprecated=True) +@click.option( + "-n", + "--name", + type=str, + help="The name of your new pipeline", +) +@click.option("-d", "--description", type=str, help="A short description of your pipeline") +@click.option("-a", "--author", type=str, help="Name of the main author(s)") +@click.option("--version", type=str, default="1.0dev", help="The initial version number to use") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") +@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +@click.option("--plain", is_flag=True, help="Use the standard nf-core template") +def create(name, description, author, version, force, outdir, template_yaml, plain): + """ + Create a new pipeline using the nf-core template. + + Uses the nf-core template to make a skeleton Nextflow pipeline with all required + files, boilerplate code and best-practices. + """ + from nf_core.create import PipelineCreate + from nf_core.pipelines.create import PipelineCreateApp + + if (name and description and author) or (template_yaml): + # If all command arguments are used, run without the interactive interface + config = None + else: + log.info( + "Launching interactive nf-core pipeline creation tool." + "\nRun with all command line arguments to avoid using an interactive interface." + ) + app = PipelineCreateApp() + config = app.run() + + try: + create_obj = PipelineCreate( + name, + description, + author, + version=version, + force=force, + outdir=outdir, + template_config=config, + ) + create_obj.init_pipeline() + except UserWarning as e: + log.error(e) + sys.exit(1) + + # nf-core modules subcommands @nf_core_cli.group() @click.option( diff --git a/nf_core/create.py b/nf_core/create.py index a60bfac75..3e639962b 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -41,7 +41,7 @@ class PipelineCreate: outdir (str): Path to the local output directory. template_config (str|CreateConfig): Path to template.yml file for pipeline creation settings. or pydantic model with the customisation for pipeline creation settings. organisation (str): Name of the GitHub organisation to create the pipeline. Will be the prefix of the pipeline. - from_config_file (bool): If true the pipeline will be created from the `.nf-core.yml` config file. + from_config_file (bool): If true the pipeline will be created from the `.nf-core.yml` config file. Used for tests and sync command. default_branch (str): Specifies the --initial-branch name. """ @@ -62,15 +62,18 @@ def __init__( if template_config is not None and isinstance(template_config, str): # Obtain a CreateConfig object from the template yaml file self.config = self.check_template_yaml_info(template_config, name, description, author) - self.update_config(self, organisation, version, force, outdir if outdir else ".") + self.update_config(organisation, version, force, outdir if outdir else ".") elif isinstance(template_config, CreateConfig): self.config = template_config + self.update_config(organisation, version, force, outdir if outdir else ".") elif from_config_file: # Try reading config file _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") # Obtain a CreateConfig object from `.nf-core.yml` config file if "template" in config_yml: self.config = CreateConfig(**config_yml["template"]) + else: + raise UserWarning("The template configuration was not provided.") self.skip_areas, skip_paths = self.obtain_skipped_areas_dict( self.config.skip_features, outdir if outdir else "." @@ -160,9 +163,9 @@ def update_config(self, organisation, version, force, pipeline_dir): if self.config.org is None: self.config.org = organisation if self.config.version is None: - self.config.version = version + self.config.version = version if version else "1.0dev" if self.config.force is None: - self.config.force = force + self.config.force = force if force else False if self.config.outdir is None: self.config.outdir = pipeline_dir if self.config.is_nfcore is None: diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 39bc8d907..4ebc1936f 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -59,13 +59,13 @@ HorizontalScroll { margin-left: 15; margin-right: 25; margin-bottom: 1; - overflow-y: scroll; display: none; height: 0; } .displayed .help_box { display: block; + overflow-y: scroll; transition: height 50ms; height: 10; diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 0fb743299..ae8b09a26 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -18,7 +18,7 @@ class CreateConfig(BaseModel): version: Optional[str] = None force: Optional[bool] = None outdir: Optional[str] = None - skip_features: Optional[dict] = None + skip_features: Optional[list] = None is_nfcore: Optional[bool] = None model_config = ConfigDict(extra="allow") diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py index 2e75ec597..0be70cc4c 100644 --- a/nf_core/pipelines/create/welcome.py +++ b/nf_core/pipelines/create/welcome.py @@ -1,13 +1,13 @@ from textual.app import ComposeResult -from textual.screen import Screen from textual.containers import Center -from textual.widgets import Button, Footer, Header, Static, Markdown +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static markdown = """ # nf-core create This app will help you create a new nf-core pipeline. -It uses the nf-core pipeline template, which is kept at +It uses the nf-core pipeline template, which is kept within the [nf-core/tools repository](https://github.com/nf-core/tools). Using this tool is mandatory when making a pipeline that may From 09bd5dbdf8ca3aa353c33d54eaea128225de536d Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 31 Aug 2023 11:18:42 +0200 Subject: [PATCH 016/737] create command fails if one but not all arguments are provided --- nf_core/__main__.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index beeb68737..d08b489c8 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -494,6 +494,13 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp if (name and description and author) or (template_yaml): # If all command arguments are used, run without the interactive interface config = None + elif name or description or author or version or force or outdir or organisation: + log.error( + "Command arguments are not accepted in interactive mode.\n" + "Run with all command line arguments to avoid using an interactive interface" + "or run without any command line arguments to use an interactive interface." + ) + sys.exit(1) else: log.info( "Launching interactive nf-core pipeline creation tool." @@ -547,6 +554,13 @@ def create(name, description, author, version, force, outdir, template_yaml, pla if (name and description and author) or (template_yaml): # If all command arguments are used, run without the interactive interface config = None + elif name or description or author or version or force or outdir or plain: + log.error( + "Command arguments are not accepted in interactive mode.\n" + "Run with all command line arguments to avoid using an interactive interface" + "or run without any command line arguments to use an interactive interface." + ) + sys.exit(1) else: log.info( "Launching interactive nf-core pipeline creation tool." From 9f121f70874d5dded896c517c4d3a6b15537b2dd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 31 Aug 2023 12:05:06 +0200 Subject: [PATCH 017/737] ask if launching TUI when using deprecated nf-core create command --- nf_core/__main__.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index d08b489c8..84ddc3e6b 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -536,7 +536,7 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp ) @click.option("-d", "--description", type=str, help="A short description of your pipeline") @click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, default="1.0dev", help="The initial version number to use") +@click.option("--version", type=str, help="The initial version number to use") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @@ -562,12 +562,18 @@ def create(name, description, author, version, force, outdir, template_yaml, pla ) sys.exit(1) else: - log.info( - "Launching interactive nf-core pipeline creation tool." - "\nRun with all command line arguments to avoid using an interactive interface." - ) - app = PipelineCreateApp() - config = app.run() + if rich.prompt.Confirm.ask( + "[blue bold]?[/] [bold] [green]nf-core create[/] command is deprecated in favor of [green]nf-core pipelines create[/].[/]\n" + "[bold]Will launch an interactive interface. Do you want to continue?[/]" + ): + log.info( + "Launching interactive nf-core pipeline creation tool." + "\nRun with all command line arguments to avoid using an interactive interface." + ) + app = PipelineCreateApp() + config = app.run() + else: + sys.exit(0) try: create_obj = PipelineCreate( From 0c77029c7ca037c51a3d5985faf66d9410e8c54a Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 31 Aug 2023 13:48:13 +0200 Subject: [PATCH 018/737] add final_details screen for version and force --- nf_core/__main__.py | 2 +- nf_core/create.py | 7 +-- nf_core/pipelines/create/__init__.py | 6 +- nf_core/pipelines/create/custompipeline.py | 4 +- nf_core/pipelines/create/finaldetails.py | 64 ++++++++++++++++++++++ nf_core/pipelines/create/nfcorepipeline.py | 4 +- nf_core/pipelines/create/utils.py | 12 +++- 7 files changed, 87 insertions(+), 12 deletions(-) create mode 100644 nf_core/pipelines/create/finaldetails.py diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 84ddc3e6b..09ecf1de6 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -470,7 +470,7 @@ def pipelines(ctx): ) @click.option("-d", "--description", type=str, help="A short description of your pipeline") @click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, default="1.0dev", help="The initial version number to use") +@click.option("--version", type=str, help="The initial version number to use") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") diff --git a/nf_core/create.py b/nf_core/create.py index 3e639962b..42f635d00 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -65,7 +65,6 @@ def __init__( self.update_config(organisation, version, force, outdir if outdir else ".") elif isinstance(template_config, CreateConfig): self.config = template_config - self.update_config(organisation, version, force, outdir if outdir else ".") elif from_config_file: # Try reading config file _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") @@ -102,7 +101,7 @@ def __init__( # Set fields used by the class methods self.no_git = no_git self.default_branch = default_branch - self.force = force + self.force = self.config.force if outdir is None: outdir = os.path.join(os.getcwd(), self.config.name_noslash) self.outdir = Path(outdir) @@ -406,7 +405,7 @@ def fix_linting(self): for a customized pipeline. """ # Create a lint config - short_name = self.skip_areas["short_name"] + short_name = self.config.short_name lint_config = { "files_exist": [ "CODE_OF_CONDUCT.md", @@ -488,7 +487,7 @@ def fix_linting(self): lint_config["readme"] = ["nextflow_badge"] # If the pipeline is unbranded - if not self.skip_areas["branded"]: + if not self.config.is_nfcore: lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) # Add the lint content to the preexisting nf-core config diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 10182c6fe..5cae6b9b5 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -4,6 +4,7 @@ from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline +from nf_core.pipelines.create.finaldetails import FinalDetails from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType from nf_core.pipelines.create.utils import CreateConfig @@ -26,6 +27,7 @@ class PipelineCreateApp(App[CreateConfig]): "choose_type": ChoosePipelineType(), "type_custom": CustomPipeline(), "type_nfcore": NfcorePipeline(), + "final_details": FinalDetails(), } # Initialise config as empty @@ -42,8 +44,8 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("type_nfcore") elif event.button.id == "type_custom": self.switch_screen("type_custom") - elif event.button.id == "done": - self.exit(self.TEMPLATE_CONFIG) + elif event.button.id == "continue": + self.switch_screen("final_details") def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index ad5d4fbe0..0100943c1 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -135,11 +135,11 @@ def compose(self) -> ComposeResult: ), ) yield Center( - Button("Done", id="done", variant="success"), + Button("Continue", id="continue", variant="success"), classes="cta", ) - @on(Button.Pressed, "#done") + @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: """Save fields to the config.""" skip = [] diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py new file mode 100644 index 000000000..c7a979d9f --- /dev/null +++ b/nf_core/pipelines/create/finaldetails.py @@ -0,0 +1,64 @@ +"""A Textual app to create a pipeline.""" +from textwrap import dedent + +from textual import on +from textual.app import ComposeResult +from textual.containers import Center, Horizontal +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch + +from nf_core.pipelines.create.utils import CreateConfig, TextInput + + +class FinalDetails(Screen): + """Name, description, author, etc.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown( + dedent( + """ + # Final details + """ + ) + ) + + yield TextInput( + "version", + "Version", + "First version of the pipeline", + "1.0dev", + ) + with Horizontal(): + yield Switch(value=False, id="force") + yield Static("If the pipeline output directory exists, remove it and continue.", classes="custom_grid") + + yield Center( + Button("Finish", id="finish", variant="success"), + classes="cta", + ) + + @on(Button.Pressed, "#finish") + def on_button_pressed(self, event: Button.Pressed) -> None: + """Save fields to the config.""" + for text_input in self.query("TextInput"): + this_input = self.query_one(Input) + validation_result = this_input.validate(this_input.value) + version = this_input.value + if not validation_result.is_valid: + text_input.query_one(".validation_msg").update("\n".join(validation_result.failure_descriptions)) + else: + text_input.query_one(".validation_msg").update("") + try: + self.parent.TEMPLATE_CONFIG.version = version + except ValueError: + pass + + this_switch = self.query_one(Switch) + try: + self.parent.TEMPLATE_CONFIG.force = this_switch.value + except ValueError: + pass + + self.parent.exit(self.parent.TEMPLATE_CONFIG) diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index b1b5af816..7e2078429 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -13,11 +13,11 @@ def compose(self) -> ComposeResult: yield Footer() # TODO: add features to customise the pipeline template yield Center( - Button("Done", id="done", variant="success"), + Button("Continue", id="continue", variant="success"), classes="cta", ) - @on(Button.Pressed, "#done") + @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: """Save fields to the config.""" skip = [] diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index ae8b09a26..19f2df6d6 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -31,7 +31,7 @@ def name_nospecialchars(cls, v: str) -> str: raise ValueError("Must be lowercase without punctuation.") return v - @field_validator("org", "description", "author") + @field_validator("org", "description", "author", "version") @classmethod def notempty(cls, v: str) -> str: """Check that string values are not empty.""" @@ -39,6 +39,16 @@ def notempty(cls, v: str) -> str: raise ValueError("Cannot be left empty.") return v + @field_validator("version") + @classmethod + def version_nospecialchars(cls, v: str) -> str: + """Check that the pipeline version is simple.""" + if not re.match(r"^([0-9]+)(\.?([0-9]+))*(dev)?$", v): + raise ValueError( + "Must contain at least one number, and can be prefixed by 'dev'. Do not use a 'v' prefix or spaces." + ) + return v + class TextInput(Static): """Widget for text inputs. From bf9a541c43011ea325ba563697421a3df342f132 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 31 Aug 2023 15:06:37 +0200 Subject: [PATCH 019/737] add nfcorepipeline grid with features to select --- nf_core/create.py | 2 +- nf_core/pipelines/create/custompipeline.py | 68 +--------------------- nf_core/pipelines/create/nfcorepipeline.py | 13 ++++- nf_core/pipelines/create/utils.py | 68 +++++++++++++++++++++- 4 files changed, 82 insertions(+), 69 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index 42f635d00..844f69754 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -36,7 +36,7 @@ class PipelineCreate: author (str): Authors name of the pipeline. version (str): Version flag. Semantic versioning only. Defaults to `1.0dev`. no_git (bool): Prevents the creation of a local Git repository for the pipeline. Defaults to False. - force (bool): Overwrites a given workflow directory with the same name. Defaults to False. + force (bool): Overwrites a given workflow directory with the same name. Defaults to False. Used for tests and sync command. May the force be with you. outdir (str): Path to the local output directory. template_config (str|CreateConfig): Path to template.yml file for pipeline creation settings. or pydantic model with the customisation for pipeline creation settings. diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 0100943c1..3693dc989 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -1,22 +1,10 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, HorizontalScroll, ScrollableContainer -from textual.reactive import reactive +from textual.containers import Center, ScrollableContainer from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown, Static, Switch +from textual.widgets import Button, Footer, Header, Switch -markdown_genomes = """ -Nf-core pipelines are configured to use a copy of the most common reference genome files. - -By selecting this option, your pipeline will include a configuration file specifying the paths to these files. - -The required code to use these files will also be included in the template. -When the pipeline user provides an appropriate genome key, -the pipeline will automatically download the required reference files. - -For more information about reference genomes in nf-core pipelines, -see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). -""" +from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes markdown_ci = """ Nf-core provides a set of Continuous Integration (CI) tests for Github. @@ -52,56 +40,6 @@ """ -class HelpText(Markdown): - """A class to show a text box with help text.""" - - def __init__(self, markdown: str, classes: str) -> None: - super().__init__(markdown=markdown, classes=classes) - - def show(self) -> None: - """Method to show the help text box.""" - self.add_class("displayed") - - def hide(self) -> None: - """Method to hide the help text box.""" - self.remove_class("displayed") - - -class PipelineFeature(Static): - """Widget for the selection of pipeline features.""" - - def __init__(self, markdown: str, title: str, subtitle: str, field_id: str) -> None: - self.markdown = markdown - self.title = title - self.subtitle = subtitle - self.field_id = field_id - super().__init__() - - def on_button_pressed(self, event: Button.Pressed) -> None: - """When the button is pressed, change the type of the button.""" - if event.button.id == "show_help": - self.add_class("displayed") - elif event.button.id == "hide_help": - self.remove_class("displayed") - - def compose(self) -> ComposeResult: - """ - Create child widgets. - - Displayed row with a switch, a short text description and a help button. - Hidden row with a help text box. - """ - yield HorizontalScroll( - Switch(value=True, id=self.field_id), - Static(self.title, classes="feature_title"), - Static(self.subtitle, classes="feature_subtitle"), - Button("Show help", id="show_help", variant="primary"), - Button("Hide help", id="hide_help"), - classes="custom_grid", - ) - yield HelpText(self.markdown, classes="help_box") - - class CustomPipeline(Screen): """Select if the pipeline will use genomic data.""" diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 7e2078429..2af99af5f 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -2,7 +2,9 @@ from textual.app import ComposeResult from textual.containers import Center, HorizontalScroll, ScrollableContainer from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown, Static, Switch +from textual.widgets import Button, Footer, Header, Switch + +from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes class NfcorePipeline(Screen): @@ -11,7 +13,14 @@ class NfcorePipeline(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - # TODO: add features to customise the pipeline template + yield ScrollableContainer( + PipelineFeature( + markdown_genomes, + "Use reference genomes", + "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", + "igenomes", + ), + ) yield Center( Button("Continue", id="continue", variant="success"), classes="cta", diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 19f2df6d6..1fbd646d3 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -4,8 +4,9 @@ from pydantic import BaseModel, ConfigDict, field_validator from textual import on from textual.app import ComposeResult +from textual.containers import HorizontalScroll from textual.validation import ValidationResult, Validator -from textual.widgets import Input, Static +from textual.widgets import Button, Input, Markdown, Static, Switch class CreateConfig(BaseModel): @@ -103,3 +104,68 @@ def validate(self, value: str) -> ValidationResult: return self.success() except ValueError as e: return self.failure(", ".join([err["msg"] for err in e.errors()])) + + +class HelpText(Markdown): + """A class to show a text box with help text.""" + + def __init__(self, markdown: str, classes: str) -> None: + super().__init__(markdown=markdown, classes=classes) + + def show(self) -> None: + """Method to show the help text box.""" + self.add_class("displayed") + + def hide(self) -> None: + """Method to hide the help text box.""" + self.remove_class("displayed") + + +class PipelineFeature(Static): + """Widget for the selection of pipeline features.""" + + def __init__(self, markdown: str, title: str, subtitle: str, field_id: str) -> None: + self.markdown = markdown + self.title = title + self.subtitle = subtitle + self.field_id = field_id + super().__init__() + + def on_button_pressed(self, event: Button.Pressed) -> None: + """When the button is pressed, change the type of the button.""" + if event.button.id == "show_help": + self.add_class("displayed") + elif event.button.id == "hide_help": + self.remove_class("displayed") + + def compose(self) -> ComposeResult: + """ + Create child widgets. + + Displayed row with a switch, a short text description and a help button. + Hidden row with a help text box. + """ + yield HorizontalScroll( + Switch(value=True, id=self.field_id), + Static(self.title, classes="feature_title"), + Static(self.subtitle, classes="feature_subtitle"), + Button("Show help", id="show_help", variant="primary"), + Button("Hide help", id="hide_help"), + classes="custom_grid", + ) + yield HelpText(self.markdown, classes="help_box") + + +## Markdown text to reuse in different screens +markdown_genomes = """ +Nf-core pipelines are configured to use a copy of the most common reference genome files. + +By selecting this option, your pipeline will include a configuration file specifying the paths to these files. + +The required code to use these files will also be included in the template. +When the pipeline user provides an appropriate genome key, +the pipeline will automatically download the required reference files. + +For more information about reference genomes in nf-core pipelines, +see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). +""" From fec5e3eb6481109ee49850d2582e8bb95e11ae97 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 31 Aug 2023 15:21:48 +0200 Subject: [PATCH 020/737] first ask if the pipeline will be nf-core or custom before basicdetails --- nf_core/pipelines/create/__init__.py | 11 ++++++++--- nf_core/pipelines/create/basicdetails.py | 6 +++++- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 5cae6b9b5..0fe143b3b 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -33,17 +33,22 @@ class PipelineCreateApp(App[CreateConfig]): # Initialise config as empty TEMPLATE_CONFIG = CreateConfig() + # Initialise pipeline type + PIPELINE_TYPE = None + def on_mount(self) -> None: self.push_screen("welcome") def on_button_pressed(self, event: Button.Pressed) -> None: """Handle all button pressed events.""" if event.button.id == "start": - self.switch_screen("basic_details") + self.switch_screen("choose_type") elif event.button.id == "type_nfcore": - self.switch_screen("type_nfcore") + self.PIPELINE_TYPE = "nfcore" + self.switch_screen("basic_details") elif event.button.id == "type_custom": - self.switch_screen("type_custom") + self.PIPELINE_TYPE = "custom" + self.switch_screen("basic_details") elif event.button.id == "continue": self.switch_screen("final_details") diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index 5ffae135c..dc7248d97 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -30,6 +30,7 @@ def compose(self) -> ComposeResult: "GitHub organisation", "nf-core", classes="column", + disabled=self.parent.PIPELINE_TYPE == "nfcore", ) yield TextInput( "name", @@ -67,6 +68,9 @@ def on_button_pressed(self, event: Button.Pressed) -> None: text_input.query_one(".validation_msg").update("") try: self.parent.TEMPLATE_CONFIG = CreateConfig(**config) - self.parent.switch_screen("choose_type") + if self.parent.PIPELINE_TYPE == "nfcore": + self.parent.switch_screen("type_nfcore") + elif self.parent.PIPELINE_TYPE == "custom": + self.parent.switch_screen("type_custom") except ValueError: pass From 0ef16b57d664acf4b46fedbd7ddf238c32455d4f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 31 Aug 2023 15:27:33 +0200 Subject: [PATCH 021/737] use kwargs --- nf_core/pipelines/create/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 1fbd646d3..cd81e41ab 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -109,8 +109,8 @@ def validate(self, value: str) -> ValidationResult: class HelpText(Markdown): """A class to show a text box with help text.""" - def __init__(self, markdown: str, classes: str) -> None: - super().__init__(markdown=markdown, classes=classes) + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) def show(self) -> None: """Method to show the help text box.""" @@ -124,12 +124,12 @@ def hide(self) -> None: class PipelineFeature(Static): """Widget for the selection of pipeline features.""" - def __init__(self, markdown: str, title: str, subtitle: str, field_id: str) -> None: + def __init__(self, markdown: str, title: str, subtitle: str, field_id: str, **kwargs) -> None: + super().__init__(**kwargs) self.markdown = markdown self.title = title self.subtitle = subtitle self.field_id = field_id - super().__init__() def on_button_pressed(self, event: Button.Pressed) -> None: """When the button is pressed, change the type of the button.""" @@ -153,7 +153,7 @@ def compose(self) -> ComposeResult: Button("Hide help", id="hide_help"), classes="custom_grid", ) - yield HelpText(self.markdown, classes="help_box") + yield HelpText(markdown=self.markdown, classes="help_box") ## Markdown text to reuse in different screens From 238767f73785e5bb4f6a5c28e4c57906e09e906e Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 21 Aug 2023 10:12:16 +0200 Subject: [PATCH 022/737] remove inexisten variable version from send-tweet github workflow --- .../.github/workflows/release-announcments.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/release-announcments.yml b/nf_core/pipeline-template/.github/workflows/release-announcments.yml index f2a45d714..a6e976855 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcments.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcments.yml @@ -41,8 +41,7 @@ jobs: consumer_key=os.getenv("TWITTER_CONSUMER_KEY"), consumer_secret=os.getenv("TWITTER_CONSUMER_SECRET"), ) - version = os.getenv("VERSION").strip('"') - tweet = os.getenv("TWEET").format(version=version) + tweet = os.getenv("TWEET") client.create_tweet(text=tweet) env: TWEET: | From 7b4a342d577466ad50171f3a975534e8ad7c34f3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 24 Aug 2023 16:14:01 +0200 Subject: [PATCH 023/737] fix typo in gh_badges jinja variable to github_badges --- nf_core/pipeline-template/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 27e96de7f..cddf8d13c 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -3,7 +3,7 @@ # ![{{ name }}](docs/images/{{ logo_light }}#gh-light-mode-only) ![{{ name }}](docs/images/{{ logo_dark }}#gh-dark-mode-only) {% endif -%} -{% if gh_badges -%} +{% if github_badges -%} [![GitHub Actions CI Status](https://github.com/{{ name }}/workflows/nf-core%20CI/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+CI%22) [![GitHub Actions Linting Status](https://github.com/{{ name }}/workflows/nf-core%20linting/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+linting%22){% endif -%} {% if branded -%}[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results){% endif -%} From b3fa25e60c87b7d82dedd1c58c78e6e5aade3fe9 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 30 Aug 2023 08:44:02 +0200 Subject: [PATCH 024/737] bump version of nf-test snap files --- nf_core/bump_version.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 5f4616edf..ada7f0b99 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -60,6 +60,20 @@ def bump_pipeline_version(pipeline_obj, new_version): ), ], ) + # nf-test snap files + pipeline_name = pipeline_obj.nf_config.get("manifest.name", "").strip(" '\"") + snap_files = [f for f in Path().glob("tests/pipeline/*.snap")] + for snap_file in snap_files: + update_file_version( + snap_file, + pipeline_obj, + [ + ( + f"{pipeline_name}={current_version}", + f"{pipeline_name}={new_version}", + ) + ], + ) def bump_nextflow_version(pipeline_obj, new_version): From 6ff69b2bc94ddc650f791241382c7ca0b6f35167 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 30 Aug 2023 08:56:48 +0200 Subject: [PATCH 025/737] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b7222da10..4c6cfc1fa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ - Update the Code of Conduct ([#2381](https://github.com/nf-core/tools/pull/2381)) - Remove `--no-git` option from `nf-core create` ([#2394](https://github.com/nf-core/tools/pull/2394)) - Throw warning when custom workflow name contains special characters ([#2401](https://github.com/nf-core/tools/pull/2401)) +- Bump version of nf-test snapshot files with `nf-core bump-version` ([#2410](https://github.com/nf-core/tools/pull/2410)) # [v2.9 - Chromium Falcon](https://github.com/nf-core/tools/releases/tag/2.9) + [2023-06-29] From 17ba9711bcab71402a714287dd85c55759c41682 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 1 Sep 2023 08:49:22 +0200 Subject: [PATCH 026/737] add outdir textinput and update config --- nf_core/pipelines/create/custompipeline.py | 3 +-- nf_core/pipelines/create/finaldetails.py | 30 ++++++++++++++-------- nf_core/pipelines/create/nfcorepipeline.py | 3 +-- nf_core/pipelines/create/utils.py | 11 +++++++- 4 files changed, 32 insertions(+), 15 deletions(-) diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 3693dc989..5cc2f87d9 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -85,5 +85,4 @@ def on_button_pressed(self, event: Button.Pressed) -> None: this_switch = feature_input.query_one(Switch) if not this_switch.value: skip.append(this_switch.id) - self.parent.TEMPLATE_CONFIG.skip_features = skip - self.parent.TEMPLATE_CONFIG.is_nfcore = False + self.parent.TEMPLATE_CONFIG.__dict__.update({"skip_features": skip, "is_nfcore": False}) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index c7a979d9f..4f65f9762 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -24,12 +24,21 @@ def compose(self) -> ComposeResult: ) ) - yield TextInput( - "version", - "Version", - "First version of the pipeline", - "1.0dev", - ) + with Horizontal(): + yield TextInput( + "version", + "Version", + "First version of the pipeline", + "1.0dev", + classes="column", + ) + yield TextInput( + "outdir", + "Output directory", + "Path to the output directory where the pipeline will be created", + ".", + classes="column", + ) with Horizontal(): yield Switch(value=False, id="force") yield Static("If the pipeline output directory exists, remove it and continue.", classes="custom_grid") @@ -42,22 +51,23 @@ def compose(self) -> ComposeResult: @on(Button.Pressed, "#finish") def on_button_pressed(self, event: Button.Pressed) -> None: """Save fields to the config.""" + new_config = {} for text_input in self.query("TextInput"): - this_input = self.query_one(Input) + this_input = text_input.query_one(Input) validation_result = this_input.validate(this_input.value) - version = this_input.value + new_config[text_input.field_id] = this_input.value if not validation_result.is_valid: text_input.query_one(".validation_msg").update("\n".join(validation_result.failure_descriptions)) else: text_input.query_one(".validation_msg").update("") try: - self.parent.TEMPLATE_CONFIG.version = version + self.parent.TEMPLATE_CONFIG.__dict__.update(new_config) except ValueError: pass this_switch = self.query_one(Switch) try: - self.parent.TEMPLATE_CONFIG.force = this_switch.value + self.parent.TEMPLATE_CONFIG.__dict__.update({"force": this_switch.value}) except ValueError: pass diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 2af99af5f..a8902daf3 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -34,5 +34,4 @@ def on_button_pressed(self, event: Button.Pressed) -> None: this_switch = feature_input.query_one(Switch) if not this_switch.value: skip.append(this_switch.id) - self.parent.TEMPLATE_CONFIG.skip_features = skip - self.parent.TEMPLATE_CONFIG.is_nfcore = True + self.parent.TEMPLATE_CONFIG.__dict__.update({"skip_features": skip, "is_nfcore": True}) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index cd81e41ab..5566c17c8 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -1,4 +1,5 @@ import re +from pathlib import Path from typing import Optional from pydantic import BaseModel, ConfigDict, field_validator @@ -32,7 +33,7 @@ def name_nospecialchars(cls, v: str) -> str: raise ValueError("Must be lowercase without punctuation.") return v - @field_validator("org", "description", "author", "version") + @field_validator("org", "description", "author", "version", "outdir") @classmethod def notempty(cls, v: str) -> str: """Check that string values are not empty.""" @@ -50,6 +51,14 @@ def version_nospecialchars(cls, v: str) -> str: ) return v + @field_validator("outdir") + @classmethod + def path_valid(cls, v: str) -> str: + """Check that a path is valid.""" + if not Path(v).is_dir(): + raise ValueError("Must be a valid path.") + return v + class TextInput(Static): """Widget for text inputs. From 0c64de879e97cfa7bcd88e6c7fd2dce472c276f6 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 1 Sep 2023 09:13:11 +0200 Subject: [PATCH 027/737] refactor jinja template 'branded' to 'is_nfcore' --- nf_core/create.py | 14 +++++++------- .../pipeline-template/.github/CONTRIBUTING.md | 4 ++-- .../.github/PULL_REQUEST_TEMPLATE.md | 2 +- nf_core/pipeline-template/README.md | 18 +++++++++--------- .../assets/email_template.txt | 2 +- .../assets/multiqc_config.yml | 4 ++-- nf_core/pipeline-template/docs/README.md | 2 +- nf_core/pipeline-template/docs/usage.md | 2 +- .../lib/NfcoreTemplate.groovy | 2 +- nf_core/pipeline-template/main.nf | 2 +- nf_core/pipeline-template/nextflow_schema.json | 2 +- 11 files changed, 27 insertions(+), 27 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index 844f69754..cf0d450a1 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -85,7 +85,7 @@ def __init__( ], "ci": [".github/workflows/"], "igenomes": ["conf/igenomes.config"], - "branded": [ + "is_nfcore": [ ".github/ISSUE_TEMPLATE/config", "CODE_OF_CONDUCT.md", ".github/workflows/awsfulltest.yml", @@ -187,11 +187,11 @@ def obtain_skipped_areas_dict(self, features_to_skip, pipeline_dir): # Define the different template areas, and what actions to take for each # if they are skipped template_areas = { - "github": {"name": "GitHub hosting", "file": True, "content": False}, - "ci": {"name": "GitHub CI", "file": True, "content": False}, - "github_badges": {"name": "GitHub badges", "file": False, "content": True}, - "igenomes": {"name": "iGenomes config", "file": True, "content": True}, - "nf_core_configs": {"name": "nf-core/configs", "file": False, "content": True}, + "github": {"file": True, "content": False}, + "ci": {"file": True, "content": False}, + "github_badges": {"file": False, "content": True}, + "igenomes": {"file": True, "content": True}, + "nf_core_configs": {"file": False, "content": True}, } skip_paths = [] @@ -486,7 +486,7 @@ def fix_linting(self): if not self.skip_areas["github_badges"] or not self.skip_areas["github"]: lint_config["readme"] = ["nextflow_badge"] - # If the pipeline is unbranded + # If the pipeline is not nf-core if not self.config.is_nfcore: lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 6a1b9a917..3a9c0fbff 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -9,7 +9,7 @@ Please use the pre-filled template to save time. However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) -{% if branded -%} +{% if is_nfcore -%} :::info If you need help using or modifying {{ name }} then the best place to ask is on the nf-core Slack [#{{ short_name }}](https://nfcore.slack.com/channels/{{ short_name }}) channel ([join our Slack here](https://nf-co.re/join/slack)). @@ -58,7 +58,7 @@ These tests are run both with the latest available version of `Nextflow` and als - Fix the bug, and bump version (X.Y.Z+1). - A PR should be made on `master` from patch to directly this particular bug. -{% if branded -%} +{% if is_nfcore -%} ## Getting help diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index 0f81ebaa4..03c700bac 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -16,7 +16,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/t - [ ] This comment contains a description of changes (with reason). - [ ] If you've fixed a bug or added code that should be tested, add tests! - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) - {%- if branded %} + {%- if is_nfcore %} - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. {%- endif %} - [ ] Make sure your code lints (`nf-core lint`). diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index cddf8d13c..906461145 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -1,4 +1,4 @@ -{% if branded -%} +{% if is_nfcore -%} # ![{{ name }}](docs/images/{{ logo_light }}#gh-light-mode-only) ![{{ name }}](docs/images/{{ logo_dark }}#gh-dark-mode-only) @@ -6,7 +6,7 @@ {% if github_badges -%} [![GitHub Actions CI Status](https://github.com/{{ name }}/workflows/nf-core%20CI/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+CI%22) [![GitHub Actions Linting Status](https://github.com/{{ name }}/workflows/nf-core%20linting/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+linting%22){% endif -%} -{% if branded -%}[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results){% endif -%} +{% if is_nfcore -%}[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results){% endif -%} {%- if github_badges -%} [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) @@ -17,10 +17,10 @@ [![Launch on Nextflow Tower](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Nextflow%20Tower-%234256e7)](https://tower.nf/launch?pipeline=https://github.com/{{ name }}) {% endif -%} -{%- if branded -%}[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23{{ short_name }}-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/{{ short_name }}){% endif -%} -{%- if branded -%}[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core){% endif -%} -{%- if branded -%}[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core){% endif -%} -{%- if branded -%}[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) +{%- if is_nfcore -%}[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23{{ short_name }}-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/{{ short_name }}){% endif -%} +{%- if is_nfcore -%}[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core){% endif -%} +{%- if is_nfcore -%}[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core){% endif -%} +{%- if is_nfcore -%}[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) {% endif -%} @@ -82,7 +82,7 @@ provided by the `-c` Nextflow option can be used to provide any configuration _* see [docs](https://nf-co.re/usage/configuration#custom-configuration-files). ::: -{% if branded -%} +{% if is_nfcore -%} For more details and further functionality, please refer to the [usage documentation](https://nf-co.re/{{ short_name }}/usage) and the [parameter documentation](https://nf-co.re/{{ short_name }}/parameters). @@ -106,7 +106,7 @@ We thank the following people for their extensive assistance in the development If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md). -{% if branded -%} +{% if is_nfcore -%} For further information or help, don't hesitate to get in touch on the [Slack `#{{ short_name }}` channel](https://nfcore.slack.com/channels/{{ short_name }}) (you can join with [this invite](https://nf-co.re/join/slack)). {% endif -%} @@ -120,7 +120,7 @@ For further information or help, don't hesitate to get in touch on the [Slack `# An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. -{% if branded -%} +{% if is_nfcore -%} You can cite the `nf-core` publication as follows: {% else -%} diff --git a/nf_core/pipeline-template/assets/email_template.txt b/nf_core/pipeline-template/assets/email_template.txt index edc8f7101..8c40733fd 100644 --- a/nf_core/pipeline-template/assets/email_template.txt +++ b/nf_core/pipeline-template/assets/email_template.txt @@ -1,4 +1,4 @@ -{% if branded -%} +{% if is_nfcore -%} ---------------------------------------------------- ,--./,-. ___ __ __ __ ___ /,-._.--~\\ diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index 9423ee53f..4d9762106 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -1,11 +1,11 @@ report_comment: > {% if 'dev' in version -%} This report has been generated by the {{ name }} - analysis pipeline.{% if branded %} For information about how to interpret these results, please see the + analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} {%- else %} This report has been generated by the {{ name }} - analysis pipeline.{% if branded %} For information about how to interpret these results, please see the + analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} {% endif %} report_section_order: diff --git a/nf_core/pipeline-template/docs/README.md b/nf_core/pipeline-template/docs/README.md index e94889c53..9a237c1ad 100644 --- a/nf_core/pipeline-template/docs/README.md +++ b/nf_core/pipeline-template/docs/README.md @@ -6,7 +6,7 @@ The {{ name }} documentation is split into the following pages: - An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. - [Output](output.md) - An overview of the different results produced by the pipeline and how to interpret them. - {%- if branded %} + {%- if is_nfcore %} You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) {% else %} diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 6dba3032a..ca06bff9c 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -1,6 +1,6 @@ # {{ name }}: Usage -{% if branded -%} +{% if is_nfcore -%} ## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/{{ short_name }}/usage](https://nf-co.re/{{ short_name }}/usage) diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy index a1a726d69..e69de4f6b 100755 --- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy +++ b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy @@ -322,7 +322,7 @@ class NfcoreTemplate { String workflow_version = NfcoreTemplate.version(workflow) String.format( """\n - ${dashedLine(monochrome_logs)}{% if branded %} + ${dashedLine(monochrome_logs)}{% if is_nfcore %} ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 3d632eb8c..210cff616 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -4,7 +4,7 @@ {{ name }} ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Github : https://github.com/{{ name }} -{%- if branded %} +{%- if is_nfcore %} Website: https://nf-co.re/{{ short_name }} Slack : https://nfcore.slack.com/channels/{{ short_name }} {%- endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 49549d046..7bef33d1b 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -19,7 +19,7 @@ "mimetype": "text/csv", "pattern": "^\\S+\\.csv$", "description": "Path to comma-separated file containing information about the samples in the experiment.", - "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row.{% if branded %} See [usage docs](https://nf-co.re/{{ short_name }}/usage#samplesheet-input).{% endif %}", + "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row.{% if is_nfcore %} See [usage docs](https://nf-co.re/{{ short_name }}/usage#samplesheet-input).{% endif %}", "fa_icon": "fas fa-file-csv" }, "outdir": { From 772011718798047c5ec394da91beb888b5c9770f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 1 Sep 2023 09:56:20 +0200 Subject: [PATCH 028/737] refactor params_dict to jinja_params and use that instead of the config --- nf_core/create.py | 65 +++++++++++++++++++++++++---------------------- 1 file changed, 34 insertions(+), 31 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index cf0d450a1..b607c78b8 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -74,7 +74,7 @@ def __init__( else: raise UserWarning("The template configuration was not provided.") - self.skip_areas, skip_paths = self.obtain_skipped_areas_dict( + self.jinja_params, skip_paths = self.obtain_skipped_areas_dict( self.config.skip_features, outdir if outdir else "." ) @@ -103,7 +103,7 @@ def __init__( self.default_branch = default_branch self.force = self.config.force if outdir is None: - outdir = os.path.join(os.getcwd(), self.config.name_noslash) + outdir = os.path.join(os.getcwd(), self.jinja_params["name_noslash"]) self.outdir = Path(outdir) def check_template_yaml_info(self, template_yaml, name, description, author): @@ -178,7 +178,7 @@ def obtain_skipped_areas_dict(self, features_to_skip, pipeline_dir): pipeline_dir (str): Path to the pipeline directory. Returns: - skip_areas (dict): Dictionary of template areas to skip with values true/false. + jinja_params (dict): Dictionary of template areas to skip with values true/false. skip_paths (list): List of template areas which contain paths to skip. """ # Try reading config file @@ -194,48 +194,51 @@ def obtain_skipped_areas_dict(self, features_to_skip, pipeline_dir): "nf_core_configs": {"file": False, "content": True}, } + # Set the parameters for the jinja template + jinja_params = self.config.model_dump() + + # Add template areas to jinja params and create list of areas with paths to skip skip_paths = [] - skip_areas = {} for t_area in template_areas: if t_area in features_to_skip: if template_areas[t_area]["file"]: skip_paths.append(t_area) - skip_areas[t_area] = False + jinja_params[t_area] = False else: - skip_areas[t_area] = True + jinja_params[t_area] = True # If github is selected, exclude also github_badges # if not param_dict["github"]: # param_dict["github_badges"] = False # Set the last parameters based on the ones provided - self.config.short_name = ( - self.config.name.lower().replace(r"/\s+/", "-").replace(f"{self.config.org}/", "").replace("/", "-") + jinja_params["short_name"] = ( + jinja_params["name"].lower().replace(r"/\s+/", "-").replace(f"{jinja_params['org']}/", "").replace("/", "-") ) - self.config.name = f"{self.config.org}/{self.config.short_name}" - self.config.name_noslash = self.config.name.replace("/", "-") - self.config.prefix_nodash = self.config.org.replace("-", "") - self.config.name_docker = self.config.name.replace(self.config.org, self.config.prefix_nodash) - self.config.logo_light = f"{self.config.name_noslash}_logo_light.png" - self.config.logo_dark = f"{self.config.name_noslash}_logo_dark.png" + jinja_params["name"] = f"{jinja_params['org']}/{jinja_params['short_name']}" + jinja_params["name_noslash"] = jinja_params["name"].replace("/", "-") + jinja_params["prefix_nodash"] = jinja_params["org"].replace("-", "") + jinja_params["name_docker"] = jinja_params["name"].replace(jinja_params["org"], jinja_params["prefix_nodash"]) + jinja_params["logo_light"] = f"{jinja_params['name_noslash']}_logo_light.png" + jinja_params["logo_dark"] = f"{jinja_params['name_noslash']}_logo_dark.png" if ( "lint" in config_yml and "nextflow_config" in config_yml["lint"] and "manifest.name" in config_yml["lint"]["nextflow_config"] ): - return skip_areas, skip_paths + return jinja_params, skip_paths # Check that the pipeline name matches the requirements - if not re.match(r"^[a-z]+$", self.config.short_name): - if self.config.is_nfcore: + if not re.match(r"^[a-z]+$", jinja_params["short_name"]): + if jinja_params["is_nfcore"]: raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") else: log.warning( "Your workflow name is not lowercase without punctuation. This may cause Nextflow errors.\nConsider changing the name to avoid special characters." ) - return skip_areas, skip_paths + return jinja_params, skip_paths def init_pipeline(self): """Creates the nf-core pipeline.""" @@ -278,14 +281,14 @@ def render_template(self): loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True ) template_dir = os.path.join(os.path.dirname(__file__), "pipeline-template") - object_attrs = self.config.model_dump() + object_attrs = self.jinja_params object_attrs["nf_core_version"] = nf_core.__version__ # Can't use glob.glob() as need recursive hidden dotfiles - https://stackoverflow.com/a/58126417/713980 template_files = list(Path(template_dir).glob("**/*")) template_files += list(Path(template_dir).glob("*")) ignore_strs = [".pyc", "__pycache__", ".pyo", ".pyd", ".DS_Store", ".egg"] - short_name = self.config.short_name + short_name = self.jinja_params["short_name"] rename_files = { "workflows/pipeline.nf": f"workflows/{short_name}.nf", "lib/WorkflowPipeline.groovy": f"lib/Workflow{short_name.title()}.groovy", @@ -343,14 +346,14 @@ def render_template(self): os.chmod(output_path, template_stat.st_mode) # Remove all unused parameters in the nextflow schema - if not self.skip_areas["igenomes"] or not self.skip_areas["nf_core_configs"]: + if not self.jinja_params["igenomes"] or not self.jinja_params["nf_core_configs"]: self.update_nextflow_schema() if self.config.is_nfcore: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() else: - if self.skip_areas["github"]: + if self.jinja_params["github"]: # Remove field mentioning nf-core docs # in the github bug report template self.remove_nf_core_in_bug_report_template() @@ -405,7 +408,7 @@ def fix_linting(self): for a customized pipeline. """ # Create a lint config - short_name = self.config.short_name + short_name = self.jinja_params["short_name"] lint_config = { "files_exist": [ "CODE_OF_CONDUCT.md", @@ -430,7 +433,7 @@ def fix_linting(self): } # Add GitHub hosting specific configurations - if not self.skip_areas["github"]: + if not self.jinja_params["github"]: lint_config["files_exist"].extend( [ ".github/ISSUE_TEMPLATE/bug_report.yml", @@ -456,7 +459,7 @@ def fix_linting(self): ) # Add CI specific configurations - if not self.skip_areas["ci"]: + if not self.jinja_params["ci"]: lint_config["files_exist"].extend( [ ".github/workflows/branch.yml", @@ -467,7 +470,7 @@ def fix_linting(self): ) # Add custom config specific configurations - if not self.skip_areas["nf_core_configs"]: + if not self.jinja_params["nf_core_configs"]: lint_config["files_exist"].extend(["conf/igenomes.config"]) lint_config["nextflow_config"].extend( [ @@ -479,11 +482,11 @@ def fix_linting(self): ) # Add igenomes specific configurations - if not self.skip_areas["igenomes"]: + if not self.jinja_params["igenomes"]: lint_config["files_exist"].extend(["conf/igenomes.config"]) # Add github badges specific configurations - if not self.skip_areas["github_badges"] or not self.skip_areas["github"]: + if not self.jinja_params["github_badges"] or not self.jinja_params["github"]: lint_config["readme"] = ["nextflow_badge"] # If the pipeline is not nf-core @@ -501,14 +504,14 @@ def fix_linting(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" - logo_url = f"https://nf-co.re/logo/{self.config.short_name}?theme=light" + logo_url = f"https://nf-co.re/logo/{self.jinja_params['short_name']}?theme=light" log.debug(f"Fetching logo from {logo_url}") - email_logo_path = self.outdir / "assets" / f"{self.config.name_noslash}_logo_light.png" + email_logo_path = self.outdir / "assets" / f"{self.jinja_params['name_noslash']}_logo_light.png" self.download_pipeline_logo(f"{logo_url}?w=600&theme=light", email_logo_path) for theme in ["dark", "light"]: readme_logo_url = f"{logo_url}?w=600&theme={theme}" - readme_logo_path = self.outdir / "docs" / "images" / f"{self.config.name_noslash}_logo_{theme}.png" + readme_logo_path = self.outdir / "docs" / "images" / f"{self.jinja_params['name_noslash']}_logo_{theme}.png" self.download_pipeline_logo(readme_logo_url, readme_logo_path) def download_pipeline_logo(self, url, img_fn): From cb62be330d26c2a4a38fd64a072884b144b66b19 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 1 Sep 2023 11:14:47 +0200 Subject: [PATCH 029/737] refactor function obtain_jinja_params_dict --- nf_core/create.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index b607c78b8..fe8f0fee9 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -74,7 +74,7 @@ def __init__( else: raise UserWarning("The template configuration was not provided.") - self.jinja_params, skip_paths = self.obtain_skipped_areas_dict( + self.jinja_params, skip_paths = self.obtain_jinja_params_dict( self.config.skip_features, outdir if outdir else "." ) @@ -170,7 +170,7 @@ def update_config(self, organisation, version, force, pipeline_dir): if self.config.is_nfcore is None: self.config.is_nfcore = True if organisation == "nf-core" else False - def obtain_skipped_areas_dict(self, features_to_skip, pipeline_dir): + def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): """Creates a dictionary of parameters for the new pipeline. Args: From da1c68f4307f2a8b6eeb2248a642d950a43bcd24 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 1 Sep 2023 12:40:06 +0200 Subject: [PATCH 030/737] run create command from the App, mv nf_core/create.py to nf_core/pipelines/create/create.py --- nf_core/__main__.py | 68 +++++++++++------------- nf_core/lint/files_unchanged.py | 6 +-- nf_core/{ => pipelines/create}/create.py | 46 ++++++++-------- nf_core/pipelines/create/finaldetails.py | 10 +++- nf_core/sync.py | 6 +-- tests/lint/nextflow_config.py | 2 +- tests/lint/version_consistency.py | 2 +- tests/modules/patch.py | 2 +- tests/test_bump_version.py | 8 +-- tests/test_cli.py | 2 +- tests/test_create.py | 16 +++--- tests/test_download.py | 4 +- tests/test_launch.py | 4 +- tests/test_lint.py | 6 +-- tests/test_modules.py | 4 +- tests/test_params_file.py | 4 +- tests/test_schema.py | 4 +- tests/test_subworkflows.py | 4 +- tests/test_sync.py | 4 +- tests/test_utils.py | 6 +-- 20 files changed, 105 insertions(+), 103 deletions(-) rename nf_core/{ => pipelines/create}/create.py (95%) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 09ecf1de6..97f166f28 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -488,12 +488,25 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp \n\n Run without any command line arguments to use an interactive interface. """ - from nf_core.create import PipelineCreate from nf_core.pipelines.create import PipelineCreateApp + from nf_core.pipelines.create.create import PipelineCreate if (name and description and author) or (template_yaml): # If all command arguments are used, run without the interactive interface - config = None + try: + create_obj = PipelineCreate( + name, + description, + author, + version=version, + force=force, + outdir=outdir, + organisation=organisation, + ) + create_obj.init_pipeline() + except UserWarning as e: + log.error(e) + sys.exit(1) elif name or description or author or version or force or outdir or organisation: log.error( "Command arguments are not accepted in interactive mode.\n" @@ -507,23 +520,7 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp "\nRun with all command line arguments to avoid using an interactive interface." ) app = PipelineCreateApp() - config = app.run() - - try: - create_obj = PipelineCreate( - name, - description, - author, - version=version, - force=force, - outdir=outdir, - template_config=config, - organisation=organisation, - ) - create_obj.init_pipeline() - except UserWarning as e: - log.error(e) - sys.exit(1) + app.run() # nf-core create (deprecated) @@ -548,12 +545,24 @@ def create(name, description, author, version, force, outdir, template_yaml, pla Uses the nf-core template to make a skeleton Nextflow pipeline with all required files, boilerplate code and best-practices. """ - from nf_core.create import PipelineCreate from nf_core.pipelines.create import PipelineCreateApp + from nf_core.pipelines.create.create import PipelineCreate if (name and description and author) or (template_yaml): # If all command arguments are used, run without the interactive interface - config = None + try: + create_obj = PipelineCreate( + name, + description, + author, + version=version, + force=force, + outdir=outdir, + ) + create_obj.init_pipeline() + except UserWarning as e: + log.error(e) + sys.exit(1) elif name or description or author or version or force or outdir or plain: log.error( "Command arguments are not accepted in interactive mode.\n" @@ -571,25 +580,10 @@ def create(name, description, author, version, force, outdir, template_yaml, pla "\nRun with all command line arguments to avoid using an interactive interface." ) app = PipelineCreateApp() - config = app.run() + app.run() else: sys.exit(0) - try: - create_obj = PipelineCreate( - name, - description, - author, - version=version, - force=force, - outdir=outdir, - template_config=config, - ) - create_obj.init_pipeline() - except UserWarning as e: - log.error(e) - sys.exit(1) - # nf-core modules subcommands @nf_core_cli.group() diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 2b64d6263..9a0175571 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -6,7 +6,7 @@ import yaml -import nf_core.create +import nf_core.pipelines.create.create log = logging.getLogger(__name__) @@ -111,7 +111,7 @@ def files_unchanged(self): ] # Only show error messages from pipeline creation - logging.getLogger("nf_core.create").setLevel(logging.ERROR) + logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) # Generate a new pipeline with nf-core create that we can compare to tmp_dir = tempfile.mkdtemp() @@ -129,7 +129,7 @@ def files_unchanged(self): yaml.dump(template_yaml, fh, default_flow_style=False) test_pipeline_dir = os.path.join(tmp_dir, f"{prefix}-{short_name}") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( None, None, None, no_git=True, outdir=test_pipeline_dir, template_yaml_path=template_yaml_path ) create_obj.init_pipeline() diff --git a/nf_core/create.py b/nf_core/pipelines/create/create.py similarity index 95% rename from nf_core/create.py rename to nf_core/pipelines/create/create.py index fe8f0fee9..4bdf64a62 100644 --- a/nf_core/create.py +++ b/nf_core/pipelines/create/create.py @@ -47,9 +47,9 @@ class PipelineCreate: def __init__( self, - name, - description, - author, + name=None, + description=None, + author=None, version="1.0dev", no_git=False, force=False, @@ -59,11 +59,7 @@ def __init__( from_config_file=False, default_branch=None, ): - if template_config is not None and isinstance(template_config, str): - # Obtain a CreateConfig object from the template yaml file - self.config = self.check_template_yaml_info(template_config, name, description, author) - self.update_config(organisation, version, force, outdir if outdir else ".") - elif isinstance(template_config, CreateConfig): + if isinstance(template_config, CreateConfig): self.config = template_config elif from_config_file: # Try reading config file @@ -71,12 +67,16 @@ def __init__( # Obtain a CreateConfig object from `.nf-core.yml` config file if "template" in config_yml: self.config = CreateConfig(**config_yml["template"]) + else: + raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") + elif (name and description and author) or (template_config and isinstance(template_config, str)): + # Obtain a CreateConfig object from the template yaml file + self.config = self.check_template_yaml_info(template_config, name, description, author) + self.update_config(organisation, version, force, outdir) else: raise UserWarning("The template configuration was not provided.") - self.jinja_params, skip_paths = self.obtain_jinja_params_dict( - self.config.skip_features, outdir if outdir else "." - ) + self.jinja_params, skip_paths = self.obtain_jinja_params_dict(self.config.skip_features, self.config.outdir) skippable_paths = { "github": [ @@ -123,12 +123,14 @@ def check_template_yaml_info(self, template_yaml, name, description, author): UserWarning: if template yaml file does not exist. """ # Obtain template customization info from template yaml file or `.nf-core.yml` config file - try: - with open(template_yaml, "r") as f: - template_yaml = yaml.safe_load(f) - config = CreateConfig(**template_yaml) - except FileNotFoundError: - raise UserWarning(f"Template YAML file '{template_yaml}' not found.") + config = CreateConfig() + if template_yaml: + try: + with open(template_yaml, "r") as f: + template_yaml = yaml.safe_load(f) + config = CreateConfig(**template_yaml) + except FileNotFoundError: + raise UserWarning(f"Template YAML file '{template_yaml}' not found.") missing_fields = [] if config.name is None and name is None: @@ -150,14 +152,14 @@ def check_template_yaml_info(self, template_yaml, name, description, author): return config - def update_config(self, organisation, version, force, pipeline_dir): + def update_config(self, organisation, version, force, outdir): """Updates the config file with arguments provided through command line. Args: organisation (str): Name of the GitHub organisation to create the pipeline. version (str): Version of the pipeline. force (bool): Overwrites a given workflow directory with the same name. - pipeline_dir (str): Path to the local output directory. + outdir (str): Path to the local output directory. """ if self.config.org is None: self.config.org = organisation @@ -166,9 +168,9 @@ def update_config(self, organisation, version, force, pipeline_dir): if self.config.force is None: self.config.force = force if force else False if self.config.outdir is None: - self.config.outdir = pipeline_dir + self.config.outdir = outdir if outdir else "." if self.config.is_nfcore is None: - self.config.is_nfcore = True if organisation == "nf-core" else False + self.config.is_nfcore = organisation == "nf-core" def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): """Creates a dictionary of parameters for the new pipeline. @@ -280,7 +282,7 @@ def render_template(self): env = jinja2.Environment( loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True ) - template_dir = os.path.join(os.path.dirname(__file__), "pipeline-template") + template_dir = os.path.join(os.path.dirname(nf_core.__file__), "pipeline-template") object_attrs = self.jinja_params object_attrs["nf_core_version"] = nf_core.__version__ diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 4f65f9762..c73038ee9 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -7,7 +7,8 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch -from nf_core.pipelines.create.utils import CreateConfig, TextInput +from nf_core.pipelines.create.create import PipelineCreate +from nf_core.pipelines.create.utils import TextInput class FinalDetails(Screen): @@ -71,4 +72,9 @@ def on_button_pressed(self, event: Button.Pressed) -> None: except ValueError: pass - self.parent.exit(self.parent.TEMPLATE_CONFIG) + # self.parent.exit(self.parent.TEMPLATE_CONFIG) + # Create the new pipeline + create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) + create_obj.init_pipeline() + self.parent.exit() + # self.parent.switch_screen("github_repo") diff --git a/nf_core/sync.py b/nf_core/sync.py index 5402a6121..6175ebff0 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -16,8 +16,8 @@ from git import GitCommandError, InvalidGitRepositoryError import nf_core -import nf_core.create import nf_core.list +import nf_core.pipelines.create.create import nf_core.utils log = logging.getLogger(__name__) @@ -251,7 +251,7 @@ def make_template_pipeline(self): log.info("Making a new template pipeline using pipeline variables") # Only show error messages from pipeline creation - logging.getLogger("nf_core.create").setLevel(logging.ERROR) + logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) # Re-write the template yaml info from .nf-core.yml config if "template" in self.config_yml: @@ -259,7 +259,7 @@ def make_template_pipeline(self): yaml.safe_dump(self.config_yml, config_path) try: - nf_core.create.PipelineCreate( + nf_core.pipelines.create.create.PipelineCreate( name=self.wf_config["manifest.name"].strip('"').strip("'"), description=self.wf_config["manifest.description"].strip('"').strip("'"), version=self.wf_config["manifest.version"].strip('"').strip("'"), diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py index f53765dce..1d7ca36ab 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/lint/nextflow_config.py @@ -1,5 +1,5 @@ -import nf_core.create import nf_core.lint +import nf_core.pipelines.create.create def test_nextflow_config_example_pass(self): diff --git a/tests/lint/version_consistency.py b/tests/lint/version_consistency.py index c68280064..6f70d67c4 100644 --- a/tests/lint/version_consistency.py +++ b/tests/lint/version_consistency.py @@ -1,5 +1,5 @@ -import nf_core.create import nf_core.lint +import nf_core.pipelines.create.create def test_version_consistency(self): diff --git a/tests/modules/patch.py b/tests/modules/patch.py index 338d890f2..7263d3026 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -349,7 +349,7 @@ def test_remove_patch(self): "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) - with mock.patch.object(nf_core.create.questionary, "confirm") as mock_questionary: + with mock.patch.object(nf_core.pipelines.create.questionary, "confirm") as mock_questionary: mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 658a2339d..07de9687f 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -5,7 +5,7 @@ import yaml import nf_core.bump_version -import nf_core.create +import nf_core.pipelines.create.create import nf_core.utils @@ -16,7 +16,7 @@ def test_bump_pipeline_version(datafiles, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True ) create_obj.init_pipeline() @@ -36,7 +36,7 @@ def test_dev_bump_pipeline_version(datafiles, tmp_path): """Test that making a release works with a dev name and a leading v""" # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True ) create_obj.init_pipeline() @@ -55,7 +55,7 @@ def test_dev_bump_pipeline_version(datafiles, tmp_path): def test_bump_nextflow_version(datafiles, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True ) create_obj.init_pipeline() diff --git a/tests/test_cli.py b/tests/test_cli.py index fc172deba..d488e6493 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -228,7 +228,7 @@ def test_licences_log_error(self, mock_lic): assert error_txt in captured_logs.output[-1] assert captured_logs.records[-1].levelname == "ERROR" - @mock.patch("nf_core.create.PipelineCreate") + @mock.patch("nf_core.pipelines.create.PipelineCreate") def test_create(self, mock_create): """Test nf-core pipeline is created and cli parameters are passed on.""" params = { diff --git a/tests/test_create.py b/tests/test_create.py index 1cc073cb5..3154539bb 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -8,7 +8,7 @@ import git import yaml -import nf_core.create +import nf_core.pipelines.create.create from .utils import with_temporary_folder @@ -26,7 +26,7 @@ def setUp(self): self.default_branch = "default" def test_pipeline_creation(self): - pipeline = nf_core.create.PipelineCreate( + pipeline = nf_core.pipelines.create.create.PipelineCreate( name=self.pipeline_name, description=self.pipeline_description, author=self.pipeline_author, @@ -44,7 +44,7 @@ def test_pipeline_creation(self): @with_temporary_folder def test_pipeline_creation_initiation(self, tmp_path): - pipeline = nf_core.create.PipelineCreate( + pipeline = nf_core.pipelines.create.create.PipelineCreate( name=self.pipeline_name, description=self.pipeline_description, author=self.pipeline_author, @@ -64,7 +64,7 @@ def test_pipeline_creation_initiation(self, tmp_path): @with_temporary_folder def test_pipeline_creation_initiation_with_yml(self, tmp_path): - pipeline = nf_core.create.PipelineCreate( + pipeline = nf_core.pipelines.create.create.PipelineCreate( name=self.pipeline_name, description=self.pipeline_description, author=self.pipeline_author, @@ -88,13 +88,13 @@ def test_pipeline_creation_initiation_with_yml(self, tmp_path): assert "template" in nfcore_yml assert nfcore_yml["template"] == yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()) - @mock.patch.object(nf_core.create.PipelineCreate, "customize_template") - @mock.patch.object(nf_core.create.questionary, "confirm") + @mock.patch.object(nf_core.pipelines.create.create.PipelineCreate, "customize_template") + @mock.patch.object(nf_core.pipelines.create.create.questionary, "confirm") @with_temporary_folder def test_pipeline_creation_initiation_customize_template(self, mock_questionary, mock_customize, tmp_path): mock_questionary.unsafe_ask.return_value = True mock_customize.return_value = {"prefix": "testprefix"} - pipeline = nf_core.create.PipelineCreate( + pipeline = nf_core.pipelines.create.create.PipelineCreate( name=self.pipeline_name, description=self.pipeline_description, author=self.pipeline_author, @@ -118,7 +118,7 @@ def test_pipeline_creation_initiation_customize_template(self, mock_questionary, @with_temporary_folder def test_pipeline_creation_with_yml_skip(self, tmp_path): - pipeline = nf_core.create.PipelineCreate( + pipeline = nf_core.pipelines.create.create.PipelineCreate( name=self.pipeline_name, description=self.pipeline_description, author=self.pipeline_author, diff --git a/tests/test_download.py b/tests/test_download.py index dfd78adcf..ea59c7bd6 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -12,7 +12,7 @@ import pytest -import nf_core.create +import nf_core.pipelines.create.create import nf_core.utils from nf_core.download import ContainerError, DownloadWorkflow, WorkflowRepo from nf_core.synced_repo import SyncedRepo @@ -110,7 +110,7 @@ def test_download_configs(self, outdir): def test_wf_use_local_configs(self, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", diff --git a/tests/test_launch.py b/tests/test_launch.py index d830311ba..d3ddde6c8 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -9,8 +9,8 @@ import pytest -import nf_core.create import nf_core.launch +import nf_core.pipelines.create.create from .utils import with_temporary_file, with_temporary_folder @@ -71,7 +71,7 @@ def test_get_pipeline_schema(self): def test_make_pipeline_schema(self, tmp_path): """Create a workflow, but delete the schema file, then try to load it""" test_pipeline_dir = os.path.join(tmp_path, "wf") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "", "", outdir=test_pipeline_dir, no_git=True, plain=True ) create_obj.init_pipeline() diff --git a/tests/test_lint.py b/tests/test_lint.py index e4e93bd1f..99e0506cf 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -9,8 +9,8 @@ import yaml -import nf_core.create import nf_core.lint +import nf_core.pipelines.create.create from .utils import with_temporary_folder @@ -21,12 +21,12 @@ class TestLint(unittest.TestCase): def setUp(self): """Function that runs at start of tests for common resources - Use nf_core.create() to make a pipeline that we can use for testing + Use nf_core.pipelines.create() to make a pipeline that we can use for testing """ self.tmp_dir = tempfile.mkdtemp() self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.create.PipelineCreate( + self.create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir, plain=True ) self.create_obj.init_pipeline() diff --git a/tests/test_modules.py b/tests/test_modules.py index 047369b7c..0d784c74e 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -9,8 +9,8 @@ import requests_cache import responses -import nf_core.create import nf_core.modules +import nf_core.pipelines.create.create from .utils import ( GITLAB_BRANCH_TEST_BRANCH, @@ -72,7 +72,7 @@ def setUp(self): self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") self.pipeline_name = "mypipeline" self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) - nf_core.create.PipelineCreate( + nf_core.pipelines.create.create.PipelineCreate( self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True ).init_pipeline() # Set up install objects diff --git a/tests/test_params_file.py b/tests/test_params_file.py index 824e8fe34..882b57512 100644 --- a/tests/test_params_file.py +++ b/tests/test_params_file.py @@ -4,7 +4,7 @@ import tempfile from pathlib import Path -import nf_core.create +import nf_core.pipelines.create.create import nf_core.schema from nf_core.params_file import ParamsFileBuilder @@ -21,7 +21,7 @@ def setup_class(cls): # Create a test pipeline in temp directory cls.tmp_dir = tempfile.mkdtemp() cls.template_dir = os.path.join(cls.tmp_dir, "wf") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "", "", outdir=cls.template_dir, no_git=True, plain=True ) create_obj.init_pipeline() diff --git a/tests/test_schema.py b/tests/test_schema.py index d3b4fda81..4d8f2e0ef 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -12,7 +12,7 @@ import requests import yaml -import nf_core.create +import nf_core.pipelines.create.create import nf_core.schema from .utils import with_temporary_file, with_temporary_folder @@ -29,7 +29,7 @@ def setUp(self): # Create a test pipeline in temp directory self.tmp_dir = tempfile.mkdtemp() self.template_dir = os.path.join(self.tmp_dir, "wf") - create_obj = nf_core.create.PipelineCreate( + create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "", "", outdir=self.template_dir, no_git=True, plain=True ) create_obj.init_pipeline() diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 1c290cb88..fb1521131 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -8,8 +8,8 @@ import responses -import nf_core.create import nf_core.modules +import nf_core.pipelines.create.create import nf_core.subworkflows from .utils import ( @@ -55,7 +55,7 @@ def setUp(self): self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") self.pipeline_name = "mypipeline" self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) - nf_core.create.PipelineCreate( + nf_core.pipelines.create.create.PipelineCreate( self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True ).init_pipeline() diff --git a/tests/test_sync.py b/tests/test_sync.py index 597e4375d..228a6682e 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -12,7 +12,7 @@ import git import pytest -import nf_core.create +import nf_core.pipelines.create.create import nf_core.sync from .utils import with_temporary_folder @@ -26,7 +26,7 @@ def setUp(self): self.tmp_dir = tempfile.mkdtemp() self.pipeline_dir = os.path.join(self.tmp_dir, "testpipeline") default_branch = "master" - self.create_obj = nf_core.create.PipelineCreate( + self.create_obj = nf_core.pipelines.create.create.PipelineCreate( "testing", "test pipeline", "tester", outdir=self.pipeline_dir, plain=True, default_branch=default_branch ) self.create_obj.init_pipeline() diff --git a/tests/test_utils.py b/tests/test_utils.py index 2ab5b64bf..22b5a2332 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -11,8 +11,8 @@ import pytest import requests -import nf_core.create import nf_core.list +import nf_core.pipelines.create.create import nf_core.utils from .utils import with_temporary_folder @@ -35,11 +35,11 @@ class TestUtils(unittest.TestCase): def setUp(self): """Function that runs at start of tests for common resources - Use nf_core.create() to make a pipeline that we can use for testing + Use nf_core.pipelines.create() to make a pipeline that we can use for testing """ self.tmp_dir = tempfile.mkdtemp() self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.create.PipelineCreate( + self.create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "This is a test pipeline", "Test McTestFace", From 3c14dd7f0ab2953e81bf23c6a1cfbd4454095224 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 3 Oct 2023 11:26:44 +0200 Subject: [PATCH 031/737] add new screen to create a github repo and push the new pipeline (not working from the app) --- nf_core/pipelines/create/__init__.py | 2 + nf_core/pipelines/create/finaldetails.py | 4 +- nf_core/pipelines/create/githubrepo.py | 176 +++++++++++++++++++++++ requirements.txt | 1 + 4 files changed, 180 insertions(+), 3 deletions(-) create mode 100644 nf_core/pipelines/create/githubrepo.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 0fe143b3b..4832a7346 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -5,6 +5,7 @@ from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline from nf_core.pipelines.create.finaldetails import FinalDetails +from nf_core.pipelines.create.githubrepo import GithubRepo from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType from nf_core.pipelines.create.utils import CreateConfig @@ -28,6 +29,7 @@ class PipelineCreateApp(App[CreateConfig]): "type_custom": CustomPipeline(), "type_nfcore": NfcorePipeline(), "final_details": FinalDetails(), + "github_repo": GithubRepo(), } # Initialise config as empty diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index c73038ee9..203f2f6bb 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -72,9 +72,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: except ValueError: pass - # self.parent.exit(self.parent.TEMPLATE_CONFIG) # Create the new pipeline create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) create_obj.init_pipeline() - self.parent.exit() - # self.parent.switch_screen("github_repo") + self.parent.switch_screen("github_repo") diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py new file mode 100644 index 000000000..6660be622 --- /dev/null +++ b/nf_core/pipelines/create/githubrepo.py @@ -0,0 +1,176 @@ +import logging +import os +from textwrap import dedent + +import git +from github import Github, GithubException +from textual import on +from textual.app import ComposeResult +from textual.containers import Center, Horizontal +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch + +from nf_core.pipelines.create.utils import TextInput + +log = logging.getLogger(__name__) + +github_text_markdown = """ +# Create a GitHub repo + +After creating the pipeline template locally, we can create a GitHub repository and push the code to it. +""" +repo_config_markdown = """ +Please select the the GitHub repository settings: +""" +exit_help_text_markdown = f""" +If you would like to create the GitHub repository later, you can do it manually by following these steps: +1. Create a new GitHub repository +2. Add the remote to your local repository +```bash +cd +git remote add origin git@github.com:/.git +``` +3. Push the code to the remote +```bash +git push --all origin +``` +""" + + +class GithubRepo(Screen): + """Create a GitHub repository and push all branches.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown(dedent(github_text_markdown)) + with Horizontal(): + yield TextInput( + "gh_username", + "GitHub username", + "Your GitHub username", + classes="column", + ) + token = "GITHUB_AUTH_TOKEN" in os.environ + yield TextInput( + "token", + "GitHub token", + "Your GitHub personal access token for login. Will use the environment variable GITHUB_AUTH_TOKEN if set.", + classes="column", + disabled=token, + ) + yield Markdown(dedent(repo_config_markdown)) + with Horizontal(): + yield Switch(value=False, id="private") + yield Static("Select if the new GitHub repo must be private.", classes="custom_grid") + with Horizontal(): + yield Switch(value=True, id="push") + yield Static( + "Select if you would like to push all the pipeline template files to your GitHub repo\nand all the branches required to keep the pipeline up to date with new releases of nf-core.", + classes="custom_grid", + ) + yield Center( + Button("Create GitHub repo", id="create", variant="success"), + Button("Finish without creating a repo", id="finish", variant="primary"), + classes="cta", + ) + + @on(Button.Pressed, "#create") + def on_button_pressed(self, event: Button.Pressed) -> None: + """Create a GitHub repo""" + # Save GitHub username and token + print("button pressed") + github_variables = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + github_variables[text_input.field_id] = this_input.value + # Save GitHub repo config + for switch_input in self.query("Switch"): + this_switch = switch_input.query_one(Switch) + github_variables[switch_input.field_id] = this_switch.value + + # Pipeline git repo + pipeline_repo = git.Repo.init(self.parent.TEMPLATE_CONFIG.outdir) + + # GitHub authentication + if "GITHUB_AUTH_TOKEN" in os.environ: + github_auth = self._github_authentication(github_variables["gh_username"], os.environ["GITHUB_AUTH_TOKEN"]) + elif github_variables["token"]: + github_auth = self._github_authentication(github_variables["gh_username"], github_variables["token"]) + else: + raise UserWarning( + f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." + "Please provide an authentication token or set the environment variable 'GITHUB_AUTH_TOKEN'." + f"\n{exit_help_text_markdown}" + ) + + user = github_auth.get_user() + org = None + # Make sure that the authentication was successful + try: + user.login + except GithubException.GithubException as e: + raise UserWarning( + f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." + "Please make sure that the provided user name and token are correct." + f"\n{exit_help_text_markdown}" + ) + + # Check if organisation exists + # If the organisation is nf-core or it doesn¡t exist, the repo will be created in the user account + if self.parent.TEMPLATE_CONFIG.org != "nf-core": + try: + org = github_auth.get_organization(self.parent.TEMPLATE_CONFIG.org) + except GithubException.UnknownObjectException: + pass + + # Create the repo + try: + if org: + self._create_repo_and_push(org, pipeline_repo, github_variables["private"], github_variables["push"]) + else: + # Create the repo in the user's account + self._create_repo_and_push(user, pipeline_repo, github_variables["private"], github_variables["push"]) + except UserWarning as e: + log.info(f"There was an error with message: {e}" f"\n{exit_help_text_markdown}") + + self.parent.exit() + + @on(Button.Pressed, "#finish") + def on_button_pressed(self, event: Button.Pressed) -> None: + """Show help message and exit""" + log.info(exit_help_text_markdown) + self.parent.exit() + + def _create_repo_and_push(self, org, pipeline_repo, private, push): + """Create a GitHub repository and push all branches.""" + # Check if repo already exists + try: + repo = org.get_repo(self.parent.TEMPLATE_CONFIG.name) + # Check if it has a commit history + try: + repo.get_commits().totalCount + raise UserWarning(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' already exists") + except GithubException.GithubException: + # Repo is empty + repo_exists = True + except GithubException.UnknownObjectException: + # Repo doesn't exist + repo_exists = False + + # Create the repo + if not repo_exists: + repo = org.create_repo( + self.parent.TEMPLATE_CONFIG.name, description=self.parent.TEMPLATE_CONFIG.description, private=private + ) + + # Add the remote and push + pipeline_repo.create_remote("origin", repo.clone_url) + if push: + pipeline_repo.remotes.origin.push(all=True).raise_if_error() + + def _github_authentication(self, gh_username, gh_token): + """Authenticate to GitHub""" + log.debug(f"Authenticating GitHub as {gh_username}") + github_auth = Github(gh_username, gh_token) + return github_auth diff --git a/requirements.txt b/requirements.txt index b5fc54259..15c4f9e18 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ click filetype GitPython +PyGithub jinja2 jsonschema>=3.0 markdown>=3.3 From e401a6a692cb785c2afb258bb91051753123b3b6 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 2 Nov 2023 11:26:28 +0100 Subject: [PATCH 032/737] Some minor CSS tweaks --- nf_core/pipelines/create/create.tcss | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 4ebc1936f..42f144c7d 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -55,20 +55,17 @@ HorizontalScroll { /* Display help messages */ .help_box { - background: white; - margin-left: 15; - margin-right: 25; - margin-bottom: 1; + background: #333333; + padding: 1 5; + margin: 1 10; + overflow-y: auto; + transition: height 50ms; display: none; - height: 0; } .displayed .help_box { display: block; - overflow-y: scroll; - - transition: height 50ms; - height: 10; + height: 12; } #show_help { display: block; From 8b9ad3517d0b44efb809c48b4a0e1c3284ee0275 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 3 Nov 2023 13:13:25 +0100 Subject: [PATCH 033/737] Add logging handler and final screen to visualise logs --- nf_core/pipelines/create/__init__.py | 21 +++++- nf_core/pipelines/create/basicdetails.py | 75 +++++++++++----------- nf_core/pipelines/create/bye.py | 35 ++++++++++ nf_core/pipelines/create/create.tcss | 3 + nf_core/pipelines/create/custompipeline.py | 65 ++++++++++--------- nf_core/pipelines/create/finaldetails.py | 65 ++++++++++--------- nf_core/pipelines/create/githubrepo.py | 72 +++++++++++---------- nf_core/pipelines/create/nfcorepipeline.py | 29 +++++---- nf_core/pipelines/create/pipelinetype.py | 19 +++--- nf_core/pipelines/create/utils.py | 27 +++++++- nf_core/pipelines/create/welcome.py | 25 ++++---- 11 files changed, 270 insertions(+), 166 deletions(-) create mode 100644 nf_core/pipelines/create/bye.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 4832a7346..900a0b778 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,16 +1,31 @@ """A Textual app to create a pipeline.""" +import logging + from textual.app import App from textual.widgets import Button from nf_core.pipelines.create.basicdetails import BasicDetails +from nf_core.pipelines.create.bye import ByeScreen from nf_core.pipelines.create.custompipeline import CustomPipeline from nf_core.pipelines.create.finaldetails import FinalDetails from nf_core.pipelines.create.githubrepo import GithubRepo from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType -from nf_core.pipelines.create.utils import CreateConfig +from nf_core.pipelines.create.utils import ( + CreateConfig, + CustomLogHandler, + LoggingConsole, +) from nf_core.pipelines.create.welcome import WelcomeScreen +log_handler = CustomLogHandler(console=LoggingConsole(), rich_tracebacks=True) +logging.basicConfig( + level="INFO", + handlers=[log_handler], + format="%(message)s", +) +log_handler.setLevel("INFO") + class PipelineCreateApp(App[CreateConfig]): """A Textual app to manage stopwatches.""" @@ -30,6 +45,7 @@ class PipelineCreateApp(App[CreateConfig]): "type_nfcore": NfcorePipeline(), "final_details": FinalDetails(), "github_repo": GithubRepo(), + "bye": ByeScreen(), } # Initialise config as empty @@ -38,6 +54,9 @@ class PipelineCreateApp(App[CreateConfig]): # Initialise pipeline type PIPELINE_TYPE = None + # Log handler + LOG_HANDLER = log_handler + def on_mount(self) -> None: self.push_screen("welcome") diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index dc7248d97..072892f39 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -3,7 +3,7 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal +from textual.containers import Center, Horizontal, VerticalScroll from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown @@ -16,43 +16,46 @@ class BasicDetails(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield Markdown( - dedent( - """ - # Basic details - """ - ) - ) with Horizontal(): - yield TextInput( - "org", - "Organisation", - "GitHub organisation", - "nf-core", - classes="column", - disabled=self.parent.PIPELINE_TYPE == "nfcore", - ) - yield TextInput( - "name", - "Pipeline Name", - "Workflow name", - classes="column", - ) + with VerticalScroll(): + yield Markdown( + dedent( + """ + # Basic details + """ + ) + ) + with Horizontal(): + yield TextInput( + "org", + "Organisation", + "GitHub organisation", + "nf-core", + classes="column", + disabled=self.parent.PIPELINE_TYPE == "nfcore", + ) + yield TextInput( + "name", + "Pipeline Name", + "Workflow name", + classes="column", + ) - yield TextInput( - "description", - "Description", - "A short description of your pipeline.", - ) - yield TextInput( - "author", - "Author(s)", - "Name of the main author / authors", - ) - yield Center( - Button("Next", variant="success"), - classes="cta", - ) + yield TextInput( + "description", + "Description", + "A short description of your pipeline.", + ) + yield TextInput( + "author", + "Author(s)", + "Name of the main author / authors", + ) + yield Center( + Button("Next", variant="success"), + classes="cta", + ) + yield Center(self.parent.LOG_HANDLER.console, classes="cta log") @on(Button.Pressed) def on_button_pressed(self, event: Button.Pressed) -> None: diff --git a/nf_core/pipelines/create/bye.py b/nf_core/pipelines/create/bye.py new file mode 100644 index 000000000..89d973778 --- /dev/null +++ b/nf_core/pipelines/create/bye.py @@ -0,0 +1,35 @@ +from textual import on +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static + +markdown = """ +# nf-core create + +Bye! +""" + + +class ByeScreen(Screen): + """A screen to show the final logs.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + yield Markdown(markdown) + yield Center(self.parent.LOG_HANDLER.console, classes="cta") + yield Center(Button("Close", id="close", variant="success"), classes="cta") + + @on(Button.Pressed, "#close") + def on_button_pressed(self, event: Button.Pressed) -> None: + """Close app""" + self.parent.exit() diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 42f144c7d..a718b940d 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -9,6 +9,9 @@ margin-left: 3; margin-right: 3; } +.log { + width: 30%; +} .custom_grid { height: auto; diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 5cc2f87d9..9ac76d80d 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -1,6 +1,6 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, ScrollableContainer +from textual.containers import Center, Horizontal, ScrollableContainer, VerticalScroll from textual.screen import Screen from textual.widgets import Button, Footer, Header, Switch @@ -46,36 +46,39 @@ class CustomPipeline(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield ScrollableContainer( - PipelineFeature( - markdown_genomes, - "Use reference genomes", - "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", - "igenomes", - ), - PipelineFeature( - markdown_ci, - "Add Github CI tests", - "The pipeline will include several GitHub actions for Continuous Integration (CI) testing", - "ci", - ), - PipelineFeature( - markdown_badges, - "Add Github badges", - "The README.md file of the pipeline will include GitHub badges", - "github_badges", - ), - PipelineFeature( - markdown_configuration, - "Add configuration files", - "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", - "nf_core_configs", - ), - ) - yield Center( - Button("Continue", id="continue", variant="success"), - classes="cta", - ) + with Horizontal(): + with VerticalScroll(): + yield ScrollableContainer( + PipelineFeature( + markdown_genomes, + "Use reference genomes", + "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", + "igenomes", + ), + PipelineFeature( + markdown_ci, + "Add Github CI tests", + "The pipeline will include several GitHub actions for Continuous Integration (CI) testing", + "ci", + ), + PipelineFeature( + markdown_badges, + "Add Github badges", + "The README.md file of the pipeline will include GitHub badges", + "github_badges", + ), + PipelineFeature( + markdown_configuration, + "Add configuration files", + "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", + "nf_core_configs", + ), + ) + yield Center( + Button("Continue", id="continue", variant="success"), + classes="cta", + ) + yield Center(self.parent.LOG_HANDLER.console, classes="cta log") @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 203f2f6bb..904d0e362 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -3,7 +3,7 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal +from textual.containers import Center, Horizontal, VerticalScroll from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch @@ -17,37 +17,42 @@ class FinalDetails(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield Markdown( - dedent( - """ - # Final details - """ - ) - ) - - with Horizontal(): - yield TextInput( - "version", - "Version", - "First version of the pipeline", - "1.0dev", - classes="column", - ) - yield TextInput( - "outdir", - "Output directory", - "Path to the output directory where the pipeline will be created", - ".", - classes="column", - ) with Horizontal(): - yield Switch(value=False, id="force") - yield Static("If the pipeline output directory exists, remove it and continue.", classes="custom_grid") + with VerticalScroll(): + yield Markdown( + dedent( + """ + # Final details + """ + ) + ) + + with Horizontal(): + yield TextInput( + "version", + "Version", + "First version of the pipeline", + "1.0dev", + classes="column", + ) + yield TextInput( + "outdir", + "Output directory", + "Path to the output directory where the pipeline will be created", + ".", + classes="column", + ) + with Horizontal(): + yield Switch(value=False, id="force") + yield Static( + "If the pipeline output directory exists, remove it and continue.", classes="custom_grid" + ) - yield Center( - Button("Finish", id="finish", variant="success"), - classes="cta", - ) + yield Center( + Button("Finish", id="finish", variant="success"), + classes="cta", + ) + yield Center(self.parent.LOG_HANDLER.console, classes="cta log") @on(Button.Pressed, "#finish") def on_button_pressed(self, event: Button.Pressed) -> None: diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 6660be622..79db4a422 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -6,7 +6,7 @@ from github import Github, GithubException from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal +from textual.containers import Center, Horizontal, VerticalScroll from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch @@ -43,43 +43,45 @@ class GithubRepo(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield Markdown(dedent(github_text_markdown)) with Horizontal(): - yield TextInput( - "gh_username", - "GitHub username", - "Your GitHub username", - classes="column", - ) - token = "GITHUB_AUTH_TOKEN" in os.environ - yield TextInput( - "token", - "GitHub token", - "Your GitHub personal access token for login. Will use the environment variable GITHUB_AUTH_TOKEN if set.", - classes="column", - disabled=token, - ) - yield Markdown(dedent(repo_config_markdown)) - with Horizontal(): - yield Switch(value=False, id="private") - yield Static("Select if the new GitHub repo must be private.", classes="custom_grid") - with Horizontal(): - yield Switch(value=True, id="push") - yield Static( - "Select if you would like to push all the pipeline template files to your GitHub repo\nand all the branches required to keep the pipeline up to date with new releases of nf-core.", - classes="custom_grid", - ) - yield Center( - Button("Create GitHub repo", id="create", variant="success"), - Button("Finish without creating a repo", id="finish", variant="primary"), - classes="cta", - ) + with VerticalScroll(): + yield Markdown(dedent(github_text_markdown)) + with Horizontal(): + yield TextInput( + "gh_username", + "GitHub username", + "Your GitHub username", + classes="column", + ) + token = "GITHUB_AUTH_TOKEN" in os.environ + yield TextInput( + "token", + "GitHub token", + "Your GitHub personal access token for login. Will use the environment variable GITHUB_AUTH_TOKEN if set.", + classes="column", + disabled=token, + ) + yield Markdown(dedent(repo_config_markdown)) + with Horizontal(): + yield Switch(value=False, id="private") + yield Static("Select if the new GitHub repo must be private.", classes="custom_grid") + with Horizontal(): + yield Switch(value=True, id="push") + yield Static( + "Select if you would like to push all the pipeline template files to your GitHub repo\nand all the branches required to keep the pipeline up to date with new releases of nf-core.", + classes="custom_grid", + ) + yield Center( + Button("Create GitHub repo", id="create", variant="success"), + Button("Finish without creating a repo", id="exit", variant="primary"), + classes="cta", + ) + yield Center(self.parent.LOG_HANDLER.console, classes="cta log") @on(Button.Pressed, "#create") def on_button_pressed(self, event: Button.Pressed) -> None: """Create a GitHub repo""" # Save GitHub username and token - print("button pressed") github_variables = {} for text_input in self.query("TextInput"): this_input = text_input.query_one(Input) @@ -134,13 +136,13 @@ def on_button_pressed(self, event: Button.Pressed) -> None: except UserWarning as e: log.info(f"There was an error with message: {e}" f"\n{exit_help_text_markdown}") - self.parent.exit() + self.parent.switch_screen("bye") - @on(Button.Pressed, "#finish") + @on(Button.Pressed, "#exit") def on_button_pressed(self, event: Button.Pressed) -> None: """Show help message and exit""" log.info(exit_help_text_markdown) - self.parent.exit() + self.parent.switch_screen("bye") def _create_repo_and_push(self, org, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index a8902daf3..746fbf40b 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -1,6 +1,6 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, HorizontalScroll, ScrollableContainer +from textual.containers import Center, Horizontal, ScrollableContainer, VerticalScroll from textual.screen import Screen from textual.widgets import Button, Footer, Header, Switch @@ -13,18 +13,21 @@ class NfcorePipeline(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield ScrollableContainer( - PipelineFeature( - markdown_genomes, - "Use reference genomes", - "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", - "igenomes", - ), - ) - yield Center( - Button("Continue", id="continue", variant="success"), - classes="cta", - ) + with Horizontal(): + with VerticalScroll(): + yield ScrollableContainer( + PipelineFeature( + markdown_genomes, + "Use reference genomes", + "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", + "igenomes", + ), + ) + yield Center( + Button("Continue", id="continue", variant="success"), + classes="cta", + ) + yield Center(self.parent.LOG_HANDLER.console, classes="cta log") @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: diff --git a/nf_core/pipelines/create/pipelinetype.py b/nf_core/pipelines/create/pipelinetype.py index 72624c5f8..979e4408c 100644 --- a/nf_core/pipelines/create/pipelinetype.py +++ b/nf_core/pipelines/create/pipelinetype.py @@ -1,6 +1,6 @@ from textual.app import ComposeResult +from textual.containers import Center, Horizontal, VerticalScroll from textual.screen import Screen -from textual.containers import Center from textual.widgets import Button, Footer, Header, Markdown markdown_intro = """ @@ -40,10 +40,13 @@ class ChoosePipelineType(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield Markdown(markdown_intro) - yield Center( - Button("nf-core", id="type_nfcore", variant="success"), - Button("Custom", id="type_custom", variant="primary"), - classes="cta", - ) - yield Markdown(markdown_details) + with Horizontal(): + with VerticalScroll(): + yield Markdown(markdown_intro) + yield Center( + Button("nf-core", id="type_nfcore", variant="success"), + Button("Custom", id="type_custom", variant="primary"), + classes="cta", + ) + yield Markdown(markdown_details) + yield Center(self.parent.LOG_HANDLER.console, classes="cta log") diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 5566c17c8..ae7ac097d 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -1,13 +1,17 @@ import re +from logging import LogRecord from pathlib import Path from typing import Optional from pydantic import BaseModel, ConfigDict, field_validator +from rich.logging import RichHandler from textual import on +from textual._context import active_app from textual.app import ComposeResult from textual.containers import HorizontalScroll from textual.validation import ValidationResult, Validator -from textual.widgets import Button, Input, Markdown, Static, Switch +from textual.widget import Widget +from textual.widgets import Button, Input, Markdown, RichLog, Static, Switch class CreateConfig(BaseModel): @@ -165,6 +169,27 @@ def compose(self) -> ComposeResult: yield HelpText(markdown=self.markdown, classes="help_box") +class LoggingConsole(RichLog): + file = False + console: Widget + + def print(self, content): + self.write(content) + + +class CustomLogHandler(RichHandler): + """A Logging handler which extends RichHandler to write to a Widget and handle a Textual App.""" + + def emit(self, record: LogRecord) -> None: + """Invoked by logging.""" + try: + app = active_app.get() + except LookupError: + pass + else: + super().emit(record) + + ## Markdown text to reuse in different screens markdown_genomes = """ Nf-core pipelines are configured to use a copy of the most common reference genome files. diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py index 0be70cc4c..d572f3149 100644 --- a/nf_core/pipelines/create/welcome.py +++ b/nf_core/pipelines/create/welcome.py @@ -1,5 +1,5 @@ from textual.app import ComposeResult -from textual.containers import Center +from textual.containers import Center, Horizontal, VerticalScroll from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static @@ -24,13 +24,16 @@ class WelcomeScreen(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", - id="logo", - ) - yield Markdown(markdown) - yield Center(Button("Let's go!", id="start", variant="success"), classes="cta") + with Horizontal(): + with VerticalScroll(): + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + yield Markdown(markdown) + yield Center(Button("Let's go!", id="start", variant="success"), classes="cta") + yield Center(self.parent.LOG_HANDLER.console, classes="cta log") From 4c237e29ba61e6b36571601bdc24f2c7f3dbbc4c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 3 Nov 2023 14:16:54 +0100 Subject: [PATCH 034/737] fix githubrepo buttons handled by the same function --- nf_core/__main__.py | 6 +- nf_core/pipelines/create/githubrepo.py | 147 ++++++++++++++----------- 2 files changed, 87 insertions(+), 66 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 97f166f28..13e2e97a5 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -580,7 +580,11 @@ def create(name, description, author, version, force, outdir, template_yaml, pla "\nRun with all command line arguments to avoid using an interactive interface." ) app = PipelineCreateApp() - app.run() + try: + app.run() + except UserWarning as e: + log.error(e) + sys.exit(1) else: sys.exit(0) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 79db4a422..a99f28feb 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -3,7 +3,7 @@ from textwrap import dedent import git -from github import Github, GithubException +from github import Github, GithubException, UnknownObjectException from textual import on from textual.app import ComposeResult from textual.containers import Center, Horizontal, VerticalScroll @@ -56,8 +56,8 @@ def compose(self) -> ComposeResult: token = "GITHUB_AUTH_TOKEN" in os.environ yield TextInput( "token", - "GitHub token", - "Your GitHub personal access token for login. Will use the environment variable GITHUB_AUTH_TOKEN if set.", + "Using the environment variable GITHUB_AUTH_TOKEN" if token else "GitHub token", + "Your GitHub personal access token for login.", classes="column", disabled=token, ) @@ -72,76 +72,89 @@ def compose(self) -> ComposeResult: classes="custom_grid", ) yield Center( - Button("Create GitHub repo", id="create", variant="success"), + Button("Create GitHub repo", id="create_github", variant="success"), Button("Finish without creating a repo", id="exit", variant="primary"), classes="cta", ) yield Center(self.parent.LOG_HANDLER.console, classes="cta log") - @on(Button.Pressed, "#create") def on_button_pressed(self, event: Button.Pressed) -> None: - """Create a GitHub repo""" - # Save GitHub username and token - github_variables = {} - for text_input in self.query("TextInput"): - this_input = text_input.query_one(Input) - github_variables[text_input.field_id] = this_input.value - # Save GitHub repo config - for switch_input in self.query("Switch"): - this_switch = switch_input.query_one(Switch) - github_variables[switch_input.field_id] = this_switch.value - - # Pipeline git repo - pipeline_repo = git.Repo.init(self.parent.TEMPLATE_CONFIG.outdir) - - # GitHub authentication - if "GITHUB_AUTH_TOKEN" in os.environ: - github_auth = self._github_authentication(github_variables["gh_username"], os.environ["GITHUB_AUTH_TOKEN"]) - elif github_variables["token"]: - github_auth = self._github_authentication(github_variables["gh_username"], github_variables["token"]) - else: - raise UserWarning( - f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." - "Please provide an authentication token or set the environment variable 'GITHUB_AUTH_TOKEN'." - f"\n{exit_help_text_markdown}" - ) - - user = github_auth.get_user() - org = None - # Make sure that the authentication was successful - try: - user.login - except GithubException.GithubException as e: - raise UserWarning( - f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." - "Please make sure that the provided user name and token are correct." - f"\n{exit_help_text_markdown}" - ) + """Create a GitHub repo or show help message and exit""" + if event.button.id == "create_github": + # Create a GitHub repo + + # Save GitHub username and token + github_variables = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + github_variables[text_input.field_id] = this_input.value + # Save GitHub repo config + for switch_input in self.query("Switch"): + github_variables[switch_input.id] = switch_input.value + + # Pipeline git repo + pipeline_repo = git.Repo.init(self.parent.TEMPLATE_CONFIG.outdir) + + # GitHub authentication + if "GITHUB_AUTH_TOKEN" in os.environ: + github_auth = self._github_authentication( + github_variables["gh_username"], os.environ["GITHUB_AUTH_TOKEN"] + ) + log.debug("Using GITHUB_AUTH_TOKEN environment variable") + elif github_variables["token"]: + github_auth = self._github_authentication(github_variables["gh_username"], github_variables["token"]) + else: + raise UserWarning( + f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." + "Please provide an authentication token or set the environment variable 'GITHUB_AUTH_TOKEN'." + f"\n{exit_help_text_markdown}" + ) - # Check if organisation exists - # If the organisation is nf-core or it doesn¡t exist, the repo will be created in the user account - if self.parent.TEMPLATE_CONFIG.org != "nf-core": + user = github_auth.get_user() + org = None + # Make sure that the authentication was successful try: - org = github_auth.get_organization(self.parent.TEMPLATE_CONFIG.org) - except GithubException.UnknownObjectException: - pass + user.login + log.debug("GitHub authentication successful") + except GithubException as e: + raise UserWarning( + f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." + "Please make sure that the provided user name and token are correct." + f"\n{exit_help_text_markdown}" + ) - # Create the repo - try: - if org: - self._create_repo_and_push(org, pipeline_repo, github_variables["private"], github_variables["push"]) - else: - # Create the repo in the user's account - self._create_repo_and_push(user, pipeline_repo, github_variables["private"], github_variables["push"]) - except UserWarning as e: - log.info(f"There was an error with message: {e}" f"\n{exit_help_text_markdown}") + # Check if organisation exists + # If the organisation is nf-core or it doesn't exist, the repo will be created in the user account + if self.parent.TEMPLATE_CONFIG.org != "nf-core": + try: + org = github_auth.get_organization(self.parent.TEMPLATE_CONFIG.org) + log.info( + f"Repo will be created in the GitHub organisation account '{self.parent.TEMPLATE_CONFIG.org}'" + ) + except UnknownObjectException: + pass - self.parent.switch_screen("bye") + # Create the repo + try: + if org: + self._create_repo_and_push( + org, pipeline_repo, github_variables["private"], github_variables["push"] + ) + else: + # Create the repo in the user's account + log.info( + f"Repo will be created in the GitHub organisation account '{github_variables['gh_username']}'" + ) + self._create_repo_and_push( + user, pipeline_repo, github_variables["private"], github_variables["push"] + ) + log.info(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' created successfully") + except UserWarning as e: + log.info(f"There was an error with message: {e}" f"\n{exit_help_text_markdown}") + elif event.button.id == "exit": + # Show help message and exit + log.info(exit_help_text_markdown) - @on(Button.Pressed, "#exit") - def on_button_pressed(self, event: Button.Pressed) -> None: - """Show help message and exit""" - log.info(exit_help_text_markdown) self.parent.switch_screen("bye") def _create_repo_and_push(self, org, pipeline_repo, private, push): @@ -153,10 +166,10 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): try: repo.get_commits().totalCount raise UserWarning(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' already exists") - except GithubException.GithubException: + except GithubException: # Repo is empty repo_exists = True - except GithubException.UnknownObjectException: + except UnknownObjectException: # Repo doesn't exist repo_exists = False @@ -167,7 +180,11 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): ) # Add the remote and push - pipeline_repo.create_remote("origin", repo.clone_url) + try: + pipeline_repo.create_remote("origin", repo.clone_url) + except git.exc.GitCommandError: + # Remote already exists + pass if push: pipeline_repo.remotes.origin.push(all=True).raise_if_error() From 8318e0c9f8fb7ff163dd2b33654caa3681a029ae Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 6 Nov 2023 13:29:26 +0100 Subject: [PATCH 035/737] fix pytests --- nf_core/lint/files_unchanged.py | 2 +- nf_core/pipelines/create/create.py | 38 ++++++++------- nf_core/sync.py | 1 - tests/data/pipeline_create_template.yml | 7 ++- tests/data/pipeline_create_template_skip.yml | 10 +++- tests/modules/patch.py | 2 +- tests/test_bump_version.py | 6 +-- tests/test_cli.py | 40 +++++++++++---- tests/test_create.py | 51 +++++--------------- tests/test_download.py | 1 - tests/test_launch.py | 2 +- tests/test_lint.py | 2 +- tests/test_modules.py | 2 +- tests/test_params_file.py | 2 +- tests/test_schema.py | 2 +- tests/test_subworkflows.py | 2 +- tests/test_sync.py | 2 +- tests/test_utils.py | 1 - 18 files changed, 89 insertions(+), 84 deletions(-) diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 9a0175571..8fc5160b4 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -130,7 +130,7 @@ def files_unchanged(self): test_pipeline_dir = os.path.join(tmp_dir, f"{prefix}-{short_name}") create_obj = nf_core.pipelines.create.create.PipelineCreate( - None, None, None, no_git=True, outdir=test_pipeline_dir, template_yaml_path=template_yaml_path + None, None, None, no_git=True, outdir=test_pipeline_dir, template_config=template_yaml_path ) create_obj.init_pipeline() diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 4bdf64a62..7660b3850 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -10,11 +10,11 @@ import sys import time from pathlib import Path +from typing import Optional, Union import filetype import git import jinja2 -import questionary import requests import yaml @@ -47,17 +47,17 @@ class PipelineCreate: def __init__( self, - name=None, - description=None, - author=None, - version="1.0dev", - no_git=False, - force=False, - outdir=None, - template_config=None, - organisation="nf-core", - from_config_file=False, - default_branch=None, + name: Optional[str] = None, + description: Optional[str] = None, + author: Optional[str] = None, + version: str = "1.0dev", + no_git: bool = False, + force: bool = False, + outdir: Optional[str] = None, + template_config: Optional[Union[str, CreateConfig, Path]] = None, + organisation: str = "nf-core", + from_config_file: bool = False, + default_branch: Optional[str] = None, ): if isinstance(template_config, CreateConfig): self.config = template_config @@ -69,14 +69,18 @@ def __init__( self.config = CreateConfig(**config_yml["template"]) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") - elif (name and description and author) or (template_config and isinstance(template_config, str)): + elif (name and description and author) or ( + template_config and (isinstance(template_config, str) or isinstance(template_config, Path)) + ): # Obtain a CreateConfig object from the template yaml file self.config = self.check_template_yaml_info(template_config, name, description, author) self.update_config(organisation, version, force, outdir) else: raise UserWarning("The template configuration was not provided.") - self.jinja_params, skip_paths = self.obtain_jinja_params_dict(self.config.skip_features, self.config.outdir) + self.jinja_params, skip_paths = self.obtain_jinja_params_dict( + self.config.skip_features or [], self.config.outdir + ) skippable_paths = { "github": [ @@ -209,9 +213,9 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): else: jinja_params[t_area] = True - # If github is selected, exclude also github_badges - # if not param_dict["github"]: - # param_dict["github_badges"] = False + # Add is_nfcore as an area to skip for non-nf-core pipelines, to skip all nf-core files + if not jinja_params["is_nfcore"]: + skip_paths.append("is_nfcore") # Set the last parameters based on the ones provided jinja_params["short_name"] = ( diff --git a/nf_core/sync.py b/nf_core/sync.py index 6175ebff0..45abe2b79 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -267,7 +267,6 @@ def make_template_pipeline(self): force=True, outdir=self.pipeline_dir, author=self.wf_config["manifest.author"].strip('"').strip("'"), - plain=True, ).init_pipeline() except Exception as err: # Reset to where you were to prevent git getting messed up. diff --git a/tests/data/pipeline_create_template.yml b/tests/data/pipeline_create_template.yml index 12e48e9c2..0ed534aa1 100644 --- a/tests/data/pipeline_create_template.yml +++ b/tests/data/pipeline_create_template.yml @@ -1 +1,6 @@ -prefix: testprefix +name: test +description: just for 4w3s0m3 tests +author: Chuck Norris +version: 1.0.0 +force: True +org: testprefix diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index b69175e0b..ed498cb73 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -1,5 +1,11 @@ -prefix: testprefix -skip: +name: test +description: just for 4w3s0m3 tests +author: Chuck Norris +version: 1.0.0 +force: True +org: testprefix +is_nfcore: False +skip_features: - github - ci - github_badges diff --git a/tests/modules/patch.py b/tests/modules/patch.py index 7263d3026..c34e1f740 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -349,7 +349,7 @@ def test_remove_patch(self): "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) - with mock.patch.object(nf_core.pipelines.create.questionary, "confirm") as mock_questionary: + with mock.patch.object(nf_core.modules.patch.questionary, "confirm") as mock_questionary: mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 07de9687f..77edd4bfd 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -17,7 +17,7 @@ def test_bump_pipeline_version(datafiles, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True + "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir ) create_obj.init_pipeline() pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) @@ -37,7 +37,7 @@ def test_dev_bump_pipeline_version(datafiles, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True + "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir ) create_obj.init_pipeline() pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) @@ -56,7 +56,7 @@ def test_bump_nextflow_version(datafiles, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir, plain=True + "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir ) create_obj.init_pipeline() pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) diff --git a/tests/test_cli.py b/tests/test_cli.py index d488e6493..1c110cd6e 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -228,21 +228,17 @@ def test_licences_log_error(self, mock_lic): assert error_txt in captured_logs.output[-1] assert captured_logs.records[-1].levelname == "ERROR" - @mock.patch("nf_core.pipelines.create.PipelineCreate") + @mock.patch("nf_core.pipelines.create.create.PipelineCreate") def test_create(self, mock_create): """Test nf-core pipeline is created and cli parameters are passed on.""" params = { - "name": "pipeline name", + "name": "pipelinename", "description": "pipeline description", "author": "Kalle Anka", - "version": "1.2.3", - "force": None, "outdir": "/path/outdir", - "template-yaml": "file.yaml", - "plain": None, } - cmd = ["create"] + self.assemble_params(params) + cmd = ["pipelines", "create"] + self.assemble_params(params) result = self.invoke_cli(cmd) assert result.exit_code == 0 @@ -250,14 +246,38 @@ def test_create(self, mock_create): params["name"], params["description"], params["author"], - version=params["version"], force="force" in params, + version=None, outdir=params["outdir"], - template_yaml_path=params["template-yaml"], - plain="plain" in params, + organisation=None, ) mock_create.return_value.init_pipeline.assert_called_once() + @mock.patch("nf_core.pipelines.create.create.PipelineCreate") + def test_create_error(self, mock_create): + """Test `nf-core pipelines create` run without providing all the arguments thorws an error.""" + params = { + "name": "pipelinename", + } + + cmd = ["pipelines", "create"] + self.assemble_params(params) + result = self.invoke_cli(cmd) + + assert result.exit_code == 1 + assert "Command arguments are not accepted in interactive mode." in result.output + + @mock.patch("nf_core.pipelines.create.PipelineCreateApp") + def test_create_app(self, mock_create): + """Test `nf-core pipelines create` runs an App.""" + cmd = ["pipelines", "create"] + result = self.invoke_cli(cmd) + + assert result.exit_code == 0 + assert "Launching interactive nf-core pipeline creation tool." in result.output + + mock_create.assert_called_once_with() + mock_create.return_value.run.assert_called_once() + @mock.patch("nf_core.utils.is_pipeline_directory") @mock.patch("nf_core.lint.run_linting") def test_lint(self, mock_lint, mock_is_pipeline): diff --git a/tests/test_create.py b/tests/test_create.py index 3154539bb..c3b7f931f 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -33,14 +33,13 @@ def test_pipeline_creation(self): version=self.pipeline_version, no_git=False, force=True, - plain=True, default_branch=self.default_branch, ) - assert pipeline.template_params["name"] == self.pipeline_name - assert pipeline.template_params["description"] == self.pipeline_description - assert pipeline.template_params["author"] == self.pipeline_author - assert pipeline.template_params["version"] == self.pipeline_version + assert pipeline.config.name == self.pipeline_name + assert pipeline.config.description == self.pipeline_description + assert pipeline.config.author == self.pipeline_author + assert pipeline.config.version == self.pipeline_version @with_temporary_folder def test_pipeline_creation_initiation(self, tmp_path): @@ -52,7 +51,6 @@ def test_pipeline_creation_initiation(self, tmp_path): no_git=False, force=True, outdir=tmp_path, - plain=True, default_branch=self.default_branch, ) pipeline.init_pipeline() @@ -60,20 +58,14 @@ def test_pipeline_creation_initiation(self, tmp_path): assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: - assert "template" not in fh.read() + assert "template" in fh.read() @with_temporary_folder def test_pipeline_creation_initiation_with_yml(self, tmp_path): pipeline = nf_core.pipelines.create.create.PipelineCreate( - name=self.pipeline_name, - description=self.pipeline_description, - author=self.pipeline_author, - version=self.pipeline_version, no_git=False, - force=True, outdir=tmp_path, - template_yaml_path=PIPELINE_TEMPLATE_YML, - plain=True, + template_config=PIPELINE_TEMPLATE_YML, default_branch=self.default_branch, ) pipeline.init_pipeline() @@ -86,23 +78,12 @@ def test_pipeline_creation_initiation_with_yml(self, tmp_path): with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: nfcore_yml = yaml.safe_load(fh) assert "template" in nfcore_yml - assert nfcore_yml["template"] == yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()) + assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() - @mock.patch.object(nf_core.pipelines.create.create.PipelineCreate, "customize_template") - @mock.patch.object(nf_core.pipelines.create.create.questionary, "confirm") @with_temporary_folder - def test_pipeline_creation_initiation_customize_template(self, mock_questionary, mock_customize, tmp_path): - mock_questionary.unsafe_ask.return_value = True - mock_customize.return_value = {"prefix": "testprefix"} + def test_pipeline_creation_initiation_customize_template(self, tmp_path): pipeline = nf_core.pipelines.create.create.PipelineCreate( - name=self.pipeline_name, - description=self.pipeline_description, - author=self.pipeline_author, - version=self.pipeline_version, - no_git=False, - force=True, - outdir=tmp_path, - default_branch=self.default_branch, + outdir=tmp_path, template_config=PIPELINE_TEMPLATE_YML, default_branch=self.default_branch ) pipeline.init_pipeline() assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) @@ -114,24 +95,16 @@ def test_pipeline_creation_initiation_customize_template(self, mock_questionary, with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: nfcore_yml = yaml.safe_load(fh) assert "template" in nfcore_yml - assert nfcore_yml["template"] == yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()) + assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() @with_temporary_folder def test_pipeline_creation_with_yml_skip(self, tmp_path): pipeline = nf_core.pipelines.create.create.PipelineCreate( - name=self.pipeline_name, - description=self.pipeline_description, - author=self.pipeline_author, - version=self.pipeline_version, - no_git=False, - force=True, outdir=tmp_path, - template_yaml_path=PIPELINE_TEMPLATE_YML_SKIP, - plain=True, + template_config=PIPELINE_TEMPLATE_YML_SKIP, default_branch=self.default_branch, ) pipeline.init_pipeline() - assert not os.path.isdir(os.path.join(pipeline.outdir, ".git")) # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) @@ -139,7 +112,7 @@ def test_pipeline_creation_with_yml_skip(self, tmp_path): with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: nfcore_yml = yaml.safe_load(fh) assert "template" in nfcore_yml - assert nfcore_yml["template"] == yaml.safe_load(PIPELINE_TEMPLATE_YML_SKIP.read_text()) + assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() # Check that some of the skipped files are not present assert not os.path.exists(os.path.join(pipeline.outdir, "CODE_OF_CONDUCT.md")) diff --git a/tests/test_download.py b/tests/test_download.py index ea59c7bd6..c9240e2ce 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -116,7 +116,6 @@ def test_wf_use_local_configs(self, tmp_path): "Test McTestFace", no_git=True, outdir=test_pipeline_dir, - plain=True, ) create_obj.init_pipeline() diff --git a/tests/test_launch.py b/tests/test_launch.py index d3ddde6c8..5de841e9e 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -72,7 +72,7 @@ def test_make_pipeline_schema(self, tmp_path): """Create a workflow, but delete the schema file, then try to load it""" test_pipeline_dir = os.path.join(tmp_path, "wf") create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "", "", outdir=test_pipeline_dir, no_git=True, plain=True + "testpipeline", "a description", "Me", outdir=test_pipeline_dir, no_git=True ) create_obj.init_pipeline() os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) diff --git a/tests/test_lint.py b/tests/test_lint.py index 99e0506cf..a967618a9 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -27,7 +27,7 @@ def setUp(self): self.tmp_dir = tempfile.mkdtemp() self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir, plain=True + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir ) self.create_obj.init_pipeline() # Base lint object on this directory diff --git a/tests/test_modules.py b/tests/test_modules.py index 0d784c74e..76dff9da3 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -73,7 +73,7 @@ def setUp(self): self.pipeline_name = "mypipeline" self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) nf_core.pipelines.create.create.PipelineCreate( - self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True + self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir ).init_pipeline() # Set up install objects self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) diff --git a/tests/test_params_file.py b/tests/test_params_file.py index 882b57512..e692ad687 100644 --- a/tests/test_params_file.py +++ b/tests/test_params_file.py @@ -22,7 +22,7 @@ def setup_class(cls): cls.tmp_dir = tempfile.mkdtemp() cls.template_dir = os.path.join(cls.tmp_dir, "wf") create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "", "", outdir=cls.template_dir, no_git=True, plain=True + "testpipeline", "a description", "Me", outdir=cls.template_dir, no_git=True ) create_obj.init_pipeline() diff --git a/tests/test_schema.py b/tests/test_schema.py index 4d8f2e0ef..1f30c9e0f 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -30,7 +30,7 @@ def setUp(self): self.tmp_dir = tempfile.mkdtemp() self.template_dir = os.path.join(self.tmp_dir, "wf") create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "", "", outdir=self.template_dir, no_git=True, plain=True + "testpipeline", "a description", "Me", outdir=self.template_dir, no_git=True ) create_obj.init_pipeline() diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index fb1521131..c5af1ff42 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -56,7 +56,7 @@ def setUp(self): self.pipeline_name = "mypipeline" self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) nf_core.pipelines.create.create.PipelineCreate( - self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True + self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir ).init_pipeline() # Set up the nf-core/modules repo dummy diff --git a/tests/test_sync.py b/tests/test_sync.py index 228a6682e..fabdf122e 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -27,7 +27,7 @@ def setUp(self): self.pipeline_dir = os.path.join(self.tmp_dir, "testpipeline") default_branch = "master" self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testing", "test pipeline", "tester", outdir=self.pipeline_dir, plain=True, default_branch=default_branch + "testing", "test pipeline", "tester", outdir=self.pipeline_dir, default_branch=default_branch ) self.create_obj.init_pipeline() self.remote_path = os.path.join(self.tmp_dir, "remote_repo") diff --git a/tests/test_utils.py b/tests/test_utils.py index 22b5a2332..035b0d97d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -45,7 +45,6 @@ def setUp(self): "Test McTestFace", no_git=True, outdir=self.test_pipeline_dir, - plain=True, ) self.create_obj.init_pipeline() # Base Pipeline object on this directory From c2c0561de95410cc3a8a62f9eb99906411e74da8 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 6 Nov 2023 14:06:23 +0100 Subject: [PATCH 036/737] fix test_remove_patch --- tests/modules/patch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/patch.py b/tests/modules/patch.py index c34e1f740..bd6b5accd 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -349,7 +349,7 @@ def test_remove_patch(self): "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) - with mock.patch.object(nf_core.modules.patch.questionary, "confirm") as mock_questionary: + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed From ef9ea81467c05fb9f0c9bd396b0071b62d9e2106 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 6 Nov 2023 15:17:16 +0100 Subject: [PATCH 037/737] add types-requests to requirements-dev --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index 4cc78d25f..1b92a49c6 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -9,3 +9,4 @@ sphinx-rtd-theme textual-dev>=1.1.0 mypy types-PyYAML +types-requests From 798dad9796ec6d5ecf3735d0d6b306f6bdea79f0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 10 Nov 2023 17:21:20 +0100 Subject: [PATCH 038/737] add full logging screen to show logging messages --- nf_core/pipelines/create/__init__.py | 4 +- nf_core/pipelines/create/basicdetails.py | 75 +++++++++---------- nf_core/pipelines/create/create.tcss | 3 - nf_core/pipelines/create/custompipeline.py | 65 ++++++++-------- nf_core/pipelines/create/finaldetails.py | 69 ++++++++--------- nf_core/pipelines/create/githubrepo.py | 69 +++++++++-------- .../create/{bye.py => loggingscreen.py} | 23 ++++-- nf_core/pipelines/create/nfcorepipeline.py | 29 ++++--- nf_core/pipelines/create/pipelinetype.py | 19 ++--- nf_core/pipelines/create/welcome.py | 25 +++---- 10 files changed, 185 insertions(+), 196 deletions(-) rename nf_core/pipelines/create/{bye.py => loggingscreen.py} (59%) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 900a0b778..a1f83df2b 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -5,7 +5,6 @@ from textual.widgets import Button from nf_core.pipelines.create.basicdetails import BasicDetails -from nf_core.pipelines.create.bye import ByeScreen from nf_core.pipelines.create.custompipeline import CustomPipeline from nf_core.pipelines.create.finaldetails import FinalDetails from nf_core.pipelines.create.githubrepo import GithubRepo @@ -45,7 +44,6 @@ class PipelineCreateApp(App[CreateConfig]): "type_nfcore": NfcorePipeline(), "final_details": FinalDetails(), "github_repo": GithubRepo(), - "bye": ByeScreen(), } # Initialise config as empty @@ -56,6 +54,8 @@ class PipelineCreateApp(App[CreateConfig]): # Log handler LOG_HANDLER = log_handler + # Logging state + LOGGING_STATE = None def on_mount(self) -> None: self.push_screen("welcome") diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index 072892f39..dc7248d97 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -3,7 +3,7 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal, VerticalScroll +from textual.containers import Center, Horizontal from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown @@ -16,46 +16,43 @@ class BasicDetails(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # Basic details + """ + ) + ) with Horizontal(): - with VerticalScroll(): - yield Markdown( - dedent( - """ - # Basic details - """ - ) - ) - with Horizontal(): - yield TextInput( - "org", - "Organisation", - "GitHub organisation", - "nf-core", - classes="column", - disabled=self.parent.PIPELINE_TYPE == "nfcore", - ) - yield TextInput( - "name", - "Pipeline Name", - "Workflow name", - classes="column", - ) + yield TextInput( + "org", + "Organisation", + "GitHub organisation", + "nf-core", + classes="column", + disabled=self.parent.PIPELINE_TYPE == "nfcore", + ) + yield TextInput( + "name", + "Pipeline Name", + "Workflow name", + classes="column", + ) - yield TextInput( - "description", - "Description", - "A short description of your pipeline.", - ) - yield TextInput( - "author", - "Author(s)", - "Name of the main author / authors", - ) - yield Center( - Button("Next", variant="success"), - classes="cta", - ) - yield Center(self.parent.LOG_HANDLER.console, classes="cta log") + yield TextInput( + "description", + "Description", + "A short description of your pipeline.", + ) + yield TextInput( + "author", + "Author(s)", + "Name of the main author / authors", + ) + yield Center( + Button("Next", variant="success"), + classes="cta", + ) @on(Button.Pressed) def on_button_pressed(self, event: Button.Pressed) -> None: diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index a718b940d..42f144c7d 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -9,9 +9,6 @@ margin-left: 3; margin-right: 3; } -.log { - width: 30%; -} .custom_grid { height: auto; diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 9ac76d80d..5cc2f87d9 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -1,6 +1,6 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal, ScrollableContainer, VerticalScroll +from textual.containers import Center, ScrollableContainer from textual.screen import Screen from textual.widgets import Button, Footer, Header, Switch @@ -46,39 +46,36 @@ class CustomPipeline(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - with Horizontal(): - with VerticalScroll(): - yield ScrollableContainer( - PipelineFeature( - markdown_genomes, - "Use reference genomes", - "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", - "igenomes", - ), - PipelineFeature( - markdown_ci, - "Add Github CI tests", - "The pipeline will include several GitHub actions for Continuous Integration (CI) testing", - "ci", - ), - PipelineFeature( - markdown_badges, - "Add Github badges", - "The README.md file of the pipeline will include GitHub badges", - "github_badges", - ), - PipelineFeature( - markdown_configuration, - "Add configuration files", - "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", - "nf_core_configs", - ), - ) - yield Center( - Button("Continue", id="continue", variant="success"), - classes="cta", - ) - yield Center(self.parent.LOG_HANDLER.console, classes="cta log") + yield ScrollableContainer( + PipelineFeature( + markdown_genomes, + "Use reference genomes", + "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", + "igenomes", + ), + PipelineFeature( + markdown_ci, + "Add Github CI tests", + "The pipeline will include several GitHub actions for Continuous Integration (CI) testing", + "ci", + ), + PipelineFeature( + markdown_badges, + "Add Github badges", + "The README.md file of the pipeline will include GitHub badges", + "github_badges", + ), + PipelineFeature( + markdown_configuration, + "Add configuration files", + "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", + "nf_core_configs", + ), + ) + yield Center( + Button("Continue", id="continue", variant="success"), + classes="cta", + ) @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 904d0e362..23be7db89 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -3,11 +3,12 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal, VerticalScroll +from textual.containers import Center, Horizontal from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch from nf_core.pipelines.create.create import PipelineCreate +from nf_core.pipelines.create.loggingscreen import LoggingScreen from nf_core.pipelines.create.utils import TextInput @@ -17,42 +18,37 @@ class FinalDetails(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - with Horizontal(): - with VerticalScroll(): - yield Markdown( - dedent( - """ - # Final details - """ - ) - ) + yield Markdown( + dedent( + """ + # Final details + """ + ) + ) - with Horizontal(): - yield TextInput( - "version", - "Version", - "First version of the pipeline", - "1.0dev", - classes="column", - ) - yield TextInput( - "outdir", - "Output directory", - "Path to the output directory where the pipeline will be created", - ".", - classes="column", - ) - with Horizontal(): - yield Switch(value=False, id="force") - yield Static( - "If the pipeline output directory exists, remove it and continue.", classes="custom_grid" - ) + with Horizontal(): + yield TextInput( + "version", + "Version", + "First version of the pipeline", + "1.0dev", + classes="column", + ) + yield TextInput( + "outdir", + "Output directory", + "Path to the output directory where the pipeline will be created", + ".", + classes="column", + ) + with Horizontal(): + yield Switch(value=False, id="force") + yield Static("If the pipeline output directory exists, remove it and continue.", classes="custom_grid") - yield Center( - Button("Finish", id="finish", variant="success"), - classes="cta", - ) - yield Center(self.parent.LOG_HANDLER.console, classes="cta log") + yield Center( + Button("Finish", id="finish", variant="success"), + classes="cta", + ) @on(Button.Pressed, "#finish") def on_button_pressed(self, event: Button.Pressed) -> None: @@ -80,4 +76,5 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Create the new pipeline create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) create_obj.init_pipeline() - self.parent.switch_screen("github_repo") + self.parent.LOGGING_STATE = "pipeline created" + self.parent.switch_screen(LoggingScreen()) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index a99f28feb..56d595319 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -6,10 +6,11 @@ from github import Github, GithubException, UnknownObjectException from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal, VerticalScroll +from textual.containers import Center, Horizontal from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch +from nf_core.pipelines.create.loggingscreen import LoggingScreen from nf_core.pipelines.create.utils import TextInput log = logging.getLogger(__name__) @@ -43,40 +44,37 @@ class GithubRepo(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown(dedent(github_text_markdown)) with Horizontal(): - with VerticalScroll(): - yield Markdown(dedent(github_text_markdown)) - with Horizontal(): - yield TextInput( - "gh_username", - "GitHub username", - "Your GitHub username", - classes="column", - ) - token = "GITHUB_AUTH_TOKEN" in os.environ - yield TextInput( - "token", - "Using the environment variable GITHUB_AUTH_TOKEN" if token else "GitHub token", - "Your GitHub personal access token for login.", - classes="column", - disabled=token, - ) - yield Markdown(dedent(repo_config_markdown)) - with Horizontal(): - yield Switch(value=False, id="private") - yield Static("Select if the new GitHub repo must be private.", classes="custom_grid") - with Horizontal(): - yield Switch(value=True, id="push") - yield Static( - "Select if you would like to push all the pipeline template files to your GitHub repo\nand all the branches required to keep the pipeline up to date with new releases of nf-core.", - classes="custom_grid", - ) - yield Center( - Button("Create GitHub repo", id="create_github", variant="success"), - Button("Finish without creating a repo", id="exit", variant="primary"), - classes="cta", - ) - yield Center(self.parent.LOG_HANDLER.console, classes="cta log") + yield TextInput( + "gh_username", + "GitHub username", + "Your GitHub username", + classes="column", + ) + token = "GITHUB_AUTH_TOKEN" in os.environ + yield TextInput( + "token", + "Using the environment variable GITHUB_AUTH_TOKEN" if token else "GitHub token", + "Your GitHub personal access token for login.", + classes="column", + disabled=token, + ) + yield Markdown(dedent(repo_config_markdown)) + with Horizontal(): + yield Switch(value=False, id="private") + yield Static("Select if the new GitHub repo must be private.", classes="custom_grid") + with Horizontal(): + yield Switch(value=True, id="push") + yield Static( + "Select if you would like to push all the pipeline template files to your GitHub repo\nand all the branches required to keep the pipeline up to date with new releases of nf-core.", + classes="custom_grid", + ) + yield Center( + Button("Create GitHub repo", id="create_github", variant="success"), + Button("Finish without creating a repo", id="exit", variant="primary"), + classes="cta", + ) def on_button_pressed(self, event: Button.Pressed) -> None: """Create a GitHub repo or show help message and exit""" @@ -155,7 +153,8 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Show help message and exit log.info(exit_help_text_markdown) - self.parent.switch_screen("bye") + self.parent.LOGGING_STATE = "repo created" + self.parent.switch_screen(LoggingScreen()) def _create_repo_and_push(self, org, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" diff --git a/nf_core/pipelines/create/bye.py b/nf_core/pipelines/create/loggingscreen.py similarity index 59% rename from nf_core/pipelines/create/bye.py rename to nf_core/pipelines/create/loggingscreen.py index 89d973778..4ef332bbc 100644 --- a/nf_core/pipelines/create/bye.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -7,11 +7,11 @@ markdown = """ # nf-core create -Bye! +Visualising logging output. """ -class ByeScreen(Screen): +class LoggingScreen(Screen): """A screen to show the final logs.""" def compose(self) -> ComposeResult: @@ -26,10 +26,21 @@ def compose(self) -> ComposeResult: id="logo", ) yield Markdown(markdown) + if self.parent.LOGGING_STATE == "repo created": + yield Center( + Button("Close App", id="close_app", variant="success"), + classes="cta", + ) + else: + yield Center( + Button("Close logging screen", id="close_screen", variant="success"), + classes="cta", + ) yield Center(self.parent.LOG_HANDLER.console, classes="cta") - yield Center(Button("Close", id="close", variant="success"), classes="cta") - @on(Button.Pressed, "#close") def on_button_pressed(self, event: Button.Pressed) -> None: - """Close app""" - self.parent.exit() + """Close the logging screen or the whole app.""" + if event.button.id == "close_app": + self.parent.exit() + if event.button.id == "close_screen": + self.parent.switch_screen("github_repo") diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 746fbf40b..95c173a40 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -1,6 +1,6 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal, ScrollableContainer, VerticalScroll +from textual.containers import Center, Horizontal, ScrollableContainer from textual.screen import Screen from textual.widgets import Button, Footer, Header, Switch @@ -13,21 +13,18 @@ class NfcorePipeline(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - with Horizontal(): - with VerticalScroll(): - yield ScrollableContainer( - PipelineFeature( - markdown_genomes, - "Use reference genomes", - "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", - "igenomes", - ), - ) - yield Center( - Button("Continue", id="continue", variant="success"), - classes="cta", - ) - yield Center(self.parent.LOG_HANDLER.console, classes="cta log") + yield ScrollableContainer( + PipelineFeature( + markdown_genomes, + "Use reference genomes", + "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", + "igenomes", + ), + ) + yield Center( + Button("Continue", id="continue", variant="success"), + classes="cta", + ) @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: diff --git a/nf_core/pipelines/create/pipelinetype.py b/nf_core/pipelines/create/pipelinetype.py index 979e4408c..98d5acc97 100644 --- a/nf_core/pipelines/create/pipelinetype.py +++ b/nf_core/pipelines/create/pipelinetype.py @@ -1,5 +1,5 @@ from textual.app import ComposeResult -from textual.containers import Center, Horizontal, VerticalScroll +from textual.containers import Center from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown @@ -40,13 +40,10 @@ class ChoosePipelineType(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - with Horizontal(): - with VerticalScroll(): - yield Markdown(markdown_intro) - yield Center( - Button("nf-core", id="type_nfcore", variant="success"), - Button("Custom", id="type_custom", variant="primary"), - classes="cta", - ) - yield Markdown(markdown_details) - yield Center(self.parent.LOG_HANDLER.console, classes="cta log") + yield Markdown(markdown_intro) + yield Center( + Button("nf-core", id="type_nfcore", variant="success"), + Button("Custom", id="type_custom", variant="primary"), + classes="cta", + ) + yield Markdown(markdown_details) diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py index d572f3149..0be70cc4c 100644 --- a/nf_core/pipelines/create/welcome.py +++ b/nf_core/pipelines/create/welcome.py @@ -1,5 +1,5 @@ from textual.app import ComposeResult -from textual.containers import Center, Horizontal, VerticalScroll +from textual.containers import Center from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static @@ -24,16 +24,13 @@ class WelcomeScreen(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - with Horizontal(): - with VerticalScroll(): - yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", - id="logo", - ) - yield Markdown(markdown) - yield Center(Button("Let's go!", id="start", variant="success"), classes="cta") - yield Center(self.parent.LOG_HANDLER.console, classes="cta log") + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + yield Markdown(markdown) + yield Center(Button("Let's go!", id="start", variant="success"), classes="cta") From 4758c7d64a786c84d8ca22f61f80a0f11593ea64 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 13 Nov 2023 10:47:58 +0100 Subject: [PATCH 039/737] autopopulate github credentials and hide password --- nf_core/pipelines/create/__init__.py | 4 +++ nf_core/pipelines/create/githubrepo.py | 33 ++++++++++++++++------- nf_core/pipelines/create/loggingscreen.py | 7 ----- nf_core/pipelines/create/utils.py | 4 ++- 4 files changed, 30 insertions(+), 18 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index a1f83df2b..68833877c 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -72,6 +72,10 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("basic_details") elif event.button.id == "continue": self.switch_screen("final_details") + elif event.button.id == "close_screen": + self.switch_screen("github_repo") + if event.button.id == "close_app": + self.exit() def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 56d595319..539958aaa 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -3,8 +3,8 @@ from textwrap import dedent import git +import yaml from github import Github, GithubException, UnknownObjectException -from textual import on from textual.app import ComposeResult from textual.containers import Center, Horizontal from textual.screen import Screen @@ -46,19 +46,21 @@ def compose(self) -> ComposeResult: yield Footer() yield Markdown(dedent(github_text_markdown)) with Horizontal(): + gh_user, gh_token = self._get_github_credentials() yield TextInput( "gh_username", "GitHub username", "Your GitHub username", + default=gh_user[0] if gh_user is not None else "GitHub username", classes="column", ) - token = "GITHUB_AUTH_TOKEN" in os.environ yield TextInput( "token", - "Using the environment variable GITHUB_AUTH_TOKEN" if token else "GitHub token", + "GitHub token", "Your GitHub personal access token for login.", + default=gh_token if gh_token is not None else "GitHub token", + password=True, classes="column", - disabled=token, ) yield Markdown(dedent(repo_config_markdown)) with Horizontal(): @@ -94,12 +96,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: pipeline_repo = git.Repo.init(self.parent.TEMPLATE_CONFIG.outdir) # GitHub authentication - if "GITHUB_AUTH_TOKEN" in os.environ: - github_auth = self._github_authentication( - github_variables["gh_username"], os.environ["GITHUB_AUTH_TOKEN"] - ) - log.debug("Using GITHUB_AUTH_TOKEN environment variable") - elif github_variables["token"]: + if github_variables["token"]: github_auth = self._github_authentication(github_variables["gh_username"], github_variables["token"]) else: raise UserWarning( @@ -192,3 +189,19 @@ def _github_authentication(self, gh_username, gh_token): log.debug(f"Authenticating GitHub as {gh_username}") github_auth = Github(gh_username, gh_token) return github_auth + + def _get_github_credentials(self): + """Get GitHub credentials""" + gh_user = None + gh_token = None + # Use gh CLI config if installed + gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") + if os.path.exists(gh_cli_config_fn): + with open(gh_cli_config_fn, "r") as fh: + gh_cli_config = yaml.safe_load(fh) + gh_user = (gh_cli_config["github.com"]["user"],) + gh_token = gh_cli_config["github.com"]["oauth_token"] + # If gh CLI not installed, try to get credentials from environment variables + elif os.environ.get("GITHUB_TOKEN") is not None: + gh_token = self.auth = os.environ["GITHUB_TOKEN"] + return (gh_user, gh_token) diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 4ef332bbc..5f4004798 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -37,10 +37,3 @@ def compose(self) -> ComposeResult: classes="cta", ) yield Center(self.parent.LOG_HANDLER.console, classes="cta") - - def on_button_pressed(self, event: Button.Pressed) -> None: - """Close the logging screen or the whole app.""" - if event.button.id == "close_app": - self.parent.exit() - if event.button.id == "close_screen": - self.parent.switch_screen("github_repo") diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index ae7ac097d..7b5cb8f95 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -71,7 +71,7 @@ class TextInput(Static): and validation messages. """ - def __init__(self, field_id, placeholder, description, default=None, **kwargs) -> None: + def __init__(self, field_id, placeholder, description, default=None, password=None, **kwargs) -> None: """Initialise the widget with our values. Pass on kwargs upstream for standard usage.""" @@ -80,6 +80,7 @@ def __init__(self, field_id, placeholder, description, default=None, **kwargs) - self.placeholder: str = placeholder self.description: str = description self.default: str = default + self.password: bool = password def compose(self) -> ComposeResult: yield Static(self.description, classes="field_help") @@ -87,6 +88,7 @@ def compose(self) -> ComposeResult: placeholder=self.placeholder, validators=[ValidateConfig(self.field_id)], value=self.default, + password=self.password, ) yield Static(classes="validation_msg") From 665d37825304b78c8a2fee580206eec1ade7fa28 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 13 Nov 2023 15:13:24 +0100 Subject: [PATCH 040/737] add button to show or hide password --- nf_core/pipelines/create/create.tcss | 15 +++++++++++++++ nf_core/pipelines/create/githubrepo.py | 19 ++++++++++++++++--- nf_core/pipelines/create/utils.py | 1 + 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 42f144c7d..df37e50ed 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -79,3 +79,18 @@ HorizontalScroll { .displayed #hide_help { display: block; } + +/* Show password */ + +#show_password { + display: block; +} +#hide_password { + display: none; +} +.displayed #show_password { + display: none; +} +.displayed #hide_password { + display: block; +} diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 539958aaa..270107c68 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -62,6 +62,8 @@ def compose(self) -> ComposeResult: password=True, classes="column", ) + yield Button("Show", id="show_password") + yield Button("Hide", id="hide_password") yield Markdown(dedent(repo_config_markdown)) with Horizontal(): yield Switch(value=False, id="private") @@ -80,7 +82,15 @@ def compose(self) -> ComposeResult: def on_button_pressed(self, event: Button.Pressed) -> None: """Create a GitHub repo or show help message and exit""" - if event.button.id == "create_github": + if event.button.id == "show_password": + self.add_class("displayed") + text_input = self.query_one("#token", TextInput) + text_input.query_one(Input).password = False + elif event.button.id == "hide_password": + self.remove_class("displayed") + text_input = self.query_one("#token", TextInput) + text_input.query_one(Input).password = True + elif event.button.id == "create_github": # Create a GitHub repo # Save GitHub username and token @@ -146,12 +156,15 @@ def on_button_pressed(self, event: Button.Pressed) -> None: log.info(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' created successfully") except UserWarning as e: log.info(f"There was an error with message: {e}" f"\n{exit_help_text_markdown}") + + self.parent.LOGGING_STATE = "repo created" + self.parent.switch_screen(LoggingScreen()) elif event.button.id == "exit": # Show help message and exit log.info(exit_help_text_markdown) - self.parent.LOGGING_STATE = "repo created" - self.parent.switch_screen(LoggingScreen()) + self.parent.LOGGING_STATE = "repo created" + self.parent.switch_screen(LoggingScreen()) def _create_repo_and_push(self, org, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 7b5cb8f95..c01c947b4 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -77,6 +77,7 @@ def __init__(self, field_id, placeholder, description, default=None, password=No Pass on kwargs upstream for standard usage.""" super().__init__(**kwargs) self.field_id: str = field_id + self.id: str = field_id self.placeholder: str = placeholder self.description: str = description self.default: str = default From 19f60bcf04e8e8e7a381995653439162adbd7764 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 14 Nov 2023 10:13:38 +0100 Subject: [PATCH 041/737] update textual and textual-dev versions --- requirements-dev.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 1b92a49c6..07da7914a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ pytest-datafiles responses Sphinx sphinx-rtd-theme -textual-dev>=1.1.0 +textual-dev>=1.2.1 mypy types-PyYAML types-requests diff --git a/requirements.txt b/requirements.txt index 15c4f9e18..7c7cde7a8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,4 +19,4 @@ requests_cache rich-click>=1.6.1 rich>=13.3.1 tabulate -textual>=0.33.0 +textual>=0.41.0 From 887f372433954481f3afe2c5ec0e79d203b1e15c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 14 Nov 2023 16:30:32 +0100 Subject: [PATCH 042/737] add first snapshot tests --- nf_core/__main__.py | 2 + nf_core/pipelines/create/__init__.py | 2 +- requirements-dev.txt | 1 + tests/__snapshots__/test_create_app.ambr | 442 +++++++++++++++++++++++ tests/test_create_app.py | 37 ++ 5 files changed, 483 insertions(+), 1 deletion(-) create mode 100644 tests/__snapshots__/test_create_app.ambr create mode 100644 tests/test_create_app.py diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 13e2e97a5..1f200112f 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -521,6 +521,7 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp ) app = PipelineCreateApp() app.run() + sys.exit(app.return_code or 0) # nf-core create (deprecated) @@ -582,6 +583,7 @@ def create(name, description, author, version, force, outdir, template_yaml, pla app = PipelineCreateApp() try: app.run() + sys.exit(app.return_code or 0) except UserWarning as e: log.error(e) sys.exit(1) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 68833877c..52a6b3961 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -75,7 +75,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: elif event.button.id == "close_screen": self.switch_screen("github_repo") if event.button.id == "close_app": - self.exit() + self.exit(return_code=0) def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/requirements-dev.txt b/requirements-dev.txt index 07da7914a..96b6ab77b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -10,3 +10,4 @@ textual-dev>=1.2.1 mypy types-PyYAML types-requests +pytest-textual-snapshot diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr new file mode 100644 index 000000000..ae80f7d84 --- /dev/null +++ b/tests/__snapshots__/test_create_app.ambr @@ -0,0 +1,442 @@ +# serializer version: 1 +# name: test_choose_type + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + To nf-core or not to nf-core? + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next, we need to know what kind of pipeline this will be. + + Choose "nf-core" if: + + ● You want your pipeline to be part of the nf-core community + ● You think that there's an outside chance that it ever could be part of nf-core + + Choose "Custom" if: + + ● Your pipeline will never be part of nf-core + ● You want full control over all features that are included from the template(including  + those that are mandatory for nf-core). + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-coreCustom + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Not sure? What's the difference? + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Choosing "nf-core" effectively pre-selects the following template features: + + ● GitHub Actions Continuous Integration (CI) configuration for the following: + ▪ Small-scale (GitHub) and large-scale (AWS) tests + ▪ Code format linting with prettier + ▪ Auto-fix functionality using @nf-core-bot + ▪ Marking old issues as stale + ● Inclusion of shared nf-core config profiles + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_welcome + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pip… + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + nf-core create + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + This app will help you create a new nf-core pipeline.It uses the  + nf-core pipeline template, which is keptwithin the nf-core/tools  + repository. + + Using this tool is mandatory when making a pipeline that maybe part  + of the nf-core community collection at some point.However, this tool  + can also be used to create pipelines that willnever be part of ▁▁ + nf-core. You can still benefit from the communitybest practices for  + your own workflow. + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- diff --git a/tests/test_create_app.py b/tests/test_create_app.py new file mode 100644 index 000000000..eb0f96a0c --- /dev/null +++ b/tests/test_create_app.py @@ -0,0 +1,37 @@ +""" Test Pipeline Create App """ +import pytest + +from nf_core.pipelines.create import PipelineCreateApp + + +@pytest.mark.asyncio +async def test_app_bindings(): + """Test that the app bindings work.""" + app = PipelineCreateApp() + async with app.run_test() as pilot: + # Test pressing the D key + assert app.dark == True + await pilot.press("d") + assert app.dark == False + await pilot.press("d") + assert app.dark == True + + # Test pressing the Q key + await pilot.press("q") + assert app.return_code == 0 + + +def test_welcome(snap_compare): + """Test snapshot for the first screen in the app. The welcome screen.""" + assert snap_compare("../nf_core/pipelines/create/__init__.py") + + +def test_choose_type(snap_compare): + """Test snapshot for the choose_type screen. + screen welcome > press start > screen choose_type + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) From 77586984ce452359ea1abbd22d0e76ad1b66b5a4 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 5 Dec 2023 11:15:00 +0100 Subject: [PATCH 043/737] add snapshots for all screens --- .gitignore | 3 + nf_core/pipelines/create/basicdetails.py | 2 +- tests/__snapshots__/test_create_app.ambr | 2989 +++++++++++++++++++++- tests/test_create_app.py | 243 +- 4 files changed, 3112 insertions(+), 125 deletions(-) diff --git a/.gitignore b/.gitignore index 271fdb14e..a3721da86 100644 --- a/.gitignore +++ b/.gitignore @@ -115,3 +115,6 @@ ENV/ pip-wheel-metadata .vscode .*.sw? + +# Textual +snapshot_report.html diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index dc7248d97..da1b2bf45 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -50,7 +50,7 @@ def compose(self) -> ComposeResult: "Name of the main author / authors", ) yield Center( - Button("Next", variant="success"), + Button("Next", id="next", variant="success"), classes="cta", ) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index ae80f7d84..9a2c11d85 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1,4 +1,553 @@ # serializer version: 1 +# name: test_basic_details_custom + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_basic_details_nfcore + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- # name: test_choose_type ''' @@ -22,251 +571,2445 @@ font-weight: 700; } - .terminal-2103023275-matrix { + .terminal-2103023275-matrix { + font-family: Fira Code, monospace; + font-size: 20px; + line-height: 24.4px; + font-variant-east-asian: full-width; + } + + .terminal-2103023275-title { + font-size: 18px; + font-weight: bold; + font-family: arial; + } + + .terminal-2103023275-r1 { fill: #c5c8c6 } + .terminal-2103023275-r2 { fill: #e3e3e3 } + .terminal-2103023275-r3 { fill: #989898 } + .terminal-2103023275-r4 { fill: #e1e1e1 } + .terminal-2103023275-r5 { fill: #121212 } + .terminal-2103023275-r6 { fill: #0053aa } + .terminal-2103023275-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2103023275-r8 { fill: #e1e1e1;font-style: italic; } + .terminal-2103023275-r9 { fill: #4ebf71;font-weight: bold } + .terminal-2103023275-r10 { fill: #7ae998 } + .terminal-2103023275-r11 { fill: #507bb3 } + .terminal-2103023275-r12 { fill: #dde6ed;font-weight: bold } + .terminal-2103023275-r13 { fill: #008139 } + .terminal-2103023275-r14 { fill: #001541 } + .terminal-2103023275-r15 { fill: #24292f } + .terminal-2103023275-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-2103023275-r17 { fill: #ddedf9 } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + To nf-core or not to nf-core? + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next, we need to know what kind of pipeline this will be. + + Choose "nf-core" if: + + ● You want your pipeline to be part of the nf-core community + ● You think that there's an outside chance that it ever could be part of nf-core + + Choose "Custom" if: + + ● Your pipeline will never be part of nf-core + ● You want full control over all features that are included from the template(including  + those that are mandatory for nf-core). + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-coreCustom + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Not sure? What's the difference? + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Choosing "nf-core" effectively pre-selects the following template features: + + ● GitHub Actions Continuous Integration (CI) configuration for the following: + ▪ Small-scale (GitHub) and large-scale (AWS) tests + ▪ Code format linting with prettier + ▪ Auto-fix functionality using @nf-core-bot + ▪ Marking old issues as stale + ● Inclusion of shared nf-core config profiles + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_customisation_help + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▔▔▔▔▔▔▔▔ + Use reference The pipeline will be▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁genomesconfigured to use a Hide help + copy of the most ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + common reference  + genome files from  + iGenomes + + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a  + configuration file specifying the paths to these files. + + The required code to use these files will also be included in the  + template.When the pipeline user provides an appropriate genome ▆▆ + key,the pipeline will automatically download the required  + reference files. + + + ▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁include several Show help + GitHub actions for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md file ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁of the pipeline willShow help + include GitHub ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + badges + + ▔▔▔▔▔▔▔▔ + Add configuration The pipeline will ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁filesinclude Show help + configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters ▆▆ + requried to run  + nf-core pipelines at + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Continue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_final_details + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Final details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔If the pipeline output directory exists, remove it and continue. + + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Finish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_github_details + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create a GitHub repo + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + After creating the pipeline template locally, we can create a GitHub repository and push the + code to it. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Your GitHub usernameYour GitHub personal access token for Show + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔login.▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + mirpedrol▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁•••••••••••••••••••••••••••••••••••• + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Please select the the GitHub repository settings: + + + ▔▔▔▔▔▔▔▔Select if the new GitHub repo must be private. + + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔Select if you would like to push all the pipeline template files to your GitHub repo + and all the branches required to keep the pipeline up to date with new releases of nf-core + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_logging_after_github + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + nf-core create + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Visualising logging output. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close App + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + ▂▂ + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_logging_pipeline_created + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + nf-core create + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Visualising logging output. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close logging screen + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + ▂▂ + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_type_custom + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁configured to use a Show help + copy of the most ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + common reference  + genome files from  + iGenomes + + ▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁include several Show help + GitHub actions for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md file of▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁the pipeline will Show help + include GitHub badges▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔ + Add configuration The pipeline will ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁filesinclude configurationShow help + profiles containing ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + custom parameters  + requried to run  + nf-core pipelines at  + different  + institutions + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Continue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_type_nfcore + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁configured to use a Show help + copy of the most ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + common reference  + genome files from  + iGenomes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Continue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_type_nfcore_validation + ''' + + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - To nf-core or not to nf-core? - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next, we need to know what kind of pipeline this will be. - - Choose "nf-core" if: - - ● You want your pipeline to be part of the nf-core community - ● You think that there's an outside chance that it ever could be part of nf-core - - Choose "Custom" if: - - ● Your pipeline will never be part of nf-core - ● You want full control over all features that are included from the template(including  - those that are mandatory for nf-core). - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-coreCustom - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Not sure? What's the difference? - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions Continuous Integration (CI) configuration for the following: - ▪ Small-scale (GitHub) and large-scale (AWS) tests - ▪ Code format linting with prettier - ▪ Auto-fix functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core config profiles - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  diff --git a/tests/test_create_app.py b/tests/test_create_app.py index eb0f96a0c..e91d0a0fa 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -1,4 +1,6 @@ """ Test Pipeline Create App """ +from unittest import mock + import pytest from nf_core.pipelines.create import PipelineCreateApp @@ -28,10 +30,249 @@ def test_welcome(snap_compare): def test_choose_type(snap_compare): """Test snapshot for the choose_type screen. - screen welcome > press start > screen choose_type + Steps to get to this screen: + screen welcome > press start > + screen choose_type + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +def test_basic_details_nfcore(snap_compare): + """Test snapshot for the basic_details screen of an nf-core pipeline. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +def test_basic_details_custom(snap_compare): + """Test snapshot for the basic_details screen of a custom pipeline. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press custom > + screen basic_details + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_custom") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +def test_type_nfcore(snap_compare): + """Test snapshot for the type_nfcore screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +def test_type_nfcore_validation(snap_compare): + """Test snapshot for the type_nfcore screen. + Validation errors should appear when input fields are empty. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > press next > + ERRORS + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#next") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +def test_type_custom(snap_compare): + """Test snapshot for the type_custom screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press custom > + screen basic_details > enter pipeline details > press next > + screen type_custom + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_custom") + await pilot.click("#name") + await pilot.press("tab") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +def test_final_details(snap_compare): + """Test snapshot for the final_details screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +def test_customisation_help(snap_compare): + """Test snapshot for the type_custom screen - showing help messages. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_custom > press Show more + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_custom") + await pilot.click("#name") + await pilot.press("tab") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#igenomes") + await pilot.press("tab") + await pilot.press("enter") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +@mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) +def test_logging_pipeline_created(mock_init_pipeline, snap_compare): + """Test snapshot for the final_details screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details > press finish > + screen logging_screen + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + await pilot.click("#finish") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +@mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) +def test_github_details(mock_init_pipeline, snap_compare): + """Test snapshot for the final_details screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details > press finish > + screen logging_screen > press close_screen > + screen github_repo + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + await pilot.click("#finish") + await pilot.click("#close_screen") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + +@mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) +def test_logging_after_github(mock_init_pipeline, snap_compare): + """Test snapshot for the final_details screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details > press finish > + screen logging_screen > press close_screen > + screen github_repo > press exit (close without creating a repo) > + screen logging_screen """ async def run_before(pilot) -> None: await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + await pilot.click("#finish") + await pilot.click("#close_screen") + await pilot.click("#exit") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) From 390816310783dc0c067e0d8be0f3324459ac417a Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 09:59:41 +0100 Subject: [PATCH 044/737] ruff code modifications --- nf_core/pipelines/create/create.py | 4 ++-- nf_core/pipelines/create/githubrepo.py | 6 +++--- nf_core/pipelines/create/loggingscreen.py | 1 - nf_core/pipelines/create/nfcorepipeline.py | 2 +- nf_core/pipelines/create/utils.py | 2 +- tests/test_create.py | 1 - tests/test_create_app.py | 6 +++--- tests/test_modules.py | 1 - tests/test_subworkflows.py | 1 - 9 files changed, 10 insertions(+), 14 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 7838e39a4..51c115e2c 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -130,7 +130,7 @@ def check_template_yaml_info(self, template_yaml, name, description, author): config = CreateConfig() if template_yaml: try: - with open(template_yaml, "r") as f: + with open(template_yaml) as f: template_yaml = yaml.safe_load(f) config = CreateConfig(**template_yaml) except FileNotFoundError: @@ -397,7 +397,7 @@ def remove_nf_core_in_bug_report_template(self): """ bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml" - with open(bug_report_path, "r") as fh: + with open(bug_report_path) as fh: contents = yaml.load(fh, Loader=yaml.FullLoader) # Remove the first item in the body, which is the information about the docs diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 270107c68..2a98d8498 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -23,7 +23,7 @@ repo_config_markdown = """ Please select the the GitHub repository settings: """ -exit_help_text_markdown = f""" +exit_help_text_markdown = """ If you would like to create the GitHub repository later, you can do it manually by following these steps: 1. Create a new GitHub repository 2. Add the remote to your local repository @@ -121,7 +121,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: try: user.login log.debug("GitHub authentication successful") - except GithubException as e: + except GithubException: raise UserWarning( f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." "Please make sure that the provided user name and token are correct." @@ -210,7 +210,7 @@ def _get_github_credentials(self): # Use gh CLI config if installed gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") if os.path.exists(gh_cli_config_fn): - with open(gh_cli_config_fn, "r") as fh: + with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) gh_user = (gh_cli_config["github.com"]["user"],) gh_token = gh_cli_config["github.com"]["oauth_token"] diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 5f4004798..2a59e2bcc 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -1,4 +1,3 @@ -from textual import on from textual.app import ComposeResult from textual.containers import Center from textual.screen import Screen diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 95c173a40..10541ced0 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -1,6 +1,6 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, Horizontal, ScrollableContainer +from textual.containers import Center, ScrollableContainer from textual.screen import Screen from textual.widgets import Button, Footer, Header, Switch diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index f11ee1753..7842888f0 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -186,7 +186,7 @@ class CustomLogHandler(RichHandler): def emit(self, record: LogRecord) -> None: """Invoked by logging.""" try: - app = active_app.get() + _app = active_app.get() except LookupError: pass else: diff --git a/tests/test_create.py b/tests/test_create.py index c3b7f931f..5dfd9244c 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -3,7 +3,6 @@ import os import unittest from pathlib import Path -from unittest import mock import git import yaml diff --git a/tests/test_create_app.py b/tests/test_create_app.py index e91d0a0fa..8b22eabad 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -12,11 +12,11 @@ async def test_app_bindings(): app = PipelineCreateApp() async with app.run_test() as pilot: # Test pressing the D key - assert app.dark == True + assert app.dark await pilot.press("d") - assert app.dark == False + assert not app.dark await pilot.press("d") - assert app.dark == True + assert app.dark # Test pressing the Q key await pilot.press("q") diff --git a/tests/test_modules.py b/tests/test_modules.py index 05b0c4111..9c045f9d1 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -20,7 +20,6 @@ GITLAB_URL, OLD_TRIMGALORE_BRANCH, OLD_TRIMGALORE_SHA, - create_tmp_pipeline, mock_anaconda_api_calls, mock_biocontainers_api_calls, ) diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 930b92942..a7f387661 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -15,7 +15,6 @@ GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, OLD_SUBWORKFLOWS_SHA, - create_tmp_pipeline, ) From 208d832994a34f1cfce22e6612c1471ca375c55e Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 10:18:27 +0100 Subject: [PATCH 045/737] update snapshots --- tests/__snapshots__/test_create_app.ambr | 2849 +++++++++++----------- 1 file changed, 1425 insertions(+), 1424 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 9a2c11d85..3c5a1b91c 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -22,250 +22,250 @@ font-weight: 700; } - .terminal-3398870890-matrix { + .terminal-2900179749-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3398870890-title { + .terminal-2900179749-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3398870890-r1 { fill: #c5c8c6 } - .terminal-3398870890-r2 { fill: #e3e3e3 } - .terminal-3398870890-r3 { fill: #989898 } - .terminal-3398870890-r4 { fill: #e1e1e1 } - .terminal-3398870890-r5 { fill: #121212 } - .terminal-3398870890-r6 { fill: #0053aa } - .terminal-3398870890-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3398870890-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-3398870890-r9 { fill: #1e1e1e } - .terminal-3398870890-r10 { fill: #008139 } - .terminal-3398870890-r11 { fill: #e2e2e2 } - .terminal-3398870890-r12 { fill: #787878 } - .terminal-3398870890-r13 { fill: #b93c5b } - .terminal-3398870890-r14 { fill: #7ae998 } - .terminal-3398870890-r15 { fill: #0a180e;font-weight: bold } - .terminal-3398870890-r16 { fill: #ddedf9 } + .terminal-2900179749-r1 { fill: #c5c8c6 } + .terminal-2900179749-r2 { fill: #e3e3e3 } + .terminal-2900179749-r3 { fill: #989898 } + .terminal-2900179749-r4 { fill: #e1e1e1 } + .terminal-2900179749-r5 { fill: #121212 } + .terminal-2900179749-r6 { fill: #0053aa } + .terminal-2900179749-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2900179749-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2900179749-r9 { fill: #1e1e1e } + .terminal-2900179749-r10 { fill: #008139 } + .terminal-2900179749-r11 { fill: #e2e2e2 } + .terminal-2900179749-r12 { fill: #787878 } + .terminal-2900179749-r13 { fill: #b93c5b } + .terminal-2900179749-r14 { fill: #7ae998 } + .terminal-2900179749-r15 { fill: #0a180e;font-weight: bold } + .terminal-2900179749-r16 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -295,253 +295,253 @@ font-weight: 700; } - .terminal-3340021344-matrix { + .terminal-1441415707-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3340021344-title { + .terminal-1441415707-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3340021344-r1 { fill: #c5c8c6 } - .terminal-3340021344-r2 { fill: #e3e3e3 } - .terminal-3340021344-r3 { fill: #989898 } - .terminal-3340021344-r4 { fill: #e1e1e1 } - .terminal-3340021344-r5 { fill: #121212 } - .terminal-3340021344-r6 { fill: #0053aa } - .terminal-3340021344-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3340021344-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-3340021344-r9 { fill: #1e1e1e } - .terminal-3340021344-r10 { fill: #0f4e2a } - .terminal-3340021344-r11 { fill: #0178d4 } - .terminal-3340021344-r12 { fill: #a7a7a7 } - .terminal-3340021344-r13 { fill: #787878 } - .terminal-3340021344-r14 { fill: #e2e2e2 } - .terminal-3340021344-r15 { fill: #b93c5b } - .terminal-3340021344-r16 { fill: #7ae998 } - .terminal-3340021344-r17 { fill: #0a180e;font-weight: bold } - .terminal-3340021344-r18 { fill: #008139 } - .terminal-3340021344-r19 { fill: #ddedf9 } + .terminal-1441415707-r1 { fill: #c5c8c6 } + .terminal-1441415707-r2 { fill: #e3e3e3 } + .terminal-1441415707-r3 { fill: #989898 } + .terminal-1441415707-r4 { fill: #e1e1e1 } + .terminal-1441415707-r5 { fill: #121212 } + .terminal-1441415707-r6 { fill: #0053aa } + .terminal-1441415707-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1441415707-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-1441415707-r9 { fill: #1e1e1e } + .terminal-1441415707-r10 { fill: #0f4e2a } + .terminal-1441415707-r11 { fill: #0178d4 } + .terminal-1441415707-r12 { fill: #a7a7a7 } + .terminal-1441415707-r13 { fill: #787878 } + .terminal-1441415707-r14 { fill: #e2e2e2 } + .terminal-1441415707-r15 { fill: #b93c5b } + .terminal-1441415707-r16 { fill: #7ae998 } + .terminal-1441415707-r17 { fill: #0a180e;font-weight: bold } + .terminal-1441415707-r18 { fill: #008139 } + .terminal-1441415707-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -571,251 +571,251 @@ font-weight: 700; } - .terminal-2103023275-matrix { + .terminal-828318910-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2103023275-title { + .terminal-828318910-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2103023275-r1 { fill: #c5c8c6 } - .terminal-2103023275-r2 { fill: #e3e3e3 } - .terminal-2103023275-r3 { fill: #989898 } - .terminal-2103023275-r4 { fill: #e1e1e1 } - .terminal-2103023275-r5 { fill: #121212 } - .terminal-2103023275-r6 { fill: #0053aa } - .terminal-2103023275-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2103023275-r8 { fill: #e1e1e1;font-style: italic; } - .terminal-2103023275-r9 { fill: #4ebf71;font-weight: bold } - .terminal-2103023275-r10 { fill: #7ae998 } - .terminal-2103023275-r11 { fill: #507bb3 } - .terminal-2103023275-r12 { fill: #dde6ed;font-weight: bold } - .terminal-2103023275-r13 { fill: #008139 } - .terminal-2103023275-r14 { fill: #001541 } - .terminal-2103023275-r15 { fill: #24292f } - .terminal-2103023275-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-2103023275-r17 { fill: #ddedf9 } + .terminal-828318910-r1 { fill: #c5c8c6 } + .terminal-828318910-r2 { fill: #e3e3e3 } + .terminal-828318910-r3 { fill: #989898 } + .terminal-828318910-r4 { fill: #e1e1e1 } + .terminal-828318910-r5 { fill: #121212 } + .terminal-828318910-r6 { fill: #0053aa } + .terminal-828318910-r7 { fill: #dde8f3;font-weight: bold } + .terminal-828318910-r8 { fill: #e1e1e1;font-style: italic; } + .terminal-828318910-r9 { fill: #4ebf71;font-weight: bold } + .terminal-828318910-r10 { fill: #7ae998 } + .terminal-828318910-r11 { fill: #507bb3 } + .terminal-828318910-r12 { fill: #dde6ed;font-weight: bold } + .terminal-828318910-r13 { fill: #008139 } + .terminal-828318910-r14 { fill: #001541 } + .terminal-828318910-r15 { fill: #24292f } + .terminal-828318910-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-828318910-r17 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - To nf-core or not to nf-core? - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next, we need to know what kind of pipeline this will be. - - Choose "nf-core" if: - - ● You want your pipeline to be part of the nf-core community - ● You think that there's an outside chance that it ever could be part of nf-core - - Choose "Custom" if: - - ● Your pipeline will never be part of nf-core - ● You want full control over all features that are included from the template(including  - those that are mandatory for nf-core). - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-coreCustom - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Not sure? What's the difference? - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions Continuous Integration (CI) configuration for the following: - ▪ Small-scale (GitHub) and large-scale (AWS) tests - ▪ Code format linting with prettier - ▪ Auto-fix functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core config profiles - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + To nf-core or not to nf-core? + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next, we need to know what kind of pipeline this will be. + + Choose "nf-core" if: + + ● You want your pipeline to be part of the nf-core community + ● You think that there's an outside chance that it ever could be part of nf-core + + Choose "Custom" if: + + ● Your pipeline will never be part of nf-core + ● You want full control over all features that are included from the template (including  + those that are mandatory for nf-core). + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-coreCustom + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +                             Not sure? What's the difference?                             + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Choosing "nf-core" effectively pre-selects the following template features: + + ● GitHub Actions Continuous Integration (CI) configuration for the following: + ▪ Small-scale (GitHub) and large-scale (AWS) tests + ▪ Code format linting with prettier + ▪ Auto-fix functionality using @nf-core-bot + ▪ Marking old issues as stale + ● Inclusion of shared nf-core config profiles + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -845,255 +845,255 @@ font-weight: 700; } - .terminal-3470600551-matrix { + .terminal-3545740190-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3470600551-title { + .terminal-3545740190-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3470600551-r1 { fill: #c5c8c6 } - .terminal-3470600551-r2 { fill: #e3e3e3 } - .terminal-3470600551-r3 { fill: #989898 } - .terminal-3470600551-r4 { fill: #1e1e1e } - .terminal-3470600551-r5 { fill: #0178d4 } - .terminal-3470600551-r6 { fill: #e1e1e1 } - .terminal-3470600551-r7 { fill: #e2e2e2 } - .terminal-3470600551-r8 { fill: #808080 } - .terminal-3470600551-r9 { fill: #454a50 } - .terminal-3470600551-r10 { fill: #e2e3e3;font-weight: bold } - .terminal-3470600551-r11 { fill: #000000 } - .terminal-3470600551-r12 { fill: #e4e4e4 } - .terminal-3470600551-r13 { fill: #14191f } - .terminal-3470600551-r14 { fill: #507bb3 } - .terminal-3470600551-r15 { fill: #dde6ed;font-weight: bold } - .terminal-3470600551-r16 { fill: #001541 } - .terminal-3470600551-r17 { fill: #7ae998 } - .terminal-3470600551-r18 { fill: #0a180e;font-weight: bold } - .terminal-3470600551-r19 { fill: #008139 } - .terminal-3470600551-r20 { fill: #dde8f3;font-weight: bold } - .terminal-3470600551-r21 { fill: #ddedf9 } + .terminal-3545740190-r1 { fill: #c5c8c6 } + .terminal-3545740190-r2 { fill: #e3e3e3 } + .terminal-3545740190-r3 { fill: #989898 } + .terminal-3545740190-r4 { fill: #1e1e1e } + .terminal-3545740190-r5 { fill: #0178d4 } + .terminal-3545740190-r6 { fill: #e1e1e1 } + .terminal-3545740190-r7 { fill: #454a50 } + .terminal-3545740190-r8 { fill: #e2e2e2 } + .terminal-3545740190-r9 { fill: #808080 } + .terminal-3545740190-r10 { fill: #e2e3e3;font-weight: bold } + .terminal-3545740190-r11 { fill: #000000 } + .terminal-3545740190-r12 { fill: #e4e4e4 } + .terminal-3545740190-r13 { fill: #14191f } + .terminal-3545740190-r14 { fill: #507bb3 } + .terminal-3545740190-r15 { fill: #dde6ed;font-weight: bold } + .terminal-3545740190-r16 { fill: #001541 } + .terminal-3545740190-r17 { fill: #7ae998 } + .terminal-3545740190-r18 { fill: #0a180e;font-weight: bold } + .terminal-3545740190-r19 { fill: #008139 } + .terminal-3545740190-r20 { fill: #dde8f3;font-weight: bold } + .terminal-3545740190-r21 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▔▔▔▔▔▔▔▔ - Use reference The pipeline will be▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁genomesconfigured to use a Hide help - copy of the most ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - common reference  - genome files from  - iGenomes - - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a  - configuration file specifying the paths to these files. - - The required code to use these files will also be included in the  - template.When the pipeline user provides an appropriate genome ▆▆ - key,the pipeline will automatically download the required  - reference files. - - - ▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁include several Show help - GitHub actions for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md file ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁of the pipeline willShow help - include GitHub ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - badges - - ▔▔▔▔▔▔▔▔ - Add configuration The pipeline will ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁filesinclude Show help - configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters ▆▆ - requried to run  - nf-core pipelines at - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Continue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be Hide help + ▁▁▁▁▁▁▁▁configured to use a copy ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + of the most common  + reference genome files  + from iGenomes + + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key,▆▆ + the pipeline will automatically download the required reference  + files. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will includeShow help + ▁▁▁▁▁▁▁▁several GitHub actions ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + for Continuous  + Integration (CI) testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md file of theShow help + ▁▁▁▁▁▁▁▁pipeline will include ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add configuration filesThe pipeline will includeShow help + ▁▁▁▁▁▁▁▁configuration profiles ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + containing custom  + parameters requried to  + run nf-core pipelines at  + different institutions + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Continue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -1123,249 +1123,249 @@ font-weight: 700; } - .terminal-680074798-matrix { + .terminal-1778650725-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-680074798-title { + .terminal-1778650725-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-680074798-r1 { fill: #c5c8c6 } - .terminal-680074798-r2 { fill: #e3e3e3 } - .terminal-680074798-r3 { fill: #989898 } - .terminal-680074798-r4 { fill: #e1e1e1 } - .terminal-680074798-r5 { fill: #121212 } - .terminal-680074798-r6 { fill: #0053aa } - .terminal-680074798-r7 { fill: #dde8f3;font-weight: bold } - .terminal-680074798-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-680074798-r9 { fill: #1e1e1e } - .terminal-680074798-r10 { fill: #008139 } - .terminal-680074798-r11 { fill: #e2e2e2 } - .terminal-680074798-r12 { fill: #b93c5b } - .terminal-680074798-r13 { fill: #7ae998 } - .terminal-680074798-r14 { fill: #0a180e;font-weight: bold } - .terminal-680074798-r15 { fill: #ddedf9 } + .terminal-1778650725-r1 { fill: #c5c8c6 } + .terminal-1778650725-r2 { fill: #e3e3e3 } + .terminal-1778650725-r3 { fill: #989898 } + .terminal-1778650725-r4 { fill: #e1e1e1 } + .terminal-1778650725-r5 { fill: #121212 } + .terminal-1778650725-r6 { fill: #0053aa } + .terminal-1778650725-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1778650725-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-1778650725-r9 { fill: #1e1e1e } + .terminal-1778650725-r10 { fill: #008139 } + .terminal-1778650725-r11 { fill: #e2e2e2 } + .terminal-1778650725-r12 { fill: #b93c5b } + .terminal-1778650725-r13 { fill: #7ae998 } + .terminal-1778650725-r14 { fill: #0a180e;font-weight: bold } + .terminal-1778650725-r15 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Final details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔If the pipeline output directory exists, remove it and continue. - - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Finish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Final details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔If the pipeline output directory exists, remove it and continue. + + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Finish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1395,255 +1395,255 @@ font-weight: 700; } - .terminal-2034340412-matrix { + .terminal-4207832566-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2034340412-title { + .terminal-4207832566-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2034340412-r1 { fill: #c5c8c6 } - .terminal-2034340412-r2 { fill: #e3e3e3 } - .terminal-2034340412-r3 { fill: #989898 } - .terminal-2034340412-r4 { fill: #e1e1e1 } - .terminal-2034340412-r5 { fill: #121212 } - .terminal-2034340412-r6 { fill: #0053aa } - .terminal-2034340412-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2034340412-r8 { fill: #454a50 } - .terminal-2034340412-r9 { fill: #a5a5a5;font-style: italic; } - .terminal-2034340412-r10 { fill: #e2e3e3;font-weight: bold } - .terminal-2034340412-r11 { fill: #1e1e1e } - .terminal-2034340412-r12 { fill: #008139 } - .terminal-2034340412-r13 { fill: #000000 } - .terminal-2034340412-r14 { fill: #e2e2e2 } - .terminal-2034340412-r15 { fill: #b93c5b } - .terminal-2034340412-r16 { fill: #7ae998 } - .terminal-2034340412-r17 { fill: #507bb3 } - .terminal-2034340412-r18 { fill: #0a180e;font-weight: bold } - .terminal-2034340412-r19 { fill: #dde6ed;font-weight: bold } - .terminal-2034340412-r20 { fill: #001541 } - .terminal-2034340412-r21 { fill: #ddedf9 } + .terminal-4207832566-r1 { fill: #c5c8c6 } + .terminal-4207832566-r2 { fill: #e3e3e3 } + .terminal-4207832566-r3 { fill: #989898 } + .terminal-4207832566-r4 { fill: #e1e1e1 } + .terminal-4207832566-r5 { fill: #121212 } + .terminal-4207832566-r6 { fill: #0053aa } + .terminal-4207832566-r7 { fill: #dde8f3;font-weight: bold } + .terminal-4207832566-r8 { fill: #454a50 } + .terminal-4207832566-r9 { fill: #a5a5a5;font-style: italic; } + .terminal-4207832566-r10 { fill: #e2e3e3;font-weight: bold } + .terminal-4207832566-r11 { fill: #1e1e1e } + .terminal-4207832566-r12 { fill: #008139 } + .terminal-4207832566-r13 { fill: #000000 } + .terminal-4207832566-r14 { fill: #e2e2e2 } + .terminal-4207832566-r15 { fill: #b93c5b } + .terminal-4207832566-r16 { fill: #7ae998 } + .terminal-4207832566-r17 { fill: #507bb3 } + .terminal-4207832566-r18 { fill: #0a180e;font-weight: bold } + .terminal-4207832566-r19 { fill: #dde6ed;font-weight: bold } + .terminal-4207832566-r20 { fill: #001541 } + .terminal-4207832566-r21 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create a GitHub repo - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - After creating the pipeline template locally, we can create a GitHub repository and push the - code to it. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Your GitHub usernameYour GitHub personal access token for Show - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔login.▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - mirpedrol▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁•••••••••••••••••••••••••••••••••••• - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Please select the the GitHub repository settings: - - - ▔▔▔▔▔▔▔▔Select if the new GitHub repo must be private. - - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔Select if you would like to push all the pipeline template files to your GitHub repo - and all the branches required to keep the pipeline up to date with new releases of nf-core - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Create GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create a GitHub repo + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + After creating the pipeline template locally, we can create a GitHub repository and push the + code to it. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Your GitHub usernameYour GitHub personal access token for Show + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔login.▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + mirpedrol▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁•••••••••••••••••••••••••••••••••••• + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Please select the the GitHub repository settings: + + + ▔▔▔▔▔▔▔▔Select if the new GitHub repo must be private. + + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔Select if you would like to push all the pipeline template files to your GitHub repo + and all the branches required to keep the pipeline up to date with new releases of nf-core + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1673,250 +1673,250 @@ font-weight: 700; } - .terminal-1522437605-matrix { + .terminal-3944300660-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1522437605-title { + .terminal-3944300660-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1522437605-r1 { fill: #c5c8c6 } - .terminal-1522437605-r2 { fill: #e3e3e3 } - .terminal-1522437605-r3 { fill: #989898 } - .terminal-1522437605-r4 { fill: #e1e1e1 } - .terminal-1522437605-r5 { fill: #98a84b } - .terminal-1522437605-r6 { fill: #626262 } - .terminal-1522437605-r7 { fill: #608ab1 } - .terminal-1522437605-r8 { fill: #d0b344 } - .terminal-1522437605-r9 { fill: #121212 } - .terminal-1522437605-r10 { fill: #0053aa } - .terminal-1522437605-r11 { fill: #dde8f3;font-weight: bold } - .terminal-1522437605-r12 { fill: #7ae998 } - .terminal-1522437605-r13 { fill: #4ebf71;font-weight: bold } - .terminal-1522437605-r14 { fill: #008139 } - .terminal-1522437605-r15 { fill: #14191f } - .terminal-1522437605-r16 { fill: #ddedf9 } + .terminal-3944300660-r1 { fill: #c5c8c6 } + .terminal-3944300660-r2 { fill: #e3e3e3 } + .terminal-3944300660-r3 { fill: #989898 } + .terminal-3944300660-r4 { fill: #e1e1e1 } + .terminal-3944300660-r5 { fill: #98a84b } + .terminal-3944300660-r6 { fill: #626262 } + .terminal-3944300660-r7 { fill: #608ab1 } + .terminal-3944300660-r8 { fill: #d0b344 } + .terminal-3944300660-r9 { fill: #121212 } + .terminal-3944300660-r10 { fill: #0053aa } + .terminal-3944300660-r11 { fill: #dde8f3;font-weight: bold } + .terminal-3944300660-r12 { fill: #7ae998 } + .terminal-3944300660-r13 { fill: #4ebf71;font-weight: bold } + .terminal-3944300660-r14 { fill: #008139 } + .terminal-3944300660-r15 { fill: #14191f } + .terminal-3944300660-r16 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - nf-core create - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Visualising logging output. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close App - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - ▂▂ - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + nf-core create + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Visualising logging output. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close App + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + ▂▂ + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1946,250 +1946,250 @@ font-weight: 700; } - .terminal-3351663353-matrix { + .terminal-139986312-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3351663353-title { + .terminal-139986312-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3351663353-r1 { fill: #c5c8c6 } - .terminal-3351663353-r2 { fill: #e3e3e3 } - .terminal-3351663353-r3 { fill: #989898 } - .terminal-3351663353-r4 { fill: #e1e1e1 } - .terminal-3351663353-r5 { fill: #98a84b } - .terminal-3351663353-r6 { fill: #626262 } - .terminal-3351663353-r7 { fill: #608ab1 } - .terminal-3351663353-r8 { fill: #d0b344 } - .terminal-3351663353-r9 { fill: #121212 } - .terminal-3351663353-r10 { fill: #0053aa } - .terminal-3351663353-r11 { fill: #dde8f3;font-weight: bold } - .terminal-3351663353-r12 { fill: #7ae998 } - .terminal-3351663353-r13 { fill: #4ebf71;font-weight: bold } - .terminal-3351663353-r14 { fill: #008139 } - .terminal-3351663353-r15 { fill: #14191f } - .terminal-3351663353-r16 { fill: #ddedf9 } + .terminal-139986312-r1 { fill: #c5c8c6 } + .terminal-139986312-r2 { fill: #e3e3e3 } + .terminal-139986312-r3 { fill: #989898 } + .terminal-139986312-r4 { fill: #e1e1e1 } + .terminal-139986312-r5 { fill: #98a84b } + .terminal-139986312-r6 { fill: #626262 } + .terminal-139986312-r7 { fill: #608ab1 } + .terminal-139986312-r8 { fill: #d0b344 } + .terminal-139986312-r9 { fill: #121212 } + .terminal-139986312-r10 { fill: #0053aa } + .terminal-139986312-r11 { fill: #dde8f3;font-weight: bold } + .terminal-139986312-r12 { fill: #7ae998 } + .terminal-139986312-r13 { fill: #4ebf71;font-weight: bold } + .terminal-139986312-r14 { fill: #008139 } + .terminal-139986312-r15 { fill: #14191f } + .terminal-139986312-r16 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - nf-core create - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Visualising logging output. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close logging screen - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - ▂▂ - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + nf-core create + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Visualising logging output. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close logging screen + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + ▂▂ + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -2219,249 +2219,249 @@ font-weight: 700; } - .terminal-3882061156-matrix { + .terminal-2624233686-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3882061156-title { + .terminal-2624233686-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3882061156-r1 { fill: #c5c8c6 } - .terminal-3882061156-r2 { fill: #e3e3e3 } - .terminal-3882061156-r3 { fill: #989898 } - .terminal-3882061156-r4 { fill: #1e1e1e } - .terminal-3882061156-r5 { fill: #e2e2e2 } - .terminal-3882061156-r6 { fill: #e1e1e1 } - .terminal-3882061156-r7 { fill: #808080 } - .terminal-3882061156-r8 { fill: #507bb3 } - .terminal-3882061156-r9 { fill: #dde6ed;font-weight: bold } - .terminal-3882061156-r10 { fill: #001541 } - .terminal-3882061156-r11 { fill: #7ae998 } - .terminal-3882061156-r12 { fill: #0a180e;font-weight: bold } - .terminal-3882061156-r13 { fill: #008139 } - .terminal-3882061156-r14 { fill: #dde8f3;font-weight: bold } - .terminal-3882061156-r15 { fill: #ddedf9 } + .terminal-2624233686-r1 { fill: #c5c8c6 } + .terminal-2624233686-r2 { fill: #e3e3e3 } + .terminal-2624233686-r3 { fill: #989898 } + .terminal-2624233686-r4 { fill: #1e1e1e } + .terminal-2624233686-r5 { fill: #e1e1e1 } + .terminal-2624233686-r6 { fill: #507bb3 } + .terminal-2624233686-r7 { fill: #e2e2e2 } + .terminal-2624233686-r8 { fill: #808080 } + .terminal-2624233686-r9 { fill: #dde6ed;font-weight: bold } + .terminal-2624233686-r10 { fill: #001541 } + .terminal-2624233686-r11 { fill: #7ae998 } + .terminal-2624233686-r12 { fill: #0a180e;font-weight: bold } + .terminal-2624233686-r13 { fill: #008139 } + .terminal-2624233686-r14 { fill: #dde8f3;font-weight: bold } + .terminal-2624233686-r15 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁configured to use a Show help - copy of the most ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁include several Show help - GitHub actions for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md file of▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁the pipeline will Show help - include GitHub badges▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔ - Add configuration The pipeline will ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁filesinclude configurationShow help - profiles containing ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - custom parameters  - requried to run  - nf-core pipelines at  - different  - institutions - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Continue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be Show help + ▁▁▁▁▁▁▁▁configured to use a copy ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + of the most common  + reference genome files  + from iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will includeShow help + ▁▁▁▁▁▁▁▁several GitHub actions ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + for Continuous  + Integration (CI) testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md file of theShow help + ▁▁▁▁▁▁▁▁pipeline will include ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add configuration filesThe pipeline will includeShow help + ▁▁▁▁▁▁▁▁configuration profiles ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + containing custom  + parameters requried to  + run nf-core pipelines at  + different institutions + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Continue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -2491,249 +2491,249 @@ font-weight: 700; } - .terminal-339122229-matrix { + .terminal-1728001786-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-339122229-title { + .terminal-1728001786-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-339122229-r1 { fill: #c5c8c6 } - .terminal-339122229-r2 { fill: #e3e3e3 } - .terminal-339122229-r3 { fill: #989898 } - .terminal-339122229-r4 { fill: #1e1e1e } - .terminal-339122229-r5 { fill: #e2e2e2 } - .terminal-339122229-r6 { fill: #e1e1e1 } - .terminal-339122229-r7 { fill: #808080 } - .terminal-339122229-r8 { fill: #507bb3 } - .terminal-339122229-r9 { fill: #dde6ed;font-weight: bold } - .terminal-339122229-r10 { fill: #001541 } - .terminal-339122229-r11 { fill: #7ae998 } - .terminal-339122229-r12 { fill: #0a180e;font-weight: bold } - .terminal-339122229-r13 { fill: #008139 } - .terminal-339122229-r14 { fill: #dde8f3;font-weight: bold } - .terminal-339122229-r15 { fill: #ddedf9 } + .terminal-1728001786-r1 { fill: #c5c8c6 } + .terminal-1728001786-r2 { fill: #e3e3e3 } + .terminal-1728001786-r3 { fill: #989898 } + .terminal-1728001786-r4 { fill: #1e1e1e } + .terminal-1728001786-r5 { fill: #e1e1e1 } + .terminal-1728001786-r6 { fill: #507bb3 } + .terminal-1728001786-r7 { fill: #e2e2e2 } + .terminal-1728001786-r8 { fill: #808080 } + .terminal-1728001786-r9 { fill: #dde6ed;font-weight: bold } + .terminal-1728001786-r10 { fill: #001541 } + .terminal-1728001786-r11 { fill: #7ae998 } + .terminal-1728001786-r12 { fill: #0a180e;font-weight: bold } + .terminal-1728001786-r13 { fill: #008139 } + .terminal-1728001786-r14 { fill: #dde8f3;font-weight: bold } + .terminal-1728001786-r15 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁configured to use a Show help - copy of the most ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - common reference  - genome files from  - iGenomes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Continue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be Show help + ▁▁▁▁▁▁▁▁configured to use a copy ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + of the most common  + reference genome files  + from iGenomes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Continue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -2763,253 +2763,253 @@ font-weight: 700; } - .terminal-3185846603-matrix { + .terminal-2559761451-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3185846603-title { + .terminal-2559761451-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3185846603-r1 { fill: #c5c8c6 } - .terminal-3185846603-r2 { fill: #e3e3e3 } - .terminal-3185846603-r3 { fill: #989898 } - .terminal-3185846603-r4 { fill: #e1e1e1 } - .terminal-3185846603-r5 { fill: #121212 } - .terminal-3185846603-r6 { fill: #0053aa } - .terminal-3185846603-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3185846603-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-3185846603-r9 { fill: #1e1e1e } - .terminal-3185846603-r10 { fill: #0f4e2a } - .terminal-3185846603-r11 { fill: #7b3042 } - .terminal-3185846603-r12 { fill: #a7a7a7 } - .terminal-3185846603-r13 { fill: #787878 } - .terminal-3185846603-r14 { fill: #e2e2e2 } - .terminal-3185846603-r15 { fill: #b93c5b } - .terminal-3185846603-r16 { fill: #166d39 } - .terminal-3185846603-r17 { fill: #3c8b54;font-weight: bold } - .terminal-3185846603-r18 { fill: #5aa86f } - .terminal-3185846603-r19 { fill: #ddedf9 } + .terminal-2559761451-r1 { fill: #c5c8c6 } + .terminal-2559761451-r2 { fill: #e3e3e3 } + .terminal-2559761451-r3 { fill: #989898 } + .terminal-2559761451-r4 { fill: #e1e1e1 } + .terminal-2559761451-r5 { fill: #121212 } + .terminal-2559761451-r6 { fill: #0053aa } + .terminal-2559761451-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2559761451-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2559761451-r9 { fill: #1e1e1e } + .terminal-2559761451-r10 { fill: #0f4e2a } + .terminal-2559761451-r11 { fill: #7b3042 } + .terminal-2559761451-r12 { fill: #a7a7a7 } + .terminal-2559761451-r13 { fill: #787878 } + .terminal-2559761451-r14 { fill: #e2e2e2 } + .terminal-2559761451-r15 { fill: #b93c5b } + .terminal-2559761451-r16 { fill: #166d39 } + .terminal-2559761451-r17 { fill: #3c8b54;font-weight: bold } + .terminal-2559761451-r18 { fill: #5aa86f } + .terminal-2559761451-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Next + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -3039,144 +3039,145 @@ font-weight: 700; } - .terminal-1736361706-matrix { + .terminal-2319623653-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1736361706-title { + .terminal-2319623653-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1736361706-r1 { fill: #c5c8c6 } - .terminal-1736361706-r2 { fill: #e3e3e3 } - .terminal-1736361706-r3 { fill: #989898 } - .terminal-1736361706-r4 { fill: #e1e1e1 } - .terminal-1736361706-r5 { fill: #98a84b } - .terminal-1736361706-r6 { fill: #626262 } - .terminal-1736361706-r7 { fill: #608ab1 } - .terminal-1736361706-r8 { fill: #d0b344 } - .terminal-1736361706-r9 { fill: #121212 } - .terminal-1736361706-r10 { fill: #0053aa } - .terminal-1736361706-r11 { fill: #dde8f3;font-weight: bold } - .terminal-1736361706-r12 { fill: #e1e1e1;text-decoration: underline; } - .terminal-1736361706-r13 { fill: #14191f } - .terminal-1736361706-r14 { fill: #ddedf9 } + .terminal-2319623653-r1 { fill: #c5c8c6 } + .terminal-2319623653-r2 { fill: #e3e3e3 } + .terminal-2319623653-r3 { fill: #989898 } + .terminal-2319623653-r4 { fill: #1e1e1e } + .terminal-2319623653-r5 { fill: #e1e1e1 } + .terminal-2319623653-r6 { fill: #98a84b } + .terminal-2319623653-r7 { fill: #626262 } + .terminal-2319623653-r8 { fill: #608ab1 } + .terminal-2319623653-r9 { fill: #d0b344 } + .terminal-2319623653-r10 { fill: #121212 } + .terminal-2319623653-r11 { fill: #0053aa } + .terminal-2319623653-r12 { fill: #dde8f3;font-weight: bold } + .terminal-2319623653-r13 { fill: #e1e1e1;text-decoration: underline; } + .terminal-2319623653-r14 { fill: #14191f } + .terminal-2319623653-r15 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pip… - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - nf-core create - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - This app will help you create a new nf-core pipeline.It uses the  - nf-core pipeline template, which is keptwithin the nf-core/tools  - repository. - - Using this tool is mandatory when making a pipeline that maybe part  - of the nf-core community collection at some point.However, this tool  - can also be used to create pipelines that willnever be part of ▁▁ - nf-core. You can still benefit from the communitybest practices for  - your own workflow. - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pip… + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + nf-core create + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + This app will help you create a new nf-core pipeline. It uses the  + nf-core pipeline template, which is kept within the nf-core/tools  + repository. + + Using this tool is mandatory when making a pipeline that may be part  + of the nf-core community collection at some point. However, this tool  + can also be used to create pipelines that will never be part of ▁▁ + nf-core. You can still benefit from the community best practices for  + your own workflow. + +  D  Toggle dark mode  Q  Quit  From bebf067648cdf2df743e031467a85228184939f7 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 11:14:35 +0100 Subject: [PATCH 046/737] ask if we have to create a github repo in a different screen --- nf_core/pipelines/create/__init__.py | 8 ++++ nf_core/pipelines/create/githubexit.py | 40 +++++++++++++++++++ nf_core/pipelines/create/githubrepo.py | 24 +---------- .../pipelines/create/githubrepoquestion.py | 31 ++++++++++++++ 4 files changed, 81 insertions(+), 22 deletions(-) create mode 100644 nf_core/pipelines/create/githubexit.py create mode 100644 nf_core/pipelines/create/githubrepoquestion.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index af615a1f0..062b3ba85 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -7,7 +7,9 @@ from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline from nf_core.pipelines.create.finaldetails import FinalDetails +from nf_core.pipelines.create.githubexit import GithubExit from nf_core.pipelines.create.githubrepo import GithubRepo +from nf_core.pipelines.create.githubrepoquestion import GithubRepoQuestion from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType from nf_core.pipelines.create.utils import ( @@ -43,7 +45,9 @@ class PipelineCreateApp(App[CreateConfig]): "type_custom": CustomPipeline(), "type_nfcore": NfcorePipeline(), "final_details": FinalDetails(), + "github_repo_question": GithubRepoQuestion(), "github_repo": GithubRepo(), + "github_exit": GithubExit(), } # Initialise config as empty @@ -73,7 +77,11 @@ def on_button_pressed(self, event: Button.Pressed) -> None: elif event.button.id == "continue": self.switch_screen("final_details") elif event.button.id == "close_screen": + self.switch_screen("github_repo_question") + elif event.button.id == "github_repo": self.switch_screen("github_repo") + elif event.button.id == "exit": + self.switch_screen("github_exit") if event.button.id == "close_app": self.exit(return_code=0) diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py new file mode 100644 index 000000000..04fc7cb0b --- /dev/null +++ b/nf_core/pipelines/create/githubexit.py @@ -0,0 +1,40 @@ +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static + +exit_help_text_markdown = """ +If you would like to create the GitHub repository later, you can do it manually by following these steps: + +1. Create a new GitHub repository +2. Add the remote to your local repository +```bash +cd +git remote add origin git@github.com:/.git +``` +3. Push the code to the remote +```bash +git push --all origin +``` +""" + + +class GithubExit(Screen): + """A screen to show a help text when a GitHub repo is NOT created.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + yield Markdown(exit_help_text_markdown) + yield Center( + Button("Close App", id="close_app", variant="success"), + classes="cta", + ) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 2a98d8498..28f6521cf 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -23,19 +23,6 @@ repo_config_markdown = """ Please select the the GitHub repository settings: """ -exit_help_text_markdown = """ -If you would like to create the GitHub repository later, you can do it manually by following these steps: -1. Create a new GitHub repository -2. Add the remote to your local repository -```bash -cd -git remote add origin git@github.com:/.git -``` -3. Push the code to the remote -```bash -git push --all origin -``` -""" class GithubRepo(Screen): @@ -112,7 +99,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: raise UserWarning( f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." "Please provide an authentication token or set the environment variable 'GITHUB_AUTH_TOKEN'." - f"\n{exit_help_text_markdown}" ) user = github_auth.get_user() @@ -125,7 +111,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: raise UserWarning( f"Could not authenticate to GitHub with user name '{github_variables['gh_username']}'." "Please make sure that the provided user name and token are correct." - f"\n{exit_help_text_markdown}" ) # Check if organisation exists @@ -155,13 +140,8 @@ def on_button_pressed(self, event: Button.Pressed) -> None: ) log.info(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' created successfully") except UserWarning as e: - log.info(f"There was an error with message: {e}" f"\n{exit_help_text_markdown}") - - self.parent.LOGGING_STATE = "repo created" - self.parent.switch_screen(LoggingScreen()) - elif event.button.id == "exit": - # Show help message and exit - log.info(exit_help_text_markdown) + log.info(f"There was an error with message: {e}") + self.parent.switch_screen("github_exit") self.parent.LOGGING_STATE = "repo created" self.parent.switch_screen(LoggingScreen()) diff --git a/nf_core/pipelines/create/githubrepoquestion.py b/nf_core/pipelines/create/githubrepoquestion.py new file mode 100644 index 000000000..ea3259710 --- /dev/null +++ b/nf_core/pipelines/create/githubrepoquestion.py @@ -0,0 +1,31 @@ +import logging +from textwrap import dedent + +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown + +log = logging.getLogger(__name__) + +github_text_markdown = """ +# Create a GitHub repo + +After creating the pipeline template locally, we can create a GitHub repository and push the code to it. + +Do you want to create a GitHub repository? +""" + + +class GithubRepoQuestion(Screen): + """Ask if the user wants to create a GitHub repository.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Markdown(dedent(github_text_markdown)) + yield Center( + Button("Create GitHub repo", id="github_repo", variant="success"), + Button("Finish without creating a repo", id="exit", variant="primary"), + classes="cta", + ) From 7ade926c426a1eb85eb53dba992dab2150640f9a Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 11:22:14 +0100 Subject: [PATCH 047/737] add snapshot for github repo question and exit message --- tests/__snapshots__/test_create_app.ambr | 517 +++++++++++++++++------ tests/test_create_app.py | 42 +- 2 files changed, 432 insertions(+), 127 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 3c5a1b91c..b9673d488 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1650,7 +1650,7 @@ ''' # --- -# name: test_logging_after_github +# name: test_github_exit_message ''' @@ -1673,250 +1673,521 @@ font-weight: 700; } - .terminal-3944300660-matrix { + .terminal-2007955284-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3944300660-title { + .terminal-2007955284-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3944300660-r1 { fill: #c5c8c6 } - .terminal-3944300660-r2 { fill: #e3e3e3 } - .terminal-3944300660-r3 { fill: #989898 } - .terminal-3944300660-r4 { fill: #e1e1e1 } - .terminal-3944300660-r5 { fill: #98a84b } - .terminal-3944300660-r6 { fill: #626262 } - .terminal-3944300660-r7 { fill: #608ab1 } - .terminal-3944300660-r8 { fill: #d0b344 } - .terminal-3944300660-r9 { fill: #121212 } - .terminal-3944300660-r10 { fill: #0053aa } - .terminal-3944300660-r11 { fill: #dde8f3;font-weight: bold } - .terminal-3944300660-r12 { fill: #7ae998 } - .terminal-3944300660-r13 { fill: #4ebf71;font-weight: bold } - .terminal-3944300660-r14 { fill: #008139 } - .terminal-3944300660-r15 { fill: #14191f } - .terminal-3944300660-r16 { fill: #ddedf9 } + .terminal-2007955284-r1 { fill: #c5c8c6 } + .terminal-2007955284-r2 { fill: #e3e3e3 } + .terminal-2007955284-r3 { fill: #989898 } + .terminal-2007955284-r4 { fill: #e1e1e1 } + .terminal-2007955284-r5 { fill: #98a84b } + .terminal-2007955284-r6 { fill: #626262 } + .terminal-2007955284-r7 { fill: #608ab1 } + .terminal-2007955284-r8 { fill: #d0b344 } + .terminal-2007955284-r9 { fill: #4ebf71;font-weight: bold } + .terminal-2007955284-r10 { fill: #d2d2d2 } + .terminal-2007955284-r11 { fill: #82aaff } + .terminal-2007955284-r12 { fill: #eeffff } + .terminal-2007955284-r13 { fill: #7ae998 } + .terminal-2007955284-r14 { fill: #008139 } + .terminal-2007955284-r15 { fill: #dde8f3;font-weight: bold } + .terminal-2007955284-r16 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - nf-core create - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Visualising logging output. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close App - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - ▂▂ - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote + + + gitpush--allorigin + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close App + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_github_question + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create a GitHub repo + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + After creating the pipeline template locally, we can create a GitHub repository and push the + code to it. + + Do you want to create a GitHub repository? + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  diff --git a/tests/test_create_app.py b/tests/test_create_app.py index 8b22eabad..a244c8983 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -216,9 +216,39 @@ async def run_before(pilot) -> None: assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) +@mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) +def test_github_question(mock_init_pipeline, snap_compare): + """Test snapshot for the github_repo_question screen. + Steps to get to this screen: + screen welcome > press start > + screen choose_type > press nf-core > + screen basic_details > enter pipeline details > press next > + screen type_nfcore > press continue > + screen final_details > press finish > + screen logging_screen > press close_screen > + screen github_repo_question + """ + + async def run_before(pilot) -> None: + await pilot.click("#start") + await pilot.click("#type_nfcore") + await pilot.click("#name") + await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") + await pilot.press("tab") + await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") + await pilot.press("tab") + await pilot.press("M", "e") + await pilot.click("#next") + await pilot.click("#continue") + await pilot.click("#finish") + await pilot.click("#close_screen") + + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + + @mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) def test_github_details(mock_init_pipeline, snap_compare): - """Test snapshot for the final_details screen. + """Test snapshot for the github_repo screen. Steps to get to this screen: screen welcome > press start > screen choose_type > press nf-core > @@ -226,6 +256,7 @@ def test_github_details(mock_init_pipeline, snap_compare): screen type_nfcore > press continue > screen final_details > press finish > screen logging_screen > press close_screen > + screen github_repo_question > press create repo > screen github_repo """ @@ -242,13 +273,14 @@ async def run_before(pilot) -> None: await pilot.click("#continue") await pilot.click("#finish") await pilot.click("#close_screen") + await pilot.click("#github_repo") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) @mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) -def test_logging_after_github(mock_init_pipeline, snap_compare): - """Test snapshot for the final_details screen. +def test_github_exit_message(mock_init_pipeline, snap_compare): + """Test snapshot for the github_exit screen. Steps to get to this screen: screen welcome > press start > screen choose_type > press nf-core > @@ -256,8 +288,9 @@ def test_logging_after_github(mock_init_pipeline, snap_compare): screen type_nfcore > press continue > screen final_details > press finish > screen logging_screen > press close_screen > + screen github_repo_question > press create repo > screen github_repo > press exit (close without creating a repo) > - screen logging_screen + screen github_exit """ async def run_before(pilot) -> None: @@ -273,6 +306,7 @@ async def run_before(pilot) -> None: await pilot.click("#continue") await pilot.click("#finish") await pilot.click("#close_screen") + await pilot.click("#github_repo") await pilot.click("#exit") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) From 70492774383466fca6d3ac8592d58a7af310befb Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 14:25:16 +0100 Subject: [PATCH 048/737] add work threads for creating a pipeline and a github repo --- nf_core/pipelines/create/finaldetails.py | 11 ++++++++--- nf_core/pipelines/create/githubrepo.py | 8 +++++++- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 23be7db89..70b5fa8bb 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -1,7 +1,7 @@ """A Textual app to create a pipeline.""" from textwrap import dedent -from textual import on +from textual import on, work from textual.app import ComposeResult from textual.containers import Center, Horizontal from textual.screen import Screen @@ -74,7 +74,12 @@ def on_button_pressed(self, event: Button.Pressed) -> None: pass # Create the new pipeline - create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) - create_obj.init_pipeline() + self.create_pipeline() self.parent.LOGGING_STATE = "pipeline created" self.parent.switch_screen(LoggingScreen()) + + @work(thread=True) + def create_pipeline(self) -> None: + """Create the pipeline.""" + create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) + create_obj.init_pipeline() diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 28f6521cf..16465ab9b 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -1,10 +1,12 @@ import logging import os +from pathlib import Path from textwrap import dedent import git import yaml from github import Github, GithubException, UnknownObjectException +from textual import work from textual.app import ComposeResult from textual.containers import Center, Horizontal from textual.screen import Screen @@ -90,7 +92,10 @@ def on_button_pressed(self, event: Button.Pressed) -> None: github_variables[switch_input.id] = switch_input.value # Pipeline git repo - pipeline_repo = git.Repo.init(self.parent.TEMPLATE_CONFIG.outdir) + pipeline_repo = git.Repo.init( + Path(self.parent.TEMPLATE_CONFIG.outdir) + / Path(self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name) + ) # GitHub authentication if github_variables["token"]: @@ -146,6 +151,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.parent.LOGGING_STATE = "repo created" self.parent.switch_screen(LoggingScreen()) + @work(thread=True) def _create_repo_and_push(self, org, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" # Check if repo already exists From 41fcc27878517cd65085d2b8adf99e0c9a9b83bf Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 15:31:24 +0100 Subject: [PATCH 049/737] add loading screen when creating a pipeline --- nf_core/pipelines/create/finaldetails.py | 12 ++++++++++++ requirements.txt | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 70b5fa8bb..9def58aed 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -4,6 +4,7 @@ from textual import on, work from textual.app import ComposeResult from textual.containers import Center, Horizontal +from textual.message import Message from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch @@ -75,6 +76,16 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Create the new pipeline self.create_pipeline() + self.screen.loading = True + + class PipelineCreated(Message): + """Custom message to indicate that the pipeline has been created.""" + + pass + + @on(PipelineCreated) + def stop_loading(self) -> None: + self.screen.loading = False self.parent.LOGGING_STATE = "pipeline created" self.parent.switch_screen(LoggingScreen()) @@ -83,3 +94,4 @@ def create_pipeline(self) -> None: """Create the pipeline.""" create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) create_obj.init_pipeline() + self.post_message(self.PipelineCreated()) diff --git a/requirements.txt b/requirements.txt index e953aa84a..d2fe53500 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,5 +19,5 @@ requests_cache rich-click>=1.6.1 rich>=13.3.1 tabulate -textual>=0.41.0 +textual>=0.47.1 pdiff From cfa0804e1010a924bc8ae71edbd7952194f64773 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 15:49:46 +0100 Subject: [PATCH 050/737] show logging only at the end --- nf_core/pipelines/create/__init__.py | 7 +++---- nf_core/pipelines/create/finaldetails.py | 4 +--- nf_core/pipelines/create/githubexit.py | 1 + nf_core/pipelines/create/githubrepo.py | 4 +++- nf_core/pipelines/create/loggingscreen.py | 14 ++++---------- 5 files changed, 12 insertions(+), 18 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 062b3ba85..9261b78f5 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -10,6 +10,7 @@ from nf_core.pipelines.create.githubexit import GithubExit from nf_core.pipelines.create.githubrepo import GithubRepo from nf_core.pipelines.create.githubrepoquestion import GithubRepoQuestion +from nf_core.pipelines.create.loggingscreen import LoggingScreen from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType from nf_core.pipelines.create.utils import ( @@ -58,8 +59,6 @@ class PipelineCreateApp(App[CreateConfig]): # Log handler LOG_HANDLER = log_handler - # Logging state - LOGGING_STATE = None def on_mount(self) -> None: self.push_screen("welcome") @@ -76,12 +75,12 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("basic_details") elif event.button.id == "continue": self.switch_screen("final_details") - elif event.button.id == "close_screen": - self.switch_screen("github_repo_question") elif event.button.id == "github_repo": self.switch_screen("github_repo") elif event.button.id == "exit": self.switch_screen("github_exit") + elif event.button.id == "show_logging": + self.switch_screen(LoggingScreen()) if event.button.id == "close_app": self.exit(return_code=0) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 9def58aed..98df05ef0 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -9,7 +9,6 @@ from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch from nf_core.pipelines.create.create import PipelineCreate -from nf_core.pipelines.create.loggingscreen import LoggingScreen from nf_core.pipelines.create.utils import TextInput @@ -86,8 +85,7 @@ class PipelineCreated(Message): @on(PipelineCreated) def stop_loading(self) -> None: self.screen.loading = False - self.parent.LOGGING_STATE = "pipeline created" - self.parent.switch_screen(LoggingScreen()) + self.parent.switch_screen("github_repo_question") @work(thread=True) def create_pipeline(self) -> None: diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 04fc7cb0b..96178fbcd 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -36,5 +36,6 @@ def compose(self) -> ComposeResult: yield Markdown(exit_help_text_markdown) yield Center( Button("Close App", id="close_app", variant="success"), + Button("Show Logging", id="show_logging", variant="primary"), classes="cta", ) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 16465ab9b..0ef1b493b 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -148,7 +148,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: log.info(f"There was an error with message: {e}") self.parent.switch_screen("github_exit") - self.parent.LOGGING_STATE = "repo created" self.parent.switch_screen(LoggingScreen()) @work(thread=True) @@ -164,6 +163,9 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): except GithubException: # Repo is empty repo_exists = True + except UserWarning: + # Repo already exists + self.parent.switch_screen(LoggingScreen()) except UnknownObjectException: # Repo doesn't exist repo_exists = False diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 2a59e2bcc..bed955e1b 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -25,14 +25,8 @@ def compose(self) -> ComposeResult: id="logo", ) yield Markdown(markdown) - if self.parent.LOGGING_STATE == "repo created": - yield Center( - Button("Close App", id="close_app", variant="success"), - classes="cta", - ) - else: - yield Center( - Button("Close logging screen", id="close_screen", variant="success"), - classes="cta", - ) + yield Center( + Button("Close App", id="close_app", variant="success"), + classes="cta", + ) yield Center(self.parent.LOG_HANDLER.console, classes="cta") From a85f1dd0f561c53895ac4fe0011276ae6b880da3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 16:05:05 +0100 Subject: [PATCH 051/737] add loading screen for repo creation --- nf_core/pipelines/create/githubrepo.py | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 0ef1b493b..005637abd 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -6,9 +6,10 @@ import git import yaml from github import Github, GithubException, UnknownObjectException -from textual import work +from textual import on, work from textual.app import ComposeResult from textual.containers import Center, Horizontal +from textual.message import Message from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch @@ -135,6 +136,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self._create_repo_and_push( org, pipeline_repo, github_variables["private"], github_variables["push"] ) + self.screen.loading = True else: # Create the repo in the user's account log.info( @@ -143,12 +145,21 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self._create_repo_and_push( user, pipeline_repo, github_variables["private"], github_variables["push"] ) + self.screen.loading = True log.info(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' created successfully") except UserWarning as e: log.info(f"There was an error with message: {e}") self.parent.switch_screen("github_exit") - self.parent.switch_screen(LoggingScreen()) + class RepoCreated(Message): + """Custom message to indicate that the GitHub repo has been created.""" + + pass + + @on(RepoCreated) + def stop_loading(self) -> None: + self.screen.loading = False + self.parent.switch_screen(LoggingScreen()) @work(thread=True) def _create_repo_and_push(self, org, pipeline_repo, private, push): @@ -163,9 +174,11 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): except GithubException: # Repo is empty repo_exists = True - except UserWarning: + except UserWarning as e: # Repo already exists - self.parent.switch_screen(LoggingScreen()) + self.post_message(self.RepoCreated()) + log.info(e) + return except UnknownObjectException: # Repo doesn't exist repo_exists = False @@ -185,6 +198,8 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): if push: pipeline_repo.remotes.origin.push(all=True).raise_if_error() + self.post_message(self.RepoCreated()) + def _github_authentication(self, gh_username, gh_token): """Authenticate to GitHub""" log.debug(f"Authenticating GitHub as {gh_username}") From 456a4be22b7ad0a733725db3da7c78059ee6dca7 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jan 2024 16:12:19 +0100 Subject: [PATCH 052/737] update app snapshots --- tests/__snapshots__/test_create_app.ambr | 520 ++++++----------------- tests/test_create_app.py | 34 -- 2 files changed, 125 insertions(+), 429 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index b9673d488..ceb7d8204 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1673,250 +1673,253 @@ font-weight: 700; } - .terminal-2007955284-matrix { + .terminal-1481614550-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2007955284-title { + .terminal-1481614550-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2007955284-r1 { fill: #c5c8c6 } - .terminal-2007955284-r2 { fill: #e3e3e3 } - .terminal-2007955284-r3 { fill: #989898 } - .terminal-2007955284-r4 { fill: #e1e1e1 } - .terminal-2007955284-r5 { fill: #98a84b } - .terminal-2007955284-r6 { fill: #626262 } - .terminal-2007955284-r7 { fill: #608ab1 } - .terminal-2007955284-r8 { fill: #d0b344 } - .terminal-2007955284-r9 { fill: #4ebf71;font-weight: bold } - .terminal-2007955284-r10 { fill: #d2d2d2 } - .terminal-2007955284-r11 { fill: #82aaff } - .terminal-2007955284-r12 { fill: #eeffff } - .terminal-2007955284-r13 { fill: #7ae998 } - .terminal-2007955284-r14 { fill: #008139 } - .terminal-2007955284-r15 { fill: #dde8f3;font-weight: bold } - .terminal-2007955284-r16 { fill: #ddedf9 } + .terminal-1481614550-r1 { fill: #c5c8c6 } + .terminal-1481614550-r2 { fill: #e3e3e3 } + .terminal-1481614550-r3 { fill: #989898 } + .terminal-1481614550-r4 { fill: #e1e1e1 } + .terminal-1481614550-r5 { fill: #98a84b } + .terminal-1481614550-r6 { fill: #626262 } + .terminal-1481614550-r7 { fill: #608ab1 } + .terminal-1481614550-r8 { fill: #d0b344 } + .terminal-1481614550-r9 { fill: #4ebf71;font-weight: bold } + .terminal-1481614550-r10 { fill: #d2d2d2 } + .terminal-1481614550-r11 { fill: #82aaff } + .terminal-1481614550-r12 { fill: #eeffff } + .terminal-1481614550-r13 { fill: #7ae998 } + .terminal-1481614550-r14 { fill: #507bb3 } + .terminal-1481614550-r15 { fill: #dde6ed;font-weight: bold } + .terminal-1481614550-r16 { fill: #008139 } + .terminal-1481614550-r17 { fill: #001541 } + .terminal-1481614550-r18 { fill: #dde8f3;font-weight: bold } + .terminal-1481614550-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote - - - gitpush--allorigin - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close App - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote + + + gitpush--allorigin + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close AppShow Logging + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -2194,279 +2197,6 @@ ''' # --- -# name: test_logging_pipeline_created - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - nf-core create - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Visualising logging output. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close logging screen - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - ▂▂ - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  - - - - - ''' -# --- # name: test_type_custom ''' diff --git a/tests/test_create_app.py b/tests/test_create_app.py index a244c8983..d226ea2da 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -188,34 +188,6 @@ async def run_before(pilot) -> None: assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) -@mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) -def test_logging_pipeline_created(mock_init_pipeline, snap_compare): - """Test snapshot for the final_details screen. - Steps to get to this screen: - screen welcome > press start > - screen choose_type > press nf-core > - screen basic_details > enter pipeline details > press next > - screen type_nfcore > press continue > - screen final_details > press finish > - screen logging_screen - """ - - async def run_before(pilot) -> None: - await pilot.click("#start") - await pilot.click("#type_nfcore") - await pilot.click("#name") - await pilot.press("m", "y", "p", "i", "p", "e", "l", "i", "n", "e") - await pilot.press("tab") - await pilot.press("A", " ", "c", "o", "o", "l", " ", "d", "e", "s", "c", "r", "i", "p", "t", "i", "o", "n") - await pilot.press("tab") - await pilot.press("M", "e") - await pilot.click("#next") - await pilot.click("#continue") - await pilot.click("#finish") - - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) - - @mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) def test_github_question(mock_init_pipeline, snap_compare): """Test snapshot for the github_repo_question screen. @@ -225,7 +197,6 @@ def test_github_question(mock_init_pipeline, snap_compare): screen basic_details > enter pipeline details > press next > screen type_nfcore > press continue > screen final_details > press finish > - screen logging_screen > press close_screen > screen github_repo_question """ @@ -241,7 +212,6 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.click("#continue") await pilot.click("#finish") - await pilot.click("#close_screen") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) @@ -255,7 +225,6 @@ def test_github_details(mock_init_pipeline, snap_compare): screen basic_details > enter pipeline details > press next > screen type_nfcore > press continue > screen final_details > press finish > - screen logging_screen > press close_screen > screen github_repo_question > press create repo > screen github_repo """ @@ -272,7 +241,6 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.click("#continue") await pilot.click("#finish") - await pilot.click("#close_screen") await pilot.click("#github_repo") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) @@ -287,7 +255,6 @@ def test_github_exit_message(mock_init_pipeline, snap_compare): screen basic_details > enter pipeline details > press next > screen type_nfcore > press continue > screen final_details > press finish > - screen logging_screen > press close_screen > screen github_repo_question > press create repo > screen github_repo > press exit (close without creating a repo) > screen github_exit @@ -305,7 +272,6 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.click("#continue") await pilot.click("#finish") - await pilot.click("#close_screen") await pilot.click("#github_repo") await pilot.click("#exit") From bb6cc970c43a83d0551c0d3bedfe3daef2c29353 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 8 Jan 2024 09:48:06 +0100 Subject: [PATCH 053/737] fix typing error --- nf_core/pipelines/create/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 7842888f0..afb561741 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -1,7 +1,7 @@ import re from logging import LogRecord from pathlib import Path -from typing import Optional +from typing import Optional, Union from pydantic import BaseModel, ConfigDict, ValidationError, field_validator from rich.logging import RichHandler @@ -95,7 +95,7 @@ def compose(self) -> ComposeResult: @on(Input.Changed) @on(Input.Submitted) - def show_invalid_reasons(self, event: Input.Changed | Input.Submitted) -> None: + def show_invalid_reasons(self, event: Union[Input.Changed, Input.Submitted]) -> None: """Validate the text input and show errors if invalid.""" if not event.validation_result.is_valid: self.query_one(".validation_msg").update("\n".join(event.validation_result.failure_descriptions)) From abdffef1ceb21eb6492a7ec6bae2da8921092b80 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 8 Jan 2024 09:58:02 +0100 Subject: [PATCH 054/737] ignroe snapshot files with editorconfig --- .editorconfig | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.editorconfig b/.editorconfig index 014c2383b..c60811206 100644 --- a/.editorconfig +++ b/.editorconfig @@ -10,3 +10,11 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js,cff}] indent_size = 2 + +[tests/__snapshots__/*] +charset = unset +end_of_line = unset +insert_final_newline = unset +trim_trailing_whitespace = unset +indent_style = unset +indent_size = unset From 2e396d9569553fb349a6d5ee9106a802481345d3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 8 Jan 2024 13:12:29 +0100 Subject: [PATCH 055/737] add exclusive=True to work threads --- nf_core/pipelines/create/create.tcss | 2 ++ nf_core/pipelines/create/finaldetails.py | 6 +++--- nf_core/pipelines/create/githubrepo.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index df37e50ed..bb3690d81 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -94,3 +94,5 @@ HorizontalScroll { .displayed #hide_password { display: block; } + + diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 98df05ef0..2fa25bec0 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -74,7 +74,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: pass # Create the new pipeline - self.create_pipeline() + self._create_pipeline() self.screen.loading = True class PipelineCreated(Message): @@ -87,8 +87,8 @@ def stop_loading(self) -> None: self.screen.loading = False self.parent.switch_screen("github_repo_question") - @work(thread=True) - def create_pipeline(self) -> None: + @work(thread=True, exclusive=True) + def _create_pipeline(self) -> None: """Create the pipeline.""" create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) create_obj.init_pipeline() diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 005637abd..f614d7d27 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -161,7 +161,7 @@ def stop_loading(self) -> None: self.screen.loading = False self.parent.switch_screen(LoggingScreen()) - @work(thread=True) + @work(thread=True, exclusive=True) def _create_repo_and_push(self, org, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" # Check if repo already exists From 33294113722c1d7c245030e89ca5046e1a370235 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 8 Jan 2024 14:05:21 +0100 Subject: [PATCH 056/737] remove logging and add completed screen instead --- nf_core/pipelines/create/__init__.py | 24 +- nf_core/pipelines/create/completed.py | 40 +++ nf_core/pipelines/create/githubexit.py | 1 - nf_core/pipelines/create/githubrepo.py | 3 +- .../pipelines/create/githubrepoquestion.py | 3 - nf_core/pipelines/create/loggingscreen.py | 32 --- nf_core/pipelines/create/utils.py | 27 +- tests/__snapshots__/test_create_app.ambr | 247 +++++++++--------- 8 files changed, 167 insertions(+), 210 deletions(-) create mode 100644 nf_core/pipelines/create/completed.py delete mode 100644 nf_core/pipelines/create/loggingscreen.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 9261b78f5..f7b76ebae 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,33 +1,19 @@ """A Textual app to create a pipeline.""" -import logging - from textual.app import App from textual.widgets import Button from nf_core.pipelines.create.basicdetails import BasicDetails +from nf_core.pipelines.create.completed import Completed from nf_core.pipelines.create.custompipeline import CustomPipeline from nf_core.pipelines.create.finaldetails import FinalDetails from nf_core.pipelines.create.githubexit import GithubExit from nf_core.pipelines.create.githubrepo import GithubRepo from nf_core.pipelines.create.githubrepoquestion import GithubRepoQuestion -from nf_core.pipelines.create.loggingscreen import LoggingScreen from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType -from nf_core.pipelines.create.utils import ( - CreateConfig, - CustomLogHandler, - LoggingConsole, -) +from nf_core.pipelines.create.utils import CreateConfig from nf_core.pipelines.create.welcome import WelcomeScreen -log_handler = CustomLogHandler(console=LoggingConsole(), rich_tracebacks=True) -logging.basicConfig( - level="INFO", - handlers=[log_handler], - format="%(message)s", -) -log_handler.setLevel("INFO") - class PipelineCreateApp(App[CreateConfig]): """A Textual app to manage stopwatches.""" @@ -49,6 +35,7 @@ class PipelineCreateApp(App[CreateConfig]): "github_repo_question": GithubRepoQuestion(), "github_repo": GithubRepo(), "github_exit": GithubExit(), + "completed_screen": Completed(), } # Initialise config as empty @@ -57,9 +44,6 @@ class PipelineCreateApp(App[CreateConfig]): # Initialise pipeline type PIPELINE_TYPE = None - # Log handler - LOG_HANDLER = log_handler - def on_mount(self) -> None: self.push_screen("welcome") @@ -79,8 +63,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("github_repo") elif event.button.id == "exit": self.switch_screen("github_exit") - elif event.button.id == "show_logging": - self.switch_screen(LoggingScreen()) if event.button.id == "close_app": self.exit(return_code=0) diff --git a/nf_core/pipelines/create/completed.py b/nf_core/pipelines/create/completed.py new file mode 100644 index 000000000..282dd7688 --- /dev/null +++ b/nf_core/pipelines/create/completed.py @@ -0,0 +1,40 @@ +from textwrap import dedent + +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static + + +class Completed(Screen): + """A screen to show the final text and exit the app.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + + completed_text_markdown = f""" + - A pipeline has been created at '`{self.parent.TEMPLATE_CONFIG.outdir + "/" + self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name}`'. + - A GitHub repository '`{self.parent.TEMPLATE_CONFIG.name}`' has been created in the {"user's" if self.parent.TEMPLATE_CONFIG.org == "nf-core" else ""} GitHub organisation account{ " `" + self.parent.TEMPLATE_CONFIG.org + "`" if self.parent.TEMPLATE_CONFIG.org != "nf-core" else ""}. + + !!!!!! IMPORTANT !!!!!! + + If you are interested in adding your pipeline to the nf-core community, + PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! + + - Please read: [https://nf-co.re/developers/adding_pipelines#join-the-community](https://nf-co.re/developers/adding_pipelines#join-the-community) + """ + + yield Markdown(dedent(completed_text_markdown)) + yield Center( + Button("Close App", id="close_app", variant="success"), + classes="cta", + ) diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 96178fbcd..04fc7cb0b 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -36,6 +36,5 @@ def compose(self) -> ComposeResult: yield Markdown(exit_help_text_markdown) yield Center( Button("Close App", id="close_app", variant="success"), - Button("Show Logging", id="show_logging", variant="primary"), classes="cta", ) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index f614d7d27..74e96a742 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -13,7 +13,6 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch -from nf_core.pipelines.create.loggingscreen import LoggingScreen from nf_core.pipelines.create.utils import TextInput log = logging.getLogger(__name__) @@ -159,7 +158,7 @@ class RepoCreated(Message): @on(RepoCreated) def stop_loading(self) -> None: self.screen.loading = False - self.parent.switch_screen(LoggingScreen()) + self.parent.switch_screen("completed_screen") @work(thread=True, exclusive=True) def _create_repo_and_push(self, org, pipeline_repo, private, push): diff --git a/nf_core/pipelines/create/githubrepoquestion.py b/nf_core/pipelines/create/githubrepoquestion.py index ea3259710..72c5c4a81 100644 --- a/nf_core/pipelines/create/githubrepoquestion.py +++ b/nf_core/pipelines/create/githubrepoquestion.py @@ -1,4 +1,3 @@ -import logging from textwrap import dedent from textual.app import ComposeResult @@ -6,8 +5,6 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown -log = logging.getLogger(__name__) - github_text_markdown = """ # Create a GitHub repo diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py deleted file mode 100644 index bed955e1b..000000000 --- a/nf_core/pipelines/create/loggingscreen.py +++ /dev/null @@ -1,32 +0,0 @@ -from textual.app import ComposeResult -from textual.containers import Center -from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown, Static - -markdown = """ -# nf-core create - -Visualising logging output. -""" - - -class LoggingScreen(Screen): - """A screen to show the final logs.""" - - def compose(self) -> ComposeResult: - yield Header() - yield Footer() - yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", - id="logo", - ) - yield Markdown(markdown) - yield Center( - Button("Close App", id="close_app", variant="success"), - classes="cta", - ) - yield Center(self.parent.LOG_HANDLER.console, classes="cta") diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index afb561741..5c3b995b1 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -1,17 +1,13 @@ import re -from logging import LogRecord from pathlib import Path from typing import Optional, Union from pydantic import BaseModel, ConfigDict, ValidationError, field_validator -from rich.logging import RichHandler from textual import on -from textual._context import active_app from textual.app import ComposeResult from textual.containers import HorizontalScroll from textual.validation import ValidationResult, Validator -from textual.widget import Widget -from textual.widgets import Button, Input, Markdown, RichLog, Static, Switch +from textual.widgets import Button, Input, Markdown, Static, Switch class CreateConfig(BaseModel): @@ -172,27 +168,6 @@ def compose(self) -> ComposeResult: yield HelpText(markdown=self.markdown, classes="help_box") -class LoggingConsole(RichLog): - file = False - console: Widget - - def print(self, content): - self.write(content) - - -class CustomLogHandler(RichHandler): - """A Logging handler which extends RichHandler to write to a Widget and handle a Textual App.""" - - def emit(self, record: LogRecord) -> None: - """Invoked by logging.""" - try: - _app = active_app.get() - except LookupError: - pass - else: - super().emit(record) - - ## Markdown text to reuse in different screens markdown_genomes = """ Nf-core pipelines are configured to use a copy of the most common reference genome files. diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index ceb7d8204..98bed6fc7 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1673,253 +1673,250 @@ font-weight: 700; } - .terminal-1481614550-matrix { + .terminal-2007955284-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1481614550-title { + .terminal-2007955284-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1481614550-r1 { fill: #c5c8c6 } - .terminal-1481614550-r2 { fill: #e3e3e3 } - .terminal-1481614550-r3 { fill: #989898 } - .terminal-1481614550-r4 { fill: #e1e1e1 } - .terminal-1481614550-r5 { fill: #98a84b } - .terminal-1481614550-r6 { fill: #626262 } - .terminal-1481614550-r7 { fill: #608ab1 } - .terminal-1481614550-r8 { fill: #d0b344 } - .terminal-1481614550-r9 { fill: #4ebf71;font-weight: bold } - .terminal-1481614550-r10 { fill: #d2d2d2 } - .terminal-1481614550-r11 { fill: #82aaff } - .terminal-1481614550-r12 { fill: #eeffff } - .terminal-1481614550-r13 { fill: #7ae998 } - .terminal-1481614550-r14 { fill: #507bb3 } - .terminal-1481614550-r15 { fill: #dde6ed;font-weight: bold } - .terminal-1481614550-r16 { fill: #008139 } - .terminal-1481614550-r17 { fill: #001541 } - .terminal-1481614550-r18 { fill: #dde8f3;font-weight: bold } - .terminal-1481614550-r19 { fill: #ddedf9 } + .terminal-2007955284-r1 { fill: #c5c8c6 } + .terminal-2007955284-r2 { fill: #e3e3e3 } + .terminal-2007955284-r3 { fill: #989898 } + .terminal-2007955284-r4 { fill: #e1e1e1 } + .terminal-2007955284-r5 { fill: #98a84b } + .terminal-2007955284-r6 { fill: #626262 } + .terminal-2007955284-r7 { fill: #608ab1 } + .terminal-2007955284-r8 { fill: #d0b344 } + .terminal-2007955284-r9 { fill: #4ebf71;font-weight: bold } + .terminal-2007955284-r10 { fill: #d2d2d2 } + .terminal-2007955284-r11 { fill: #82aaff } + .terminal-2007955284-r12 { fill: #eeffff } + .terminal-2007955284-r13 { fill: #7ae998 } + .terminal-2007955284-r14 { fill: #008139 } + .terminal-2007955284-r15 { fill: #dde8f3;font-weight: bold } + .terminal-2007955284-r16 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote - - - gitpush--allorigin - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close AppShow Logging - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote + + + gitpush--allorigin + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close App + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  From cbce046a32f6a4c69de508f0ad42e2372672c386 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 8 Jan 2024 15:24:06 +0100 Subject: [PATCH 057/737] show an error message if the pipeline already exists --- nf_core/pipelines/create/__init__.py | 2 ++ nf_core/pipelines/create/create.py | 3 +-- nf_core/pipelines/create/error.py | 33 ++++++++++++++++++++++++ nf_core/pipelines/create/finaldetails.py | 18 +++++++++++-- 4 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 nf_core/pipelines/create/error.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index f7b76ebae..2f5b7b043 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -5,6 +5,7 @@ from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.completed import Completed from nf_core.pipelines.create.custompipeline import CustomPipeline +from nf_core.pipelines.create.error import ExistError from nf_core.pipelines.create.finaldetails import FinalDetails from nf_core.pipelines.create.githubexit import GithubExit from nf_core.pipelines.create.githubrepo import GithubRepo @@ -36,6 +37,7 @@ class PipelineCreateApp(App[CreateConfig]): "github_repo": GithubRepo(), "github_exit": GithubExit(), "completed_screen": Completed(), + "error_screen": ExistError(), } # Initialise config as empty diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 51c115e2c..e9761bcc4 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -7,7 +7,6 @@ import random import re import shutil -import sys import time from pathlib import Path from typing import Optional, Union @@ -279,7 +278,7 @@ def render_template(self): else: log.error(f"Output directory '{self.outdir}' exists!") log.info("Use -f / --force to overwrite existing files") - sys.exit(1) + raise UserWarning(f"Output directory '{self.outdir}' exists!") os.makedirs(self.outdir) # Run jinja2 for each file in the template folder diff --git a/nf_core/pipelines/create/error.py b/nf_core/pipelines/create/error.py new file mode 100644 index 000000000..a1d94e62f --- /dev/null +++ b/nf_core/pipelines/create/error.py @@ -0,0 +1,33 @@ +from textwrap import dedent + +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static + + +class ExistError(Screen): + """A screen to show the final text and exit the app - when an error ocurred.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + + completed_text_markdown = f""" + A pipeline '`{self.parent.TEMPLATE_CONFIG.outdir + "/" + self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name}`' already exists. + Please select a different name or `force` the creation of the pipeline to override the existing one. + """ + + yield Markdown(dedent(completed_text_markdown)) + yield Center( + Button("Close App", id="close_app", variant="success"), + classes="cta", + ) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 2fa25bec0..4e16be097 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -82,14 +82,28 @@ class PipelineCreated(Message): pass + class PipelineExists(Message): + """Custom message to indicate that the pipeline already exists.""" + + pass + @on(PipelineCreated) def stop_loading(self) -> None: self.screen.loading = False self.parent.switch_screen("github_repo_question") + @on(PipelineExists) + def stop_loading_error(self) -> None: + self.screen.loading = False + self.parent.switch_screen("error_screen") + @work(thread=True, exclusive=True) def _create_pipeline(self) -> None: """Create the pipeline.""" create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) - create_obj.init_pipeline() - self.post_message(self.PipelineCreated()) + try: + create_obj.init_pipeline() + except UserWarning: + self.post_message(self.PipelineExists()) + else: + self.post_message(self.PipelineCreated()) From 62c4b3ec6df0d0e4d3d8613864fc58852181ed48 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 8 Jan 2024 15:29:25 +0100 Subject: [PATCH 058/737] fix nf-core.pipelines.create import --- tests/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/utils.py b/tests/utils.py index 89c132881..9a0fd0896 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -9,8 +9,8 @@ import responses -import nf_core.create import nf_core.modules +import nf_core.pipelines.create.create OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" @@ -102,8 +102,8 @@ def create_tmp_pipeline() -> Tuple[str, str, str, str]: pipeline_name = "mypipeline" pipeline_dir = os.path.join(tmp_dir, pipeline_name) - nf_core.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=True, outdir=pipeline_dir, plain=True + nf_core.pipelines.create.create.PipelineCreate( + pipeline_name, "it is mine", "me", no_git=True, outdir=pipeline_dir ).init_pipeline() # return values to instance variables for later use in test methods From f04ad5ea2a0526e5ab66d516bdeb62a371284088 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 8 Jan 2024 15:44:46 +0100 Subject: [PATCH 059/737] GHA use new nf-core pipelines create command and fixed default org --- .github/workflows/create-lint-wf.yml | 2 +- .github/workflows/create-test-lint-wf-template.yml | 2 +- .github/workflows/create-test-wf.yml | 2 +- nf_core/__main__.py | 5 +++-- nf_core/pipelines/create/create.py | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 0119efcd4..ed1b0c4c6 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -59,7 +59,7 @@ jobs: run: | mkdir create-lint-wf && cd create-lint-wf export NXF_WORK=$(pwd) - nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" # Try syncing it before we change anything - name: nf-core sync diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 3805c1a24..758291be3 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -91,7 +91,7 @@ jobs: - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | cd create-test-lint-wf - nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} - name: run the pipeline run: | diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index e128e16a3..0d03decd1 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -54,7 +54,7 @@ jobs: run: | mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) - nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results - name: Upload log file artifact diff --git a/nf_core/__main__.py b/nf_core/__main__.py index dfe1616ea..013a7eb33 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -470,14 +470,15 @@ def pipelines(ctx): ) @click.option("-d", "--description", type=str, help="A short description of your pipeline") @click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, help="The initial version number to use") +@click.option("--version", type=str, default="v1.0.0dev", help="The initial version number to use") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @click.option( "--organisation", type=str, - help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core", + default="nf-core", + help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", ) def create_pipeline(ctx, name, description, author, version, force, outdir, template_yaml, organisation): """ diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index e9761bcc4..e5efe9451 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -49,7 +49,7 @@ def __init__( name: Optional[str] = None, description: Optional[str] = None, author: Optional[str] = None, - version: str = "1.0dev", + version: str = "1.0.0dev", no_git: bool = False, force: bool = False, outdir: Optional[str] = None, From baa3412c3d53099f3357a72824b1e39a552b1ae3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 8 Jan 2024 16:37:28 +0100 Subject: [PATCH 060/737] fix pytests --- nf_core/__main__.py | 4 ++-- nf_core/pipelines/create/create.py | 2 +- tests/lint/multiqc_config.py | 5 +++-- tests/test_cli.py | 6 +++--- tests/test_modules.py | 10 +++------- tests/test_subworkflows.py | 9 ++------- 6 files changed, 14 insertions(+), 22 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 013a7eb33..7089cb3a4 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -470,7 +470,7 @@ def pipelines(ctx): ) @click.option("-d", "--description", type=str, help="A short description of your pipeline") @click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, default="v1.0.0dev", help="The initial version number to use") +@click.option("--version", type=str, default="1.0dev", help="The initial version number to use") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @@ -508,7 +508,7 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp except UserWarning as e: log.error(e) sys.exit(1) - elif name or description or author or version or force or outdir or organisation: + elif name or description or author or version != "1.0dev" or force or outdir or organisation != "nf-core": log.error( "Command arguments are not accepted in interactive mode.\n" "Run with all command line arguments to avoid using an interactive interface" diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index e5efe9451..e9761bcc4 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -49,7 +49,7 @@ def __init__( name: Optional[str] = None, description: Optional[str] = None, author: Optional[str] = None, - version: str = "1.0.0dev", + version: str = "1.0dev", no_git: bool = False, force: bool = False, outdir: Optional[str] = None, diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py index 721560ce8..84eba1594 100644 --- a/tests/lint/multiqc_config.py +++ b/tests/lint/multiqc_config.py @@ -48,7 +48,7 @@ def test_multiqc_incorrect_export_plots(self): # Reset the file with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: yaml.safe_dump(mqc_yml_tmp, fh) - assert result["failed"] == ["'assets/multiqc_config.yml' does not contain 'export_plots: true'."] + assert "'assets/multiqc_config.yml' does not contain 'export_plots: true'." in result["failed"] def test_multiqc_config_report_comment_fail(self): @@ -103,4 +103,5 @@ def test_multiqc_config_report_comment_release_succeed(self): # lint again lint_obj._load() result = lint_obj.multiqc_config() - assert "'assets/multiqc_config.yml' contains a matching 'report_comment'." in result["passed"] + print(result["passed"]) + assert "'assets/multiqc_config.yml' contains `report_comment`" in result["passed"] diff --git a/tests/test_cli.py b/tests/test_cli.py index 1c110cd6e..c75a0ebc1 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -247,9 +247,9 @@ def test_create(self, mock_create): params["description"], params["author"], force="force" in params, - version=None, + version="1.0dev", outdir=params["outdir"], - organisation=None, + organisation="nf-core", ) mock_create.return_value.init_pipeline.assert_called_once() @@ -272,7 +272,7 @@ def test_create_app(self, mock_create): cmd = ["pipelines", "create"] result = self.invoke_cli(cmd) - assert result.exit_code == 0 + assert result.return_value == (0 or None) assert "Launching interactive nf-core pipeline creation tool." in result.output mock_create.assert_called_once_with() diff --git a/tests/test_modules.py b/tests/test_modules.py index 9c045f9d1..faa5499ca 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -20,6 +20,7 @@ GITLAB_URL, OLD_TRIMGALORE_BRANCH, OLD_TRIMGALORE_SHA, + create_tmp_pipeline, mock_anaconda_api_calls, mock_biocontainers_api_calls, ) @@ -84,13 +85,8 @@ def setUp(self): self.component_type = "modules" # Set up the schema - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - self.pipeline_name = "mypipeline" - self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) - nf_core.pipelines.create.create.PipelineCreate( - self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir - ).init_pipeline() + self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() + # Set up install objects self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) self.mods_install_old = nf_core.modules.ModuleInstall( diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index a7f387661..dd9af04d9 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -15,6 +15,7 @@ GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, OLD_SUBWORKFLOWS_SHA, + create_tmp_pipeline, ) @@ -49,13 +50,7 @@ def setUp(self): self.component_type = "subworkflows" # Set up the pipeline structure - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - self.pipeline_name = "mypipeline" - self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) - nf_core.pipelines.create.create.PipelineCreate( - self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir - ).init_pipeline() + self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() # Set up the nf-core/modules repo dummy self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) From acd72010d65d4c784357e93fb72d1a4b49bedc34 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 9 Jan 2024 12:52:06 +0100 Subject: [PATCH 061/737] add loading messages --- nf_core/pipelines/create/create.tcss | 5 ++++- nf_core/pipelines/create/finaldetails.py | 12 +++++++++++- nf_core/pipelines/create/githubrepo.py | 12 +++++++++++- 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index bb3690d81..c1c1974aa 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -95,4 +95,7 @@ HorizontalScroll { display: block; } - +/* Loading message */ +LoadingIndicator { + border: solid white; +} diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 4e16be097..9df7d23c2 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -6,7 +6,16 @@ from textual.containers import Center, Horizontal from textual.message import Message from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch +from textual.widgets import ( + Button, + Footer, + Header, + Input, + LoadingIndicator, + Markdown, + Static, + Switch, +) from nf_core.pipelines.create.create import PipelineCreate from nf_core.pipelines.create.utils import TextInput @@ -102,6 +111,7 @@ def _create_pipeline(self) -> None: """Create the pipeline.""" create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) try: + self.query_one(LoadingIndicator).border_title = "Creating pipeline..." create_obj.init_pipeline() except UserWarning: self.post_message(self.PipelineExists()) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 74e96a742..fb8e84cc0 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -11,7 +11,16 @@ from textual.containers import Center, Horizontal from textual.message import Message from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch +from textual.widgets import ( + Button, + Footer, + Header, + Input, + LoadingIndicator, + Markdown, + Static, + Switch, +) from nf_core.pipelines.create.utils import TextInput @@ -163,6 +172,7 @@ def stop_loading(self) -> None: @work(thread=True, exclusive=True) def _create_repo_and_push(self, org, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" + self.query_one(LoadingIndicator).border_title = "Creating GitHub repo..." # Check if repo already exists try: repo = org.get_repo(self.parent.TEMPLATE_CONFIG.name) From 6b4ed9c09373a5f0675717c04a6496173f469b98 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 9 Jan 2024 12:59:34 +0100 Subject: [PATCH 062/737] default version to 1.0.0dev --- nf_core/__main__.py | 4 +- nf_core/pipelines/create/create.py | 6 +- nf_core/pipelines/create/finaldetails.py | 2 +- tests/__snapshots__/test_create_app.ambr | 242 +++++++++++------------ tests/lint/version_consistency.py | 2 +- tests/test_cli.py | 2 +- 6 files changed, 129 insertions(+), 129 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 7089cb3a4..ca4003d6f 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -470,7 +470,7 @@ def pipelines(ctx): ) @click.option("-d", "--description", type=str, help="A short description of your pipeline") @click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, default="1.0dev", help="The initial version number to use") +@click.option("--version", type=str, default="1.0.0dev", help="The initial version number to use") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @@ -508,7 +508,7 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp except UserWarning as e: log.error(e) sys.exit(1) - elif name or description or author or version != "1.0dev" or force or outdir or organisation != "nf-core": + elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": log.error( "Command arguments are not accepted in interactive mode.\n" "Run with all command line arguments to avoid using an interactive interface" diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index e9761bcc4..dc0c6c796 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -33,7 +33,7 @@ class PipelineCreate: name (str): Name for the pipeline. description (str): Description for the pipeline. author (str): Authors name of the pipeline. - version (str): Version flag. Semantic versioning only. Defaults to `1.0dev`. + version (str): Version flag. Semantic versioning only. Defaults to `1.0.0dev`. no_git (bool): Prevents the creation of a local Git repository for the pipeline. Defaults to False. force (bool): Overwrites a given workflow directory with the same name. Defaults to False. Used for tests and sync command. May the force be with you. @@ -49,7 +49,7 @@ def __init__( name: Optional[str] = None, description: Optional[str] = None, author: Optional[str] = None, - version: str = "1.0dev", + version: str = "1.0.0dev", no_git: bool = False, force: bool = False, outdir: Optional[str] = None, @@ -167,7 +167,7 @@ def update_config(self, organisation, version, force, outdir): if self.config.org is None: self.config.org = organisation if self.config.version is None: - self.config.version = version if version else "1.0dev" + self.config.version = version if version else "1.0.0dev" if self.config.force is None: self.config.force = force if force else False if self.config.outdir is None: diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 9df7d23c2..5eb3122e6 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -40,7 +40,7 @@ def compose(self) -> ComposeResult: "version", "Version", "First version of the pipeline", - "1.0dev", + "1.0.0dev", classes="column", ) yield TextInput( diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 98bed6fc7..f72b73537 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1123,249 +1123,249 @@ font-weight: 700; } - .terminal-1778650725-matrix { + .terminal-3890482819-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1778650725-title { + .terminal-3890482819-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1778650725-r1 { fill: #c5c8c6 } - .terminal-1778650725-r2 { fill: #e3e3e3 } - .terminal-1778650725-r3 { fill: #989898 } - .terminal-1778650725-r4 { fill: #e1e1e1 } - .terminal-1778650725-r5 { fill: #121212 } - .terminal-1778650725-r6 { fill: #0053aa } - .terminal-1778650725-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1778650725-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-1778650725-r9 { fill: #1e1e1e } - .terminal-1778650725-r10 { fill: #008139 } - .terminal-1778650725-r11 { fill: #e2e2e2 } - .terminal-1778650725-r12 { fill: #b93c5b } - .terminal-1778650725-r13 { fill: #7ae998 } - .terminal-1778650725-r14 { fill: #0a180e;font-weight: bold } - .terminal-1778650725-r15 { fill: #ddedf9 } + .terminal-3890482819-r1 { fill: #c5c8c6 } + .terminal-3890482819-r2 { fill: #e3e3e3 } + .terminal-3890482819-r3 { fill: #989898 } + .terminal-3890482819-r4 { fill: #e1e1e1 } + .terminal-3890482819-r5 { fill: #121212 } + .terminal-3890482819-r6 { fill: #0053aa } + .terminal-3890482819-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3890482819-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-3890482819-r9 { fill: #1e1e1e } + .terminal-3890482819-r10 { fill: #008139 } + .terminal-3890482819-r11 { fill: #e2e2e2 } + .terminal-3890482819-r12 { fill: #b93c5b } + .terminal-3890482819-r13 { fill: #7ae998 } + .terminal-3890482819-r14 { fill: #0a180e;font-weight: bold } + .terminal-3890482819-r15 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Final details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔If the pipeline output directory exists, remove it and continue. - - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Finish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Final details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔If the pipeline output directory exists, remove it and continue. + + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Finish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  diff --git a/tests/lint/version_consistency.py b/tests/lint/version_consistency.py index 6f70d67c4..4763020fb 100644 --- a/tests/lint/version_consistency.py +++ b/tests/lint/version_consistency.py @@ -11,4 +11,4 @@ def test_version_consistency(self): result = lint_obj.version_consistency() assert result["passed"] == ["Version tags are numeric and consistent between container, release tag and config."] - assert result["failed"] == ["manifest.version was not numeric: 1.0dev!"] + assert result["failed"] == ["manifest.version was not numeric: 1.0.0dev!"] diff --git a/tests/test_cli.py b/tests/test_cli.py index c75a0ebc1..f76b5dcd9 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -247,7 +247,7 @@ def test_create(self, mock_create): params["description"], params["author"], force="force" in params, - version="1.0dev", + version="1.0.0dev", outdir=params["outdir"], organisation="nf-core", ) From 97bca8b8804d85d409d5d451f1d259a28acefaf0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 19 Jan 2024 09:04:02 +0100 Subject: [PATCH 063/737] add logging instead of loading screen and see logging messages on real time --- nf_core/pipelines/create/__init__.py | 26 +- nf_core/pipelines/create/create.tcss | 5 - nf_core/pipelines/create/finaldetails.py | 34 +-- nf_core/pipelines/create/githubexit.py | 1 + nf_core/pipelines/create/githubrepo.py | 37 +-- .../pipelines/create/githubrepoquestion.py | 3 + nf_core/pipelines/create/loggingscreen.py | 38 +++ nf_core/pipelines/create/utils.py | 34 ++- tests/__snapshots__/test_create_app.ambr | 247 +++++++++--------- tests/test_create_app.py | 9 +- 10 files changed, 246 insertions(+), 188 deletions(-) create mode 100644 nf_core/pipelines/create/loggingscreen.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 2f5b7b043..259fd9e33 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,4 +1,6 @@ """A Textual app to create a pipeline.""" +import logging + from textual.app import App from textual.widgets import Button @@ -10,11 +12,24 @@ from nf_core.pipelines.create.githubexit import GithubExit from nf_core.pipelines.create.githubrepo import GithubRepo from nf_core.pipelines.create.githubrepoquestion import GithubRepoQuestion +from nf_core.pipelines.create.loggingscreen import LoggingScreen from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType -from nf_core.pipelines.create.utils import CreateConfig +from nf_core.pipelines.create.utils import ( + CreateConfig, + CustomLogHandler, + LoggingConsole, +) from nf_core.pipelines.create.welcome import WelcomeScreen +log_handler = CustomLogHandler(console=LoggingConsole(), rich_tracebacks=True) +logging.basicConfig( + level="INFO", + handlers=[log_handler], + format="%(message)s", +) +log_handler.setLevel("INFO") + class PipelineCreateApp(App[CreateConfig]): """A Textual app to manage stopwatches.""" @@ -46,6 +61,11 @@ class PipelineCreateApp(App[CreateConfig]): # Initialise pipeline type PIPELINE_TYPE = None + # Log handler + LOG_HANDLER = log_handler + # Logging state + LOGGING_STATE = None + def on_mount(self) -> None: self.push_screen("welcome") @@ -63,8 +83,12 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("final_details") elif event.button.id == "github_repo": self.switch_screen("github_repo") + elif event.button.id == "close_screen": + self.switch_screen("github_repo_question") elif event.button.id == "exit": self.switch_screen("github_exit") + elif event.button.id == "show_logging": + self.switch_screen(LoggingScreen()) if event.button.id == "close_app": self.exit(return_code=0) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index c1c1974aa..df37e50ed 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -94,8 +94,3 @@ HorizontalScroll { .displayed #hide_password { display: block; } - -/* Loading message */ -LoadingIndicator { - border: solid white; -} diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 5eb3122e6..7186fd2b6 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -6,19 +6,11 @@ from textual.containers import Center, Horizontal from textual.message import Message from textual.screen import Screen -from textual.widgets import ( - Button, - Footer, - Header, - Input, - LoadingIndicator, - Markdown, - Static, - Switch, -) +from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch from nf_core.pipelines.create.create import PipelineCreate -from nf_core.pipelines.create.utils import TextInput +from nf_core.pipelines.create.loggingscreen import LoggingScreen +from nf_core.pipelines.create.utils import ShowLogs, TextInput class FinalDetails(Screen): @@ -84,36 +76,24 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Create the new pipeline self._create_pipeline() - self.screen.loading = True - - class PipelineCreated(Message): - """Custom message to indicate that the pipeline has been created.""" - - pass + self.parent.LOGGING_STATE = "pipeline created" + self.parent.switch_screen(LoggingScreen()) class PipelineExists(Message): """Custom message to indicate that the pipeline already exists.""" pass - @on(PipelineCreated) - def stop_loading(self) -> None: - self.screen.loading = False - self.parent.switch_screen("github_repo_question") - @on(PipelineExists) - def stop_loading_error(self) -> None: - self.screen.loading = False + def show_pipeline_error(self) -> None: self.parent.switch_screen("error_screen") @work(thread=True, exclusive=True) def _create_pipeline(self) -> None: """Create the pipeline.""" + self.post_message(ShowLogs()) create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) try: - self.query_one(LoadingIndicator).border_title = "Creating pipeline..." create_obj.init_pipeline() except UserWarning: self.post_message(self.PipelineExists()) - else: - self.post_message(self.PipelineCreated()) diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 04fc7cb0b..96178fbcd 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -36,5 +36,6 @@ def compose(self) -> ComposeResult: yield Markdown(exit_help_text_markdown) yield Center( Button("Close App", id="close_app", variant="success"), + Button("Show Logging", id="show_logging", variant="primary"), classes="cta", ) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index fb8e84cc0..22d52f451 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -6,23 +6,14 @@ import git import yaml from github import Github, GithubException, UnknownObjectException -from textual import on, work +from textual import work from textual.app import ComposeResult from textual.containers import Center, Horizontal -from textual.message import Message from textual.screen import Screen -from textual.widgets import ( - Button, - Footer, - Header, - Input, - LoadingIndicator, - Markdown, - Static, - Switch, -) - -from nf_core.pipelines.create.utils import TextInput +from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch + +from nf_core.pipelines.create.loggingscreen import LoggingScreen +from nf_core.pipelines.create.utils import ShowLogs, TextInput log = logging.getLogger(__name__) @@ -144,7 +135,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self._create_repo_and_push( org, pipeline_repo, github_variables["private"], github_variables["push"] ) - self.screen.loading = True else: # Create the repo in the user's account log.info( @@ -153,26 +143,18 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self._create_repo_and_push( user, pipeline_repo, github_variables["private"], github_variables["push"] ) - self.screen.loading = True log.info(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' created successfully") except UserWarning as e: log.info(f"There was an error with message: {e}") self.parent.switch_screen("github_exit") - class RepoCreated(Message): - """Custom message to indicate that the GitHub repo has been created.""" - - pass - - @on(RepoCreated) - def stop_loading(self) -> None: - self.screen.loading = False - self.parent.switch_screen("completed_screen") + self.parent.LOGGING_STATE = "repo created" + self.parent.switch_screen(LoggingScreen()) @work(thread=True, exclusive=True) def _create_repo_and_push(self, org, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" - self.query_one(LoadingIndicator).border_title = "Creating GitHub repo..." + self.post_message(ShowLogs()) # Check if repo already exists try: repo = org.get_repo(self.parent.TEMPLATE_CONFIG.name) @@ -185,7 +167,6 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): repo_exists = True except UserWarning as e: # Repo already exists - self.post_message(self.RepoCreated()) log.info(e) return except UnknownObjectException: @@ -207,8 +188,6 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): if push: pipeline_repo.remotes.origin.push(all=True).raise_if_error() - self.post_message(self.RepoCreated()) - def _github_authentication(self, gh_username, gh_token): """Authenticate to GitHub""" log.debug(f"Authenticating GitHub as {gh_username}") diff --git a/nf_core/pipelines/create/githubrepoquestion.py b/nf_core/pipelines/create/githubrepoquestion.py index 72c5c4a81..ea3259710 100644 --- a/nf_core/pipelines/create/githubrepoquestion.py +++ b/nf_core/pipelines/create/githubrepoquestion.py @@ -1,3 +1,4 @@ +import logging from textwrap import dedent from textual.app import ComposeResult @@ -5,6 +6,8 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown +log = logging.getLogger(__name__) + github_text_markdown = """ # Create a GitHub repo diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py new file mode 100644 index 000000000..2a59e2bcc --- /dev/null +++ b/nf_core/pipelines/create/loggingscreen.py @@ -0,0 +1,38 @@ +from textual.app import ComposeResult +from textual.containers import Center +from textual.screen import Screen +from textual.widgets import Button, Footer, Header, Markdown, Static + +markdown = """ +# nf-core create + +Visualising logging output. +""" + + +class LoggingScreen(Screen): + """A screen to show the final logs.""" + + def compose(self) -> ComposeResult: + yield Header() + yield Footer() + yield Static( + f"\n[green]{' ' * 40},--.[grey39]/[green],-." + + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + "\n[green] `._,._,'\n", + id="logo", + ) + yield Markdown(markdown) + if self.parent.LOGGING_STATE == "repo created": + yield Center( + Button("Close App", id="close_app", variant="success"), + classes="cta", + ) + else: + yield Center( + Button("Close logging screen", id="close_screen", variant="success"), + classes="cta", + ) + yield Center(self.parent.LOG_HANDLER.console, classes="cta") diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 5c3b995b1..f3474e01c 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -1,13 +1,18 @@ import re +from logging import LogRecord from pathlib import Path from typing import Optional, Union from pydantic import BaseModel, ConfigDict, ValidationError, field_validator +from rich.logging import RichHandler from textual import on +from textual._context import active_app from textual.app import ComposeResult from textual.containers import HorizontalScroll +from textual.message import Message from textual.validation import ValidationResult, Validator -from textual.widgets import Button, Input, Markdown, Static, Switch +from textual.widget import Widget +from textual.widgets import Button, Input, Markdown, RichLog, Static, Switch class CreateConfig(BaseModel): @@ -168,6 +173,33 @@ def compose(self) -> ComposeResult: yield HelpText(markdown=self.markdown, classes="help_box") +class LoggingConsole(RichLog): + file = False + console: Widget + + def print(self, content): + self.write(content) + + +class CustomLogHandler(RichHandler): + """A Logging handler which extends RichHandler to write to a Widget and handle a Textual App.""" + + def emit(self, record: LogRecord) -> None: + """Invoked by logging.""" + try: + _app = active_app.get() + except LookupError: + pass + else: + super().emit(record) + + +class ShowLogs(Message): + """Custom message to show the logging messages.""" + + pass + + ## Markdown text to reuse in different screens markdown_genomes = """ Nf-core pipelines are configured to use a copy of the most common reference genome files. diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index f72b73537..6ee9c9b9b 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1673,250 +1673,253 @@ font-weight: 700; } - .terminal-2007955284-matrix { + .terminal-1481614550-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2007955284-title { + .terminal-1481614550-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2007955284-r1 { fill: #c5c8c6 } - .terminal-2007955284-r2 { fill: #e3e3e3 } - .terminal-2007955284-r3 { fill: #989898 } - .terminal-2007955284-r4 { fill: #e1e1e1 } - .terminal-2007955284-r5 { fill: #98a84b } - .terminal-2007955284-r6 { fill: #626262 } - .terminal-2007955284-r7 { fill: #608ab1 } - .terminal-2007955284-r8 { fill: #d0b344 } - .terminal-2007955284-r9 { fill: #4ebf71;font-weight: bold } - .terminal-2007955284-r10 { fill: #d2d2d2 } - .terminal-2007955284-r11 { fill: #82aaff } - .terminal-2007955284-r12 { fill: #eeffff } - .terminal-2007955284-r13 { fill: #7ae998 } - .terminal-2007955284-r14 { fill: #008139 } - .terminal-2007955284-r15 { fill: #dde8f3;font-weight: bold } - .terminal-2007955284-r16 { fill: #ddedf9 } + .terminal-1481614550-r1 { fill: #c5c8c6 } + .terminal-1481614550-r2 { fill: #e3e3e3 } + .terminal-1481614550-r3 { fill: #989898 } + .terminal-1481614550-r4 { fill: #e1e1e1 } + .terminal-1481614550-r5 { fill: #98a84b } + .terminal-1481614550-r6 { fill: #626262 } + .terminal-1481614550-r7 { fill: #608ab1 } + .terminal-1481614550-r8 { fill: #d0b344 } + .terminal-1481614550-r9 { fill: #4ebf71;font-weight: bold } + .terminal-1481614550-r10 { fill: #d2d2d2 } + .terminal-1481614550-r11 { fill: #82aaff } + .terminal-1481614550-r12 { fill: #eeffff } + .terminal-1481614550-r13 { fill: #7ae998 } + .terminal-1481614550-r14 { fill: #507bb3 } + .terminal-1481614550-r15 { fill: #dde6ed;font-weight: bold } + .terminal-1481614550-r16 { fill: #008139 } + .terminal-1481614550-r17 { fill: #001541 } + .terminal-1481614550-r18 { fill: #dde8f3;font-weight: bold } + .terminal-1481614550-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote - - - gitpush--allorigin - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close App - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote + + + gitpush--allorigin + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close AppShow Logging + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  diff --git a/tests/test_create_app.py b/tests/test_create_app.py index d226ea2da..710359e94 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -196,7 +196,7 @@ def test_github_question(mock_init_pipeline, snap_compare): screen choose_type > press nf-core > screen basic_details > enter pipeline details > press next > screen type_nfcore > press continue > - screen final_details > press finish > + screen final_details > press finish > close logging screen > screen github_repo_question """ @@ -212,6 +212,7 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.click("#continue") await pilot.click("#finish") + await pilot.click("#close_screen") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) @@ -224,7 +225,7 @@ def test_github_details(mock_init_pipeline, snap_compare): screen choose_type > press nf-core > screen basic_details > enter pipeline details > press next > screen type_nfcore > press continue > - screen final_details > press finish > + screen final_details > press finish > close logging screen > screen github_repo_question > press create repo > screen github_repo """ @@ -241,6 +242,7 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.click("#continue") await pilot.click("#finish") + await pilot.click("#close_screen") await pilot.click("#github_repo") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) @@ -254,7 +256,7 @@ def test_github_exit_message(mock_init_pipeline, snap_compare): screen choose_type > press nf-core > screen basic_details > enter pipeline details > press next > screen type_nfcore > press continue > - screen final_details > press finish > + screen final_details > press finish > close logging screen > screen github_repo_question > press create repo > screen github_repo > press exit (close without creating a repo) > screen github_exit @@ -272,6 +274,7 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.click("#continue") await pilot.click("#finish") + await pilot.click("#close_screen") await pilot.click("#github_repo") await pilot.click("#exit") From 0525d9c1ffc2c9c091ead1d8ff1db908d23c84dd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 19 Feb 2024 10:38:41 +0100 Subject: [PATCH 064/737] deprecation error if 'nf-core create' is used --- nf_core/__main__.py | 46 +++------------------------------------------ 1 file changed, 3 insertions(+), 43 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index ca4003d6f..51eff26f7 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -542,54 +542,14 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp @click.option("--plain", is_flag=True, help="Use the standard nf-core template") def create(name, description, author, version, force, outdir, template_yaml, plain): """ + DEPRECATED Create a new pipeline using the nf-core template. Uses the nf-core template to make a skeleton Nextflow pipeline with all required files, boilerplate code and best-practices. """ - from nf_core.pipelines.create import PipelineCreateApp - from nf_core.pipelines.create.create import PipelineCreate - - if (name and description and author) or (template_yaml): - # If all command arguments are used, run without the interactive interface - try: - create_obj = PipelineCreate( - name, - description, - author, - version=version, - force=force, - outdir=outdir, - ) - create_obj.init_pipeline() - except UserWarning as e: - log.error(e) - sys.exit(1) - elif name or description or author or version or force or outdir or plain: - log.error( - "Command arguments are not accepted in interactive mode.\n" - "Run with all command line arguments to avoid using an interactive interface" - "or run without any command line arguments to use an interactive interface." - ) - sys.exit(1) - else: - if rich.prompt.Confirm.ask( - "[blue bold]?[/] [bold] [green]nf-core create[/] command is deprecated in favor of [green]nf-core pipelines create[/].[/]\n" - "[bold]Will launch an interactive interface. Do you want to continue?[/]" - ): - log.info( - "Launching interactive nf-core pipeline creation tool." - "\nRun with all command line arguments to avoid using an interactive interface." - ) - app = PipelineCreateApp() - try: - app.run() - sys.exit(app.return_code or 0) - except UserWarning as e: - log.error(e) - sys.exit(1) - else: - sys.exit(0) + log.error("[bold][green]nf-core create[/] command is deprecated. Use [green]nf-core pipelines create[/].[/]") + sys.exit(0) # nf-core modules subcommands From 29afad0a5a23bc5c5942500a21bf155a561caa52 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 19 Feb 2024 11:22:08 +0100 Subject: [PATCH 065/737] more formatting and docs --- nf_core/pipelines/create/__init__.py | 2 +- nf_core/pipelines/create/completed.py | 40 ------------------- nf_core/pipelines/create/create.tcss | 4 ++ nf_core/pipelines/create/custompipeline.py | 11 ++++- nf_core/pipelines/create/error.py | 7 ++++ nf_core/pipelines/create/finaldetails.py | 9 ++++- nf_core/pipelines/create/githubexit.py | 9 +++++ nf_core/pipelines/create/githubrepo.py | 23 ++++++++--- .../pipelines/create/githubrepoquestion.py | 7 ++++ nf_core/pipelines/create/loggingscreen.py | 11 ++++- nf_core/pipelines/create/nfcorepipeline.py | 11 ++++- nf_core/pipelines/create/welcome.py | 17 ++++++++ 12 files changed, 98 insertions(+), 53 deletions(-) delete mode 100644 nf_core/pipelines/create/completed.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 259fd9e33..d1a5484f8 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -22,7 +22,7 @@ ) from nf_core.pipelines.create.welcome import WelcomeScreen -log_handler = CustomLogHandler(console=LoggingConsole(), rich_tracebacks=True) +log_handler = CustomLogHandler(console=LoggingConsole(highlight=True, markup=True), rich_tracebacks=True) logging.basicConfig( level="INFO", handlers=[log_handler], diff --git a/nf_core/pipelines/create/completed.py b/nf_core/pipelines/create/completed.py deleted file mode 100644 index 282dd7688..000000000 --- a/nf_core/pipelines/create/completed.py +++ /dev/null @@ -1,40 +0,0 @@ -from textwrap import dedent - -from textual.app import ComposeResult -from textual.containers import Center -from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown, Static - - -class Completed(Screen): - """A screen to show the final text and exit the app.""" - - def compose(self) -> ComposeResult: - yield Header() - yield Footer() - yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", - id="logo", - ) - - completed_text_markdown = f""" - - A pipeline has been created at '`{self.parent.TEMPLATE_CONFIG.outdir + "/" + self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name}`'. - - A GitHub repository '`{self.parent.TEMPLATE_CONFIG.name}`' has been created in the {"user's" if self.parent.TEMPLATE_CONFIG.org == "nf-core" else ""} GitHub organisation account{ " `" + self.parent.TEMPLATE_CONFIG.org + "`" if self.parent.TEMPLATE_CONFIG.org != "nf-core" else ""}. - - !!!!!! IMPORTANT !!!!!! - - If you are interested in adding your pipeline to the nf-core community, - PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! - - - Please read: [https://nf-co.re/developers/adding_pipelines#join-the-community](https://nf-co.re/developers/adding_pipelines#join-the-community) - """ - - yield Markdown(dedent(completed_text_markdown)) - yield Center( - Button("Close App", id="close_app", variant="success"), - classes="cta", - ) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index df37e50ed..51ed5745f 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -52,6 +52,10 @@ HorizontalScroll { color: grey; } +Vertical{ + height: auto; +} + /* Display help messages */ .help_box { diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 5cc2f87d9..440b900a3 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -1,8 +1,10 @@ +from textwrap import dedent + from textual import on from textual.app import ComposeResult from textual.containers import Center, ScrollableContainer from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Switch +from textual.widgets import Button, Footer, Header, Markdown, Switch from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes @@ -46,6 +48,13 @@ class CustomPipeline(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # Template features + """ + ) + ) yield ScrollableContainer( PipelineFeature( markdown_genomes, diff --git a/nf_core/pipelines/create/error.py b/nf_core/pipelines/create/error.py index a1d94e62f..922b5ed54 100644 --- a/nf_core/pipelines/create/error.py +++ b/nf_core/pipelines/create/error.py @@ -12,6 +12,13 @@ class ExistError(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # Pipeline exists + """ + ) + ) yield Static( f"\n[green]{' ' * 40},--.[grey39]/[green],-." + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 7186fd2b6..88008894c 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -3,7 +3,7 @@ from textual import on, work from textual.app import ComposeResult -from textual.containers import Center, Horizontal +from textual.containers import Center, Horizontal, Vertical from textual.message import Message from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch @@ -44,7 +44,12 @@ def compose(self) -> ComposeResult: ) with Horizontal(): yield Switch(value=False, id="force") - yield Static("If the pipeline output directory exists, remove it and continue.", classes="custom_grid") + with Vertical(): + yield Static("Force", classes="custom_grid") + yield Static( + "If the pipeline output directory exists, remove it and continue.", + classes="feature_subtitle", + ) yield Center( Button("Finish", id="finish", variant="success"), diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 96178fbcd..9b2c54912 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -1,3 +1,5 @@ +from textwrap import dedent + from textual.app import ComposeResult from textual.containers import Center from textual.screen import Screen @@ -25,6 +27,13 @@ class GithubExit(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # HowTo create a GitHub repository + """ + ) + ) yield Static( f"\n[green]{' ' * 40},--.[grey39]/[green],-." + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 22d52f451..54581ebb1 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -8,7 +8,7 @@ from github import Github, GithubException, UnknownObjectException from textual import work from textual.app import ComposeResult -from textual.containers import Center, Horizontal +from textual.containers import Center, Horizontal, Vertical from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch @@ -33,6 +33,13 @@ class GithubRepo(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # Create GitHub repository + """ + ) + ) yield Markdown(dedent(github_text_markdown)) with Horizontal(): gh_user, gh_token = self._get_github_credentials() @@ -56,13 +63,17 @@ def compose(self) -> ComposeResult: yield Markdown(dedent(repo_config_markdown)) with Horizontal(): yield Switch(value=False, id="private") - yield Static("Select if the new GitHub repo must be private.", classes="custom_grid") + with Vertical(): + yield Static("Private", classes="") + yield Static("Select if the new GitHub repo must be private.", classes="feature_subtitle") with Horizontal(): yield Switch(value=True, id="push") - yield Static( - "Select if you would like to push all the pipeline template files to your GitHub repo\nand all the branches required to keep the pipeline up to date with new releases of nf-core.", - classes="custom_grid", - ) + with Vertical(): + yield Static("Push files", classes="custom_grid") + yield Static( + "Select if you would like to push all the pipeline template files to your GitHub repo\nand all the branches required to keep the pipeline up to date with new releases of nf-core.", + classes="feature_subtitle", + ) yield Center( Button("Create GitHub repo", id="create_github", variant="success"), Button("Finish without creating a repo", id="exit", variant="primary"), diff --git a/nf_core/pipelines/create/githubrepoquestion.py b/nf_core/pipelines/create/githubrepoquestion.py index ea3259710..c866f859a 100644 --- a/nf_core/pipelines/create/githubrepoquestion.py +++ b/nf_core/pipelines/create/githubrepoquestion.py @@ -23,6 +23,13 @@ class GithubRepoQuestion(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # Create GitHub repository + """ + ) + ) yield Markdown(dedent(github_text_markdown)) yield Center( Button("Create GitHub repo", id="github_repo", variant="success"), diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 2a59e2bcc..cb7b93291 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -1,11 +1,11 @@ +from textwrap import dedent + from textual.app import ComposeResult from textual.containers import Center from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static markdown = """ -# nf-core create - Visualising logging output. """ @@ -16,6 +16,13 @@ class LoggingScreen(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # Logging + """ + ) + ) yield Static( f"\n[green]{' ' * 40},--.[grey39]/[green],-." + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 10541ced0..2444b3515 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -1,8 +1,10 @@ +from textwrap import dedent + from textual import on from textual.app import ComposeResult from textual.containers import Center, ScrollableContainer from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Switch +from textual.widgets import Button, Footer, Header, Markdown, Switch from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes @@ -13,6 +15,13 @@ class NfcorePipeline(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # Pipeline features + """ + ) + ) yield ScrollableContainer( PipelineFeature( markdown_genomes, diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py index 0be70cc4c..cb0d7468c 100644 --- a/nf_core/pipelines/create/welcome.py +++ b/nf_core/pipelines/create/welcome.py @@ -1,3 +1,5 @@ +from textwrap import dedent + from textual.app import ComposeResult from textual.containers import Center from textual.screen import Screen @@ -15,6 +17,14 @@ However, this tool can also be used to create pipelines that will never be part of nf-core. You can still benefit from the community best practices for your own workflow. + +If you are planning to add a pipeline to the nf-core community, you need to be part of that community! +Please join us on Slack [https://nf-co.re/join](https://nf-co.re/join), +and ask to be added to the GitHub association through the #github-invitations channel. + +Come and discuss your plans with the nf-core community as early as possible. +Ideally before you make a start on your pipeline! +These topics are specifically discussed in the [#new-pipelines](https://nfcore.slack.com/channels/new-pipelines) channel. """ @@ -24,6 +34,13 @@ class WelcomeScreen(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() + yield Markdown( + dedent( + """ + # Create a pipeline from the nf-core template + """ + ) + ) yield Static( f"\n[green]{' ' * 40},--.[grey39]/[green],-." + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" From 2dcb0f1744bf2eec8852d92956d34e6594c1fe1f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 19 Feb 2024 12:30:46 +0100 Subject: [PATCH 066/737] add button to go back --- nf_core/pipelines/create/__init__.py | 18 +++++++++--------- nf_core/pipelines/create/basicdetails.py | 5 +++-- nf_core/pipelines/create/custompipeline.py | 1 + nf_core/pipelines/create/finaldetails.py | 1 + nf_core/pipelines/create/githubrepo.py | 1 + nf_core/pipelines/create/githubrepoquestion.py | 1 + nf_core/pipelines/create/nfcorepipeline.py | 1 + 7 files changed, 17 insertions(+), 11 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index d1a5484f8..b62f09397 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -5,7 +5,6 @@ from textual.widgets import Button from nf_core.pipelines.create.basicdetails import BasicDetails -from nf_core.pipelines.create.completed import Completed from nf_core.pipelines.create.custompipeline import CustomPipeline from nf_core.pipelines.create.error import ExistError from nf_core.pipelines.create.finaldetails import FinalDetails @@ -51,7 +50,6 @@ class PipelineCreateApp(App[CreateConfig]): "github_repo_question": GithubRepoQuestion(), "github_repo": GithubRepo(), "github_exit": GithubExit(), - "completed_screen": Completed(), "error_screen": ExistError(), } @@ -72,25 +70,27 @@ def on_mount(self) -> None: def on_button_pressed(self, event: Button.Pressed) -> None: """Handle all button pressed events.""" if event.button.id == "start": - self.switch_screen("choose_type") + self.push_screen("choose_type") elif event.button.id == "type_nfcore": self.PIPELINE_TYPE = "nfcore" - self.switch_screen("basic_details") + self.push_screen("basic_details") elif event.button.id == "type_custom": self.PIPELINE_TYPE = "custom" - self.switch_screen("basic_details") + self.push_screen("basic_details") elif event.button.id == "continue": - self.switch_screen("final_details") + self.push_screen("final_details") elif event.button.id == "github_repo": - self.switch_screen("github_repo") + self.push_screen("github_repo") elif event.button.id == "close_screen": - self.switch_screen("github_repo_question") + self.push_screen("github_repo_question") elif event.button.id == "exit": - self.switch_screen("github_exit") + self.push_screen("github_exit") elif event.button.id == "show_logging": self.switch_screen(LoggingScreen()) if event.button.id == "close_app": self.exit(return_code=0) + if event.button.id == "back": + self.pop_screen() def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index da1b2bf45..aa2886c1b 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -50,6 +50,7 @@ def compose(self) -> ComposeResult: "Name of the main author / authors", ) yield Center( + Button("Back", id="back", variant="default"), Button("Next", id="next", variant="success"), classes="cta", ) @@ -69,8 +70,8 @@ def on_button_pressed(self, event: Button.Pressed) -> None: try: self.parent.TEMPLATE_CONFIG = CreateConfig(**config) if self.parent.PIPELINE_TYPE == "nfcore": - self.parent.switch_screen("type_nfcore") + self.parent.push_screen("type_nfcore") elif self.parent.PIPELINE_TYPE == "custom": - self.parent.switch_screen("type_custom") + self.parent.push_screen("type_custom") except ValueError: pass diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 440b900a3..6fe878469 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -82,6 +82,7 @@ def compose(self) -> ComposeResult: ), ) yield Center( + Button("Back", id="back", variant="default"), Button("Continue", id="continue", variant="success"), classes="cta", ) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 88008894c..9ffaaa9cf 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -52,6 +52,7 @@ def compose(self) -> ComposeResult: ) yield Center( + Button("Back", id="back", variant="default"), Button("Finish", id="finish", variant="success"), classes="cta", ) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 54581ebb1..c8a02e609 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -75,6 +75,7 @@ def compose(self) -> ComposeResult: classes="feature_subtitle", ) yield Center( + Button("Back", id="back", variant="default"), Button("Create GitHub repo", id="create_github", variant="success"), Button("Finish without creating a repo", id="exit", variant="primary"), classes="cta", diff --git a/nf_core/pipelines/create/githubrepoquestion.py b/nf_core/pipelines/create/githubrepoquestion.py index c866f859a..127942485 100644 --- a/nf_core/pipelines/create/githubrepoquestion.py +++ b/nf_core/pipelines/create/githubrepoquestion.py @@ -32,6 +32,7 @@ def compose(self) -> ComposeResult: ) yield Markdown(dedent(github_text_markdown)) yield Center( + Button("Back", id="back", variant="default"), Button("Create GitHub repo", id="github_repo", variant="success"), Button("Finish without creating a repo", id="exit", variant="primary"), classes="cta", diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 2444b3515..f9bd45733 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -31,6 +31,7 @@ def compose(self) -> ComposeResult: ), ) yield Center( + Button("Back", id="back", variant="default"), Button("Continue", id="continue", variant="success"), classes="cta", ) From 17bb9e9a6a4e4e585cc673a4a5ad1d2f64cf47dd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 19 Feb 2024 15:39:11 +0100 Subject: [PATCH 067/737] disable button to close logs while pipeline is not created --- nf_core/pipelines/create/__init__.py | 2 +- nf_core/pipelines/create/finaldetails.py | 3 ++- nf_core/pipelines/create/loggingscreen.py | 2 +- nf_core/pipelines/create/utils.py | 6 ++++++ 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index b62f09397..86941c124 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -21,7 +21,7 @@ ) from nf_core.pipelines.create.welcome import WelcomeScreen -log_handler = CustomLogHandler(console=LoggingConsole(highlight=True, markup=True), rich_tracebacks=True) +log_handler = CustomLogHandler(console=LoggingConsole(), rich_tracebacks=True, markup=True) logging.basicConfig( level="INFO", handlers=[log_handler], diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 9ffaaa9cf..5af28cffa 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -10,7 +10,7 @@ from nf_core.pipelines.create.create import PipelineCreate from nf_core.pipelines.create.loggingscreen import LoggingScreen -from nf_core.pipelines.create.utils import ShowLogs, TextInput +from nf_core.pipelines.create.utils import ShowLogs, TextInput, change_select_disabled class FinalDetails(Screen): @@ -101,5 +101,6 @@ def _create_pipeline(self) -> None: create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) try: create_obj.init_pipeline() + self.parent.call_from_thread(change_select_disabled, self.parent, "close_screen", False) except UserWarning: self.post_message(self.PipelineExists()) diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index cb7b93291..3cfe3f8d8 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -39,7 +39,7 @@ def compose(self) -> ComposeResult: ) else: yield Center( - Button("Close logging screen", id="close_screen", variant="success"), + Button("Close logging screen", id="close_screen", variant="success", disabled=True), classes="cta", ) yield Center(self.parent.LOG_HANDLER.console, classes="cta") diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index f3474e01c..a1c908952 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -200,6 +200,12 @@ class ShowLogs(Message): pass +## Functions +def change_select_disabled(app, widget_id: str, disabled: bool) -> None: + """Change the disabled state of a widget.""" + app.get_widget_by_id(widget_id).disabled = disabled + + ## Markdown text to reuse in different screens markdown_genomes = """ Nf-core pipelines are configured to use a copy of the most common reference genome files. From 5445b740515f00de6a4e809b1d9b41b9bb46bf51 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 20 Feb 2024 14:09:50 +0100 Subject: [PATCH 068/737] more tweaking of logging buttons and screen --- nf_core/pipelines/create/__init__.py | 5 ++++- nf_core/pipelines/create/githubrepoquestion.py | 3 --- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 86941c124..126b3db62 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -82,10 +82,13 @@ def on_button_pressed(self, event: Button.Pressed) -> None: elif event.button.id == "github_repo": self.push_screen("github_repo") elif event.button.id == "close_screen": - self.push_screen("github_repo_question") + # Switch screen (not push) to allow viewing old logging messages + self.switch_screen("github_repo_question") elif event.button.id == "exit": self.push_screen("github_exit") elif event.button.id == "show_logging": + # Set logging state to repo created to see the button for closing the logging screen + self.LOGGING_STATE = "repo created" self.switch_screen(LoggingScreen()) if event.button.id == "close_app": self.exit(return_code=0) diff --git a/nf_core/pipelines/create/githubrepoquestion.py b/nf_core/pipelines/create/githubrepoquestion.py index 127942485..ded33d188 100644 --- a/nf_core/pipelines/create/githubrepoquestion.py +++ b/nf_core/pipelines/create/githubrepoquestion.py @@ -9,8 +9,6 @@ log = logging.getLogger(__name__) github_text_markdown = """ -# Create a GitHub repo - After creating the pipeline template locally, we can create a GitHub repository and push the code to it. Do you want to create a GitHub repository? @@ -32,7 +30,6 @@ def compose(self) -> ComposeResult: ) yield Markdown(dedent(github_text_markdown)) yield Center( - Button("Back", id="back", variant="default"), Button("Create GitHub repo", id="github_repo", variant="success"), Button("Finish without creating a repo", id="exit", variant="primary"), classes="cta", From 536e3be0a8ceab419785c6b901a1884f9ff6bc8d Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 21 Feb 2024 15:24:52 +0100 Subject: [PATCH 069/737] update create app tests according to last changes --- nf-core-a | 1 + nf_core/pipelines/create/nfcorepipeline.py | 2 +- tests/__snapshots__/test_create_app.ambr | 2635 ++++++++++---------- tests/test_create_app.py | 5 + 4 files changed, 1339 insertions(+), 1304 deletions(-) create mode 160000 nf-core-a diff --git a/nf-core-a b/nf-core-a new file mode 160000 index 000000000..6a887ed6e --- /dev/null +++ b/nf-core-a @@ -0,0 +1 @@ +Subproject commit 6a887ed6ebd510b597a45c4acc505d830313950b diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index f9bd45733..8306e9326 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -18,7 +18,7 @@ def compose(self) -> ComposeResult: yield Markdown( dedent( """ - # Pipeline features + # Template features """ ) ) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 6ee9c9b9b..b0a306adc 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -22,250 +22,253 @@ font-weight: 700; } - .terminal-2900179749-matrix { + .terminal-1527309810-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2900179749-title { + .terminal-1527309810-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2900179749-r1 { fill: #c5c8c6 } - .terminal-2900179749-r2 { fill: #e3e3e3 } - .terminal-2900179749-r3 { fill: #989898 } - .terminal-2900179749-r4 { fill: #e1e1e1 } - .terminal-2900179749-r5 { fill: #121212 } - .terminal-2900179749-r6 { fill: #0053aa } - .terminal-2900179749-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2900179749-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2900179749-r9 { fill: #1e1e1e } - .terminal-2900179749-r10 { fill: #008139 } - .terminal-2900179749-r11 { fill: #e2e2e2 } - .terminal-2900179749-r12 { fill: #787878 } - .terminal-2900179749-r13 { fill: #b93c5b } - .terminal-2900179749-r14 { fill: #7ae998 } - .terminal-2900179749-r15 { fill: #0a180e;font-weight: bold } - .terminal-2900179749-r16 { fill: #ddedf9 } + .terminal-1527309810-r1 { fill: #c5c8c6 } + .terminal-1527309810-r2 { fill: #e3e3e3 } + .terminal-1527309810-r3 { fill: #989898 } + .terminal-1527309810-r4 { fill: #e1e1e1 } + .terminal-1527309810-r5 { fill: #121212 } + .terminal-1527309810-r6 { fill: #0053aa } + .terminal-1527309810-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1527309810-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-1527309810-r9 { fill: #1e1e1e } + .terminal-1527309810-r10 { fill: #008139 } + .terminal-1527309810-r11 { fill: #e2e2e2 } + .terminal-1527309810-r12 { fill: #787878 } + .terminal-1527309810-r13 { fill: #b93c5b } + .terminal-1527309810-r14 { fill: #454a50 } + .terminal-1527309810-r15 { fill: #7ae998 } + .terminal-1527309810-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-1527309810-r17 { fill: #0a180e;font-weight: bold } + .terminal-1527309810-r18 { fill: #000000 } + .terminal-1527309810-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -295,253 +298,256 @@ font-weight: 700; } - .terminal-1441415707-matrix { + .terminal-2230840552-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1441415707-title { + .terminal-2230840552-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1441415707-r1 { fill: #c5c8c6 } - .terminal-1441415707-r2 { fill: #e3e3e3 } - .terminal-1441415707-r3 { fill: #989898 } - .terminal-1441415707-r4 { fill: #e1e1e1 } - .terminal-1441415707-r5 { fill: #121212 } - .terminal-1441415707-r6 { fill: #0053aa } - .terminal-1441415707-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1441415707-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-1441415707-r9 { fill: #1e1e1e } - .terminal-1441415707-r10 { fill: #0f4e2a } - .terminal-1441415707-r11 { fill: #0178d4 } - .terminal-1441415707-r12 { fill: #a7a7a7 } - .terminal-1441415707-r13 { fill: #787878 } - .terminal-1441415707-r14 { fill: #e2e2e2 } - .terminal-1441415707-r15 { fill: #b93c5b } - .terminal-1441415707-r16 { fill: #7ae998 } - .terminal-1441415707-r17 { fill: #0a180e;font-weight: bold } - .terminal-1441415707-r18 { fill: #008139 } - .terminal-1441415707-r19 { fill: #ddedf9 } + .terminal-2230840552-r1 { fill: #c5c8c6 } + .terminal-2230840552-r2 { fill: #e3e3e3 } + .terminal-2230840552-r3 { fill: #989898 } + .terminal-2230840552-r4 { fill: #e1e1e1 } + .terminal-2230840552-r5 { fill: #121212 } + .terminal-2230840552-r6 { fill: #0053aa } + .terminal-2230840552-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2230840552-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2230840552-r9 { fill: #1e1e1e } + .terminal-2230840552-r10 { fill: #0f4e2a } + .terminal-2230840552-r11 { fill: #0178d4 } + .terminal-2230840552-r12 { fill: #a7a7a7 } + .terminal-2230840552-r13 { fill: #787878 } + .terminal-2230840552-r14 { fill: #e2e2e2 } + .terminal-2230840552-r15 { fill: #b93c5b } + .terminal-2230840552-r16 { fill: #454a50 } + .terminal-2230840552-r17 { fill: #7ae998 } + .terminal-2230840552-r18 { fill: #e2e3e3;font-weight: bold } + .terminal-2230840552-r19 { fill: #0a180e;font-weight: bold } + .terminal-2230840552-r20 { fill: #000000 } + .terminal-2230840552-r21 { fill: #008139 } + .terminal-2230840552-r22 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -845,255 +851,257 @@ font-weight: 700; } - .terminal-3545740190-matrix { + .terminal-2112272033-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3545740190-title { + .terminal-2112272033-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3545740190-r1 { fill: #c5c8c6 } - .terminal-3545740190-r2 { fill: #e3e3e3 } - .terminal-3545740190-r3 { fill: #989898 } - .terminal-3545740190-r4 { fill: #1e1e1e } - .terminal-3545740190-r5 { fill: #0178d4 } - .terminal-3545740190-r6 { fill: #e1e1e1 } - .terminal-3545740190-r7 { fill: #454a50 } - .terminal-3545740190-r8 { fill: #e2e2e2 } - .terminal-3545740190-r9 { fill: #808080 } - .terminal-3545740190-r10 { fill: #e2e3e3;font-weight: bold } - .terminal-3545740190-r11 { fill: #000000 } - .terminal-3545740190-r12 { fill: #e4e4e4 } - .terminal-3545740190-r13 { fill: #14191f } - .terminal-3545740190-r14 { fill: #507bb3 } - .terminal-3545740190-r15 { fill: #dde6ed;font-weight: bold } - .terminal-3545740190-r16 { fill: #001541 } - .terminal-3545740190-r17 { fill: #7ae998 } - .terminal-3545740190-r18 { fill: #0a180e;font-weight: bold } - .terminal-3545740190-r19 { fill: #008139 } - .terminal-3545740190-r20 { fill: #dde8f3;font-weight: bold } - .terminal-3545740190-r21 { fill: #ddedf9 } + .terminal-2112272033-r1 { fill: #c5c8c6 } + .terminal-2112272033-r2 { fill: #e3e3e3 } + .terminal-2112272033-r3 { fill: #989898 } + .terminal-2112272033-r4 { fill: #e1e1e1 } + .terminal-2112272033-r5 { fill: #121212 } + .terminal-2112272033-r6 { fill: #0053aa } + .terminal-2112272033-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2112272033-r8 { fill: #1e1e1e } + .terminal-2112272033-r9 { fill: #0178d4 } + .terminal-2112272033-r10 { fill: #454a50 } + .terminal-2112272033-r11 { fill: #e2e2e2 } + .terminal-2112272033-r12 { fill: #808080 } + .terminal-2112272033-r13 { fill: #e2e3e3;font-weight: bold } + .terminal-2112272033-r14 { fill: #000000 } + .terminal-2112272033-r15 { fill: #e4e4e4 } + .terminal-2112272033-r16 { fill: #14191f } + .terminal-2112272033-r17 { fill: #507bb3 } + .terminal-2112272033-r18 { fill: #dde6ed;font-weight: bold } + .terminal-2112272033-r19 { fill: #001541 } + .terminal-2112272033-r20 { fill: #7ae998 } + .terminal-2112272033-r21 { fill: #0a180e;font-weight: bold } + .terminal-2112272033-r22 { fill: #008139 } + .terminal-2112272033-r23 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be Hide help - ▁▁▁▁▁▁▁▁configured to use a copy ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - of the most common  - reference genome files  - from iGenomes - - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key,▆▆ - the pipeline will automatically download the required reference  - files. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will includeShow help - ▁▁▁▁▁▁▁▁several GitHub actions ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - for Continuous  - Integration (CI) testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md file of theShow help - ▁▁▁▁▁▁▁▁pipeline will include ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add configuration filesThe pipeline will includeShow help - ▁▁▁▁▁▁▁▁configuration profiles ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - containing custom  - parameters requried to  - run nf-core pipelines at  - different institutions - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Continue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference The pipeline will beHide help + ▁▁▁▁▁▁▁▁genomesconfigured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + + + Nf-core pipelines are configured to use a copy of the most + common reference genome files. + + By selecting this option, your pipeline will include a  + configuration file specifying the paths to these files. + + The required code to use these files will also be included + in the template. When the pipeline user provides an ▆▆ + appropriate genome key, the pipeline will automatically  + download the required reference files. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will Show help + ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for  + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▂▂ + Add Github badgesThe README.md file Show help + ▁▁▁▁▁▁▁▁of the pipeline will▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub  + badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add configuration The pipeline will Show help + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -1123,249 +1131,253 @@ font-weight: 700; } - .terminal-3890482819-matrix { + .terminal-2426593002-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3890482819-title { + .terminal-2426593002-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3890482819-r1 { fill: #c5c8c6 } - .terminal-3890482819-r2 { fill: #e3e3e3 } - .terminal-3890482819-r3 { fill: #989898 } - .terminal-3890482819-r4 { fill: #e1e1e1 } - .terminal-3890482819-r5 { fill: #121212 } - .terminal-3890482819-r6 { fill: #0053aa } - .terminal-3890482819-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3890482819-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-3890482819-r9 { fill: #1e1e1e } - .terminal-3890482819-r10 { fill: #008139 } - .terminal-3890482819-r11 { fill: #e2e2e2 } - .terminal-3890482819-r12 { fill: #b93c5b } - .terminal-3890482819-r13 { fill: #7ae998 } - .terminal-3890482819-r14 { fill: #0a180e;font-weight: bold } - .terminal-3890482819-r15 { fill: #ddedf9 } + .terminal-2426593002-r1 { fill: #c5c8c6 } + .terminal-2426593002-r2 { fill: #e3e3e3 } + .terminal-2426593002-r3 { fill: #989898 } + .terminal-2426593002-r4 { fill: #e1e1e1 } + .terminal-2426593002-r5 { fill: #121212 } + .terminal-2426593002-r6 { fill: #0053aa } + .terminal-2426593002-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2426593002-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2426593002-r9 { fill: #1e1e1e } + .terminal-2426593002-r10 { fill: #008139 } + .terminal-2426593002-r11 { fill: #e2e2e2 } + .terminal-2426593002-r12 { fill: #b93c5b } + .terminal-2426593002-r13 { fill: #808080 } + .terminal-2426593002-r14 { fill: #454a50 } + .terminal-2426593002-r15 { fill: #7ae998 } + .terminal-2426593002-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-2426593002-r17 { fill: #0a180e;font-weight: bold } + .terminal-2426593002-r18 { fill: #000000 } + .terminal-2426593002-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Final details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔If the pipeline output directory exists, remove it and continue. - - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Finish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Final details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔Force + If the pipeline output directory exists, remove it and continue. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackFinish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1395,255 +1407,257 @@ font-weight: 700; } - .terminal-4207832566-matrix { + .terminal-368636757-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4207832566-title { + .terminal-368636757-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4207832566-r1 { fill: #c5c8c6 } - .terminal-4207832566-r2 { fill: #e3e3e3 } - .terminal-4207832566-r3 { fill: #989898 } - .terminal-4207832566-r4 { fill: #e1e1e1 } - .terminal-4207832566-r5 { fill: #121212 } - .terminal-4207832566-r6 { fill: #0053aa } - .terminal-4207832566-r7 { fill: #dde8f3;font-weight: bold } - .terminal-4207832566-r8 { fill: #454a50 } - .terminal-4207832566-r9 { fill: #a5a5a5;font-style: italic; } - .terminal-4207832566-r10 { fill: #e2e3e3;font-weight: bold } - .terminal-4207832566-r11 { fill: #1e1e1e } - .terminal-4207832566-r12 { fill: #008139 } - .terminal-4207832566-r13 { fill: #000000 } - .terminal-4207832566-r14 { fill: #e2e2e2 } - .terminal-4207832566-r15 { fill: #b93c5b } - .terminal-4207832566-r16 { fill: #7ae998 } - .terminal-4207832566-r17 { fill: #507bb3 } - .terminal-4207832566-r18 { fill: #0a180e;font-weight: bold } - .terminal-4207832566-r19 { fill: #dde6ed;font-weight: bold } - .terminal-4207832566-r20 { fill: #001541 } - .terminal-4207832566-r21 { fill: #ddedf9 } + .terminal-368636757-r1 { fill: #c5c8c6 } + .terminal-368636757-r2 { fill: #e3e3e3 } + .terminal-368636757-r3 { fill: #989898 } + .terminal-368636757-r4 { fill: #e1e1e1 } + .terminal-368636757-r5 { fill: #121212 } + .terminal-368636757-r6 { fill: #0053aa } + .terminal-368636757-r7 { fill: #dde8f3;font-weight: bold } + .terminal-368636757-r8 { fill: #454a50 } + .terminal-368636757-r9 { fill: #a5a5a5;font-style: italic; } + .terminal-368636757-r10 { fill: #e2e3e3;font-weight: bold } + .terminal-368636757-r11 { fill: #1e1e1e } + .terminal-368636757-r12 { fill: #008139 } + .terminal-368636757-r13 { fill: #000000 } + .terminal-368636757-r14 { fill: #787878 } + .terminal-368636757-r15 { fill: #e2e2e2 } + .terminal-368636757-r16 { fill: #b93c5b } + .terminal-368636757-r17 { fill: #808080 } + .terminal-368636757-r18 { fill: #7ae998 } + .terminal-368636757-r19 { fill: #507bb3 } + .terminal-368636757-r20 { fill: #0a180e;font-weight: bold } + .terminal-368636757-r21 { fill: #dde6ed;font-weight: bold } + .terminal-368636757-r22 { fill: #001541 } + .terminal-368636757-r23 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create a GitHub repo - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - After creating the pipeline template locally, we can create a GitHub repository and push the - code to it. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Your GitHub usernameYour GitHub personal access token for Show - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔login.▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - mirpedrol▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁•••••••••••••••••••••••••••••••••••• - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Please select the the GitHub repository settings: - - - ▔▔▔▔▔▔▔▔Select if the new GitHub repo must be private. - - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔Select if you would like to push all the pipeline template files to your GitHub repo - and all the branches required to keep the pipeline up to date with new releases of nf-core - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Create GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create a GitHub repo + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + After creating the pipeline template locally, we can create a GitHub repository and push the + code to it. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Your GitHub usernameYour GitHub personal access token for Show + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔login.▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Please select the the GitHub repository settings: + + + ▔▔▔▔▔▔▔▔Private + Select if the new GitHub repo must be private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔Push files + Select if you would like to push all the pipeline template files to your GitHub repo + ▁▁▁▁▁▁▁▁and all the branches required to keep the pipeline up to date with new releases of  + nf-core. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1673,253 +1687,255 @@ font-weight: 700; } - .terminal-1481614550-matrix { + .terminal-1480303962-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1481614550-title { + .terminal-1480303962-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1481614550-r1 { fill: #c5c8c6 } - .terminal-1481614550-r2 { fill: #e3e3e3 } - .terminal-1481614550-r3 { fill: #989898 } - .terminal-1481614550-r4 { fill: #e1e1e1 } - .terminal-1481614550-r5 { fill: #98a84b } - .terminal-1481614550-r6 { fill: #626262 } - .terminal-1481614550-r7 { fill: #608ab1 } - .terminal-1481614550-r8 { fill: #d0b344 } - .terminal-1481614550-r9 { fill: #4ebf71;font-weight: bold } - .terminal-1481614550-r10 { fill: #d2d2d2 } - .terminal-1481614550-r11 { fill: #82aaff } - .terminal-1481614550-r12 { fill: #eeffff } - .terminal-1481614550-r13 { fill: #7ae998 } - .terminal-1481614550-r14 { fill: #507bb3 } - .terminal-1481614550-r15 { fill: #dde6ed;font-weight: bold } - .terminal-1481614550-r16 { fill: #008139 } - .terminal-1481614550-r17 { fill: #001541 } - .terminal-1481614550-r18 { fill: #dde8f3;font-weight: bold } - .terminal-1481614550-r19 { fill: #ddedf9 } + .terminal-1480303962-r1 { fill: #c5c8c6 } + .terminal-1480303962-r2 { fill: #e3e3e3 } + .terminal-1480303962-r3 { fill: #989898 } + .terminal-1480303962-r4 { fill: #e1e1e1 } + .terminal-1480303962-r5 { fill: #121212 } + .terminal-1480303962-r6 { fill: #0053aa } + .terminal-1480303962-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1480303962-r8 { fill: #98a84b } + .terminal-1480303962-r9 { fill: #626262 } + .terminal-1480303962-r10 { fill: #608ab1 } + .terminal-1480303962-r11 { fill: #d0b344 } + .terminal-1480303962-r12 { fill: #4ebf71;font-weight: bold } + .terminal-1480303962-r13 { fill: #d2d2d2 } + .terminal-1480303962-r14 { fill: #82aaff } + .terminal-1480303962-r15 { fill: #eeffff } + .terminal-1480303962-r16 { fill: #7ae998 } + .terminal-1480303962-r17 { fill: #507bb3 } + .terminal-1480303962-r18 { fill: #dde6ed;font-weight: bold } + .terminal-1480303962-r19 { fill: #008139 } + .terminal-1480303962-r20 { fill: #001541 } + .terminal-1480303962-r21 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote - - - gitpush--allorigin - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close AppShow Logging - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + HowTo create a GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote + + + gitpush--allorigin + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close AppShow Logging + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1949,248 +1965,248 @@ font-weight: 700; } - .terminal-3406096395-matrix { + .terminal-4165331380-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3406096395-title { + .terminal-4165331380-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3406096395-r1 { fill: #c5c8c6 } - .terminal-3406096395-r2 { fill: #e3e3e3 } - .terminal-3406096395-r3 { fill: #989898 } - .terminal-3406096395-r4 { fill: #e1e1e1 } - .terminal-3406096395-r5 { fill: #121212 } - .terminal-3406096395-r6 { fill: #0053aa } - .terminal-3406096395-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3406096395-r8 { fill: #7ae998 } - .terminal-3406096395-r9 { fill: #507bb3 } - .terminal-3406096395-r10 { fill: #4ebf71;font-weight: bold } - .terminal-3406096395-r11 { fill: #dde6ed;font-weight: bold } - .terminal-3406096395-r12 { fill: #008139 } - .terminal-3406096395-r13 { fill: #001541 } - .terminal-3406096395-r14 { fill: #ddedf9 } + .terminal-4165331380-r1 { fill: #c5c8c6 } + .terminal-4165331380-r2 { fill: #e3e3e3 } + .terminal-4165331380-r3 { fill: #989898 } + .terminal-4165331380-r4 { fill: #e1e1e1 } + .terminal-4165331380-r5 { fill: #121212 } + .terminal-4165331380-r6 { fill: #0053aa } + .terminal-4165331380-r7 { fill: #dde8f3;font-weight: bold } + .terminal-4165331380-r8 { fill: #7ae998 } + .terminal-4165331380-r9 { fill: #507bb3 } + .terminal-4165331380-r10 { fill: #4ebf71;font-weight: bold } + .terminal-4165331380-r11 { fill: #dde6ed;font-weight: bold } + .terminal-4165331380-r12 { fill: #008139 } + .terminal-4165331380-r13 { fill: #001541 } + .terminal-4165331380-r14 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create a GitHub repo - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - After creating the pipeline template locally, we can create a GitHub repository and push the - code to it. - - Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Create GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + After creating the pipeline template locally, we can create a GitHub repository and push the + code to it. + + Do you want to create a GitHub repository? + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -2220,249 +2236,254 @@ font-weight: 700; } - .terminal-2624233686-matrix { + .terminal-3762159600-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2624233686-title { + .terminal-3762159600-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2624233686-r1 { fill: #c5c8c6 } - .terminal-2624233686-r2 { fill: #e3e3e3 } - .terminal-2624233686-r3 { fill: #989898 } - .terminal-2624233686-r4 { fill: #1e1e1e } - .terminal-2624233686-r5 { fill: #e1e1e1 } - .terminal-2624233686-r6 { fill: #507bb3 } - .terminal-2624233686-r7 { fill: #e2e2e2 } - .terminal-2624233686-r8 { fill: #808080 } - .terminal-2624233686-r9 { fill: #dde6ed;font-weight: bold } - .terminal-2624233686-r10 { fill: #001541 } - .terminal-2624233686-r11 { fill: #7ae998 } - .terminal-2624233686-r12 { fill: #0a180e;font-weight: bold } - .terminal-2624233686-r13 { fill: #008139 } - .terminal-2624233686-r14 { fill: #dde8f3;font-weight: bold } - .terminal-2624233686-r15 { fill: #ddedf9 } + .terminal-3762159600-r1 { fill: #c5c8c6 } + .terminal-3762159600-r2 { fill: #e3e3e3 } + .terminal-3762159600-r3 { fill: #989898 } + .terminal-3762159600-r4 { fill: #e1e1e1 } + .terminal-3762159600-r5 { fill: #121212 } + .terminal-3762159600-r6 { fill: #0053aa } + .terminal-3762159600-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3762159600-r8 { fill: #1e1e1e } + .terminal-3762159600-r9 { fill: #507bb3 } + .terminal-3762159600-r10 { fill: #e2e2e2 } + .terminal-3762159600-r11 { fill: #808080 } + .terminal-3762159600-r12 { fill: #dde6ed;font-weight: bold } + .terminal-3762159600-r13 { fill: #001541 } + .terminal-3762159600-r14 { fill: #454a50 } + .terminal-3762159600-r15 { fill: #7ae998 } + .terminal-3762159600-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-3762159600-r17 { fill: #0a180e;font-weight: bold } + .terminal-3762159600-r18 { fill: #000000 } + .terminal-3762159600-r19 { fill: #008139 } + .terminal-3762159600-r20 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be Show help - ▁▁▁▁▁▁▁▁configured to use a copy ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - of the most common  - reference genome files  - from iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will includeShow help - ▁▁▁▁▁▁▁▁several GitHub actions ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - for Continuous  - Integration (CI) testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md file of theShow help - ▁▁▁▁▁▁▁▁pipeline will include ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add configuration filesThe pipeline will includeShow help - ▁▁▁▁▁▁▁▁configuration profiles ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - containing custom  - parameters requried to  - run nf-core pipelines at  - different institutions - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Continue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be Show help + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will Show help + ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for  + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md file ofShow help + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add configuration The pipeline will Show help + ▁▁▁▁▁▁▁▁filesinclude configuration▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + requried to run  + nf-core pipelines at  + different  + institutions + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -2492,249 +2513,254 @@ font-weight: 700; } - .terminal-1728001786-matrix { + .terminal-1488796558-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1728001786-title { + .terminal-1488796558-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1728001786-r1 { fill: #c5c8c6 } - .terminal-1728001786-r2 { fill: #e3e3e3 } - .terminal-1728001786-r3 { fill: #989898 } - .terminal-1728001786-r4 { fill: #1e1e1e } - .terminal-1728001786-r5 { fill: #e1e1e1 } - .terminal-1728001786-r6 { fill: #507bb3 } - .terminal-1728001786-r7 { fill: #e2e2e2 } - .terminal-1728001786-r8 { fill: #808080 } - .terminal-1728001786-r9 { fill: #dde6ed;font-weight: bold } - .terminal-1728001786-r10 { fill: #001541 } - .terminal-1728001786-r11 { fill: #7ae998 } - .terminal-1728001786-r12 { fill: #0a180e;font-weight: bold } - .terminal-1728001786-r13 { fill: #008139 } - .terminal-1728001786-r14 { fill: #dde8f3;font-weight: bold } - .terminal-1728001786-r15 { fill: #ddedf9 } + .terminal-1488796558-r1 { fill: #c5c8c6 } + .terminal-1488796558-r2 { fill: #e3e3e3 } + .terminal-1488796558-r3 { fill: #989898 } + .terminal-1488796558-r4 { fill: #e1e1e1 } + .terminal-1488796558-r5 { fill: #121212 } + .terminal-1488796558-r6 { fill: #0053aa } + .terminal-1488796558-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1488796558-r8 { fill: #1e1e1e } + .terminal-1488796558-r9 { fill: #507bb3 } + .terminal-1488796558-r10 { fill: #e2e2e2 } + .terminal-1488796558-r11 { fill: #808080 } + .terminal-1488796558-r12 { fill: #dde6ed;font-weight: bold } + .terminal-1488796558-r13 { fill: #001541 } + .terminal-1488796558-r14 { fill: #454a50 } + .terminal-1488796558-r15 { fill: #7ae998 } + .terminal-1488796558-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-1488796558-r17 { fill: #0a180e;font-weight: bold } + .terminal-1488796558-r18 { fill: #000000 } + .terminal-1488796558-r19 { fill: #008139 } + .terminal-1488796558-r20 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be Show help - ▁▁▁▁▁▁▁▁configured to use a copy ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - of the most common  - reference genome files  - from iGenomes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Continue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be Show help + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -2764,253 +2790,256 @@ font-weight: 700; } - .terminal-2559761451-matrix { + .terminal-2179958535-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2559761451-title { + .terminal-2179958535-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2559761451-r1 { fill: #c5c8c6 } - .terminal-2559761451-r2 { fill: #e3e3e3 } - .terminal-2559761451-r3 { fill: #989898 } - .terminal-2559761451-r4 { fill: #e1e1e1 } - .terminal-2559761451-r5 { fill: #121212 } - .terminal-2559761451-r6 { fill: #0053aa } - .terminal-2559761451-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2559761451-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2559761451-r9 { fill: #1e1e1e } - .terminal-2559761451-r10 { fill: #0f4e2a } - .terminal-2559761451-r11 { fill: #7b3042 } - .terminal-2559761451-r12 { fill: #a7a7a7 } - .terminal-2559761451-r13 { fill: #787878 } - .terminal-2559761451-r14 { fill: #e2e2e2 } - .terminal-2559761451-r15 { fill: #b93c5b } - .terminal-2559761451-r16 { fill: #166d39 } - .terminal-2559761451-r17 { fill: #3c8b54;font-weight: bold } - .terminal-2559761451-r18 { fill: #5aa86f } - .terminal-2559761451-r19 { fill: #ddedf9 } + .terminal-2179958535-r1 { fill: #c5c8c6 } + .terminal-2179958535-r2 { fill: #e3e3e3 } + .terminal-2179958535-r3 { fill: #989898 } + .terminal-2179958535-r4 { fill: #e1e1e1 } + .terminal-2179958535-r5 { fill: #121212 } + .terminal-2179958535-r6 { fill: #0053aa } + .terminal-2179958535-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2179958535-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2179958535-r9 { fill: #1e1e1e } + .terminal-2179958535-r10 { fill: #0f4e2a } + .terminal-2179958535-r11 { fill: #7b3042 } + .terminal-2179958535-r12 { fill: #a7a7a7 } + .terminal-2179958535-r13 { fill: #787878 } + .terminal-2179958535-r14 { fill: #e2e2e2 } + .terminal-2179958535-r15 { fill: #b93c5b } + .terminal-2179958535-r16 { fill: #454a50 } + .terminal-2179958535-r17 { fill: #166d39 } + .terminal-2179958535-r18 { fill: #e2e3e3;font-weight: bold } + .terminal-2179958535-r19 { fill: #3c8b54;font-weight: bold } + .terminal-2179958535-r20 { fill: #000000 } + .terminal-2179958535-r21 { fill: #5aa86f } + .terminal-2179958535-r22 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -3040,145 +3069,145 @@ font-weight: 700; } - .terminal-2319623653-matrix { + .terminal-2481518089-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2319623653-title { + .terminal-2481518089-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2319623653-r1 { fill: #c5c8c6 } - .terminal-2319623653-r2 { fill: #e3e3e3 } - .terminal-2319623653-r3 { fill: #989898 } - .terminal-2319623653-r4 { fill: #1e1e1e } - .terminal-2319623653-r5 { fill: #e1e1e1 } - .terminal-2319623653-r6 { fill: #98a84b } - .terminal-2319623653-r7 { fill: #626262 } - .terminal-2319623653-r8 { fill: #608ab1 } - .terminal-2319623653-r9 { fill: #d0b344 } - .terminal-2319623653-r10 { fill: #121212 } - .terminal-2319623653-r11 { fill: #0053aa } - .terminal-2319623653-r12 { fill: #dde8f3;font-weight: bold } - .terminal-2319623653-r13 { fill: #e1e1e1;text-decoration: underline; } - .terminal-2319623653-r14 { fill: #14191f } - .terminal-2319623653-r15 { fill: #ddedf9 } + .terminal-2481518089-r1 { fill: #c5c8c6 } + .terminal-2481518089-r2 { fill: #e3e3e3 } + .terminal-2481518089-r3 { fill: #989898 } + .terminal-2481518089-r4 { fill: #1e1e1e } + .terminal-2481518089-r5 { fill: #e1e1e1 } + .terminal-2481518089-r6 { fill: #121212 } + .terminal-2481518089-r7 { fill: #0053aa } + .terminal-2481518089-r8 { fill: #dde8f3;font-weight: bold } + .terminal-2481518089-r9 { fill: #98a84b } + .terminal-2481518089-r10 { fill: #626262 } + .terminal-2481518089-r11 { fill: #608ab1 } + .terminal-2481518089-r12 { fill: #d0b344 } + .terminal-2481518089-r13 { fill: #14191f } + .terminal-2481518089-r14 { fill: #e1e1e1;text-decoration: underline; } + .terminal-2481518089-r15 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pip… - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - nf-core create - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - This app will help you create a new nf-core pipeline. It uses the  - nf-core pipeline template, which is kept within the nf-core/tools  - repository. - - Using this tool is mandatory when making a pipeline that may be part  - of the nf-core community collection at some point. However, this tool  - can also be used to create pipelines that will never be part of ▁▁ - nf-core. You can still benefit from the community best practices for  - your own workflow. - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pip… + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create a pipeline from the nf-core template + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + +                                         ,--./,-. +         ___     __   __   __   ___     /,-._.--~\ + |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                        `._,._,' + ▇▇ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + nf-core create + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + This app will help you create a new nf-core pipeline. It uses the  + nf-core pipeline template, which is kept within the nf-core/tools  + repository. + +  D Toggle dark mode  Q  Quit  diff --git a/tests/test_create_app.py b/tests/test_create_app.py index 710359e94..b6b05ab58 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -231,6 +231,7 @@ def test_github_details(mock_init_pipeline, snap_compare): """ async def run_before(pilot) -> None: + delete = ["backspace"] * 50 await pilot.click("#start") await pilot.click("#type_nfcore") await pilot.click("#name") @@ -244,6 +245,10 @@ async def run_before(pilot) -> None: await pilot.click("#finish") await pilot.click("#close_screen") await pilot.click("#github_repo") + await pilot.click("#gh_username") + await pilot.press(*delete) # delete field automatically filled using github CLI + await pilot.press("tab") + await pilot.press(*delete) assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) From 2943098ce9d74e3a363dcd12832b209793b2894d Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 6 Mar 2024 15:48:23 +0100 Subject: [PATCH 070/737] some fixes after merging dev to textual-create branch --- .github/actions/create-lint-wf/action.yml | 2 +- tests/lint/template_strings.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index fba0ff343..5b7dbfffe 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -27,7 +27,7 @@ runs: run: | mkdir -p create-lint-wf && cd create-lint-wf export NXF_WORK=$(pwd) - nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" # Try syncing it before we change anything - name: nf-core sync diff --git a/tests/lint/template_strings.py b/tests/lint/template_strings.py index ac0ae0168..50c956b21 100644 --- a/tests/lint/template_strings.py +++ b/tests/lint/template_strings.py @@ -1,8 +1,8 @@ import subprocess from pathlib import Path -import nf_core.create import nf_core.lint +import nf_core.pipelines.create def test_template_strings(self): @@ -16,7 +16,6 @@ def test_template_strings(self): lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.template_strings() - print(result["failed"]) assert len(result["failed"]) == 1 assert len(result["ignored"]) == 0 From 3cfdc783fe4f6ec9eb811825f04294b198fa69f9 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 6 Mar 2024 16:35:39 +0100 Subject: [PATCH 071/737] fix packaging. Thanks @mashehu for the debugging help --- MANIFEST.in | 1 + nf_core/__main__.py | 8 +++++++- nf_core/pipelines/__init__.py | 1 + 3 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 nf_core/pipelines/__init__.py diff --git a/MANIFEST.in b/MANIFEST.in index 5ec177b78..68f115d97 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,3 +9,4 @@ include nf_core/assets/logo/nf-core-repo-logo-base-lightbg.png include nf_core/assets/logo/nf-core-repo-logo-base-darkbg.png include nf_core/assets/logo/placeholder_logo.svg include nf_core/assets/logo/MavenPro-Bold.ttf +include nf_core/pipelines/create/create.tcss diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 937f55ef8..ea3a24c3d 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -44,7 +44,7 @@ { "name": "Commands for developers", "commands": [ - "create", + "pipelines", "lint", "modules", "subworkflows", @@ -55,6 +55,12 @@ ], }, ], + "nf-core pipelines": [ + { + "name": "Pipeline commands", + "commands": ["create"], + }, + ], "nf-core modules": [ { "name": "For pipelines", diff --git a/nf_core/pipelines/__init__.py b/nf_core/pipelines/__init__.py new file mode 100644 index 000000000..bc981c449 --- /dev/null +++ b/nf_core/pipelines/__init__.py @@ -0,0 +1 @@ +from .create import PipelineCreateApp From e7687e94464ee52aa4190c8c12572dfd4e4bf920 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 7 Mar 2024 13:46:50 +0100 Subject: [PATCH 072/737] add changed from dev branch to create.py script --- .../assets/multiqc_config.yml | 4 +- nf_core/pipelines/create/create.py | 90 ++++++------------- 2 files changed, 30 insertions(+), 64 deletions(-) diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index b13b7ae07..cd4e539b3 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -1,11 +1,11 @@ report_comment: > {% if 'dev' in version -%} This report has been generated by the {{ name }} - analysis pipeline.{% if branded %} For information about how to interpret these results, please see the + analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} {%- else %} This report has been generated by the {{ name }} - analysis pipeline.{% if branded %} For information about how to interpret these results, please see the + analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} {% endif %} report_section_order: diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index dc0c6c796..51e7da6ba 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -4,22 +4,19 @@ import configparser import logging import os -import random import re import shutil -import time from pathlib import Path from typing import Optional, Union -import filetype # type: ignore import git import jinja2 -import requests import yaml import nf_core import nf_core.schema import nf_core.utils +from nf_core.create_logo import create_logo from nf_core.lint_utils import run_prettier_on_file from nf_core.pipelines.create.utils import CreateConfig @@ -271,15 +268,13 @@ def render_template(self): # Check if the output directory exists if self.outdir.exists(): if self.force: - log.warning( - f"Output directory '{self.outdir}' exists - removing the existing directory as --force specified" - ) - shutil.rmtree(self.outdir) + log.warning(f"Output directory '{self.outdir}' exists - continuing as --force specified") else: log.error(f"Output directory '{self.outdir}' exists!") log.info("Use -f / --force to overwrite existing files") raise UserWarning(f"Output directory '{self.outdir}' exists!") - os.makedirs(self.outdir) + else: + os.makedirs(self.outdir) # Run jinja2 for each file in the template folder env = jinja2.Environment( @@ -296,7 +291,7 @@ def render_template(self): short_name = self.jinja_params["short_name"] rename_files = { "workflows/pipeline.nf": f"workflows/{short_name}.nf", - "lib/WorkflowPipeline.groovy": f"lib/Workflow{short_name.title()}.groovy", + "subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf": f"subworkflows/local/utils_nfcore_{short_name}_pipeline/main.nf", } # Set the paths to skip according to customization @@ -508,57 +503,13 @@ def fix_linting(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" - - logo_url = f"https://nf-co.re/logo/{self.jinja_params['short_name']}?theme=light" - log.debug(f"Fetching logo from {logo_url}") - - email_logo_path = self.outdir / "assets" / f"{self.jinja_params['name_noslash']}_logo_light.png" - self.download_pipeline_logo(f"{logo_url}?w=600&theme=light", email_logo_path) + email_logo_path = Path(self.outdir) / "assets" + create_logo(text=self.jinja_params["short_name"], dir=email_logo_path, theme="light", force=self.force) for theme in ["dark", "light"]: - readme_logo_url = f"{logo_url}?w=600&theme={theme}" - readme_logo_path = self.outdir / "docs" / "images" / f"{self.jinja_params['name_noslash']}_logo_{theme}.png" - self.download_pipeline_logo(readme_logo_url, readme_logo_path) - - def download_pipeline_logo(self, url, img_fn): - """Attempt to download a logo from the website. Retry if it fails.""" - os.makedirs(os.path.dirname(img_fn), exist_ok=True) - attempt = 0 - max_attempts = 10 - retry_delay = 0 # x up to 10 each time, so first delay will be 1-100 seconds - while attempt < max_attempts: - # If retrying, wait a while - if retry_delay > 0: - log.info(f"Waiting {retry_delay} seconds before next image fetch attempt") - time.sleep(retry_delay) - - attempt += 1 - # Use a random number to avoid the template sync hitting the website simultaneously for all pipelines - retry_delay = random.randint(1, 100) * attempt - log.debug(f"Fetching logo '{img_fn}' (attempt {attempt})") - try: - # Try to fetch the logo from the website - r = requests.get(url, timeout=180) - if r.status_code != 200: - raise UserWarning(f"Got status code {r.status_code}") - # Check that the returned image looks right - - except (ConnectionError, UserWarning) as e: - # Something went wrong - try again - log.warning(e) - log.error("Connection error - retrying") - continue - - # Write the new logo to the file - with open(img_fn, "wb") as fh: - fh.write(r.content) - # Check that the file looks valid - image_type = filetype.guess(img_fn).extension - if image_type != "png": - log.error(f"Logo from the website didn't look like an image: '{image_type}'") - continue - - # Got this far, presumably it's good - break the retry loop - break + readme_logo_path = Path(self.outdir) / "docs" / "images" + create_logo( + text=self.jinja_params["short_name"], dir=readme_logo_path, width=600, theme=theme, force=self.force + ) def git_init_pipeline(self): """Initialises the new pipeline as a Git repository and submits first commit. @@ -587,8 +538,23 @@ def git_init_pipeline(self): repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") if default_branch: repo.active_branch.rename(default_branch) - repo.git.branch("TEMPLATE") - repo.git.branch("dev") + try: + repo.git.branch("TEMPLATE") + repo.git.branch("dev") + + except git.GitCommandError as e: + if "already exists" in e.stderr: + log.debug("Branches 'TEMPLATE' and 'dev' already exist") + if self.force: + log.debug("Force option set - deleting branches") + repo.git.branch("-D", "TEMPLATE") + repo.git.branch("-D", "dev") + repo.git.branch("TEMPLATE") + repo.git.branch("dev") + else: + raise UserWarning( + "Branches 'TEMPLATE' and 'dev' already exist. Use --force to overwrite existing branches." + ) log.info( "Done. Remember to add a remote and push to GitHub:\n" f"[white on grey23] cd {self.outdir} \n" From 1debc4b4b1759e1f9562bb9516e3b5cff1e6817b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 7 Mar 2024 13:53:02 +0100 Subject: [PATCH 073/737] fix back button after validation of basic details --- nf-core-a | 1 - nf_core/pipelines/create/basicdetails.py | 9 +++++---- 2 files changed, 5 insertions(+), 5 deletions(-) delete mode 160000 nf-core-a diff --git a/nf-core-a b/nf-core-a deleted file mode 160000 index 6a887ed6e..000000000 --- a/nf-core-a +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 6a887ed6ebd510b597a45c4acc505d830313950b diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index aa2886c1b..e4f36e403 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -69,9 +69,10 @@ def on_button_pressed(self, event: Button.Pressed) -> None: text_input.query_one(".validation_msg").update("") try: self.parent.TEMPLATE_CONFIG = CreateConfig(**config) - if self.parent.PIPELINE_TYPE == "nfcore": - self.parent.push_screen("type_nfcore") - elif self.parent.PIPELINE_TYPE == "custom": - self.parent.push_screen("type_custom") + if event.button.id == "next": + if self.parent.PIPELINE_TYPE == "nfcore": + self.parent.push_screen("type_nfcore") + elif self.parent.PIPELINE_TYPE == "custom": + self.parent.push_screen("type_custom") except ValueError: pass From 41e99a16b5a9721041474daf6ec6f084aaa8ff84 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 7 Mar 2024 14:10:51 +0100 Subject: [PATCH 074/737] move button to close logging screen to the bottom --- nf_core/pipelines/create/__init__.py | 2 +- nf_core/pipelines/create/create.tcss | 6 ++++++ nf_core/pipelines/create/loggingscreen.py | 4 ++-- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 126b3db62..feff20659 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -21,7 +21,7 @@ ) from nf_core.pipelines.create.welcome import WelcomeScreen -log_handler = CustomLogHandler(console=LoggingConsole(), rich_tracebacks=True, markup=True) +log_handler = CustomLogHandler(console=LoggingConsole(classes="log_console"), rich_tracebacks=True, markup=True) logging.basicConfig( level="INFO", handlers=[log_handler], diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 51ed5745f..46b398901 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -98,3 +98,9 @@ Vertical{ .displayed #hide_password { display: block; } + +/* Logging console */ + +.log_console { + height: auto; +} diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 3cfe3f8d8..68b65619c 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -32,6 +32,7 @@ def compose(self) -> ComposeResult: id="logo", ) yield Markdown(markdown) + yield Center(self.parent.LOG_HANDLER.console) if self.parent.LOGGING_STATE == "repo created": yield Center( Button("Close App", id="close_app", variant="success"), @@ -39,7 +40,6 @@ def compose(self) -> ComposeResult: ) else: yield Center( - Button("Close logging screen", id="close_screen", variant="success", disabled=True), + Button("Continue", id="close_screen", variant="success", disabled=True), classes="cta", ) - yield Center(self.parent.LOG_HANDLER.console, classes="cta") From 67db794a5540307889fed9f04f1d4c0c965c3797 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 7 Mar 2024 14:24:15 +0100 Subject: [PATCH 075/737] Merge branch 'dev' of https://github.com/nf-core/tools into textual-create --- .pre-commit-config.yaml | 4 +- CHANGELOG.md | 3 + nf_core/__init__.py | 2 +- nf_core/__main__.py | 3 +- nf_core/components/components_test.py | 7 +- nf_core/components/create.py | 1 - nf_core/components/lint/__init__.py | 1 - nf_core/components/nfcore_component.py | 1 + nf_core/launch.py | 3 +- nf_core/licences.py | 1 - nf_core/lint/nextflow_config.py | 5 +- nf_core/list.py | 1 - nf_core/modules/bump_versions.py | 1 - nf_core/modules/lint/__init__.py | 1 - nf_core/modules/lint/module_changes.py | 1 + nf_core/modules/lint/module_tests.py | 1 + nf_core/params_file.py | 3 +- .../.github/workflows/ci.yml | 2 +- .../.github/workflows/download_pipeline.yml | 2 +- .../.github/workflows/linting.yml | 2 +- .../pipeline-template/.pre-commit-config.yaml | 3 + nf_core/pipelines/create/create.py | 1 + nf_core/schema.py | 66 ++++++++----------- nf_core/subworkflows/lint/__init__.py | 1 - .../subworkflows/lint/subworkflow_changes.py | 1 + .../subworkflows/lint/subworkflow_tests.py | 1 + nf_core/sync.py | 3 +- nf_core/utils.py | 1 + tests/components/generate_snapshot.py | 1 + tests/components/snapshot_test.py | 1 + tests/test_bump_version.py | 4 +- tests/test_cli.py | 2 +- tests/test_components.py | 3 +- tests/test_create.py | 4 +- tests/test_download.py | 3 +- tests/test_launch.py | 3 +- tests/test_licenses.py | 3 +- tests/test_lint.py | 4 +- tests/test_list.py | 3 +- tests/test_modules.py | 3 +- tests/test_refgenie.py | 3 +- tests/test_schema.py | 3 +- tests/test_subworkflows.py | 3 +- tests/test_sync.py | 3 +- tests/test_utils.py | 3 +- 45 files changed, 79 insertions(+), 92 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 03fbb7bed..b618e0083 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.2.2 + rev: v0.3.1 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix @@ -9,6 +9,8 @@ repos: rev: "v3.1.0" hooks: - id: prettier + additional_dependencies: + - prettier@3.2.5 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python rev: "2.7.3" diff --git a/CHANGELOG.md b/CHANGELOG.md index 14ff84e93..2b7ff2ef6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ ### Template +- Update templates to use nf-core/setup-nextflow v2 + ### Linting ### Components @@ -12,6 +14,7 @@ - Update CI to use nf-core/setup-nextflow v2 - Changelog bot: handle also patch version before dev suffix ([#2820](https://github.com/nf-core/tools/pull/2820)) +- update prettier to 3.2.5 ([#2830](https://github.com/nf-core/tools/pull/2830)) - Update GitHub Actions ([#2827](https://github.com/nf-core/tools/pull/2827)) ## [v2.13.1 - Tin Puppy Patch](https://github.com/nf-core/tools/releases/tag/2.13) - [2024-02-29] diff --git a/nf_core/__init__.py b/nf_core/__init__.py index d96be73f3..2d4fe45a0 100644 --- a/nf_core/__init__.py +++ b/nf_core/__init__.py @@ -1,4 +1,4 @@ -""" Main nf_core module file. +"""Main nf_core module file. Shouldn't do much, as everything is under subcommands. """ diff --git a/nf_core/__main__.py b/nf_core/__main__.py index ea3a24c3d..ed256d8fb 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1,5 +1,6 @@ #!/usr/bin/env python -""" nf-core: Helper tools for use with nf-core Nextflow pipelines. """ +"""nf-core: Helper tools for use with nf-core Nextflow pipelines.""" + import logging import os import sys diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index f1a9e7c40..9b81f54f0 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -2,7 +2,6 @@ The ComponentsTest class handles the generation and testing of nf-test snapshots. """ - import logging import os import re @@ -91,9 +90,9 @@ def run(self) -> None: """Run build steps""" self.check_inputs() os.environ["NFT_DIFF"] = "pdiff" # set nf-test differ to pdiff to get a better diff output - os.environ[ - "NFT_DIFF_ARGS" - ] = "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences + os.environ["NFT_DIFF_ARGS"] = ( + "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences + ) with nf_core.utils.set_wd(Path(self.dir)): self.check_snapshot_stability() if len(self.errors) > 0: diff --git a/nf_core/components/create.py b/nf_core/components/create.py index c4b477a0a..d2169e3a7 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -2,7 +2,6 @@ The ComponentCreate class handles generating of module and subworkflow templates """ - import glob import json import logging diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 3c2fb9dde..c99934bca 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -3,7 +3,6 @@ in nf-core pipelines """ - import logging import operator import os diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 2f73afe9d..d9731ba7c 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -1,6 +1,7 @@ """ The NFCoreComponent class holds information and utility functions for a single module or subworkflow """ + import logging import re from pathlib import Path diff --git a/nf_core/launch.py b/nf_core/launch.py index 25bb4c150..bc0cd58ae 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -1,5 +1,4 @@ -""" Launch a pipeline, interactively collecting params """ - +"""Launch a pipeline, interactively collecting params""" import copy import json diff --git a/nf_core/licences.py b/nf_core/licences.py index a8a35334d..be737280f 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -1,6 +1,5 @@ """Lists software licences for a given workflow.""" - import json import logging import os diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index d3e29d236..2e142cde6 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -245,10 +245,9 @@ def nextflow_config(self): raise AssertionError() except (AssertionError, IndexError): failed.append( - "Config variable ``manifest.homePage`` did not begin with https://github.com/nf-core/:\n {}".format( - manifest_homepage - ) + f"Config variable ``manifest.homePage`` did not begin with https://github.com/nf-core/:\n {manifest_homepage}" ) + else: passed.append("Config variable ``manifest.homePage`` began with https://github.com/nf-core/") diff --git a/nf_core/list.py b/nf_core/list.py index 67d1a7687..658f4dc6d 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -1,6 +1,5 @@ """Lists available nf-core pipelines and versions.""" - import json import logging import os diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index b9003be97..9b54174d5 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -3,7 +3,6 @@ or for a single module """ - import logging import os import re diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 866e6312a..b2816dab6 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -6,7 +6,6 @@ nf-core modules lint """ - import logging import os diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index ee8cabebe..eb76f4b88 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -1,6 +1,7 @@ """ Check whether the content of a module has changed compared to the original repository """ + import shutil import tempfile from pathlib import Path diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 520f8cf0a..b1a611d70 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -1,6 +1,7 @@ """ Lint the tests of a module in nf-core/modules """ + import json import logging from pathlib import Path diff --git a/nf_core/params_file.py b/nf_core/params_file.py index 267fe7086..78798b065 100644 --- a/nf_core/params_file.py +++ b/nf_core/params_file.py @@ -1,5 +1,4 @@ -""" Create a YAML parameter file """ - +"""Create a YAML parameter file""" import json import logging diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 84c727f60..3880b2c4d 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@v2 with: version: "{% raw %}${{ matrix.NXF_VER }}{% endraw %}" diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index dcd7caabf..4fdec4e24 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -28,7 +28,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@v2 - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index 59b85f95f..612467ff6 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -35,7 +35,7 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Install Nextflow - uses: nf-core/setup-nextflow@v1 + uses: nf-core/setup-nextflow@v2 - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 with: diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml index af57081f6..4dc0f1dcd 100644 --- a/nf_core/pipeline-template/.pre-commit-config.yaml +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -3,6 +3,9 @@ repos: rev: "v3.1.0" hooks: - id: prettier + additional_dependencies: + - prettier@3.2.5 + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python rev: "2.7.3" hooks: diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 51e7da6ba..9347b7e67 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -1,6 +1,7 @@ """Creates a nf-core pipeline matching the current organization's specification based on a template. """ + import configparser import logging import os diff --git a/nf_core/schema.py b/nf_core/schema.py index df04dc5a1..373f8bbaa 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -1,5 +1,4 @@ -""" Code to deal with pipeline JSON Schema """ - +"""Code to deal with pipeline JSON Schema""" import copy import json @@ -84,16 +83,13 @@ def load_lint_schema(self): self.get_schema_defaults() self.validate_default_params() if len(self.invalid_nextflow_config_default_parameters) > 0: + params = "\n --".join( + [f"{param}: {msg}" for param, msg in self.invalid_nextflow_config_default_parameters.items()] + ) log.info( - "[red][✗] Invalid default parameters found:\n --{}\n\nNOTE: Use null in config for no default.".format( - "\n --".join( - [ - f"{param}: {msg}" - for param, msg in self.invalid_nextflow_config_default_parameters.items() - ] - ) - ) + f"[red][✗] Invalid default parameters found:\n {params} \n\nNOTE: Use null in config for no default." ) + else: log.info(f"[green][✓] Pipeline schema looks valid[/] [dim](found {num_params} params)") except json.decoder.JSONDecodeError as e: @@ -282,9 +278,9 @@ def validate_default_params(self): if param in self.pipeline_params: self.validate_config_default_parameter(param, group_properties[param], self.pipeline_params[param]) else: - self.invalid_nextflow_config_default_parameters[ - param - ] = "Not in pipeline parameters. Check `nextflow.config`." + self.invalid_nextflow_config_default_parameters[param] = ( + "Not in pipeline parameters. Check `nextflow.config`." + ) # Go over ungrouped params if any exist ungrouped_properties = self.schema.get("properties") @@ -297,9 +293,9 @@ def validate_default_params(self): param, ungrouped_properties[param], self.pipeline_params[param] ) else: - self.invalid_nextflow_config_default_parameters[ - param - ] = "Not in pipeline parameters. Check `nextflow.config`." + self.invalid_nextflow_config_default_parameters[param] = ( + "Not in pipeline parameters. Check `nextflow.config`." + ) def validate_config_default_parameter(self, param, schema_param, config_default): """ @@ -314,9 +310,9 @@ def validate_config_default_parameter(self, param, schema_param, config_default) ): # Check that we are not deferring the execution of this parameter in the schema default with squiggly brakcets if schema_param["type"] != "string" or "{" not in schema_param["default"]: - self.invalid_nextflow_config_default_parameters[ - param - ] = f"Schema default (`{schema_param['default']}`) does not match the config default (`{config_default}`)" + self.invalid_nextflow_config_default_parameters[param] = ( + f"Schema default (`{schema_param['default']}`) does not match the config default (`{config_default}`)" + ) return # if default is null, we're good @@ -326,28 +322,28 @@ def validate_config_default_parameter(self, param, schema_param, config_default) # Check variable types in nextflow.config if schema_param["type"] == "string": if str(config_default) in ["false", "true", "''"]: - self.invalid_nextflow_config_default_parameters[ - param - ] = f"String should not be set to `{config_default}`" + self.invalid_nextflow_config_default_parameters[param] = ( + f"String should not be set to `{config_default}`" + ) if schema_param["type"] == "boolean": if str(config_default) not in ["false", "true"]: - self.invalid_nextflow_config_default_parameters[ - param - ] = f"Booleans should only be true or false, not `{config_default}`" + self.invalid_nextflow_config_default_parameters[param] = ( + f"Booleans should only be true or false, not `{config_default}`" + ) if schema_param["type"] == "integer": try: int(config_default) except ValueError: - self.invalid_nextflow_config_default_parameters[ - param - ] = f"Does not look like an integer: `{config_default}`" + self.invalid_nextflow_config_default_parameters[param] = ( + f"Does not look like an integer: `{config_default}`" + ) if schema_param["type"] == "number": try: float(config_default) except ValueError: - self.invalid_nextflow_config_default_parameters[ - param - ] = f"Does not look like a number (float): `{config_default}`" + self.invalid_nextflow_config_default_parameters[param] = ( + f"Does not look like a number (float): `{config_default}`" + ) def validate_schema(self, schema=None): """ @@ -647,17 +643,13 @@ def build_schema(self, pipeline_dir, no_prompts, web_only, url): # Extra help for people running offline if "Could not connect" in e.args[0]: log.info( - "If you're working offline, now copy your schema ({}) and paste at https://nf-co.re/pipeline_schema_builder".format( - self.schema_filename - ) + f"If you're working offline, now copy your schema ({self.schema_filename}) and paste at https://nf-co.re/pipeline_schema_builder" ) log.info("When you're finished, you can paste the edited schema back into the same file") if self.web_schema_build_web_url: log.info( "To save your work, open {}\n" - "Click the blue 'Finished' button, copy the schema and paste into this file: {}".format( - self.web_schema_build_web_url, self.schema_filename - ) + f"Click the blue 'Finished' button, copy the schema and paste into this file: { self.web_schema_build_web_url, self.schema_filename}" ) return False diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index 3a8719042..96d2e0ab9 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -6,7 +6,6 @@ nf-core subworkflows lint """ - import logging import os diff --git a/nf_core/subworkflows/lint/subworkflow_changes.py b/nf_core/subworkflows/lint/subworkflow_changes.py index b7fa13d93..a9c9616a2 100644 --- a/nf_core/subworkflows/lint/subworkflow_changes.py +++ b/nf_core/subworkflows/lint/subworkflow_changes.py @@ -1,6 +1,7 @@ """ Check whether the content of a subworkflow has changed compared to the original repository """ + from pathlib import Path import nf_core.modules.modules_repo diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index f7284320e..796a56d01 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -1,6 +1,7 @@ """ Lint the tests of a subworkflow in nf-core/modules """ + import json import logging from pathlib import Path diff --git a/nf_core/sync.py b/nf_core/sync.py index 4057461bc..d1dee9a54 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -1,5 +1,4 @@ -"""Synchronise a pipeline TEMPLATE branch with the template. -""" +"""Synchronise a pipeline TEMPLATE branch with the template.""" import json import logging diff --git a/nf_core/utils.py b/nf_core/utils.py index e1778b55b..8ea6f418a 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1,6 +1,7 @@ """ Common utility functions for the nf-core python package. """ + import concurrent.futures import datetime import errno diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py index c5067d721..50024a8eb 100644 --- a/tests/components/generate_snapshot.py +++ b/tests/components/generate_snapshot.py @@ -1,4 +1,5 @@ """Test generate a snapshot""" + import json from pathlib import Path from unittest.mock import MagicMock diff --git a/tests/components/snapshot_test.py b/tests/components/snapshot_test.py index d77461847..b3fc25977 100644 --- a/tests/components/snapshot_test.py +++ b/tests/components/snapshot_test.py @@ -1,4 +1,5 @@ """Test the 'modules test' or 'subworkflows test' command which runs nf-test test.""" + import shutil from pathlib import Path diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 77edd4bfd..059e18e92 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -1,5 +1,5 @@ -"""Some tests covering the bump_version code. -""" +"""Some tests covering the bump_version code.""" + import os import yaml diff --git a/tests/test_cli.py b/tests/test_cli.py index 64c024025..28be5c2a7 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,4 +1,4 @@ -""" Tests covering the command-line code. +"""Tests covering the command-line code. Most tests check the cli arguments are passed along and that some action is taken. diff --git a/tests/test_components.py b/tests/test_components.py index b7f67eb51..eaf999c3c 100644 --- a/tests/test_components.py +++ b/tests/test_components.py @@ -1,5 +1,4 @@ -""" Tests covering the modules commands -""" +"""Tests covering the modules commands""" import os import shutil diff --git a/tests/test_create.py b/tests/test_create.py index 5dfd9244c..313b6f535 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -1,5 +1,5 @@ -"""Some tests covering the pipeline creation sub command. -""" +"""Some tests covering the pipeline creation sub command.""" + import os import unittest from pathlib import Path diff --git a/tests/test_download.py b/tests/test_download.py index 9d3c285dd..48ba88f3c 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -1,5 +1,4 @@ -"""Tests for the download subcommand of nf-core tools -""" +"""Tests for the download subcommand of nf-core tools""" import os import re diff --git a/tests/test_launch.py b/tests/test_launch.py index 7b3ff7d9d..043055a2d 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -1,5 +1,4 @@ -""" Tests covering the pipeline launch code. -""" +"""Tests covering the pipeline launch code.""" import json import os diff --git a/tests/test_licenses.py b/tests/test_licenses.py index 4fb58a107..8023c9e89 100644 --- a/tests/test_licenses.py +++ b/tests/test_licenses.py @@ -1,5 +1,4 @@ -"""Some tests covering the pipeline creation sub command. -""" +"""Some tests covering the pipeline creation sub command.""" # import json # import os # import tempfile diff --git a/tests/test_lint.py b/tests/test_lint.py index 558dac845..31923ea88 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -1,5 +1,5 @@ -"""Some tests covering the linting code. -""" +"""Some tests covering the linting code.""" + import fnmatch import json import os diff --git a/tests/test_list.py b/tests/test_list.py index c1f51e03e..c78276b41 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -1,5 +1,4 @@ -""" Tests covering the workflow listing code. -""" +"""Tests covering the workflow listing code.""" import json import os diff --git a/tests/test_modules.py b/tests/test_modules.py index 2a3d8795f..7955d8efa 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -1,5 +1,4 @@ -""" Tests covering the modules commands -""" +"""Tests covering the modules commands""" import os import shutil diff --git a/tests/test_refgenie.py b/tests/test_refgenie.py index 5440c1c47..23cc0dd14 100644 --- a/tests/test_refgenie.py +++ b/tests/test_refgenie.py @@ -1,5 +1,4 @@ -""" Tests covering the refgenie integration code -""" +"""Tests covering the refgenie integration code""" import os import shlex diff --git a/tests/test_schema.py b/tests/test_schema.py index aab5b5bbb..d7f4f4d64 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,5 +1,4 @@ -""" Tests covering the pipeline schema code. -""" +"""Tests covering the pipeline schema code.""" import json import os diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index f9419ea7a..23ec98db4 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -1,5 +1,4 @@ -""" Tests covering the subworkflows commands -""" +"""Tests covering the subworkflows commands""" import os import shutil diff --git a/tests/test_sync.py b/tests/test_sync.py index 0b9be7353..f15676f08 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -1,5 +1,4 @@ -""" Tests covering the sync command -""" +"""Tests covering the sync command""" import json import os diff --git a/tests/test_utils.py b/tests/test_utils.py index d0af75bfe..85f4e3c54 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,5 +1,4 @@ -""" Tests covering for utility functions. -""" +"""Tests covering for utility functions.""" import os import shutil From 0e9640f65e822f1b2e1a3cc0744011d64778058c Mon Sep 17 00:00:00 2001 From: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Date: Tue, 19 Mar 2024 11:44:14 +0000 Subject: [PATCH 076/737] Module test data uses paths rather than config map This makes tests reusable in pipeline repos Closes #2858 --- CHANGELOG.md | 1 + nf_core/module-template/tests/main.nf.test | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a47fda727..51cdfa947 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - Add fallback to `download_pipeline.yml` in case the pipeline does not support stub runs ([#2846](https://github.com/nf-core/tools/pull/2846)) - Set topic variable correctly in the mastodon announcement ([#2848](https://github.com/nf-core/tools/pull/2848)) - Add a cleanup action to `download_pipeline.yml` to fix failures caused by inadequate storage space on the runner ([#2849](https://github.com/nf-core/tools/pull/2849)) +- Module test data uses paths rather than config map ### Linting diff --git a/nf_core/module-template/tests/main.nf.test b/nf_core/module-template/tests/main.nf.test index e1b1dadf1..a53131ca2 100644 --- a/nf_core/module-template/tests/main.nf.test +++ b/nf_core/module-template/tests/main.nf.test @@ -28,10 +28,10 @@ nextflow_process { {% if has_meta %} input[0] = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) - ] + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), + ] {%- else %} - input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), {%- endif %} """ } @@ -60,10 +60,10 @@ nextflow_process { {% if has_meta %} input[0] = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) - ] + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), + ] {%- else %} - input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), {%- endif %} """ } From bb53c9dc8e5db0e3258839dfc0bb1d54ae12c3d2 Mon Sep 17 00:00:00 2001 From: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Date: Tue, 19 Mar 2024 11:46:45 +0000 Subject: [PATCH 077/737] Update subworkflow test data --- CHANGELOG.md | 2 +- nf_core/subworkflow-template/tests/main.nf.test | 14 ++++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 51cdfa947..33b9827bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ - Add fallback to `download_pipeline.yml` in case the pipeline does not support stub runs ([#2846](https://github.com/nf-core/tools/pull/2846)) - Set topic variable correctly in the mastodon announcement ([#2848](https://github.com/nf-core/tools/pull/2848)) - Add a cleanup action to `download_pipeline.yml` to fix failures caused by inadequate storage space on the runner ([#2849](https://github.com/nf-core/tools/pull/2849)) -- Module test data uses paths rather than config map +- Module and subworkflow test data uses paths rather than config map ([#]()) ### Linting diff --git a/nf_core/subworkflow-template/tests/main.nf.test b/nf_core/subworkflow-template/tests/main.nf.test index c44e19a4e..897888369 100644 --- a/nf_core/subworkflow-template/tests/main.nf.test +++ b/nf_core/subworkflow-template/tests/main.nf.test @@ -22,12 +22,14 @@ nextflow_workflow { workflow { """ // TODO nf-core: define inputs of the workflow here. Example: - input[0] = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) - ] - input[1] = [ [ id:'genome' ], - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - ] + input[0] = [ + [ id:'test', single_end:false ], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), + ] + input[1] = [ + [ id:'genome' ], + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), + ] """ } } From f6bd633a55c3bd586ac8cfe607c19ad4abd30859 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 19 Mar 2024 11:47:51 +0000 Subject: [PATCH 078/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 33b9827bd..25c854beb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ - Strip out mention of "Nextflow Tower" and replace with "Seqera Platform" wherever possible - Update pre-commit hook astral-sh/ruff-pre-commit to v0.3.3 ([#2850](https://github.com/nf-core/tools/pull/2850)) - Fix issue with config resolution that was causing nested configs to behave unexpectedly ([#2862](https://github.com/nf-core/tools/pull/2862)) +- 2858 Test data uses paths instead of config map ([#2877](https://github.com/nf-core/tools/pull/2877)) ## [v2.13.1 - Tin Puppy Patch](https://github.com/nf-core/tools/releases/tag/2.13) - [2024-02-29] From 065d1c34dffe9bbc45b9eae66e4090ca3426742e Mon Sep 17 00:00:00 2001 From: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Date: Tue, 19 Mar 2024 11:48:12 +0000 Subject: [PATCH 079/737] CHANGELOG --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 25c854beb..10d2b4279 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ - Add fallback to `download_pipeline.yml` in case the pipeline does not support stub runs ([#2846](https://github.com/nf-core/tools/pull/2846)) - Set topic variable correctly in the mastodon announcement ([#2848](https://github.com/nf-core/tools/pull/2848)) - Add a cleanup action to `download_pipeline.yml` to fix failures caused by inadequate storage space on the runner ([#2849](https://github.com/nf-core/tools/pull/2849)) -- Module and subworkflow test data uses paths rather than config map ([#]()) +- Module and subworkflow test data uses paths rather than config map ([#2877](https://github.com/nf-core/tools/pull/2877)) ### Linting From 6bf5389f6c20c55e071d6255e8960146393fb244 Mon Sep 17 00:00:00 2001 From: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Date: Tue, 19 Mar 2024 11:49:08 +0000 Subject: [PATCH 080/737] Auto changelog did it all for me --- CHANGELOG.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 10d2b4279..bd3e253be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,6 @@ - Add fallback to `download_pipeline.yml` in case the pipeline does not support stub runs ([#2846](https://github.com/nf-core/tools/pull/2846)) - Set topic variable correctly in the mastodon announcement ([#2848](https://github.com/nf-core/tools/pull/2848)) - Add a cleanup action to `download_pipeline.yml` to fix failures caused by inadequate storage space on the runner ([#2849](https://github.com/nf-core/tools/pull/2849)) -- Module and subworkflow test data uses paths rather than config map ([#2877](https://github.com/nf-core/tools/pull/2877)) ### Linting @@ -38,7 +37,7 @@ - Strip out mention of "Nextflow Tower" and replace with "Seqera Platform" wherever possible - Update pre-commit hook astral-sh/ruff-pre-commit to v0.3.3 ([#2850](https://github.com/nf-core/tools/pull/2850)) - Fix issue with config resolution that was causing nested configs to behave unexpectedly ([#2862](https://github.com/nf-core/tools/pull/2862)) -- 2858 Test data uses paths instead of config map ([#2877](https://github.com/nf-core/tools/pull/2877)) +- Test data uses paths instead of config map ([#2877](https://github.com/nf-core/tools/pull/2877)) ## [v2.13.1 - Tin Puppy Patch](https://github.com/nf-core/tools/releases/tag/2.13) - [2024-02-29] From ed70990a254359325035aeb22dad4b348a5a0590 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 2 Apr 2024 22:37:12 +0200 Subject: [PATCH 081/737] Remove duplicate lines in changelog from merge conflict resolution --- CHANGELOG.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3679af823..cbfef3bcc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,6 @@ - Add a cleanup action to `download_pipeline.yml` to fix failures caused by inadequate storage space on the runner ([#2849](https://github.com/nf-core/tools/pull/2849)) - Update python to 3.12 ([#2805](https://github.com/nf-core/tools/pull/2805)) - Remove `pyproject.toml` from template root -- Update templates to use nf-core/setup-nextflow v2 ### Linting @@ -31,7 +30,6 @@ - Changelog bot: handle also patch version before dev suffix ([#2820](https://github.com/nf-core/tools/pull/2820)) - Fix path in component update script ([#2823](https://github.com/nf-core/tools/pull/2823)) - Update prettier to 3.2.5 ([#2830](https://github.com/nf-core/tools/pull/2830)) -- update prettier to 3.2.5 ([#2830](https://github.com/nf-core/tools/pull/2830)) - Update GitHub Actions ([#2827](https://github.com/nf-core/tools/pull/2827)) - Switch to setup-nf-test ([#2834](https://github.com/nf-core/tools/pull/2834)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.3.2 ([#2836](https://github.com/nf-core/tools/pull/2836)) From 3a540235c74cd4934427ded891ea20261264672c Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 2 Apr 2024 22:42:38 +0200 Subject: [PATCH 082/737] Raw strings for nf-core ascii artwork Avoids warnings about invalid escape sequences --- nf_core/pipelines/create/error.py | 10 +++++----- nf_core/pipelines/create/githubexit.py | 10 +++++----- nf_core/pipelines/create/loggingscreen.py | 10 +++++----- nf_core/pipelines/create/welcome.py | 10 +++++----- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/nf_core/pipelines/create/error.py b/nf_core/pipelines/create/error.py index 922b5ed54..b738b2a9d 100644 --- a/nf_core/pipelines/create/error.py +++ b/nf_core/pipelines/create/error.py @@ -20,11 +20,11 @@ def compose(self) -> ComposeResult: ) ) yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", + rf"\n[green]{' ' * 40},--.[grey39]/[green],-." + + r"\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + r"\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + r"\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + r"\n[green] `._,._,'\n", id="logo", ) diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 9b2c54912..102edabe3 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -35,11 +35,11 @@ def compose(self) -> ComposeResult: ) ) yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", + rf"\n[green]{' ' * 40},--.[grey39]/[green],-." + + r"\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + r"\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + r"\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + r"\n[green] `._,._,'\n", id="logo", ) yield Markdown(exit_help_text_markdown) diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 68b65619c..6c8f77406 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -24,11 +24,11 @@ def compose(self) -> ComposeResult: ) ) yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", + rf"\n[green]{' ' * 40},--.[grey39]/[green],-." + + r"\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + r"\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + r"\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + r"\n[green] `._,._,'\n", id="logo", ) yield Markdown(markdown) diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py index cb0d7468c..a5839b741 100644 --- a/nf_core/pipelines/create/welcome.py +++ b/nf_core/pipelines/create/welcome.py @@ -42,11 +42,11 @@ def compose(self) -> ComposeResult: ) ) yield Static( - f"\n[green]{' ' * 40},--.[grey39]/[green],-." - + "\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + "\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + "\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + "\n[green] `._,._,'\n", + rf"\n[green]{' ' * 40},--.[grey39]/[green],-." + + r"\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" + + r"\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" + + r"\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," + + r"\n[green] `._,._,'\n", id="logo", ) yield Markdown(markdown) From bfaf3b10dfa9234caca0ed63425214f147d34fe5 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 2 Apr 2024 22:47:15 +0200 Subject: [PATCH 083/737] Simplify log messages about CLI flags --- nf_core/__main__.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 4fbbf3586..c1bdab222 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -670,16 +670,13 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp sys.exit(1) elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": log.error( - "Command arguments are not accepted in interactive mode.\n" - "Run with all command line arguments to avoid using an interactive interface" - "or run without any command line arguments to use an interactive interface." + "[red]Partial arguments supplied.[/] " + "Run without [i]any[/] arguments for an interactive interface, " + "or with at least name + description + author to use non-interactively." ) sys.exit(1) else: - log.info( - "Launching interactive nf-core pipeline creation tool." - "\nRun with all command line arguments to avoid using an interactive interface." - ) + log.info("Launching interactive nf-core pipeline creation tool.") app = PipelineCreateApp() app.run() sys.exit(app.return_code or 0) From ea99d690688775728d2fd340e0f67b7f0dff2331 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 2 Apr 2024 22:59:40 +0200 Subject: [PATCH 084/737] Move logo to utils, fix alignment --- nf_core/__main__.py | 24 +++++------------------ nf_core/pipelines/create/create.tcss | 4 +++- nf_core/pipelines/create/error.py | 8 +++----- nf_core/pipelines/create/githubexit.py | 8 +++----- nf_core/pipelines/create/loggingscreen.py | 8 +++----- nf_core/pipelines/create/welcome.py | 8 +++----- nf_core/utils.py | 9 +++++++++ 7 files changed, 29 insertions(+), 40 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index c1bdab222..6435865ad 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -17,7 +17,7 @@ from nf_core.download import DownloadError from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE from nf_core.params_file import ParamsFileBuilder -from nf_core.utils import check_if_outdated, rich_force_colors, setup_nfcore_dir +from nf_core.utils import check_if_outdated, nfcore_logo, rich_force_colors, setup_nfcore_dir # Set up logging as the root logger # Submodules should all traverse back to this @@ -121,25 +121,11 @@ def run_nf_core(): # print nf-core header if environment variable is not set if os.environ.get("_NF_CORE_COMPLETE") is None: # Print nf-core header - stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False) - stderr.print( - "[blue] ___ __ __ __ ___ [green]/,-._.--~\\", - highlight=False, - ) - stderr.print( - r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", - highlight=False, - ) - stderr.print( - r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", - highlight=False, - ) - stderr.print( - "[green] `._,._,'\n", - highlight=False, - ) + stderr.print("\n") + for line in nfcore_logo: + stderr.print(line, highlight=False) stderr.print( - f"[grey39] nf-core/tools version {__version__} - [link=https://nf-co.re]https://nf-co.re[/]", + f"\n[grey39] nf-core/tools version {__version__} - [link=https://nf-co.re]https://nf-co.re[/]", highlight=False, ) try: diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 46b398901..c3a52a4be 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -1,5 +1,7 @@ #logo { - text-align:center; + width: 100%; + content-align-horizontal: center; + content-align-vertical: middle; } .cta { layout: horizontal; diff --git a/nf_core/pipelines/create/error.py b/nf_core/pipelines/create/error.py index b738b2a9d..67c67aa1c 100644 --- a/nf_core/pipelines/create/error.py +++ b/nf_core/pipelines/create/error.py @@ -5,6 +5,8 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static +from nf_core.utils import nfcore_logo + class ExistError(Screen): """A screen to show the final text and exit the app - when an error ocurred.""" @@ -20,11 +22,7 @@ def compose(self) -> ComposeResult: ) ) yield Static( - rf"\n[green]{' ' * 40},--.[grey39]/[green],-." - + r"\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + r"\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + r"\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + r"\n[green] `._,._,'\n", + "\n" + "\n".join(nfcore_logo) + "\n", id="logo", ) diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 102edabe3..3294c40f3 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -5,6 +5,8 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static +from nf_core.utils import nfcore_logo + exit_help_text_markdown = """ If you would like to create the GitHub repository later, you can do it manually by following these steps: @@ -35,11 +37,7 @@ def compose(self) -> ComposeResult: ) ) yield Static( - rf"\n[green]{' ' * 40},--.[grey39]/[green],-." - + r"\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + r"\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + r"\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + r"\n[green] `._,._,'\n", + "\n" + "\n".join(nfcore_logo) + "\n", id="logo", ) yield Markdown(exit_help_text_markdown) diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 6c8f77406..89a848f83 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -5,6 +5,8 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static +from nf_core.utils import nfcore_logo + markdown = """ Visualising logging output. """ @@ -24,11 +26,7 @@ def compose(self) -> ComposeResult: ) ) yield Static( - rf"\n[green]{' ' * 40},--.[grey39]/[green],-." - + r"\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + r"\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + r"\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + r"\n[green] `._,._,'\n", + "\n" + "\n".join(nfcore_logo) + "\n", id="logo", ) yield Markdown(markdown) diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py index a5839b741..c23b65953 100644 --- a/nf_core/pipelines/create/welcome.py +++ b/nf_core/pipelines/create/welcome.py @@ -5,6 +5,8 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static +from nf_core.utils import nfcore_logo + markdown = """ # nf-core create @@ -42,11 +44,7 @@ def compose(self) -> ComposeResult: ) ) yield Static( - rf"\n[green]{' ' * 40},--.[grey39]/[green],-." - + r"\n[blue] ___ __ __ __ ___ [green]/,-._.--~\\" - + r"\n[blue]|\ | |__ __ / ` / \ |__) |__ [yellow] } {" - + r"\n[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-," - + r"\n[green] `._,._,'\n", + "\n" + "\n".join(nfcore_logo) + "\n", id="logo", ) yield Markdown(markdown) diff --git a/nf_core/utils.py b/nf_core/utils.py index 4271e971a..ec8eac700 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -36,6 +36,15 @@ log = logging.getLogger(__name__) +# ASCII nf-core logo +nfcore_logo = [ + r"[green] ,--.[grey39]/[green],-.", + r"[blue] ___ __ __ __ ___ [green]/,-._.--~\ ", + r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", + r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", + r"[green] `._,._,'", +] + # Custom style for questionary nfcore_question_style = prompt_toolkit.styles.Style( [ From d89218f8742b70673bf0ec7c0c7635987da0d25f Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 2 Apr 2024 23:42:38 +0200 Subject: [PATCH 085/737] Tweaks for welcome and pipeline type pages --- nf_core/pipelines/create/create.tcss | 18 ++++++---- nf_core/pipelines/create/pipelinetype.py | 43 ++++++++++++++---------- nf_core/pipelines/create/welcome.py | 37 +++++++------------- 3 files changed, 49 insertions(+), 49 deletions(-) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index c3a52a4be..836437a2c 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -3,13 +3,14 @@ content-align-horizontal: center; content-align-vertical: middle; } -.cta { - layout: horizontal; - margin-bottom: 1; + +.mb-0 { + margin: 0 2; } -.cta Button { - margin-left: 3; - margin-right: 3; + +.pipeline-type-grid { + height: auto; + margin-bottom: 2; } .custom_grid { @@ -106,3 +107,8 @@ Vertical{ .log_console { height: auto; } + +/* Layouts */ +.col-2 { + grid-size: 2 1; +} diff --git a/nf_core/pipelines/create/pipelinetype.py b/nf_core/pipelines/create/pipelinetype.py index 98d5acc97..48914e855 100644 --- a/nf_core/pipelines/create/pipelinetype.py +++ b/nf_core/pipelines/create/pipelinetype.py @@ -1,36 +1,37 @@ from textual.app import ComposeResult -from textual.containers import Center +from textual.containers import Center, Grid from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown markdown_intro = """ -# To nf-core or not to nf-core? - -Next, we need to know what kind of pipeline this will be. +# Choose pipeline type +""" -Choose _"nf-core"_ if: +markdown_type_nfcore = """ +## Choose _"nf-core"_ if: * You want your pipeline to be part of the nf-core community * You think that there's an outside chance that it ever _could_ be part of nf-core - -Choose _"Custom"_ if: +""" +markdown_type_custom = """ +## Choose _"Custom"_ if: * Your pipeline will _never_ be part of nf-core * You want full control over *all* features that are included from the template - (including those that are mandatory for nf-core). + (including those that are mandatory for nf-core). """ markdown_details = """ -## Not sure? What's the difference? +## What's the difference? Choosing _"nf-core"_ effectively pre-selects the following template features: -* GitHub Actions Continuous Integration (CI) configuration for the following: - * Small-scale (GitHub) and large-scale (AWS) tests - * Code format linting with prettier - * Auto-fix functionality using @nf-core-bot +* GitHub Actions continuous-integration configuration files: + * Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) + * Code formatting checks with [Prettier](https://prettier.io/) + * Auto-fix linting functionality using [@nf-core-bot](https://github.com/nf-core-bot) * Marking old issues as stale -* Inclusion of shared nf-core config profiles +* Inclusion of [shared nf-core configuration profiles](https://nf-co.re/configs) """ @@ -41,9 +42,15 @@ def compose(self) -> ComposeResult: yield Header() yield Footer() yield Markdown(markdown_intro) - yield Center( - Button("nf-core", id="type_nfcore", variant="success"), - Button("Custom", id="type_custom", variant="primary"), - classes="cta", + yield Grid( + Center( + Markdown(markdown_type_nfcore), + Center(Button("nf-core", id="type_nfcore", variant="success")), + ), + Center( + Markdown(markdown_type_custom), + Center(Button("Custom", id="type_custom", variant="primary")), + ), + classes="col-2 pipeline-type-grid", ) yield Markdown(markdown_details) diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py index c23b65953..38f29b041 100644 --- a/nf_core/pipelines/create/welcome.py +++ b/nf_core/pipelines/create/welcome.py @@ -1,5 +1,3 @@ -from textwrap import dedent - from textual.app import ComposeResult from textual.containers import Center from textual.screen import Screen @@ -8,25 +6,21 @@ from nf_core.utils import nfcore_logo markdown = """ -# nf-core create - -This app will help you create a new nf-core pipeline. -It uses the nf-core pipeline template, which is kept -within the [nf-core/tools repository](https://github.com/nf-core/tools). +# Welcome to the nf-core pipeline creation wizard -Using this tool is mandatory when making a pipeline that may -be part of the nf-core community collection at some point. -However, this tool can also be used to create pipelines that will -never be part of nf-core. You can still benefit from the community -best practices for your own workflow. +This app will help you create a new Nextflow pipeline +from the nf-core pipeline template, part of the +[nf-core/tools repository](https://github.com/nf-core/tools). -If you are planning to add a pipeline to the nf-core community, you need to be part of that community! -Please join us on Slack [https://nf-co.re/join](https://nf-co.re/join), -and ask to be added to the GitHub association through the #github-invitations channel. +The template _must_ be used for nf-core pipelines, but hopefully +helps all Nextflow developers benefit from nf-core best practices. -Come and discuss your plans with the nf-core community as early as possible. -Ideally before you make a start on your pipeline! -These topics are specifically discussed in the [#new-pipelines](https://nfcore.slack.com/channels/new-pipelines) channel. +If you want to add a pipeline to nf-core, please +[join on Slack](https://nf-co.re/join) and discuss your plans with the +community as early as possible; _**ideally before you start on your pipeline!**_ +See the [nf-core guidelines](https://nf-co.re/docs/contributing/guidelines) +and the [#new-pipelines](https://nfcore.slack.com/channels/new-pipelines) +Slack channel for more information. """ @@ -36,13 +30,6 @@ class WelcomeScreen(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield Markdown( - dedent( - """ - # Create a pipeline from the nf-core template - """ - ) - ) yield Static( "\n" + "\n".join(nfcore_logo) + "\n", id="logo", From 19a614483bfc63f0f1a12074826d0dce5c97fe7f Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 3 Apr 2024 00:08:18 +0200 Subject: [PATCH 086/737] Display tweaks --- nf_core/pipelines/create/__init__.py | 9 ++++++++- nf_core/pipelines/create/create.py | 21 +++++++++++++-------- nf_core/pipelines/create/create.tcss | 20 +++++++++++++++----- nf_core/pipelines/create/custompipeline.py | 1 + nf_core/pipelines/create/finaldetails.py | 10 +++++++--- nf_core/pipelines/create/loggingscreen.py | 6 +----- nf_core/pipelines/create/nfcorepipeline.py | 1 + 7 files changed, 46 insertions(+), 22 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index feff20659..15670ac5d 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,4 +1,5 @@ """A Textual app to create a pipeline.""" + import logging from textual.app import App @@ -21,7 +22,13 @@ ) from nf_core.pipelines.create.welcome import WelcomeScreen -log_handler = CustomLogHandler(console=LoggingConsole(classes="log_console"), rich_tracebacks=True, markup=True) +log_handler = CustomLogHandler( + console=LoggingConsole(classes="log_console"), + rich_tracebacks=True, + show_time=False, + show_path=False, + markup=True, +) logging.basicConfig( level="INFO", handlers=[log_handler], diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 9347b7e67..ff1d488b1 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -55,6 +55,7 @@ def __init__( organisation: str = "nf-core", from_config_file: bool = False, default_branch: Optional[str] = None, + is_interactive: bool = False, ): if isinstance(template_config, CreateConfig): self.config = template_config @@ -102,6 +103,7 @@ def __init__( # Set fields used by the class methods self.no_git = no_git self.default_branch = default_branch + self.is_interactive = is_interactive self.force = self.config.force if outdir is None: outdir = os.path.join(os.getcwd(), self.jinja_params["name_noslash"]) @@ -253,7 +255,7 @@ def init_pipeline(self): if not self.no_git: self.git_init_pipeline() - if self.config.is_nfcore: + if self.config.is_nfcore and not self.is_interactive: log.info( "[green bold]!!!!!! IMPORTANT !!!!!!\n\n" "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" @@ -556,10 +558,13 @@ def git_init_pipeline(self): raise UserWarning( "Branches 'TEMPLATE' and 'dev' already exist. Use --force to overwrite existing branches." ) - log.info( - "Done. Remember to add a remote and push to GitHub:\n" - f"[white on grey23] cd {self.outdir} \n" - " git remote add origin git@github.com:USERNAME/REPO_NAME.git \n" - " git push --all origin " - ) - log.info("This will also push your newly created dev branch and the TEMPLATE branch for syncing.") + if self.is_interactive: + log.info(f"Pipeline created: ./{self.outdir.relative_to(Path.cwd())}") + else: + log.info( + "Done. Remember to add a remote and push to GitHub:\n" + f"[white on grey23] cd {self.outdir} \n" + " git remote add origin git@github.com:USERNAME/REPO_NAME.git \n" + " git push --all origin " + ) + log.info("This will also push your newly created dev branch and the TEMPLATE branch for syncing.") diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 836437a2c..ad1b99d58 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -3,9 +3,12 @@ content-align-horizontal: center; content-align-vertical: middle; } - -.mb-0 { - margin: 0 2; +.cta { + layout: horizontal; + margin-bottom: 1; +} +.cta Button { + margin: 0 3; } .pipeline-type-grid { @@ -59,12 +62,16 @@ Vertical{ height: auto; } +.features-container { + padding: 0 4 1 4; +} + /* Display help messages */ .help_box { background: #333333; - padding: 1 5; - margin: 1 10; + padding: 1 3 0 3; + margin: 0 5 2 5; overflow-y: auto; transition: height 50ms; display: none; @@ -106,6 +113,9 @@ Vertical{ .log_console { height: auto; + background: #333333; + padding: 1 3; + margin: 0 4 2 4; } /* Layouts */ diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 6fe878469..7d460db65 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -80,6 +80,7 @@ def compose(self) -> ComposeResult: "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", "nf_core_configs", ), + classes="features-container", ) yield Center( Button("Back", id="back", variant="default"), diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 5af28cffa..c621a425a 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -1,4 +1,5 @@ """A Textual app to create a pipeline.""" + from textwrap import dedent from textual import on, work @@ -45,9 +46,9 @@ def compose(self) -> ComposeResult: with Horizontal(): yield Switch(value=False, id="force") with Vertical(): - yield Static("Force", classes="custom_grid") + yield Static("Force creation", classes="custom_grid") yield Static( - "If the pipeline output directory exists, remove it and continue.", + "Overwrite any existing pipeline output directories.", classes="feature_subtitle", ) @@ -98,7 +99,10 @@ def show_pipeline_error(self) -> None: def _create_pipeline(self) -> None: """Create the pipeline.""" self.post_message(ShowLogs()) - create_obj = PipelineCreate(template_config=self.parent.TEMPLATE_CONFIG) + create_obj = PipelineCreate( + template_config=self.parent.TEMPLATE_CONFIG, + is_interactive=True, + ) try: create_obj.init_pipeline() self.parent.call_from_thread(change_select_disabled, self.parent, "close_screen", False) diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 89a848f83..4c863b164 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -7,10 +7,6 @@ from nf_core.utils import nfcore_logo -markdown = """ -Visualising logging output. -""" - class LoggingScreen(Screen): """A screen to show the final logs.""" @@ -29,7 +25,7 @@ def compose(self) -> ComposeResult: "\n" + "\n".join(nfcore_logo) + "\n", id="logo", ) - yield Markdown(markdown) + yield Markdown("Creating pipeline..") yield Center(self.parent.LOG_HANDLER.console) if self.parent.LOGGING_STATE == "repo created": yield Center( diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 8306e9326..49cc1f8f8 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -29,6 +29,7 @@ def compose(self) -> ComposeResult: "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", "igenomes", ), + classes="features-container", ) yield Center( Button("Back", id="back", variant="default"), From 0bfae29aed2b5e89021530b9705e20f67789c000 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 3 Apr 2024 00:22:56 +0200 Subject: [PATCH 087/737] Tidy up design for GitHub repo creation page --- nf_core/__main__.py | 4 +++- nf_core/pipelines/create/create.tcss | 7 +++++++ nf_core/pipelines/create/githubexit.py | 23 ++++++++++++----------- nf_core/pipelines/create/githubrepo.py | 21 ++++++++------------- 4 files changed, 30 insertions(+), 25 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 6435865ad..c12378731 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -691,7 +691,9 @@ def create(name, description, author, version, force, outdir, template_yaml, pla Uses the nf-core template to make a skeleton Nextflow pipeline with all required files, boilerplate code and best-practices. """ - log.error("[bold][green]nf-core create[/] command is deprecated. Use [green]nf-core pipelines create[/].[/]") + log.error( + "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." + ) sys.exit(0) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index ad1b99d58..b2355e133 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -122,3 +122,10 @@ Vertical{ .col-2 { grid-size: 2 1; } + +.ghrepo-cols { + margin: 0 4; +} +.ghrepo-cols Button { + margin-top: 2; +} diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 3294c40f3..10346d030 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -11,15 +11,16 @@ If you would like to create the GitHub repository later, you can do it manually by following these steps: 1. Create a new GitHub repository -2. Add the remote to your local repository -```bash -cd -git remote add origin git@github.com:/.git -``` -3. Push the code to the remote -```bash -git push --all origin -``` +2. Add the remote to your local repository: + ```bash + cd + git remote add origin git@github.com:/.git + ``` +3. Push the code to the remote: + ```bash + git push --all origin + ``` + * Note the `--all` flag: this is needed to push all branches to the remote. """ @@ -42,7 +43,7 @@ def compose(self) -> ComposeResult: ) yield Markdown(exit_help_text_markdown) yield Center( - Button("Close App", id="close_app", variant="success"), - Button("Show Logging", id="show_logging", variant="primary"), + Button("Close", id="close_app", variant="success"), + Button("Show pipeline creation log", id="show_logging", variant="primary"), classes="cta", ) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index c8a02e609..aee4b39dc 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -18,12 +18,8 @@ log = logging.getLogger(__name__) github_text_markdown = """ -# Create a GitHub repo - -After creating the pipeline template locally, we can create a GitHub repository and push the code to it. -""" -repo_config_markdown = """ -Please select the the GitHub repository settings: +Now that we have created a new pipeline locally, we can create a new +GitHub repository using the GitHub API and push the code to it. """ @@ -41,7 +37,7 @@ def compose(self) -> ComposeResult: ) ) yield Markdown(dedent(github_text_markdown)) - with Horizontal(): + with Horizontal(classes="ghrepo-cols"): gh_user, gh_token = self._get_github_credentials() yield TextInput( "gh_username", @@ -53,25 +49,24 @@ def compose(self) -> ComposeResult: yield TextInput( "token", "GitHub token", - "Your GitHub personal access token for login.", + "Your GitHub [link=https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens]personal access token[/link] for login.", default=gh_token if gh_token is not None else "GitHub token", password=True, classes="column", ) yield Button("Show", id="show_password") yield Button("Hide", id="hide_password") - yield Markdown(dedent(repo_config_markdown)) - with Horizontal(): + with Horizontal(classes="ghrepo-cols"): yield Switch(value=False, id="private") with Vertical(): yield Static("Private", classes="") - yield Static("Select if the new GitHub repo must be private.", classes="feature_subtitle") - with Horizontal(): + yield Static("Select to make the new GitHub repo private.", classes="feature_subtitle") + with Horizontal(classes="ghrepo-cols"): yield Switch(value=True, id="push") with Vertical(): yield Static("Push files", classes="custom_grid") yield Static( - "Select if you would like to push all the pipeline template files to your GitHub repo\nand all the branches required to keep the pipeline up to date with new releases of nf-core.", + "Select to push pipeline files and branches to your GitHub repo.", classes="feature_subtitle", ) yield Center( From a1d8916e434e6b07057ce05a279224617675c17c Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 3 Apr 2024 08:27:57 +0000 Subject: [PATCH 088/737] [automated] Fix code linting --- nf_core/pipelines/create/basicdetails.py | 1 + tests/test_create_app.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index e4f36e403..3f319b58f 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -1,4 +1,5 @@ """A Textual app to create a pipeline.""" + from textwrap import dedent from textual import on diff --git a/tests/test_create_app.py b/tests/test_create_app.py index b6b05ab58..124078cec 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -1,4 +1,5 @@ -""" Test Pipeline Create App """ +"""Test Pipeline Create App""" + from unittest import mock import pytest From 7a96212dadd504b4be5c80856850fb6a97f68388 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 3 Apr 2024 09:41:19 +0000 Subject: [PATCH 089/737] update create app snapshots --- tests/__snapshots__/test_create_app.ambr | 2915 +++++++++++----------- tests/lint/configs.py | 2 +- tests/lint/nfcore_yml.py | 2 +- 3 files changed, 1462 insertions(+), 1457 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index b0a306adc..50beceecc 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -22,253 +22,253 @@ font-weight: 700; } - .terminal-1527309810-matrix { + .terminal-3000245001-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1527309810-title { + .terminal-3000245001-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1527309810-r1 { fill: #c5c8c6 } - .terminal-1527309810-r2 { fill: #e3e3e3 } - .terminal-1527309810-r3 { fill: #989898 } - .terminal-1527309810-r4 { fill: #e1e1e1 } - .terminal-1527309810-r5 { fill: #121212 } - .terminal-1527309810-r6 { fill: #0053aa } - .terminal-1527309810-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1527309810-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-1527309810-r9 { fill: #1e1e1e } - .terminal-1527309810-r10 { fill: #008139 } - .terminal-1527309810-r11 { fill: #e2e2e2 } - .terminal-1527309810-r12 { fill: #787878 } - .terminal-1527309810-r13 { fill: #b93c5b } - .terminal-1527309810-r14 { fill: #454a50 } - .terminal-1527309810-r15 { fill: #7ae998 } - .terminal-1527309810-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-1527309810-r17 { fill: #0a180e;font-weight: bold } - .terminal-1527309810-r18 { fill: #000000 } - .terminal-1527309810-r19 { fill: #ddedf9 } + .terminal-3000245001-r1 { fill: #c5c8c6 } + .terminal-3000245001-r2 { fill: #e3e3e3 } + .terminal-3000245001-r3 { fill: #989898 } + .terminal-3000245001-r4 { fill: #e1e1e1 } + .terminal-3000245001-r5 { fill: #121212 } + .terminal-3000245001-r6 { fill: #0053aa } + .terminal-3000245001-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3000245001-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-3000245001-r9 { fill: #1e1e1e } + .terminal-3000245001-r10 { fill: #008139 } + .terminal-3000245001-r11 { fill: #e2e2e2 } + .terminal-3000245001-r12 { fill: #787878 } + .terminal-3000245001-r13 { fill: #b93c5b } + .terminal-3000245001-r14 { fill: #454a50 } + .terminal-3000245001-r15 { fill: #7ae998 } + .terminal-3000245001-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-3000245001-r17 { fill: #0a180e;font-weight: bold } + .terminal-3000245001-r18 { fill: #000000 } + .terminal-3000245001-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -298,256 +298,256 @@ font-weight: 700; } - .terminal-2230840552-matrix { + .terminal-2776506879-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2230840552-title { + .terminal-2776506879-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2230840552-r1 { fill: #c5c8c6 } - .terminal-2230840552-r2 { fill: #e3e3e3 } - .terminal-2230840552-r3 { fill: #989898 } - .terminal-2230840552-r4 { fill: #e1e1e1 } - .terminal-2230840552-r5 { fill: #121212 } - .terminal-2230840552-r6 { fill: #0053aa } - .terminal-2230840552-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2230840552-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2230840552-r9 { fill: #1e1e1e } - .terminal-2230840552-r10 { fill: #0f4e2a } - .terminal-2230840552-r11 { fill: #0178d4 } - .terminal-2230840552-r12 { fill: #a7a7a7 } - .terminal-2230840552-r13 { fill: #787878 } - .terminal-2230840552-r14 { fill: #e2e2e2 } - .terminal-2230840552-r15 { fill: #b93c5b } - .terminal-2230840552-r16 { fill: #454a50 } - .terminal-2230840552-r17 { fill: #7ae998 } - .terminal-2230840552-r18 { fill: #e2e3e3;font-weight: bold } - .terminal-2230840552-r19 { fill: #0a180e;font-weight: bold } - .terminal-2230840552-r20 { fill: #000000 } - .terminal-2230840552-r21 { fill: #008139 } - .terminal-2230840552-r22 { fill: #ddedf9 } + .terminal-2776506879-r1 { fill: #c5c8c6 } + .terminal-2776506879-r2 { fill: #e3e3e3 } + .terminal-2776506879-r3 { fill: #989898 } + .terminal-2776506879-r4 { fill: #e1e1e1 } + .terminal-2776506879-r5 { fill: #121212 } + .terminal-2776506879-r6 { fill: #0053aa } + .terminal-2776506879-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2776506879-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2776506879-r9 { fill: #1e1e1e } + .terminal-2776506879-r10 { fill: #0f4e2a } + .terminal-2776506879-r11 { fill: #0178d4 } + .terminal-2776506879-r12 { fill: #a7a7a7 } + .terminal-2776506879-r13 { fill: #787878 } + .terminal-2776506879-r14 { fill: #e2e2e2 } + .terminal-2776506879-r15 { fill: #b93c5b } + .terminal-2776506879-r16 { fill: #454a50 } + .terminal-2776506879-r17 { fill: #7ae998 } + .terminal-2776506879-r18 { fill: #e2e3e3;font-weight: bold } + .terminal-2776506879-r19 { fill: #0a180e;font-weight: bold } + .terminal-2776506879-r20 { fill: #000000 } + .terminal-2776506879-r21 { fill: #008139 } + .terminal-2776506879-r22 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -577,251 +577,253 @@ font-weight: 700; } - .terminal-828318910-matrix { + .terminal-1170633481-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-828318910-title { + .terminal-1170633481-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-828318910-r1 { fill: #c5c8c6 } - .terminal-828318910-r2 { fill: #e3e3e3 } - .terminal-828318910-r3 { fill: #989898 } - .terminal-828318910-r4 { fill: #e1e1e1 } - .terminal-828318910-r5 { fill: #121212 } - .terminal-828318910-r6 { fill: #0053aa } - .terminal-828318910-r7 { fill: #dde8f3;font-weight: bold } - .terminal-828318910-r8 { fill: #e1e1e1;font-style: italic; } - .terminal-828318910-r9 { fill: #4ebf71;font-weight: bold } - .terminal-828318910-r10 { fill: #7ae998 } - .terminal-828318910-r11 { fill: #507bb3 } - .terminal-828318910-r12 { fill: #dde6ed;font-weight: bold } - .terminal-828318910-r13 { fill: #008139 } - .terminal-828318910-r14 { fill: #001541 } - .terminal-828318910-r15 { fill: #24292f } - .terminal-828318910-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-828318910-r17 { fill: #ddedf9 } + .terminal-1170633481-r1 { fill: #c5c8c6 } + .terminal-1170633481-r2 { fill: #e3e3e3 } + .terminal-1170633481-r3 { fill: #989898 } + .terminal-1170633481-r4 { fill: #e1e1e1 } + .terminal-1170633481-r5 { fill: #121212 } + .terminal-1170633481-r6 { fill: #0053aa } + .terminal-1170633481-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1170633481-r8 { fill: #24292f } + .terminal-1170633481-r9 { fill: #e2e3e3;font-weight: bold } + .terminal-1170633481-r10 { fill: #e2e3e3;font-weight: bold;font-style: italic; } + .terminal-1170633481-r11 { fill: #4ebf71;font-weight: bold } + .terminal-1170633481-r12 { fill: #e1e1e1;font-style: italic; } + .terminal-1170633481-r13 { fill: #7ae998 } + .terminal-1170633481-r14 { fill: #008139 } + .terminal-1170633481-r15 { fill: #507bb3 } + .terminal-1170633481-r16 { fill: #dde6ed;font-weight: bold } + .terminal-1170633481-r17 { fill: #001541 } + .terminal-1170633481-r18 { fill: #e1e1e1;text-decoration: underline; } + .terminal-1170633481-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - To nf-core or not to nf-core? - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Next, we need to know what kind of pipeline this will be. - - Choose "nf-core" if: - - ● You want your pipeline to be part of the nf-core community - ● You think that there's an outside chance that it ever could be part of nf-core - - Choose "Custom" if: - - ● Your pipeline will never be part of nf-core - ● You want full control over all features that are included from the template (including  - those that are mandatory for nf-core). - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-coreCustom - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -                             Not sure? What's the difference?                             - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions Continuous Integration (CI) configuration for the following: - ▪ Small-scale (GitHub) and large-scale (AWS) tests - ▪ Code format linting with prettier - ▪ Auto-fix functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core config profiles - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Choose pipeline type + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +          Choose "nf-core" if:                  Choose "Custom" if:           + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ● You want your pipeline to be part of the● Your pipeline will never be part of  + nf-core communitynf-core + ● You think that there's an outside chance● You want full control over all features  + that it ever could be part of nf-corethat are included from the template  + (including those that are mandatory for  + nf-core). + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Custom + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +                                  What's the difference?                                  + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Choosing "nf-core" effectively pre-selects the following template features: + + ● GitHub Actions continuous-integration configuration files: + ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) + ▪ Code formatting checks with Prettier + ▪ Auto-fix linting functionality using @nf-core-bot + ▪ Marking old issues as stale + ● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -851,257 +853,257 @@ font-weight: 700; } - .terminal-2112272033-matrix { + .terminal-3272111277-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2112272033-title { + .terminal-3272111277-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2112272033-r1 { fill: #c5c8c6 } - .terminal-2112272033-r2 { fill: #e3e3e3 } - .terminal-2112272033-r3 { fill: #989898 } - .terminal-2112272033-r4 { fill: #e1e1e1 } - .terminal-2112272033-r5 { fill: #121212 } - .terminal-2112272033-r6 { fill: #0053aa } - .terminal-2112272033-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2112272033-r8 { fill: #1e1e1e } - .terminal-2112272033-r9 { fill: #0178d4 } - .terminal-2112272033-r10 { fill: #454a50 } - .terminal-2112272033-r11 { fill: #e2e2e2 } - .terminal-2112272033-r12 { fill: #808080 } - .terminal-2112272033-r13 { fill: #e2e3e3;font-weight: bold } - .terminal-2112272033-r14 { fill: #000000 } - .terminal-2112272033-r15 { fill: #e4e4e4 } - .terminal-2112272033-r16 { fill: #14191f } - .terminal-2112272033-r17 { fill: #507bb3 } - .terminal-2112272033-r18 { fill: #dde6ed;font-weight: bold } - .terminal-2112272033-r19 { fill: #001541 } - .terminal-2112272033-r20 { fill: #7ae998 } - .terminal-2112272033-r21 { fill: #0a180e;font-weight: bold } - .terminal-2112272033-r22 { fill: #008139 } - .terminal-2112272033-r23 { fill: #ddedf9 } + .terminal-3272111277-r1 { fill: #c5c8c6 } + .terminal-3272111277-r2 { fill: #e3e3e3 } + .terminal-3272111277-r3 { fill: #989898 } + .terminal-3272111277-r4 { fill: #e1e1e1 } + .terminal-3272111277-r5 { fill: #121212 } + .terminal-3272111277-r6 { fill: #0053aa } + .terminal-3272111277-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3272111277-r8 { fill: #1e1e1e } + .terminal-3272111277-r9 { fill: #0178d4 } + .terminal-3272111277-r10 { fill: #454a50 } + .terminal-3272111277-r11 { fill: #e2e2e2 } + .terminal-3272111277-r12 { fill: #808080 } + .terminal-3272111277-r13 { fill: #e2e3e3;font-weight: bold } + .terminal-3272111277-r14 { fill: #000000 } + .terminal-3272111277-r15 { fill: #e4e4e4 } + .terminal-3272111277-r16 { fill: #14191f } + .terminal-3272111277-r17 { fill: #507bb3 } + .terminal-3272111277-r18 { fill: #dde6ed;font-weight: bold } + .terminal-3272111277-r19 { fill: #001541 } + .terminal-3272111277-r20 { fill: #7ae998 } + .terminal-3272111277-r21 { fill: #0a180e;font-weight: bold } + .terminal-3272111277-r22 { fill: #008139 } + .terminal-3272111277-r23 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference The pipeline will beHide help - ▁▁▁▁▁▁▁▁genomesconfigured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - - Nf-core pipelines are configured to use a copy of the most - common reference genome files. - - By selecting this option, your pipeline will include a  - configuration file specifying the paths to these files. - - The required code to use these files will also be included - in the template. When the pipeline user provides an ▆▆ - appropriate genome key, the pipeline will automatically  - download the required reference files. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will Show help - ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for  - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▂▂ - Add Github badgesThe README.md file Show help - ▁▁▁▁▁▁▁▁of the pipeline will▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub  - badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add configuration The pipeline will Show help - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference The pipeline will beHide help + ▁▁▁▁▁▁▁▁genomesconfigured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration  + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, the + pipeline will automatically download the required reference files. + ▅▅ + For more information about reference genomes in nf-core pipelines, see  + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will Show help + ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for  + Continuous  + Integration (CI)  + testing + ▆▆ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md file Show help + ▁▁▁▁▁▁▁▁of the pipeline will▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub  + badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -1131,253 +1133,253 @@ font-weight: 700; } - .terminal-2426593002-matrix { + .terminal-3970307065-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2426593002-title { + .terminal-3970307065-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2426593002-r1 { fill: #c5c8c6 } - .terminal-2426593002-r2 { fill: #e3e3e3 } - .terminal-2426593002-r3 { fill: #989898 } - .terminal-2426593002-r4 { fill: #e1e1e1 } - .terminal-2426593002-r5 { fill: #121212 } - .terminal-2426593002-r6 { fill: #0053aa } - .terminal-2426593002-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2426593002-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2426593002-r9 { fill: #1e1e1e } - .terminal-2426593002-r10 { fill: #008139 } - .terminal-2426593002-r11 { fill: #e2e2e2 } - .terminal-2426593002-r12 { fill: #b93c5b } - .terminal-2426593002-r13 { fill: #808080 } - .terminal-2426593002-r14 { fill: #454a50 } - .terminal-2426593002-r15 { fill: #7ae998 } - .terminal-2426593002-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-2426593002-r17 { fill: #0a180e;font-weight: bold } - .terminal-2426593002-r18 { fill: #000000 } - .terminal-2426593002-r19 { fill: #ddedf9 } + .terminal-3970307065-r1 { fill: #c5c8c6 } + .terminal-3970307065-r2 { fill: #e3e3e3 } + .terminal-3970307065-r3 { fill: #989898 } + .terminal-3970307065-r4 { fill: #e1e1e1 } + .terminal-3970307065-r5 { fill: #121212 } + .terminal-3970307065-r6 { fill: #0053aa } + .terminal-3970307065-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3970307065-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-3970307065-r9 { fill: #1e1e1e } + .terminal-3970307065-r10 { fill: #008139 } + .terminal-3970307065-r11 { fill: #e2e2e2 } + .terminal-3970307065-r12 { fill: #b93c5b } + .terminal-3970307065-r13 { fill: #808080 } + .terminal-3970307065-r14 { fill: #454a50 } + .terminal-3970307065-r15 { fill: #7ae998 } + .terminal-3970307065-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-3970307065-r17 { fill: #0a180e;font-weight: bold } + .terminal-3970307065-r18 { fill: #000000 } + .terminal-3970307065-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Final details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔Force - If the pipeline output directory exists, remove it and continue. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackFinish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Final details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔Force creation + Overwrite any existing pipeline output directories. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackFinish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1407,257 +1409,257 @@ font-weight: 700; } - .terminal-368636757-matrix { + .terminal-3041904966-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-368636757-title { + .terminal-3041904966-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-368636757-r1 { fill: #c5c8c6 } - .terminal-368636757-r2 { fill: #e3e3e3 } - .terminal-368636757-r3 { fill: #989898 } - .terminal-368636757-r4 { fill: #e1e1e1 } - .terminal-368636757-r5 { fill: #121212 } - .terminal-368636757-r6 { fill: #0053aa } - .terminal-368636757-r7 { fill: #dde8f3;font-weight: bold } - .terminal-368636757-r8 { fill: #454a50 } - .terminal-368636757-r9 { fill: #a5a5a5;font-style: italic; } - .terminal-368636757-r10 { fill: #e2e3e3;font-weight: bold } - .terminal-368636757-r11 { fill: #1e1e1e } - .terminal-368636757-r12 { fill: #008139 } - .terminal-368636757-r13 { fill: #000000 } - .terminal-368636757-r14 { fill: #787878 } - .terminal-368636757-r15 { fill: #e2e2e2 } - .terminal-368636757-r16 { fill: #b93c5b } - .terminal-368636757-r17 { fill: #808080 } - .terminal-368636757-r18 { fill: #7ae998 } - .terminal-368636757-r19 { fill: #507bb3 } - .terminal-368636757-r20 { fill: #0a180e;font-weight: bold } - .terminal-368636757-r21 { fill: #dde6ed;font-weight: bold } - .terminal-368636757-r22 { fill: #001541 } - .terminal-368636757-r23 { fill: #ddedf9 } + .terminal-3041904966-r1 { fill: #c5c8c6 } + .terminal-3041904966-r2 { fill: #e3e3e3 } + .terminal-3041904966-r3 { fill: #989898 } + .terminal-3041904966-r4 { fill: #e1e1e1 } + .terminal-3041904966-r5 { fill: #121212 } + .terminal-3041904966-r6 { fill: #0053aa } + .terminal-3041904966-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3041904966-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-3041904966-r9 { fill: #1e1e1e } + .terminal-3041904966-r10 { fill: #008139 } + .terminal-3041904966-r11 { fill: #454a50 } + .terminal-3041904966-r12 { fill: #787878 } + .terminal-3041904966-r13 { fill: #e2e2e2 } + .terminal-3041904966-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-3041904966-r15 { fill: #000000 } + .terminal-3041904966-r16 { fill: #b93c5b } + .terminal-3041904966-r17 { fill: #808080 } + .terminal-3041904966-r18 { fill: #7ae998 } + .terminal-3041904966-r19 { fill: #507bb3 } + .terminal-3041904966-r20 { fill: #0a180e;font-weight: bold } + .terminal-3041904966-r21 { fill: #dde6ed;font-weight: bold } + .terminal-3041904966-r22 { fill: #001541 } + .terminal-3041904966-r23 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create a GitHub repo - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - After creating the pipeline template locally, we can create a GitHub repository and push the - code to it. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Your GitHub usernameYour GitHub personal access token for Show - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔login.▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Please select the the GitHub repository settings: - - - ▔▔▔▔▔▔▔▔Private - Select if the new GitHub repo must be private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔Push files - Select if you would like to push all the pipeline template files to your GitHub repo - ▁▁▁▁▁▁▁▁and all the branches required to keep the pipeline up to date with new releases of  - nf-core. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + Now that we have created a new pipeline locally, we can create a new GitHub repository using + the GitHub API and push the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔Push files + Select to push pipeline files and branches to your GitHub repo. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1687,255 +1689,256 @@ font-weight: 700; } - .terminal-1480303962-matrix { + .terminal-4130924772-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1480303962-title { + .terminal-4130924772-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1480303962-r1 { fill: #c5c8c6 } - .terminal-1480303962-r2 { fill: #e3e3e3 } - .terminal-1480303962-r3 { fill: #989898 } - .terminal-1480303962-r4 { fill: #e1e1e1 } - .terminal-1480303962-r5 { fill: #121212 } - .terminal-1480303962-r6 { fill: #0053aa } - .terminal-1480303962-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1480303962-r8 { fill: #98a84b } - .terminal-1480303962-r9 { fill: #626262 } - .terminal-1480303962-r10 { fill: #608ab1 } - .terminal-1480303962-r11 { fill: #d0b344 } - .terminal-1480303962-r12 { fill: #4ebf71;font-weight: bold } - .terminal-1480303962-r13 { fill: #d2d2d2 } - .terminal-1480303962-r14 { fill: #82aaff } - .terminal-1480303962-r15 { fill: #eeffff } - .terminal-1480303962-r16 { fill: #7ae998 } - .terminal-1480303962-r17 { fill: #507bb3 } - .terminal-1480303962-r18 { fill: #dde6ed;font-weight: bold } - .terminal-1480303962-r19 { fill: #008139 } - .terminal-1480303962-r20 { fill: #001541 } - .terminal-1480303962-r21 { fill: #ddedf9 } + .terminal-4130924772-r1 { fill: #c5c8c6 } + .terminal-4130924772-r2 { fill: #e3e3e3 } + .terminal-4130924772-r3 { fill: #989898 } + .terminal-4130924772-r4 { fill: #e1e1e1 } + .terminal-4130924772-r5 { fill: #121212 } + .terminal-4130924772-r6 { fill: #0053aa } + .terminal-4130924772-r7 { fill: #dde8f3;font-weight: bold } + .terminal-4130924772-r8 { fill: #98e024 } + .terminal-4130924772-r9 { fill: #626262 } + .terminal-4130924772-r10 { fill: #9d65ff } + .terminal-4130924772-r11 { fill: #fd971f } + .terminal-4130924772-r12 { fill: #4ebf71;font-weight: bold } + .terminal-4130924772-r13 { fill: #d2d2d2 } + .terminal-4130924772-r14 { fill: #82aaff } + .terminal-4130924772-r15 { fill: #eeffff } + .terminal-4130924772-r16 { fill: #939393;font-weight: bold } + .terminal-4130924772-r17 { fill: #7ae998 } + .terminal-4130924772-r18 { fill: #507bb3 } + .terminal-4130924772-r19 { fill: #dde6ed;font-weight: bold } + .terminal-4130924772-r20 { fill: #008139 } + .terminal-4130924772-r21 { fill: #001541 } + .terminal-4130924772-r22 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - HowTo create a GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote - - - gitpush--allorigin - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close AppShow Logging - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + HowTo create a GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository: + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote: + + + gitpush--allorigin + + + ● Note the --all flag: this is needed to push all branches to the remote. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + CloseShow pipeline creation log + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1965,248 +1968,248 @@ font-weight: 700; } - .terminal-4165331380-matrix { + .terminal-3308461771-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4165331380-title { + .terminal-3308461771-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4165331380-r1 { fill: #c5c8c6 } - .terminal-4165331380-r2 { fill: #e3e3e3 } - .terminal-4165331380-r3 { fill: #989898 } - .terminal-4165331380-r4 { fill: #e1e1e1 } - .terminal-4165331380-r5 { fill: #121212 } - .terminal-4165331380-r6 { fill: #0053aa } - .terminal-4165331380-r7 { fill: #dde8f3;font-weight: bold } - .terminal-4165331380-r8 { fill: #7ae998 } - .terminal-4165331380-r9 { fill: #507bb3 } - .terminal-4165331380-r10 { fill: #4ebf71;font-weight: bold } - .terminal-4165331380-r11 { fill: #dde6ed;font-weight: bold } - .terminal-4165331380-r12 { fill: #008139 } - .terminal-4165331380-r13 { fill: #001541 } - .terminal-4165331380-r14 { fill: #ddedf9 } + .terminal-3308461771-r1 { fill: #c5c8c6 } + .terminal-3308461771-r2 { fill: #e3e3e3 } + .terminal-3308461771-r3 { fill: #989898 } + .terminal-3308461771-r4 { fill: #e1e1e1 } + .terminal-3308461771-r5 { fill: #121212 } + .terminal-3308461771-r6 { fill: #0053aa } + .terminal-3308461771-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3308461771-r8 { fill: #7ae998 } + .terminal-3308461771-r9 { fill: #507bb3 } + .terminal-3308461771-r10 { fill: #4ebf71;font-weight: bold } + .terminal-3308461771-r11 { fill: #dde6ed;font-weight: bold } + .terminal-3308461771-r12 { fill: #008139 } + .terminal-3308461771-r13 { fill: #001541 } + .terminal-3308461771-r14 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - After creating the pipeline template locally, we can create a GitHub repository and push the - code to it. - - Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Create GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + After creating the pipeline template locally, we can create a GitHub repository and push the + code to it. + + Do you want to create a GitHub repository? + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -2236,254 +2239,254 @@ font-weight: 700; } - .terminal-3762159600-matrix { + .terminal-1734914007-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3762159600-title { + .terminal-1734914007-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3762159600-r1 { fill: #c5c8c6 } - .terminal-3762159600-r2 { fill: #e3e3e3 } - .terminal-3762159600-r3 { fill: #989898 } - .terminal-3762159600-r4 { fill: #e1e1e1 } - .terminal-3762159600-r5 { fill: #121212 } - .terminal-3762159600-r6 { fill: #0053aa } - .terminal-3762159600-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3762159600-r8 { fill: #1e1e1e } - .terminal-3762159600-r9 { fill: #507bb3 } - .terminal-3762159600-r10 { fill: #e2e2e2 } - .terminal-3762159600-r11 { fill: #808080 } - .terminal-3762159600-r12 { fill: #dde6ed;font-weight: bold } - .terminal-3762159600-r13 { fill: #001541 } - .terminal-3762159600-r14 { fill: #454a50 } - .terminal-3762159600-r15 { fill: #7ae998 } - .terminal-3762159600-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-3762159600-r17 { fill: #0a180e;font-weight: bold } - .terminal-3762159600-r18 { fill: #000000 } - .terminal-3762159600-r19 { fill: #008139 } - .terminal-3762159600-r20 { fill: #ddedf9 } + .terminal-1734914007-r1 { fill: #c5c8c6 } + .terminal-1734914007-r2 { fill: #e3e3e3 } + .terminal-1734914007-r3 { fill: #989898 } + .terminal-1734914007-r4 { fill: #e1e1e1 } + .terminal-1734914007-r5 { fill: #121212 } + .terminal-1734914007-r6 { fill: #0053aa } + .terminal-1734914007-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1734914007-r8 { fill: #1e1e1e } + .terminal-1734914007-r9 { fill: #507bb3 } + .terminal-1734914007-r10 { fill: #e2e2e2 } + .terminal-1734914007-r11 { fill: #808080 } + .terminal-1734914007-r12 { fill: #dde6ed;font-weight: bold } + .terminal-1734914007-r13 { fill: #001541 } + .terminal-1734914007-r14 { fill: #454a50 } + .terminal-1734914007-r15 { fill: #7ae998 } + .terminal-1734914007-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-1734914007-r17 { fill: #0a180e;font-weight: bold } + .terminal-1734914007-r18 { fill: #000000 } + .terminal-1734914007-r19 { fill: #008139 } + .terminal-1734914007-r20 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be Show help - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will Show help - ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for  - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md file ofShow help - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add configuration The pipeline will Show help - ▁▁▁▁▁▁▁▁filesinclude configuration▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different  - institutions - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be Show help + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will Show help + ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for  + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md file ofShow help + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add configuration The pipeline will Show help + ▁▁▁▁▁▁▁▁filesinclude configuration▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + requried to run  + nf-core pipelines at  + different  + institutions + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -2513,254 +2516,254 @@ font-weight: 700; } - .terminal-1488796558-matrix { + .terminal-182709094-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1488796558-title { + .terminal-182709094-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1488796558-r1 { fill: #c5c8c6 } - .terminal-1488796558-r2 { fill: #e3e3e3 } - .terminal-1488796558-r3 { fill: #989898 } - .terminal-1488796558-r4 { fill: #e1e1e1 } - .terminal-1488796558-r5 { fill: #121212 } - .terminal-1488796558-r6 { fill: #0053aa } - .terminal-1488796558-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1488796558-r8 { fill: #1e1e1e } - .terminal-1488796558-r9 { fill: #507bb3 } - .terminal-1488796558-r10 { fill: #e2e2e2 } - .terminal-1488796558-r11 { fill: #808080 } - .terminal-1488796558-r12 { fill: #dde6ed;font-weight: bold } - .terminal-1488796558-r13 { fill: #001541 } - .terminal-1488796558-r14 { fill: #454a50 } - .terminal-1488796558-r15 { fill: #7ae998 } - .terminal-1488796558-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-1488796558-r17 { fill: #0a180e;font-weight: bold } - .terminal-1488796558-r18 { fill: #000000 } - .terminal-1488796558-r19 { fill: #008139 } - .terminal-1488796558-r20 { fill: #ddedf9 } + .terminal-182709094-r1 { fill: #c5c8c6 } + .terminal-182709094-r2 { fill: #e3e3e3 } + .terminal-182709094-r3 { fill: #989898 } + .terminal-182709094-r4 { fill: #e1e1e1 } + .terminal-182709094-r5 { fill: #121212 } + .terminal-182709094-r6 { fill: #0053aa } + .terminal-182709094-r7 { fill: #dde8f3;font-weight: bold } + .terminal-182709094-r8 { fill: #1e1e1e } + .terminal-182709094-r9 { fill: #507bb3 } + .terminal-182709094-r10 { fill: #e2e2e2 } + .terminal-182709094-r11 { fill: #808080 } + .terminal-182709094-r12 { fill: #dde6ed;font-weight: bold } + .terminal-182709094-r13 { fill: #001541 } + .terminal-182709094-r14 { fill: #454a50 } + .terminal-182709094-r15 { fill: #7ae998 } + .terminal-182709094-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-182709094-r17 { fill: #0a180e;font-weight: bold } + .terminal-182709094-r18 { fill: #000000 } + .terminal-182709094-r19 { fill: #008139 } + .terminal-182709094-r20 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be Show help - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference genomesThe pipeline will be Show help + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -2790,256 +2793,256 @@ font-weight: 700; } - .terminal-2179958535-matrix { + .terminal-2320153615-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2179958535-title { + .terminal-2320153615-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2179958535-r1 { fill: #c5c8c6 } - .terminal-2179958535-r2 { fill: #e3e3e3 } - .terminal-2179958535-r3 { fill: #989898 } - .terminal-2179958535-r4 { fill: #e1e1e1 } - .terminal-2179958535-r5 { fill: #121212 } - .terminal-2179958535-r6 { fill: #0053aa } - .terminal-2179958535-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2179958535-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2179958535-r9 { fill: #1e1e1e } - .terminal-2179958535-r10 { fill: #0f4e2a } - .terminal-2179958535-r11 { fill: #7b3042 } - .terminal-2179958535-r12 { fill: #a7a7a7 } - .terminal-2179958535-r13 { fill: #787878 } - .terminal-2179958535-r14 { fill: #e2e2e2 } - .terminal-2179958535-r15 { fill: #b93c5b } - .terminal-2179958535-r16 { fill: #454a50 } - .terminal-2179958535-r17 { fill: #166d39 } - .terminal-2179958535-r18 { fill: #e2e3e3;font-weight: bold } - .terminal-2179958535-r19 { fill: #3c8b54;font-weight: bold } - .terminal-2179958535-r20 { fill: #000000 } - .terminal-2179958535-r21 { fill: #5aa86f } - .terminal-2179958535-r22 { fill: #ddedf9 } + .terminal-2320153615-r1 { fill: #c5c8c6 } + .terminal-2320153615-r2 { fill: #e3e3e3 } + .terminal-2320153615-r3 { fill: #989898 } + .terminal-2320153615-r4 { fill: #e1e1e1 } + .terminal-2320153615-r5 { fill: #121212 } + .terminal-2320153615-r6 { fill: #0053aa } + .terminal-2320153615-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2320153615-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2320153615-r9 { fill: #1e1e1e } + .terminal-2320153615-r10 { fill: #0f4e2a } + .terminal-2320153615-r11 { fill: #7b3042 } + .terminal-2320153615-r12 { fill: #a7a7a7 } + .terminal-2320153615-r13 { fill: #787878 } + .terminal-2320153615-r14 { fill: #e2e2e2 } + .terminal-2320153615-r15 { fill: #b93c5b } + .terminal-2320153615-r16 { fill: #454a50 } + .terminal-2320153615-r17 { fill: #166d39 } + .terminal-2320153615-r18 { fill: #e2e3e3;font-weight: bold } + .terminal-2320153615-r19 { fill: #3c8b54;font-weight: bold } + .terminal-2320153615-r20 { fill: #000000 } + .terminal-2320153615-r21 { fill: #5aa86f } + .terminal-2320153615-r22 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -3069,145 +3072,147 @@ font-weight: 700; } - .terminal-2481518089-matrix { + .terminal-2790734285-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2481518089-title { + .terminal-2790734285-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2481518089-r1 { fill: #c5c8c6 } - .terminal-2481518089-r2 { fill: #e3e3e3 } - .terminal-2481518089-r3 { fill: #989898 } - .terminal-2481518089-r4 { fill: #1e1e1e } - .terminal-2481518089-r5 { fill: #e1e1e1 } - .terminal-2481518089-r6 { fill: #121212 } - .terminal-2481518089-r7 { fill: #0053aa } - .terminal-2481518089-r8 { fill: #dde8f3;font-weight: bold } - .terminal-2481518089-r9 { fill: #98a84b } - .terminal-2481518089-r10 { fill: #626262 } - .terminal-2481518089-r11 { fill: #608ab1 } - .terminal-2481518089-r12 { fill: #d0b344 } - .terminal-2481518089-r13 { fill: #14191f } - .terminal-2481518089-r14 { fill: #e1e1e1;text-decoration: underline; } - .terminal-2481518089-r15 { fill: #ddedf9 } + .terminal-2790734285-r1 { fill: #c5c8c6 } + .terminal-2790734285-r2 { fill: #e3e3e3 } + .terminal-2790734285-r3 { fill: #989898 } + .terminal-2790734285-r4 { fill: #1e1e1e } + .terminal-2790734285-r5 { fill: #98e024 } + .terminal-2790734285-r6 { fill: #626262 } + .terminal-2790734285-r7 { fill: #9d65ff } + .terminal-2790734285-r8 { fill: #fd971f } + .terminal-2790734285-r9 { fill: #e1e1e1 } + .terminal-2790734285-r10 { fill: #121212 } + .terminal-2790734285-r11 { fill: #0053aa } + .terminal-2790734285-r12 { fill: #dde8f3;font-weight: bold } + .terminal-2790734285-r13 { fill: #e1e1e1;text-decoration: underline; } + .terminal-2790734285-r14 { fill: #e1e1e1;font-style: italic; } + .terminal-2790734285-r15 { fill: #14191f } + .terminal-2790734285-r16 { fill: #e1e1e1;font-weight: bold;font-style: italic; } + .terminal-2790734285-r17 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pip… - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create a pipeline from the nf-core template - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - -                                         ,--./,-. -         ___     __   __   __   ___     /,-._.--~\ - |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                        `._,._,' - ▇▇ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - nf-core create - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - This app will help you create a new nf-core pipeline. It uses the  - nf-core pipeline template, which is kept within the nf-core/tools  - repository. - -  D Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pip… + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Welcome to the nf-core pipeline creation wizard + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + This app will help you create a new Nextflow pipeline from the nf-core + pipeline template, part of the nf-core/tools repository. + + The template must be used for nf-core pipelines, but hopefully helps  + all Nextflow developers benefit from nf-core best practices. + + If you want to add a pipeline to nf-core, please join on Slack and ▆▆ + discuss your plans with the community as early as possible; ideally  + before you start on your pipeline! See the nf-core guidelines and the  + #new-pipelines Slack channel for more information. +  D Toggle dark mode  Q  Quit  diff --git a/tests/lint/configs.py b/tests/lint/configs.py index b50a1393a..8610910cd 100644 --- a/tests/lint/configs.py +++ b/tests/lint/configs.py @@ -2,8 +2,8 @@ import yaml -import nf_core.create import nf_core.lint +import nf_core.pipelines.create def test_withname_in_modules_config(self): diff --git a/tests/lint/nfcore_yml.py b/tests/lint/nfcore_yml.py index 474ccd48f..9d745a634 100644 --- a/tests/lint/nfcore_yml.py +++ b/tests/lint/nfcore_yml.py @@ -1,8 +1,8 @@ import re from pathlib import Path -import nf_core.create import nf_core.lint +import nf_core.pipelines.create def test_nfcore_yml_pass(self): From eaedeb75c780451b427201bec8a556816b2ee7ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 3 Apr 2024 10:06:10 +0000 Subject: [PATCH 090/737] fix providing a prefix twitha template yaml --- nf_core/__main__.py | 1 + nf_core/pipelines/create/create.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index c12378731..8d066b0ec 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -648,6 +648,7 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp version=version, force=force, outdir=outdir, + template_config=template_yaml, organisation=organisation, ) create_obj.init_pipeline() diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index ff1d488b1..b5136cf8b 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -132,9 +132,13 @@ def check_template_yaml_info(self, template_yaml, name, description, author): with open(template_yaml) as f: template_yaml = yaml.safe_load(f) config = CreateConfig(**template_yaml) + # Allow giving a prefix through a template + if template_yaml["prefix"] is not None and config.org is None: + config.org = template_yaml["prefix"] except FileNotFoundError: raise UserWarning(f"Template YAML file '{template_yaml}' not found.") + # Check required fields missing_fields = [] if config.name is None and name is None: missing_fields.append("name") From 682d9b35f603c162946bd4eb9891e14f02419d0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 3 Apr 2024 10:07:29 +0000 Subject: [PATCH 091/737] update error message in create pipeline test --- tests/test_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index 29fb703f2..f3e111a58 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -265,7 +265,7 @@ def test_create_error(self, mock_create): result = self.invoke_cli(cmd) assert result.exit_code == 1 - assert "Command arguments are not accepted in interactive mode." in result.output + assert "Partial arguments supplied." in result.output @mock.patch("nf_core.pipelines.create.PipelineCreateApp") def test_create_app(self, mock_create): From 3aa383c5c8bbbf35d15dd00ac2f07129bfc15ffd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 3 Apr 2024 11:45:24 +0000 Subject: [PATCH 092/737] not show pipeline logging when creating a repo --- nf_core/pipelines/create/__init__.py | 4 - nf_core/pipelines/create/githubexit.py | 1 - nf_core/pipelines/create/githubrepo.py | 4 +- nf_core/pipelines/create/loggingscreen.py | 6 +- tests/__snapshots__/test_create_app.ambr | 253 +++++++++++----------- 5 files changed, 132 insertions(+), 136 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 15670ac5d..05589d5d2 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -93,10 +93,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.switch_screen("github_repo_question") elif event.button.id == "exit": self.push_screen("github_exit") - elif event.button.id == "show_logging": - # Set logging state to repo created to see the button for closing the logging screen - self.LOGGING_STATE = "repo created" - self.switch_screen(LoggingScreen()) if event.button.id == "close_app": self.exit(return_code=0) if event.button.id == "back": diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 10346d030..79421813f 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -44,6 +44,5 @@ def compose(self) -> ComposeResult: yield Markdown(exit_help_text_markdown) yield Center( Button("Close", id="close_app", variant="success"), - Button("Show pipeline creation log", id="show_logging", variant="primary"), classes="cta", ) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index aee4b39dc..656f8c51e 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -150,7 +150,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self._create_repo_and_push( user, pipeline_repo, github_variables["private"], github_variables["push"] ) - log.info(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' created successfully") except UserWarning as e: log.info(f"There was an error with message: {e}") self.parent.switch_screen("github_exit") @@ -174,7 +173,7 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): repo_exists = True except UserWarning as e: # Repo already exists - log.info(e) + log.error(e) return except UnknownObjectException: # Repo doesn't exist @@ -185,6 +184,7 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): repo = org.create_repo( self.parent.TEMPLATE_CONFIG.name, description=self.parent.TEMPLATE_CONFIG.description, private=private ) + log.info(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' created successfully") # Add the remote and push try: diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 4c863b164..89a9b595c 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -25,7 +25,11 @@ def compose(self) -> ComposeResult: "\n" + "\n".join(nfcore_logo) + "\n", id="logo", ) - yield Markdown("Creating pipeline..") + if self.parent.LOGGING_STATE == "repo created": + yield Markdown("Creating GitHub repository..") + else: + yield Markdown("Creating pipeline..") + self.parent.LOG_HANDLER.console.clear() yield Center(self.parent.LOG_HANDLER.console) if self.parent.LOGGING_STATE == "repo created": yield Center( diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 50beceecc..7d3cf9d12 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1689,256 +1689,253 @@ font-weight: 700; } - .terminal-4130924772-matrix { + .terminal-2633126699-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4130924772-title { + .terminal-2633126699-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4130924772-r1 { fill: #c5c8c6 } - .terminal-4130924772-r2 { fill: #e3e3e3 } - .terminal-4130924772-r3 { fill: #989898 } - .terminal-4130924772-r4 { fill: #e1e1e1 } - .terminal-4130924772-r5 { fill: #121212 } - .terminal-4130924772-r6 { fill: #0053aa } - .terminal-4130924772-r7 { fill: #dde8f3;font-weight: bold } - .terminal-4130924772-r8 { fill: #98e024 } - .terminal-4130924772-r9 { fill: #626262 } - .terminal-4130924772-r10 { fill: #9d65ff } - .terminal-4130924772-r11 { fill: #fd971f } - .terminal-4130924772-r12 { fill: #4ebf71;font-weight: bold } - .terminal-4130924772-r13 { fill: #d2d2d2 } - .terminal-4130924772-r14 { fill: #82aaff } - .terminal-4130924772-r15 { fill: #eeffff } - .terminal-4130924772-r16 { fill: #939393;font-weight: bold } - .terminal-4130924772-r17 { fill: #7ae998 } - .terminal-4130924772-r18 { fill: #507bb3 } - .terminal-4130924772-r19 { fill: #dde6ed;font-weight: bold } - .terminal-4130924772-r20 { fill: #008139 } - .terminal-4130924772-r21 { fill: #001541 } - .terminal-4130924772-r22 { fill: #ddedf9 } + .terminal-2633126699-r1 { fill: #c5c8c6 } + .terminal-2633126699-r2 { fill: #e3e3e3 } + .terminal-2633126699-r3 { fill: #989898 } + .terminal-2633126699-r4 { fill: #e1e1e1 } + .terminal-2633126699-r5 { fill: #121212 } + .terminal-2633126699-r6 { fill: #0053aa } + .terminal-2633126699-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2633126699-r8 { fill: #98e024 } + .terminal-2633126699-r9 { fill: #626262 } + .terminal-2633126699-r10 { fill: #9d65ff } + .terminal-2633126699-r11 { fill: #fd971f } + .terminal-2633126699-r12 { fill: #4ebf71;font-weight: bold } + .terminal-2633126699-r13 { fill: #d2d2d2 } + .terminal-2633126699-r14 { fill: #82aaff } + .terminal-2633126699-r15 { fill: #eeffff } + .terminal-2633126699-r16 { fill: #939393;font-weight: bold } + .terminal-2633126699-r17 { fill: #7ae998 } + .terminal-2633126699-r18 { fill: #008139 } + .terminal-2633126699-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - HowTo create a GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - gitpush--allorigin - - - ● Note the --all flag: this is needed to push all branches to the remote. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - CloseShow pipeline creation log - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + HowTo create a GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository: + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote: + + + gitpush--allorigin + + + ● Note the --all flag: this is needed to push all branches to the remote. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  From d8283bb91556e5a1556c103003f3768b275fe0a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 3 Apr 2024 12:19:11 +0000 Subject: [PATCH 093/737] fix pipeline create tests --- nf_core/pipelines/create/create.py | 2 +- tests/test_cli.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index b5136cf8b..be3ef6849 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -133,7 +133,7 @@ def check_template_yaml_info(self, template_yaml, name, description, author): template_yaml = yaml.safe_load(f) config = CreateConfig(**template_yaml) # Allow giving a prefix through a template - if template_yaml["prefix"] is not None and config.org is None: + if "prefix" in template_yaml and config.org is None: config.org = template_yaml["prefix"] except FileNotFoundError: raise UserWarning(f"Template YAML file '{template_yaml}' not found.") diff --git a/tests/test_cli.py b/tests/test_cli.py index f3e111a58..4f1e1d564 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -250,6 +250,7 @@ def test_create(self, mock_create): force="force" in params, version="1.0.0dev", outdir=params["outdir"], + template_config=None, organisation="nf-core", ) mock_create.return_value.init_pipeline.assert_called_once() From 241bc207c8285650847e5acf3f6e1bb3f1f57637 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 3 Apr 2024 12:48:02 +0000 Subject: [PATCH 094/737] use org instead of prefix in pipeline template --- .github/workflows/create-test-lint-wf-template.yml | 10 +++++----- nf_core/lint/files_unchanged.py | 2 +- nf_core/pipelines/create/create.py | 5 +---- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 035ae86e0..83559113a 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -84,23 +84,23 @@ jobs: run: | mkdir create-test-lint-wf export NXF_WORK=$(pwd) - printf "prefix: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > create-test-lint-wf/template_skip_all.yml + printf "org: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > create-test-lint-wf/template_skip_all.yml - name: Create template skip github_badges run: | - printf "prefix: my-prefix\nskip: github_badges" > create-test-lint-wf/template_skip_github_badges.yml + printf "org: my-prefix\nskip: github_badges" > create-test-lint-wf/template_skip_github_badges.yml - name: Create template skip igenomes run: | - printf "prefix: my-prefix\nskip: igenomes" > create-test-lint-wf/template_skip_igenomes.yml + printf "org: my-prefix\nskip: igenomes" > create-test-lint-wf/template_skip_igenomes.yml - name: Create template skip ci run: | - printf "prefix: my-prefix\nskip: ci" > create-test-lint-wf/template_skip_ci.yml + printf "org: my-prefix\nskip: ci" > create-test-lint-wf/template_skip_ci.yml - name: Create template skip nf_core_configs run: | - printf "prefix: my-prefix\nskip: nf_core_configs" > create-test-lint-wf/template_skip_nf_core_configs.yml + printf "org: my-prefix\nskip: nf_core_configs" > create-test-lint-wf/template_skip_nf_core_configs.yml # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index e78ae7787..50332e925 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -119,7 +119,7 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: "name": short_name, "description": self.nf_config["manifest.description"].strip("\"'"), "author": self.nf_config["manifest.author"].strip("\"'"), - "prefix": prefix, + "org": prefix, } template_yaml_path = Path(tmp_dir, "template.yaml") diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index be3ef6849..1fd4601ee 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -132,9 +132,6 @@ def check_template_yaml_info(self, template_yaml, name, description, author): with open(template_yaml) as f: template_yaml = yaml.safe_load(f) config = CreateConfig(**template_yaml) - # Allow giving a prefix through a template - if "prefix" in template_yaml and config.org is None: - config.org = template_yaml["prefix"] except FileNotFoundError: raise UserWarning(f"Template YAML file '{template_yaml}' not found.") @@ -177,7 +174,7 @@ def update_config(self, organisation, version, force, outdir): if self.config.outdir is None: self.config.outdir = outdir if outdir else "." if self.config.is_nfcore is None: - self.config.is_nfcore = organisation == "nf-core" + self.config.is_nfcore = self.config.org == "nf-core" def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): """Creates a dictionary of parameters for the new pipeline. From 6f2a59b4a9e33ff7725196271e503186ada130d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 5 Apr 2024 09:23:39 +0000 Subject: [PATCH 095/737] add 'back' button if the pipeline exists --- nf_core/pipelines/create/__init__.py | 2 -- nf_core/pipelines/create/error.py | 38 ----------------------- nf_core/pipelines/create/finaldetails.py | 5 +-- nf_core/pipelines/create/loggingscreen.py | 1 + 4 files changed, 2 insertions(+), 44 deletions(-) delete mode 100644 nf_core/pipelines/create/error.py diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 05589d5d2..96f027e9f 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -7,7 +7,6 @@ from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline -from nf_core.pipelines.create.error import ExistError from nf_core.pipelines.create.finaldetails import FinalDetails from nf_core.pipelines.create.githubexit import GithubExit from nf_core.pipelines.create.githubrepo import GithubRepo @@ -57,7 +56,6 @@ class PipelineCreateApp(App[CreateConfig]): "github_repo_question": GithubRepoQuestion(), "github_repo": GithubRepo(), "github_exit": GithubExit(), - "error_screen": ExistError(), } # Initialise config as empty diff --git a/nf_core/pipelines/create/error.py b/nf_core/pipelines/create/error.py deleted file mode 100644 index 67c67aa1c..000000000 --- a/nf_core/pipelines/create/error.py +++ /dev/null @@ -1,38 +0,0 @@ -from textwrap import dedent - -from textual.app import ComposeResult -from textual.containers import Center -from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown, Static - -from nf_core.utils import nfcore_logo - - -class ExistError(Screen): - """A screen to show the final text and exit the app - when an error ocurred.""" - - def compose(self) -> ComposeResult: - yield Header() - yield Footer() - yield Markdown( - dedent( - """ - # Pipeline exists - """ - ) - ) - yield Static( - "\n" + "\n".join(nfcore_logo) + "\n", - id="logo", - ) - - completed_text_markdown = f""" - A pipeline '`{self.parent.TEMPLATE_CONFIG.outdir + "/" + self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name}`' already exists. - Please select a different name or `force` the creation of the pipeline to override the existing one. - """ - - yield Markdown(dedent(completed_text_markdown)) - yield Center( - Button("Close App", id="close_app", variant="success"), - classes="cta", - ) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index c621a425a..7f0f5d918 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -91,10 +91,6 @@ class PipelineExists(Message): pass - @on(PipelineExists) - def show_pipeline_error(self) -> None: - self.parent.switch_screen("error_screen") - @work(thread=True, exclusive=True) def _create_pipeline(self) -> None: """Create the pipeline.""" @@ -108,3 +104,4 @@ def _create_pipeline(self) -> None: self.parent.call_from_thread(change_select_disabled, self.parent, "close_screen", False) except UserWarning: self.post_message(self.PipelineExists()) + self.parent.call_from_thread(change_select_disabled, self.parent, "back", False) diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index 89a9b595c..dcb9fce7e 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -38,6 +38,7 @@ def compose(self) -> ComposeResult: ) else: yield Center( + Button("Back", id="back", variant="default", disabled=True), Button("Continue", id="close_screen", variant="success", disabled=True), classes="cta", ) From 24704a7ee856546f929ba254920bb52ba375da3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 5 Apr 2024 09:48:30 +0000 Subject: [PATCH 096/737] add option to set repo name --- nf_core/pipelines/create/githubrepo.py | 36 ++++++++++++++++++-------- 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 656f8c51e..461f41272 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -56,6 +56,14 @@ def compose(self) -> ComposeResult: ) yield Button("Show", id="show_password") yield Button("Hide", id="hide_password") + with Horizontal(classes="ghrepo-cols"): + yield TextInput( + "repo_name", + "Repository name", + "The name of the new GitHub repository", + default=self.parent.TEMPLATE_CONFIG.name, + classes="column", + ) with Horizontal(classes="ghrepo-cols"): yield Switch(value=False, id="private") with Vertical(): @@ -89,7 +97,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: elif event.button.id == "create_github": # Create a GitHub repo - # Save GitHub username and token + # Save GitHub username, token and repo name github_variables = {} for text_input in self.query("TextInput"): this_input = text_input.query_one(Input) @@ -101,7 +109,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Pipeline git repo pipeline_repo = git.Repo.init( Path(self.parent.TEMPLATE_CONFIG.outdir) - / Path(self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name) + / Path(self.parent.TEMPLATE_CONFIG.org + "-" + github_variables["repo_name"]) ) # GitHub authentication @@ -140,7 +148,11 @@ def on_button_pressed(self, event: Button.Pressed) -> None: try: if org: self._create_repo_and_push( - org, pipeline_repo, github_variables["private"], github_variables["push"] + org, + github_variables["repo_name"], + pipeline_repo, + github_variables["private"], + github_variables["push"], ) else: # Create the repo in the user's account @@ -148,7 +160,11 @@ def on_button_pressed(self, event: Button.Pressed) -> None: f"Repo will be created in the GitHub organisation account '{github_variables['gh_username']}'" ) self._create_repo_and_push( - user, pipeline_repo, github_variables["private"], github_variables["push"] + user, + github_variables["repo_name"], + pipeline_repo, + github_variables["private"], + github_variables["push"], ) except UserWarning as e: log.info(f"There was an error with message: {e}") @@ -158,16 +174,16 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.parent.switch_screen(LoggingScreen()) @work(thread=True, exclusive=True) - def _create_repo_and_push(self, org, pipeline_repo, private, push): + def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" self.post_message(ShowLogs()) # Check if repo already exists try: - repo = org.get_repo(self.parent.TEMPLATE_CONFIG.name) + repo = org.get_repo(repo_name) # Check if it has a commit history try: repo.get_commits().totalCount - raise UserWarning(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' already exists") + raise UserWarning(f"GitHub repository '{repo_name}' already exists") except GithubException: # Repo is empty repo_exists = True @@ -181,10 +197,8 @@ def _create_repo_and_push(self, org, pipeline_repo, private, push): # Create the repo if not repo_exists: - repo = org.create_repo( - self.parent.TEMPLATE_CONFIG.name, description=self.parent.TEMPLATE_CONFIG.description, private=private - ) - log.info(f"GitHub repository '{self.parent.TEMPLATE_CONFIG.name}' created successfully") + repo = org.create_repo(repo_name, description=self.parent.TEMPLATE_CONFIG.description, private=private) + log.info(f"GitHub repository '{repo_name}' created successfully") # Add the remote and push try: From 5add94be701033401e0c2087adc0d55021d57198 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 5 Apr 2024 11:45:19 +0000 Subject: [PATCH 097/737] show help if repo already exists --- nf_core/pipelines/create/githubrepo.py | 13 +++++++++++-- nf_core/pipelines/create/loggingscreen.py | 3 ++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 461f41272..02545e018 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -9,11 +9,12 @@ from textual import work from textual.app import ComposeResult from textual.containers import Center, Horizontal, Vertical +from textual.message import Message from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch from nf_core.pipelines.create.loggingscreen import LoggingScreen -from nf_core.pipelines.create.utils import ShowLogs, TextInput +from nf_core.pipelines.create.utils import ShowLogs, TextInput, change_select_disabled log = logging.getLogger(__name__) @@ -167,12 +168,17 @@ def on_button_pressed(self, event: Button.Pressed) -> None: github_variables["push"], ) except UserWarning as e: - log.info(f"There was an error with message: {e}") + log.error(f"There was an error with message: {e}") self.parent.switch_screen("github_exit") self.parent.LOGGING_STATE = "repo created" self.parent.switch_screen(LoggingScreen()) + class RepoExists(Message): + """Custom message to indicate that the GitHub repo already exists.""" + + pass + @work(thread=True, exclusive=True) def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" @@ -190,6 +196,8 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): except UserWarning as e: # Repo already exists log.error(e) + self.parent.call_from_thread(self.post_message, self.RepoExists()) + self.parent.call_from_thread(change_select_disabled, self.parent, "exit", False) return except UnknownObjectException: # Repo doesn't exist @@ -199,6 +207,7 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): if not repo_exists: repo = org.create_repo(repo_name, description=self.parent.TEMPLATE_CONFIG.description, private=private) log.info(f"GitHub repository '{repo_name}' created successfully") + self.parent.call_from_thread(change_select_disabled, self.parent, "close_app", False) # Add the remote and push try: diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index dcb9fce7e..a862852d7 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -33,7 +33,8 @@ def compose(self) -> ComposeResult: yield Center(self.parent.LOG_HANDLER.console) if self.parent.LOGGING_STATE == "repo created": yield Center( - Button("Close App", id="close_app", variant="success"), + Button("Continue", id="exit", variant="success", disabled=True), + Button("Close App", id="close_app", variant="success", disabled=True), classes="cta", ) else: From 5d9ebbd3d349d3d7c8455787900cd50ab17be6c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 5 Apr 2024 12:45:29 +0000 Subject: [PATCH 098/737] hide buttons not used in logging screen --- nf_core/pipelines/create/create.tcss | 4 ++++ nf_core/pipelines/create/finaldetails.py | 4 +++- nf_core/pipelines/create/githubrepo.py | 4 +++- nf_core/pipelines/create/utils.py | 5 +++++ 4 files changed, 15 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index b2355e133..67394a9de 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -118,6 +118,10 @@ Vertical{ margin: 0 4 2 4; } +.hide { + display: none; +} + /* Layouts */ .col-2 { grid-size: 2 1; diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 7f0f5d918..b822c09f8 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -11,7 +11,7 @@ from nf_core.pipelines.create.create import PipelineCreate from nf_core.pipelines.create.loggingscreen import LoggingScreen -from nf_core.pipelines.create.utils import ShowLogs, TextInput, change_select_disabled +from nf_core.pipelines.create.utils import ShowLogs, TextInput, add_hide_class, change_select_disabled class FinalDetails(Screen): @@ -102,6 +102,8 @@ def _create_pipeline(self) -> None: try: create_obj.init_pipeline() self.parent.call_from_thread(change_select_disabled, self.parent, "close_screen", False) + add_hide_class(self.parent, "back") except UserWarning: self.post_message(self.PipelineExists()) self.parent.call_from_thread(change_select_disabled, self.parent, "back", False) + add_hide_class(self.parent, "close_screen") diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 02545e018..d32380de0 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -14,7 +14,7 @@ from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch from nf_core.pipelines.create.loggingscreen import LoggingScreen -from nf_core.pipelines.create.utils import ShowLogs, TextInput, change_select_disabled +from nf_core.pipelines.create.utils import ShowLogs, TextInput, add_hide_class, change_select_disabled log = logging.getLogger(__name__) @@ -198,6 +198,7 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): log.error(e) self.parent.call_from_thread(self.post_message, self.RepoExists()) self.parent.call_from_thread(change_select_disabled, self.parent, "exit", False) + add_hide_class(self.parent, "close_app") return except UnknownObjectException: # Repo doesn't exist @@ -208,6 +209,7 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): repo = org.create_repo(repo_name, description=self.parent.TEMPLATE_CONFIG.description, private=private) log.info(f"GitHub repository '{repo_name}' created successfully") self.parent.call_from_thread(change_select_disabled, self.parent, "close_app", False) + add_hide_class(self.parent, "exit") # Add the remote and push try: diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index a1c908952..7b332615f 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -206,6 +206,11 @@ def change_select_disabled(app, widget_id: str, disabled: bool) -> None: app.get_widget_by_id(widget_id).disabled = disabled +def add_hide_class(app, widget_id: str) -> None: + """Add class 'hide' to a widget. Not display widget.""" + app.get_widget_by_id(widget_id).add_class("hide") + + ## Markdown text to reuse in different screens markdown_genomes = """ Nf-core pipelines are configured to use a copy of the most common reference genome files. From 5a0aa4629e5d531f9904aa7b12b34d3fc2d00916 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 5 Apr 2024 13:12:20 +0000 Subject: [PATCH 099/737] fix threads and messages --- nf_core/pipelines/create/finaldetails.py | 7 +++++-- nf_core/pipelines/create/githubrepo.py | 11 +++++++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index b822c09f8..86b93423f 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -91,6 +91,11 @@ class PipelineExists(Message): pass + @on(PipelineExists) + def show_back_button(self) -> None: + change_select_disabled(self.parent, "back", False) + add_hide_class(self.parent, "close_screen") + @work(thread=True, exclusive=True) def _create_pipeline(self) -> None: """Create the pipeline.""" @@ -105,5 +110,3 @@ def _create_pipeline(self) -> None: add_hide_class(self.parent, "back") except UserWarning: self.post_message(self.PipelineExists()) - self.parent.call_from_thread(change_select_disabled, self.parent, "back", False) - add_hide_class(self.parent, "close_screen") diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index d32380de0..768f77f7a 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -6,7 +6,7 @@ import git import yaml from github import Github, GithubException, UnknownObjectException -from textual import work +from textual import on, work from textual.app import ComposeResult from textual.containers import Center, Horizontal, Vertical from textual.message import Message @@ -179,6 +179,11 @@ class RepoExists(Message): pass + @on(RepoExists) + def show_github_info_button(self) -> None: + change_select_disabled(self.parent, "exit", False) + add_hide_class(self.parent, "close_app") + @work(thread=True, exclusive=True) def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): """Create a GitHub repository and push all branches.""" @@ -196,9 +201,7 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): except UserWarning as e: # Repo already exists log.error(e) - self.parent.call_from_thread(self.post_message, self.RepoExists()) - self.parent.call_from_thread(change_select_disabled, self.parent, "exit", False) - add_hide_class(self.parent, "close_app") + self.post_message(self.RepoExists()) return except UnknownObjectException: # Repo doesn't exist From 48d6ea17c3216ba9950d8bd4888075f6788f2419 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 5 Apr 2024 13:18:37 +0000 Subject: [PATCH 100/737] update test snapshots --- tests/__snapshots__/test_create_app.ambr | 258 +++++++++++------------ 1 file changed, 129 insertions(+), 129 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 7d3cf9d12..3f4b2f35c 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1409,257 +1409,257 @@ font-weight: 700; } - .terminal-3041904966-matrix { + .terminal-2065381799-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3041904966-title { + .terminal-2065381799-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3041904966-r1 { fill: #c5c8c6 } - .terminal-3041904966-r2 { fill: #e3e3e3 } - .terminal-3041904966-r3 { fill: #989898 } - .terminal-3041904966-r4 { fill: #e1e1e1 } - .terminal-3041904966-r5 { fill: #121212 } - .terminal-3041904966-r6 { fill: #0053aa } - .terminal-3041904966-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3041904966-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-3041904966-r9 { fill: #1e1e1e } - .terminal-3041904966-r10 { fill: #008139 } - .terminal-3041904966-r11 { fill: #454a50 } - .terminal-3041904966-r12 { fill: #787878 } - .terminal-3041904966-r13 { fill: #e2e2e2 } - .terminal-3041904966-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-3041904966-r15 { fill: #000000 } - .terminal-3041904966-r16 { fill: #b93c5b } - .terminal-3041904966-r17 { fill: #808080 } - .terminal-3041904966-r18 { fill: #7ae998 } - .terminal-3041904966-r19 { fill: #507bb3 } - .terminal-3041904966-r20 { fill: #0a180e;font-weight: bold } - .terminal-3041904966-r21 { fill: #dde6ed;font-weight: bold } - .terminal-3041904966-r22 { fill: #001541 } - .terminal-3041904966-r23 { fill: #ddedf9 } + .terminal-2065381799-r1 { fill: #c5c8c6 } + .terminal-2065381799-r2 { fill: #e3e3e3 } + .terminal-2065381799-r3 { fill: #989898 } + .terminal-2065381799-r4 { fill: #e1e1e1 } + .terminal-2065381799-r5 { fill: #121212 } + .terminal-2065381799-r6 { fill: #0053aa } + .terminal-2065381799-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2065381799-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2065381799-r9 { fill: #1e1e1e } + .terminal-2065381799-r10 { fill: #008139 } + .terminal-2065381799-r11 { fill: #454a50 } + .terminal-2065381799-r12 { fill: #787878 } + .terminal-2065381799-r13 { fill: #e2e2e2 } + .terminal-2065381799-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-2065381799-r15 { fill: #000000 } + .terminal-2065381799-r16 { fill: #b93c5b } + .terminal-2065381799-r17 { fill: #808080 } + .terminal-2065381799-r18 { fill: #7ae998 } + .terminal-2065381799-r19 { fill: #507bb3 } + .terminal-2065381799-r20 { fill: #0a180e;font-weight: bold } + .terminal-2065381799-r21 { fill: #dde6ed;font-weight: bold } + .terminal-2065381799-r22 { fill: #001541 } + .terminal-2065381799-r23 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - Now that we have created a new pipeline locally, we can create a new GitHub repository using - the GitHub API and push the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔Push files - Select to push pipeline files and branches to your GitHub repo. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + Now that we have created a new pipeline locally, we can create a new GitHub repository using + the GitHub API and push the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the new GitHub repository + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + mypipeline + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔Push files + Select to push pipeline files and branches to your GitHub repo. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  From 01481db2bb23ee682a68180ff1668d9dd1564091 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 5 Apr 2024 14:21:44 +0000 Subject: [PATCH 101/737] allow selecting a github organisation --- nf_core/pipelines/create/githubrepo.py | 23 +- tests/__snapshots__/test_create_app.ambr | 258 +++++++++++------------ 2 files changed, 148 insertions(+), 133 deletions(-) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 768f77f7a..481bd4437 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -22,6 +22,12 @@ Now that we have created a new pipeline locally, we can create a new GitHub repository using the GitHub API and push the code to it. """ +github_org_help = """ +You can't create a repository to the nf-core organisation. +Please create the pipeline repo to an organisation where you have access or use your user account. +A core-team member will be able to transfer the repo to nf-core once the development has started. +You user account will be used by default if 'nf-core' is provided. +""" class GithubRepo(Screen): @@ -58,6 +64,13 @@ def compose(self) -> ComposeResult: yield Button("Show", id="show_password") yield Button("Hide", id="hide_password") with Horizontal(classes="ghrepo-cols"): + yield TextInput( + "repo_org", + "Organisation name", + "The name of the organisation where the GitHub repo will be cretaed", + default=self.parent.TEMPLATE_CONFIG.org, + classes="column", + ) yield TextInput( "repo_name", "Repository name", @@ -65,6 +78,8 @@ def compose(self) -> ComposeResult: default=self.parent.TEMPLATE_CONFIG.name, classes="column", ) + if self.parent.TEMPLATE_CONFIG.is_nfcore: + yield Markdown(dedent(github_org_help)) with Horizontal(classes="ghrepo-cols"): yield Switch(value=False, id="private") with Vertical(): @@ -110,7 +125,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Pipeline git repo pipeline_repo = git.Repo.init( Path(self.parent.TEMPLATE_CONFIG.outdir) - / Path(self.parent.TEMPLATE_CONFIG.org + "-" + github_variables["repo_name"]) + / Path(self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name) ) # GitHub authentication @@ -136,11 +151,11 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Check if organisation exists # If the organisation is nf-core or it doesn't exist, the repo will be created in the user account - if self.parent.TEMPLATE_CONFIG.org != "nf-core": + if github_variables["repo_org"] != "nf-core": try: - org = github_auth.get_organization(self.parent.TEMPLATE_CONFIG.org) + org = github_auth.get_organization(github_variables["repo_org"]) log.info( - f"Repo will be created in the GitHub organisation account '{self.parent.TEMPLATE_CONFIG.org}'" + f"Repo will be created in the GitHub organisation account '{github_variables['repo_org']}'" ) except UnknownObjectException: pass diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 3f4b2f35c..3477a381a 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1409,257 +1409,257 @@ font-weight: 700; } - .terminal-2065381799-matrix { + .terminal-3368911015-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2065381799-title { + .terminal-3368911015-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2065381799-r1 { fill: #c5c8c6 } - .terminal-2065381799-r2 { fill: #e3e3e3 } - .terminal-2065381799-r3 { fill: #989898 } - .terminal-2065381799-r4 { fill: #e1e1e1 } - .terminal-2065381799-r5 { fill: #121212 } - .terminal-2065381799-r6 { fill: #0053aa } - .terminal-2065381799-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2065381799-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2065381799-r9 { fill: #1e1e1e } - .terminal-2065381799-r10 { fill: #008139 } - .terminal-2065381799-r11 { fill: #454a50 } - .terminal-2065381799-r12 { fill: #787878 } - .terminal-2065381799-r13 { fill: #e2e2e2 } - .terminal-2065381799-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-2065381799-r15 { fill: #000000 } - .terminal-2065381799-r16 { fill: #b93c5b } - .terminal-2065381799-r17 { fill: #808080 } - .terminal-2065381799-r18 { fill: #7ae998 } - .terminal-2065381799-r19 { fill: #507bb3 } - .terminal-2065381799-r20 { fill: #0a180e;font-weight: bold } - .terminal-2065381799-r21 { fill: #dde6ed;font-weight: bold } - .terminal-2065381799-r22 { fill: #001541 } - .terminal-2065381799-r23 { fill: #ddedf9 } + .terminal-3368911015-r1 { fill: #c5c8c6 } + .terminal-3368911015-r2 { fill: #e3e3e3 } + .terminal-3368911015-r3 { fill: #989898 } + .terminal-3368911015-r4 { fill: #e1e1e1 } + .terminal-3368911015-r5 { fill: #121212 } + .terminal-3368911015-r6 { fill: #0053aa } + .terminal-3368911015-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3368911015-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-3368911015-r9 { fill: #1e1e1e } + .terminal-3368911015-r10 { fill: #008139 } + .terminal-3368911015-r11 { fill: #454a50 } + .terminal-3368911015-r12 { fill: #787878 } + .terminal-3368911015-r13 { fill: #e2e2e2 } + .terminal-3368911015-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-3368911015-r15 { fill: #000000 } + .terminal-3368911015-r16 { fill: #b93c5b } + .terminal-3368911015-r17 { fill: #808080 } + .terminal-3368911015-r18 { fill: #7ae998 } + .terminal-3368911015-r19 { fill: #507bb3 } + .terminal-3368911015-r20 { fill: #0a180e;font-weight: bold } + .terminal-3368911015-r21 { fill: #dde6ed;font-weight: bold } + .terminal-3368911015-r22 { fill: #001541 } + .terminal-3368911015-r23 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - Now that we have created a new pipeline locally, we can create a new GitHub repository using - the GitHub API and push the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the new GitHub repository - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - mypipeline - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔Push files - Select to push pipeline files and branches to your GitHub repo. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + Now that we have created a new pipeline locally, we can create a new GitHub repository using + the GitHub API and push the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + You can't create a repository to the nf-core organisation. Please create the pipeline repo  + to an organisation where you have access or use your user account. A core-team member will  + be able to transfer the repo to nf-core once the development has started. You user account  + will be used by default if 'nf-core' is provided. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔Push files + Select to push pipeline files and branches to your GitHub repo. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + +  D  Toggle dark mode  Q  Quit  From 6f7d0d16930598500f4a3a3014ab8c3bb740ed56 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 5 Apr 2024 16:42:47 +0200 Subject: [PATCH 102/737] Rip out the docs to put in the website repo --- README.md | 1227 +---------------- docs/images/nf-core-bump-version.svg | 187 --- docs/images/nf-core-create-logo.svg | 107 -- docs/images/nf-core-create.svg | 162 --- docs/images/nf-core-download-tree.svg | 190 --- docs/images/nf-core-download.svg | 139 -- docs/images/nf-core-launch-rnaseq.svg | 120 -- docs/images/nf-core-licences.svg | 107 -- docs/images/nf-core-lint.svg | 208 --- docs/images/nf-core-list-rna.svg | 172 --- docs/images/nf-core-list-stars.svg | 141 -- docs/images/nf-core-list.svg | 145 -- docs/images/nf-core-logo.png | Bin 17930 -> 0 bytes docs/images/nf-core-modules-bump-version.svg | 144 -- docs/images/nf-core-modules-create.svg | 123 -- docs/images/nf-core-modules-info.svg | 240 ---- docs/images/nf-core-modules-install.svg | 126 -- docs/images/nf-core-modules-lint.svg | 114 -- docs/images/nf-core-modules-list-local.svg | 155 --- docs/images/nf-core-modules-list-remote.svg | 169 --- docs/images/nf-core-modules-patch.svg | 193 --- docs/images/nf-core-modules-remove.svg | 110 -- docs/images/nf-core-modules-test.svg | 110 -- docs/images/nf-core-modules-update.svg | 122 -- docs/images/nf-core-schema-build.svg | 119 -- docs/images/nf-core-schema-lint.svg | 114 -- docs/images/nf-core-schema-validate.svg | 118 -- docs/images/nf-core-subworkflows-create.svg | 143 -- docs/images/nf-core-subworkflows-info.svg | 172 --- docs/images/nf-core-subworkflows-install.svg | 126 -- docs/images/nf-core-subworkflows-lint.svg | 341 ----- .../nf-core-subworkflows-list-local.svg | 168 --- .../nf-core-subworkflows-list-remote.svg | 169 --- docs/images/nf-core-subworkflows-remove.svg | 158 --- docs/images/nf-core-subworkflows-test.svg | 110 -- docs/images/nf-core-subworkflows-update.svg | 126 -- docs/images/nf-core-sync.svg | 145 -- docs/images/nfcore-tools_logo_dark.png | Bin 64937 -> 0 bytes docs/images/nfcore-tools_logo_light.png | Bin 64856 -> 0 bytes 39 files changed, 9 insertions(+), 6511 deletions(-) delete mode 100644 docs/images/nf-core-bump-version.svg delete mode 100644 docs/images/nf-core-create-logo.svg delete mode 100644 docs/images/nf-core-create.svg delete mode 100644 docs/images/nf-core-download-tree.svg delete mode 100644 docs/images/nf-core-download.svg delete mode 100644 docs/images/nf-core-launch-rnaseq.svg delete mode 100644 docs/images/nf-core-licences.svg delete mode 100644 docs/images/nf-core-lint.svg delete mode 100644 docs/images/nf-core-list-rna.svg delete mode 100644 docs/images/nf-core-list-stars.svg delete mode 100644 docs/images/nf-core-list.svg delete mode 100644 docs/images/nf-core-logo.png delete mode 100644 docs/images/nf-core-modules-bump-version.svg delete mode 100644 docs/images/nf-core-modules-create.svg delete mode 100644 docs/images/nf-core-modules-info.svg delete mode 100644 docs/images/nf-core-modules-install.svg delete mode 100644 docs/images/nf-core-modules-lint.svg delete mode 100644 docs/images/nf-core-modules-list-local.svg delete mode 100644 docs/images/nf-core-modules-list-remote.svg delete mode 100644 docs/images/nf-core-modules-patch.svg delete mode 100644 docs/images/nf-core-modules-remove.svg delete mode 100644 docs/images/nf-core-modules-test.svg delete mode 100644 docs/images/nf-core-modules-update.svg delete mode 100644 docs/images/nf-core-schema-build.svg delete mode 100644 docs/images/nf-core-schema-lint.svg delete mode 100644 docs/images/nf-core-schema-validate.svg delete mode 100644 docs/images/nf-core-subworkflows-create.svg delete mode 100644 docs/images/nf-core-subworkflows-info.svg delete mode 100644 docs/images/nf-core-subworkflows-install.svg delete mode 100644 docs/images/nf-core-subworkflows-lint.svg delete mode 100644 docs/images/nf-core-subworkflows-list-local.svg delete mode 100644 docs/images/nf-core-subworkflows-list-remote.svg delete mode 100644 docs/images/nf-core-subworkflows-remove.svg delete mode 100644 docs/images/nf-core-subworkflows-test.svg delete mode 100644 docs/images/nf-core-subworkflows-update.svg delete mode 100644 docs/images/nf-core-sync.svg delete mode 100644 docs/images/nfcore-tools_logo_dark.png delete mode 100644 docs/images/nfcore-tools_logo_light.png diff --git a/README.md b/README.md index a5e679986..4ed5e6373 100644 --- a/README.md +++ b/README.md @@ -16,64 +16,17 @@ A python package with helper tools for the nf-core community. -> **Read this documentation on the nf-core website: [https://nf-co.re/tools](https://nf-co.re/tools)** - -## Table of contents - -- [`nf-core` tools installation](#installation) -- [`nf-core` tools update](#update-tools) -- [`nf-core list` - List available pipelines](#listing-pipelines) -- [`nf-core launch` - Run a pipeline with interactive parameter prompts](#launch-a-pipeline) -- [`nf-core create-params-file` - Create a parameter file](#create-a-parameter-file) -- [`nf-core download` - Download a pipeline for offline use](#downloading-pipelines-for-offline-use) -- [`nf-core licences` - List software licences in a pipeline](#pipeline-software-licences) -- [`nf-core create` - Create a new pipeline with the nf-core template](#creating-a-new-pipeline) -- [`nf-core lint` - Check pipeline code against nf-core guidelines](#linting-a-workflow) -- [`nf-core schema` - Work with pipeline schema files](#pipeline-schema) -- [`nf-core bump-version` - Update nf-core pipeline version number](#bumping-a-pipeline-version-number) -- [`nf-core sync` - Synchronise pipeline TEMPLATE branches](#sync-a-pipeline-with-the-template) -- [`nf-core create-logo` - Create an nf-core pipeline logo](#create-an-nf-core-pipeline-logo) -- [`nf-core tui` - Explore the nf-core command line graphically](#tools-cli-tui) -- [`nf-core modules` - commands for dealing with DSL2 modules](#modules) - - - [`modules list` - List available modules](#list-modules) - - [`modules list remote` - List remote modules](#list-remote-modules) - - [`modules list local` - List installed modules](#list-installed-modules) - - [`modules info` - Show information about a module](#show-information-about-a-module) - - [`modules install` - Install modules in a pipeline](#install-modules-in-a-pipeline) - - [`modules update` - Update modules in a pipeline](#update-modules-in-a-pipeline) - - [`modules remove` - Remove a module from a pipeline](#remove-a-module-from-a-pipeline) - - [`modules patch` - Create a patch file for a module](#create-a-patch-file-for-a-module) - - [`modules create` - Create a module from the template](#create-a-new-module) - - [`modules lint` - Check a module against nf-core guidelines](#check-a-module-against-nf-core-guidelines) - - [`modules test` - Run the tests for a module](#run-the-tests-for-a-module-using-pytest) - - [`modules bump-versions` - Bump software versions of modules](#bump-bioconda-and-container-versions-of-modules-in) - -- [`nf-core subworkflows` - commands for dealing with subworkflows](#subworkflows) - - [`subworkflows list` - List available subworkflows](#list-subworkflows) - - [`subworkflows list remote` - List remote subworkflows](#list-remote-subworkflows) - - [`subworkflows list local` - List installed subworkflows](#list-installed-subworkflows) - - [`subworkflows info` - Show information about a subworkflow](#show-information-about-a-subworkflow) - - [`subworkflows install` - Install subworkflows in a pipeline](#install-subworkflows-in-a-pipeline) - - [`subworkflows update` - Update subworkflows in a pipeline](#update-subworkflows-in-a-pipeline) - - [`subworkflows remove` - Remove a subworkflow from a pipeline](#remove-a-subworkflow-from-a-pipeline) - - [`subworkflows create` - Create a subworkflow from the template](#create-a-new-subworkflow) - - [`subworkflows lint` - Check a subworkflow against nf-core guidelines](#check-a-subworkflow-against-nf-core-guidelines) - - [`subworkflows test` - Run the tests for a subworkflow](#run-the-tests-for-a-subworkflow-using-pytest) -- [Citation](#citation) - The nf-core tools package is written in Python and can be imported and used within other packages. For documentation of the internal Python functions, please refer to the [Tools Python API docs](https://nf-co.re/tools/docs/). ## Installation -### Bioconda +For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/usage/tools). +Below is a quick-start for those who know what they're doing: -You can install `nf-core/tools` from [bioconda](https://bioconda.github.io/recipes/nf-core/README.html). +### Bioconda -First, install conda and configure the channels to use bioconda -(see the [bioconda documentation](https://bioconda.github.io/index.html#usage)). -Then, just run the conda installation command: +Install [from Bioconda](https://bioconda.github.io/recipes/nf-core/README.html): ```bash conda install nf-core @@ -88,1191 +41,29 @@ conda activate nf-core ### Python Package Index -`nf-core/tools` can also be installed from [PyPI](https://pypi.python.org/pypi/nf-core/) using pip as follows: +Install [from PyPI](https://pypi.python.org/pypi/nf-core/): ```bash pip install nf-core ``` -### Docker image - -There is a docker image that you can use to run `nf-core/tools` that has all of the requirements packaged (including Nextflow) and so should work out of the box. It is called [`nfcore/tools`](https://hub.docker.com/r/nfcore/tools) _**(NB: no hyphen!)**_ - -You can use this container on the command line as follows: - -```bash -docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools -``` - -- `-i` and `-t` are needed for the interactive cli prompts to work (this tells Docker to use a pseudo-tty with stdin attached) -- The `-v` argument tells Docker to bind your current working directory (`pwd`) to the same path inside the container, so that files created there will be saved to your local file system outside of the container. -- `-w` sets the working directory in the container to this path, so that it's the same as your working directory outside of the container. -- `-u` sets your local user account as the user inside the container, so that any files created have the correct ownership permissions - -After the above base command, you can use the regular command line flags that you would use with other types of installation. -For example, to launch the `viralrecon` pipeline: - -```bash -docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools launch viralrecon -r 1.1.0 -``` - -If you use `$NXF_SINGULARITY_CACHEDIR` for downloads, you'll also need to make this folder and environment variable available to the continer: - -```bash -docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) -v $NXF_SINGULARITY_CACHEDIR:$NXF_SINGULARITY_CACHEDIR -e NXF_SINGULARITY_CACHEDIR nfcore/tools launch viralrecon -r 1.1.0 -``` - -#### Docker bash alias - -The above base command is a bit of a mouthful to type, to say the least. -To make it easier to use, we highly recommend adding the following bash alias to your `~/.bashrc` file: - -```bash -alias nf-core="docker run -itv `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) nfcore/tools" -``` - -Once applied (you may need to reload your shell) you can just use the `nf-core` command instead: - -```bash -nf-core list -``` - -#### Docker versions - -You can use docker image tags to specify the version you would like to use. For example, `nfcore/tools:dev` for the latest development version of the code, or `nfcore/tools:1.14` for version `1.14` of tools. -If you omit this, it will default to `:latest`, which should be the latest stable release. - -If you need a specific version of Nextflow inside the container, you can build an image yourself. -Clone the repo locally and check out whatever version of nf-core/tools that you need. -Then build using the `--build-arg NXF_VER` flag as follows: - -```bash -docker build -t nfcore/tools:dev . --build-arg NXF_VER=20.04.0 -``` - ### Development version -If you would like the latest development version of tools, the command is: - ```bash pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev ``` -If you intend to make edits to the code, first make a fork of the repository and then clone it locally. -Go to the cloned directory and install with pip (also installs development requirements): +If editing, fork and clone the repo, then install as follows: ```bash pip install --upgrade -r requirements-dev.txt -e . ``` -### Using a specific Python interpreter - -If you prefer, you can also run tools with a specific Python interpreter. -The command line usage and flags are then exactly the same as if you ran with the `nf-core` command. -Note that the module is `nf_core` with an underscore, not a hyphen like the console command. - -For example: - -```bash -python -m nf_core --help -python3 -m nf_core list -~/my_env/bin/python -m nf_core create --name mypipeline --description "This is a new skeleton pipeline" -``` - -### Using with your own Python scripts - -The tools functionality is written in such a way that you can import it into your own scripts. -For example, if you would like to get a list of all available nf-core pipelines: - -```python -import nf_core.list -wfs = nf_core.list.Workflows() -wfs.get_remote_workflows() -for wf in wfs.remote_workflows: - print(wf.full_name) -``` - -Please see [https://nf-co.re/tools/docs/](https://nf-co.re/tools/docs/) for the function documentation. - -### Automatic version check - -nf-core/tools automatically checks the web to see if there is a new version of nf-core/tools available. -If you would prefer to skip this check, set the environment variable `NFCORE_NO_VERSION_CHECK`. For example: - -```bash -export NFCORE_NO_VERSION_CHECK=1 -``` - -### Update tools - -It is advisable to keep nf-core/tools updated to the most recent version. The command to update depends on the system used to install it, for example if you have installed it with conda you can use: - -```bash -conda update nf-core -``` - -if you used pip: - -```bash -pip install --upgrade nf-core -``` - -Please refer to the respective documentation for further details to manage packages, as for example [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-pkgs.html#updating-packages) or [pip](https://packaging.python.org/en/latest/tutorials/installing-packages/#upgrading-packages). - -### Activate shell completions for nf-core/tools - -Auto-completion for the `nf-core` command is available for bash, zsh and fish. To activate it, add the following lines to the respective shell config files. - -| shell | shell config file | command | -| ----- | ----------------------------------------- | -------------------------------------------------- | -| bash | `~/.bashrc` | `eval "$(_NF_CORE_COMPLETE=bash_source nf-core)"` | -| zsh | `~/.zshrc` | `eval "$(_NF_CORE_COMPLETE=zsh_source nf-core)"` | -| fish | `~/.config/fish/completions/nf-core.fish` | `eval (env _NF_CORE_COMPLETE=fish_source nf-core)` | - -After a restart of the shell session you should have auto-completion for the `nf-core` command and all its sub-commands and options. - -> [!NOTE] -> The added line will run the command `nf-core` (which will also slow down startup time of your shell). You should therefore either have the nf-core/tools installed globally. -> You can also wrap it inside `if type nf-core > /dev/null; then ` \ `fi` for bash and zsh or `if command -v nf-core &> /dev/null eval (env _NF_CORE_COMPLETE=fish_source nf-core) end` for fish. You need to then source the config in your environment for the completions to be activated. - -> [!TIP] -> If you see the error `command not found compdef` , be sure that your config file contains the line `autoload -Uz compinit && compinit` before the eval line. - -## Listing pipelines - -The command `nf-core list` shows all available nf-core pipelines along with their latest version, when that was published and how recently the pipeline code was pulled to your local system (if at all). - -An example of the output from the command is as follows: - - - -![`nf-core list`](docs/images/nf-core-list.svg) - -To narrow down the list, supply one or more additional keywords to filter the pipelines based on matches in titles, descriptions and topics: - -![`nf-core list rna rna-seq`](docs/images/nf-core-list-rna.svg) - -You can sort the results by latest release (`-s release`, default), -when you last pulled a local copy (`-s pulled`), -alphabetically (`-s name`), -or number of GitHub stars (`-s stars`). - - - -![`nf-core list -s stars`](docs/images/nf-core-list-stars.svg) - -To return results as JSON output for downstream use, use the `--json` flag. - -Archived pipelines are not returned by default. To include them, use the `--show_archived` flag. - -## Launch a pipeline - -Some nextflow pipelines have a considerable number of command line flags that can be used. -To help with this, you can use the `nf-core launch` command. -You can choose between a web-based graphical interface or an interactive command-line wizard tool to enter the pipeline parameters for your run. -Both interfaces show documentation alongside each parameter and validate your inputs. - -The tool uses the `nextflow_schema.json` file from a pipeline to give parameter descriptions, defaults and grouping. -If no file for the pipeline is found, one will be automatically generated at runtime. - -Nextflow `params` variables are saved in to a JSON file called `nf-params.json` and used by nextflow with the `-params-file` flag. -This makes it easier to reuse these in the future. - -The command takes one argument - either the name of an nf-core pipeline which will be pulled automatically, -or the path to a directory containing a Nextflow pipeline _(can be any pipeline, doesn't have to be nf-core)_. - - - -![`nf-core launch rnaseq -r 3.8.1`](docs/images/nf-core-launch-rnaseq.svg) - -Once complete, the wizard will ask you if you want to launch the Nextflow run. -If not, you can copy and paste the Nextflow command with the `nf-params.json` file of your inputs. - -```console -INFO [✓] Input parameters look valid -INFO Nextflow command: - nextflow run nf-core/rnaseq -params-file "nf-params.json" - - -Do you want to run this command now? [y/n]: -``` - -### Launch tool options - -- `-r`, `--revision` - - Specify a pipeline release (or branch / git commit sha) of the project to run -- `-i`, `--id` - - You can use the web GUI for nf-core pipelines by clicking _"Launch"_ on the website. Once filled in you will be given an ID to use with this command which is used to retrieve your inputs. -- `-c`, `--command-only` - - If you prefer not to save your inputs in a JSON file and use `-params-file`, this option will specify all entered params directly in the nextflow command. -- `-p`, `--params-in PATH` - - To use values entered in a previous pipeline run, you can supply the `nf-params.json` file previously generated. - - This will overwrite the pipeline schema defaults before the wizard is launched. -- `-o`, `--params-out PATH` - - Path to save parameters JSON file to. (Default: `nf-params.json`) -- `-a`, `--save-all` - - Without this option the pipeline will ignore any values that match the pipeline schema defaults. - - This option saves _all_ parameters found to the JSON file. -- `-h`, `--show-hidden` - - A pipeline JSON schema can define some parameters as 'hidden' if they are rarely used or for internal pipeline use only. - - This option forces the wizard to show all parameters, including those labelled as 'hidden'. -- `--url` - - Change the URL used for the graphical interface, useful for development work on the website. - -## Create a parameter file - -Sometimes it is easier to manually edit a parameter file than to use the web interface or interactive commandline wizard -provided by `nf-core launch`, for example when running a pipeline with many options on a remote server without a graphical interface. - -You can create a parameter file with all parameters of a pipeline with the `nf-core create-params-file` command. -This file can then be passed to `nextflow` with the `-params-file` flag. - -This command takes one argument - either the name of a nf-core pipeline which will be pulled automatically, -or the path to a directory containing a Nextflow pipeline _(can be any pipeline, doesn't have to be nf-core)_. - -The generated YAML file contains all parameters set to the pipeline default value along with their description in comments. -This template can then be used by uncommenting and modifying the value of parameters you want to pass to a pipline run. - -Hidden options are not included by default, but can be included using the `-x`/`--show-hidden` flag. - -## Downloading pipelines for offline use - -Sometimes you may need to run an nf-core pipeline on a server or HPC system that has no internet connection. -In this case you will need to fetch the pipeline files first, then manually transfer them to your system. - -To make this process easier and ensure accurate retrieval of correctly versioned code and software containers, we have written a download helper tool. - -The `nf-core download` command will download both the pipeline code and the [institutional nf-core/configs](https://github.com/nf-core/configs) files. It can also optionally download any singularity image files that are required. - -If run without any arguments, the download tool will interactively prompt you for the required information. -Each option has a flag, if all are supplied then it will run without any user input needed. - - - -![`nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d`](docs/images/nf-core-download.svg) - -Once downloaded, you will see something like the following file structure for the downloaded pipeline: - - - -![`tree -L 2 nf-core-rnaseq/`](docs/images/nf-core-download-tree.svg) - -You can run the pipeline by simply providing the directory path for the `workflow` folder to your `nextflow run` command: - -```bash -nextflow run /path/to/download/nf-core-rnaseq-dev/workflow/ --input mydata.csv --outdir results # usual parameters here -``` - -> [!NOTE] -> If you downloaded Singularity container images, you will need to use `-profile singularity` or have it enabled in your config file. - -### Downloaded nf-core configs - -The pipeline files are automatically updated (`params.custom_config_base` is set to `../configs`), so that the local copy of institutional configs are available when running the pipeline. -So using `-profile ` should work if available within [nf-core/configs](https://github.com/nf-core/configs). - -> [!WARNING] -> This option is not available when downloading a pipeline for use with [Seqera Platform](#adapting-downloads-to-seqera-platform) because the application manages all configurations separately. - -### Downloading Apptainer containers - -If you're using [Singularity](https://apptainer.org) (Apptainer), the `nf-core download` command can also fetch the required container images for you. -To do this, select `singularity` in the prompt or specify `--container-system singularity` in the command. -Your archive / target output directory will then also include a separate folder `singularity-containers`. - -The downloaded workflow files are again edited to add the following line to the end of the pipeline's `nextflow.config` file: - -```nextflow -singularity.cacheDir = "${projectDir}/../singularity-images/" -``` - -This tells Nextflow to use the `singularity-containers` directory relative to the workflow for the singularity image cache directory. -All images should be downloaded there, so Nextflow will use them instead of trying to pull from the internet. - -#### Singularity cache directory - -We highly recommend setting the `$NXF_SINGULARITY_CACHEDIR` environment variable on your system, even if that is a different system to where you will be running Nextflow. - -If found, the tool will fetch the Singularity images to this directory first before copying to the target output archive / directory. -Any images previously fetched will be found there and copied directly - this includes images that may be shared with other pipelines or previous pipeline version downloads or download attempts. - -If you are running the download on the same system where you will be running the pipeline (eg. a shared filesystem where Nextflow won't have an internet connection at a later date), you can choose to _only_ use the cache via a prompt or cli options `--container-cache-utilisation amend`. This instructs `nf-core download` to fetch all Singularity images to the `$NXF_SINGULARITY_CACHEDIR` directory but does _not_ copy them to the workflow archive / directory. The workflow config file is _not_ edited. This means that when you later run the workflow, Nextflow will just use the cache folder directly. - -If you are downloading a workflow for a different system, you can provide information about the contents of its image cache to `nf-core download`. To avoid unnecessary container image downloads, choose `--container-cache-utilisation remote` and provide a list of already available images as plain text file to `--container-cache-index my_list_of_remotely_available_images.txt`. To generate this list on the remote system, run `find $NXF_SINGULARITY_CACHEDIR -name "*.img" > my_list_of_remotely_available_images.txt`. The tool will then only download and copy images into your output directory, which are missing on the remote system. - -#### How the Singularity image downloads work - -The Singularity image download finds containers using two methods: - -1. It runs `nextflow config` on the downloaded workflow to look for a `process.container` statement for the whole pipeline. - This is the typical method used for DSL1 pipelines. -2. It scrapes any files it finds with a `.nf` file extension in the workflow `modules` directory for lines - that look like `container = "xxx"`. This is the typical method for DSL2 pipelines, which have one container per process. - -Some DSL2 modules have container addresses for docker (eg. `biocontainers/fastqc:0.11.9--0`) and also URLs for direct downloads of a Singularity container (eg. `https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0`). -Where both are found, the download URL is preferred. - -Once a full list of containers is found, they are processed in the following order: - -1. If the target image already exists, nothing is done (eg. with `$NXF_SINGULARITY_CACHEDIR` and `--container-cache-utilisation amend` specified) -2. If found in `$NXF_SINGULARITY_CACHEDIR` and `--container-cache-utilisation copy` is specified, they are copied to the output directory -3. If they start with `http` they are downloaded directly within Python (default 4 at a time, you can customise this with `--parallel-downloads`) -4. If they look like a Docker image name, they are fetched using a `singularity pull` command. Choose the container libraries (registries) queried by providing one or multiple `--container-library` parameter(s). For example, if you call `nf-core download` with `-l quay.io -l ghcr.io -l docker.io`, every image will be pulled from `quay.io` unless an error is encountered. Subsequently, `ghcr.io` and then `docker.io` will be queried for any image that has failed before. - - This requires Singularity/Apptainer to be installed on the system and is substantially slower - -Note that compressing many GBs of binary files can be slow, so specifying `--compress none` is recommended when downloading Singularity images that are copied to the output directory. - -If the download speeds are much slower than your internet connection is capable of, you can set `--parallel-downloads` to a large number to download loads of images at once. - -### Adapting downloads to Seqera Platform - -[Seqera Platform](https://seqera.io/platform/) (formerly _"Nextflow Tower"_) provides a graphical user interface to oversee pipeline runs, gather statistics and configure compute resources. While pipelines added to _Seqera Platform_ are preferably hosted at a Git service, providing them as disconnected, self-reliant repositories is also possible for premises with restricted network access. Choosing the `--platform` flag will download the pipeline in an appropriate form. - -Subsequently, the `*.git` folder can be moved to it's final destination and linked with a pipeline in _Seqera Platform_ using the `file:/` prefix. - -> [!TIP] -> Also without access to Seqera Platform, pipelines downloaded with the `--platform` flag can be run if the _absolute_ path is specified: `nextflow run -r 2.5 file:/path/to/pipelinedownload.git`. Downloads in this format allow you to include multiple revisions of a pipeline in a single file, but require that the revision (e.g. `-r 2.5`) is always explicitly specified. - -## Pipeline software licences - -Sometimes it's useful to see the software licences of the tools used in a pipeline. -You can use the `licences` subcommand to fetch and print the software licence from each conda / PyPI package used in an nf-core pipeline. - -> [!WARNING] -> This command does not currently work for newer DSL2 pipelines. This will hopefully be addressed [soon](https://github.com/nf-core/tools/issues/1155). - - - -![`nf-core licences deepvariant`](docs/images/nf-core-licences.svg) - -## Creating a new pipeline - -The `create` subcommand makes a new pipeline using the nf-core base template. -With a given pipeline name, description and author, it makes a starter pipeline which follows nf-core best practices. - -After creating the files, the command initialises the folder as a git repository and makes an initial commit. -This first "vanilla" commit which is identical to the output from the templating tool is important, as it allows us to keep your pipeline in sync with the base template in the future. -See the [nf-core syncing docs](https://nf-co.re/developers/sync) for more information. - - - -![` nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique" -a "Big Steve" --plain`](docs/images/nf-core-create.svg) - -Once you have run the command, create a new empty repository on GitHub under your username (not the `nf-core` organisation, yet) and push the commits from your computer using the example commands in the above log. -You can then continue to edit, commit and push normally as you build your pipeline. - -Please see the [nf-core documentation](https://nf-co.re/developers/adding_pipelines) for a full walkthrough of how to create a new nf-core workflow. - -> [!TIP] -> As the log output says, remember to come and discuss your idea for a pipeline as early as possible! -> See the [documentation](https://nf-co.re/developers/adding_pipelines#join-the-community) for instructions. - -Note that if the required arguments for `nf-core create` are not given, it will interactively prompt for them. If you prefer, you can supply them as command line arguments. See `nf-core create --help` for more information. - -### Customizing the creation of a pipeline - -The `nf-core create` command comes with a number of options that allow you to customize the creation of a pipeline if you intend to not publish it as an -nf-core pipeline. This can be done in two ways: by using interactive prompts, or by supplying a `template.yml` file using the `--template-yaml ` option. -Both options allow you to specify a custom pipeline prefix to use instead of the common `nf-core`, as well as selecting parts of the template to be excluded during pipeline creation. -The interactive prompts will guide you through the pipeline creation process. An example of a `template.yml` file is shown below. - -```yaml -name: coolpipe -description: A cool pipeline -author: me -prefix: myorg -skip: - - github - - ci - - github_badges - - igenomes - - nf_core_configs -``` - -This will create a pipeline called `coolpipe` in the directory `myorg-coolpipe` (`-`) with `me` as the author. It will exclude all possible parts of the template: - -- `github`: removed all files required for GitHub hosting of the pipeline. Specifically, the `.github` folder and `.gitignore` file. -- `ci`: removes the GitHub continuous integration tests from the pipeline. Specifically, the `.github/workflows/` folder. -- `github_badges`: removes GitHub badges from the `README.md` file. -- `igenomes`: removes pipeline options related to iGenomes. Including the `conf/igenomes.config` file and all references to it. -- `nf_core_configs`: excludes `nf_core/configs` repository options, which make multiple config profiles for various institutional clusters available. - -To run the pipeline creation silently (i.e. without any prompts) with the nf-core template, you can use the `--plain` option. - -## Linting a workflow - -The `lint` subcommand checks a given pipeline for all nf-core community guidelines. -This is the same test that is used on the automated continuous integration tests. - -For example, the current version looks something like this: - - - -![`nf-core lint`](docs/images/nf-core-lint.svg) - -You can use the `-k` / `--key` flag to run only named tests for faster debugging, eg: `nf-core lint -k files_exist -k files_unchanged`. The `nf-core lint` command lints the current working directory by default, to specify another directory you can use `--dir `. - -### Linting documentation - -Each test result name on the left is a terminal hyperlink. -In most terminals you can ctrl + click ( cmd + click) these -links to open documentation specific to this test in your browser. - -Alternatively visit and find your test to read more. - -### Linting config - -It's sometimes desirable to disable certain lint tests, especially if you're using nf-core/tools with your -own pipeline that is outside of nf-core. - -To help with this, you can add a tools config file to your pipeline called `.nf-core.yml` in the pipeline root directory (previously: `.nf-core-lint.yml`). -Here you can list the names of any tests that you would like to disable and set them to `False`, for example: - -```yaml -lint: - actions_awsfulltest: False - pipeline_todos: False -``` - -Some lint tests allow greater granularity, for example skipping a test only for a specific file. -This is documented in the test-specific docs but generally involves passing a list, for example: - -```yaml -lint: - files_exist: - - CODE_OF_CONDUCT.md - files_unchanged: - - assets/email_template.html - - CODE_OF_CONDUCT.md -``` - -Note that you have to list all configurations for the `nf-core lint` command under the `lint:` field in the `.nf-core.yml` file, as this file is also used for configuration of other commands. - -### Automatically fix errors - -Some lint tests can try to automatically fix any issues they find. To enable this functionality, use the `--fix` flag. -The pipeline must be a `git` repository with no uncommitted changes for this to work. -This is so that any automated changes can then be reviewed and undone (`git checkout .`) if you disagree. - -### Lint results output - -The output from `nf-core lint` is designed to be viewed on the command line and is deliberately succinct. -You can view all passed tests with `--show-passed` or generate JSON / markdown results with the `--json` and `--markdown` flags. - -## Pipeline schema - -nf-core pipelines have a `nextflow_schema.json` file in their root which describes the different parameters used by the workflow. -These files allow automated validation of inputs when running the pipeline, are used to generate command line help and can be used to build interfaces to launch pipelines. -Pipeline schema files are built according to the [JSONSchema specification](https://json-schema.org/) (Draft 7). - -To help developers working with pipeline schema, nf-core tools has three `schema` sub-commands: - -- `nf-core schema validate` -- `nf-core schema build` -- `nf-core schema docs` -- `nf-core schema lint` - -### Validate pipeline parameters - -Nextflow can take input parameters in a JSON or YAML file when running a pipeline using the `-params-file` option. -This command validates such a file against the pipeline schema. - -Usage is `nf-core schema validate `. eg with the pipeline downloaded [above](#download-pipeline), you can run: - - - -![`nf-core schema validate nf-core-rnaseq/3_8 nf-params.json`](docs/images/nf-core-schema-validate.svg) - -The `pipeline` option can be a directory containing a pipeline, a path to a schema file or the name of an nf-core pipeline (which will be downloaded using `nextflow pull`). - -### Build a pipeline schema - -Manually building JSONSchema documents is not trivial and can be very error prone. -Instead, the `nf-core schema build` command collects your pipeline parameters and gives interactive prompts about any missing or unexpected params. -If no existing schema is found it will create one for you. - -Once built, the tool can send the schema to the nf-core website so that you can use a graphical interface to organise and fill in the schema. -The tool checks the status of your schema on the website and once complete, saves your changes locally. - -Usage is `nf-core schema build -d `, eg: - - - -![`nf-core schema build --no-prompts`](docs/images/nf-core-schema-build.svg) - -There are four flags that you can use with this command: - -- `--dir `: Specify a pipeline directory other than the current working directory -- `--no-prompts`: Make changes without prompting for confirmation each time. Does not launch web tool. -- `--web-only`: Skips comparison of the schema against the pipeline parameters and only launches the web tool. -- `--url `: Supply a custom URL for the online tool. Useful when testing locally. - -### Display the documentation for a pipeline schema - -To get an impression about the current pipeline schema you can display the content of the `nextflow_schema.json` with `nf-core schema docs `. This will print the content of your schema in Markdown format to the standard output. - -There are four flags that you can use with this command: - -- `--output `: Output filename. Defaults to standard out. -- `--format [markdown|html]`: Format to output docs in. -- `--force`: Overwrite existing files -- `--columns `: CSV list of columns to include in the parameter tables - -### Add new parameters to the pipeline schema - -If you want to add a parameter to the schema, you first have to add the parameter and its default value to the `nextflow.config` file with the `params` scope. Afterwards, you run the command `nf-core schema build` to add the parameters to your schema and open the graphical interface to easily modify the schema. - -The graphical interface is oganzised in groups and within the groups the single parameters are stored. For a better overview you can collapse all groups with the `Collapse groups` button, then your new parameters will be the only remaining one at the bottom of the page. Now you can either create a new group with the `Add group` button or drag and drop the paramters in an existing group. Therefor the group has to be expanded. The group title will be displayed, if you run your pipeline with the `--help` flag and its description apears on the parameter page of your pipeline. - -Now you can start to change the parameter itself. The `ID` of a new parameter should be defined in small letters without whitespaces. The description is a short free text explanation about the parameter, that appears if you run your pipeline with the `--help` flag. By clicking on the dictionary icon you can add a longer explanation for the parameter page of your pipeline. Usually, they contain a small paragraph about the parameter settings or a used datasource, like databases or references. If you want to specify some conditions for your parameter, like the file extension, you can use the nut icon to open the settings. This menu depends on the `type` you assigned to your parameter. For integers you can define a min and max value, and for strings the file extension can be specified. - -The `type` field is one of the most important points in your pipeline schema, since it defines the datatype of your input and how it will be interpreted. This allows extensive testing prior to starting the pipeline. - -The basic datatypes for a pipeline schema are: - -- `string` -- `number` -- `integer` -- `boolean` - -For the `string` type you have three different options in the settings (nut icon): `enumerated values`, `pattern` and `format`. The first option, `enumerated values`, allows you to specify a list of specific input values. The list has to be separated with a pipe. The `pattern` and `format` settings can depend on each other. The `format` has to be either a directory or a file path. Depending on the `format` setting selected, specifying the `pattern` setting can be the most efficient and time saving option, especially for `file paths`. The `number` and `integer` types share the same settings. Similarly to `string`, there is an `enumerated values` option with the possibility of specifying a `min` and `max` value. For the `boolean` there is no further settings and the default value is usually `false`. The `boolean` value can be switched to `true` by adding the flag to the command. This parameter type is often used to skip specific sections of a pipeline. - -After filling the schema, click on the `Finished` button in the top right corner, this will automatically update your `nextflow_schema.json`. If this is not working, the schema can be copied from the graphical interface and pasted in your `nextflow_schema.json` file. - -### Update existing pipeline schema - -It's important to change the default value of a parameter in the `nextflow.config` file first and then in the pipeline schema, because the value in the config file overwrites the value in the pipeline schema. To change any other parameter use `nf-core schema build --web-only` to open the graphical interface without rebuilding the pipeline schema. Now, the parameters can be changed as mentioned above but keep in mind that changing the parameter datatype depends on the default value specified in the `nextflow.config` file. - -### Linting a pipeline schema - -The pipeline schema is linted as part of the main pipeline `nf-core lint` command, -however sometimes it can be useful to quickly check the syntax of the JSONSchema without running a full lint run. - -Usage is `nf-core schema lint ` (defaulting to `nextflow_schema.json`), eg: - - - -![`nf-core schema lint`](docs/images/nf-core-schema-lint.svg) - -## Bumping a pipeline version number - -When releasing a new version of a nf-core pipeline, version numbers have to be updated in several different places. The helper command `nf-core bump-version` automates this for you to avoid manual errors (and frustration!). - -The command uses results from the linting process, so will only work with workflows that pass these tests. - -Usage is `nf-core bump-version `, eg: - - - -![`nf-core bump-version 1.1`](docs/images/nf-core-bump-version.svg) - -You can change the directory from the current working directory by specifying `--dir `. To change the required version of Nextflow instead of the pipeline version number, use the flag `--nextflow`. - -## Sync a pipeline with the template - -Over time, the main nf-core pipeline template is updated. To keep all nf-core pipelines up to date, -we synchronise these updates automatically when new versions of nf-core/tools are released. -This is done by maintaining a special `TEMPLATE` branch, containing a vanilla copy of the nf-core template -with only the variables used when it first ran (name, description etc.). This branch is updated and a -pull-request can be made with just the updates from the main template code. - -Note that pipeline synchronisation happens automatically each time nf-core/tools is released, creating an automated pull-request on each pipeline. -**As such, you do not normally need to run this command yourself!** - -This command takes a pipeline directory and attempts to run this synchronisation. -Usage is `nf-core sync`, eg: - - - -![`nf-core sync`](docs/images/nf-core-sync.svg) - -The sync command tries to check out the `TEMPLATE` branch from the `origin` remote or an existing local branch called `TEMPLATE`. -It will fail if it cannot do either of these things. -The `nf-core create` command should make this template automatically when you first start your pipeline. -Please see the [nf-core website sync documentation](https://nf-co.re/developers/sync) if you have difficulties. - -To specify a directory to sync other than the current working directory, use the `--dir `. - -By default, the tool will collect workflow variables from the current branch in your pipeline directory. -You can supply the `--from-branch` flag to specific a different branch. - -Finally, if you give the `--pull-request` flag, the command will push any changes to the remote and attempt to create a pull request using the GitHub API. -The GitHub username and repository name will be fetched from the remote url (see `git remote -v | grep origin`), or can be supplied with `--username` and `--github-repository`. - -To create the pull request, a personal access token is required for API authentication. -These can be created at [https://github.com/settings/tokens](https://github.com/settings/tokens). -Supply this using the `--auth-token` flag. - -## Create an nf-core pipeline logo - -The `nf-core create-logo` command creates a logo for your pipeline based on the nf-core template and the pipeline name. You can specify the width of the logo in pixels with the `--width` flag. Additionally, you can specify the output format to be either `png` or `svg` with the `--format` flag. The default format is `png`. - -Usage is `nf-core create-logo `, eg: - - - -![`nf-core create-logo nextbigthing`](docs/images/nf-core-create-logo.svg) - -## Tools CLI TUI - -_CLI:_ Command line interface -_TUI:_ Terminal user interface - -The `nf-core` command line interface is fairly large, with a lot of commands and options. -To make it easier to explore and use, run `nf-core tui` to launch a graphical terminal interface. - -This functionality works using [Textualize/trogon](https://github.com/Textualize/trogon) -and is based on the underlying CLI implementation that uses [Click](https://click.palletsprojects.com/). - -## Modules - -With the advent of [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html), we are creating a centralised repository of modules. -These are software tool process definitions that can be imported into any pipeline. -This allows multiple pipelines to use the same code for share tools and gives a greater degree of granulairy and unit testing. - -The nf-core DSL2 modules repository is at - -### Custom remote modules - -The modules supercommand comes with two flags for specifying a custom remote: - -- `--git-remote `: Specify the repository from which the modules should be fetched as a git URL. Defaults to the github repository of `nf-core/modules`. -- `--branch `: Specify the branch from which the modules should be fetched. Defaults to the default branch of your repository. - -For example, if you want to install the `fastqc` module from the repository `nf-core/modules-test` hosted at `gitlab.com`, you can use the following command: - -```terminal -nf-core modules --git-remote git@gitlab.com:nf-core/modules-test.git install fastqc -``` - -Note that a custom remote must follow a similar directory structure to that of `nf-core/moduleś` for the `nf-core modules` commands to work properly. - -The directory where modules are installed will be prompted or obtained from `org_path` in the `.nf-core.yml` file if available. If your modules are located at `modules/my-folder/TOOL/SUBTOOL` your `.nf-core.yml` should have: - -```yaml -org_path: my-folder -``` - -Please avoid installing the same tools from two different remotes, as this can lead to further errors. - -The modules commands will during initalisation try to pull changes from the remote repositories. If you want to disable this, for example -due to performance reason or if you want to run the commands offline, you can use the flag `--no-pull`. Note however that the commands will -still need to clone repositories that have previously not been used. - -### Private remote repositories - -You can use the modules command with private remote repositories. Make sure that your local `git` is correctly configured with your private remote -and then specify the remote the same way you would do with a public remote repository. - -### List modules - -The `nf-core modules list` command provides the subcommands `remote` and `local` for listing modules installed in a remote repository and in the local pipeline respectively. Both subcommands allow to use a pattern for filtering the modules by keywords eg: `nf-core modules list `. - -#### List remote modules - -To list all modules available on [nf-core/modules](https://github.com/nf-core/modules), you can use -`nf-core modules list remote`, which will print all available modules to the terminal. - - - -![`nf-core modules list remote`](docs/images/nf-core-modules-list-remote.svg) - -#### List installed modules - -To list modules installed in a local pipeline directory you can use `nf-core modules list local`. This will list the modules install in the current working directory by default. If you want to specify another directory, use the `--dir ` flag. - - - -![`nf-core modules list local`](docs/images/nf-core-modules-list-local.svg) - -## Show information about a module - -For quick help about how a module works, use `nf-core modules info `. -This shows documentation about the module on the command line, similar to what's available on the -[nf-core website](https://nf-co.re/modules). - - - -![`nf-core modules info abacas`](docs/images/nf-core-modules-info.svg) - -### Install modules in a pipeline - -You can install modules from [nf-core/modules](https://github.com/nf-core/modules) in your pipeline using `nf-core modules install`. -A module installed this way will be installed to the `./modules/nf-core/modules` directory. - - - -![`nf-core modules install abacas`](docs/images/nf-core-modules-install.svg) - -You can pass the module name as an optional argument to `nf-core modules install` instead of using the cli prompt, eg: `nf-core modules install fastqc`. You can specify a pipeline directory other than the current working directory by using the `--dir `. - -There are three additional flags that you can use when installing a module: - -- `--force`: Overwrite a previously installed version of the module. -- `--prompt`: Select the module version using a cli prompt. -- `--sha `: Install the module at a specific commit. - -### Update modules in a pipeline - -You can update modules installed from a remote repository in your pipeline using `nf-core modules update`. - - - -![`nf-core modules update --all --no-preview`](docs/images/nf-core-modules-update.svg) - -You can pass the module name as an optional argument to `nf-core modules update` instead of using the cli prompt, eg: `nf-core modules update fastqc`. You can specify a pipeline directory other than the current working directory by using the `--dir `. - -There are five additional flags that you can use with this command: - -- `--force`: Reinstall module even if it appears to be up to date -- `--prompt`: Select the module version using a cli prompt. -- `--sha `: Install the module at a specific commit from the `nf-core/modules` repository. -- `--preview/--no-preview`: Show the diff between the installed files and the new version before installing. -- `--save-diff `: Save diffs to a file instead of updating in place. The diffs can then be applied with `git apply `. -- `--all`: Use this flag to run the command on all modules in the pipeline. - -If you don't want to update certain modules or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `star/align` module installed from `nf-core/modules` from being updated by adding the following to the `.nf-core.yml` file: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: - star/align: False -``` - -If you want this module to be updated only to a specific version (or downgraded), you could instead specifiy the version: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: - star/align: "e937c7950af70930d1f34bb961403d9d2aa81c7" -``` - -This also works at the repository level. For example, if you want to exclude all modules installed from `nf-core/modules` from being updated you could add: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: False -``` - -or if you want all modules in `nf-core/modules` at a specific version: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: "e937c7950af70930d1f34bb961403d9d2aa81c7" -``` - -Note that the module versions specified in the `.nf-core.yml` file has higher precedence than versions specified with the command line flags, thus aiding you in writing reproducible pipelines. - -### Remove a module from a pipeline - -To delete a module from your pipeline, run `nf-core modules remove`. - - - -![`nf-core modules remove abacas`](docs/images/nf-core-modules-remove.svg) - -You can pass the module name as an optional argument to `nf-core modules remove` instead of using the cli prompt, eg: `nf-core modules remove fastqc`. To specify the pipeline directory, use `--dir `. - -### Create a patch file for a module - -If you want to make a minor change to a locally installed module but still keep it up date with the remote version, you can create a patch file using `nf-core modules patch`. - - - -![`nf-core modules patch fastqc`](docs/images/nf-core-modules-patch.svg) - -The generated patches work with `nf-core modules update`: when you install a new version of the module, the command tries to apply -the patch automatically. The patch application fails if the new version of the module modifies the same lines as the patch. In this case, -the patch new version is installed but the old patch file is preserved. - -When linting a patched module, the linting command will check the validity of the patch. When running other lint tests the patch is applied in reverse, and the original files are linted. - -### Create a new module - -This command creates a new nf-core module from the nf-core module template. -This ensures that your module follows the nf-core guidelines. -The template contains extensive `TODO` messages to walk you through the changes you need to make to the template. - -You can create a new module using `nf-core modules create`. - -This command can be used both when writing a module for the shared [nf-core/modules](https://github.com/nf-core/modules) repository, -and also when creating local modules for a pipeline. - -Which type of repository you are working in is detected by the `repository_type` flag in a `.nf-core.yml` file in the root directory, -set to either `pipeline` or `modules`. -The command will automatically look through parent directories for this file to set the root path, so that you can run the command in a subdirectory. -It will start in the current working directory, or whatever is specified with `--dir `. - -The `nf-core modules create` command will prompt you with the relevant questions in order to create all of the necessary module files. - - - -![`cd modules && nf-core modules create fastqc --author @nf-core-bot --label process_low --meta --force`](docs/images/nf-core-modules-create.svg) - -### Check a module against nf-core guidelines - -Run the `nf-core modules lint` command to check modules in the current working directory (pipeline or nf-core/modules clone) against nf-core guidelines. - -Use the `--all` flag to run linting on all modules found. Use `--dir ` to specify another directory than the current working directory. - - - -![`nf-core modules lint multiqc`](docs/images/nf-core-modules-lint.svg) - -### Create a test for a module - -All modules on [nf-core/modules](https://github.com/nf-core/modules) have a strict requirement of being unit tested using minimal test data. We use [nf-test](https://code.askimed.com/nf-test/) as our testing framework. -Each module comes already with a template for the test file in `test/main.nf.test`. Replace the placeholder code in that file with your specific input, output and proces. In order to generate the corresponding snapshot after writing your test, you can use the `nf-core modules test` command. This command will run `nf-test test` twice, to also check for snapshot stability, i.e. that the same snapshot is generated on multiple runs. - -You can specify the module name in the form TOOL/SUBTOOL in the command or provide it later through interactive prompts. - - - -![`nf-core modules test fastqc --no-prompts`](docs/images/nf-core-modules-test.svg) - -In case you changed something in the test and want to update the snapshot, run - -```bash -nf-core modules test --update -``` - -If you want to run the test only once without checking for snapshot stability, you can use the `--once` flag. - -### Bump bioconda and container versions of modules in - -If you are contributing to the `nf-core/modules` repository and want to bump bioconda and container versions of certain modules, you can use the `nf-core modules bump-versions` helper tool. This will bump the bioconda version of a single or all modules to the latest version and also fetch the correct Docker and Singularity container tags. - - - -![`nf-core modules bump-versions fastqc`](docs/images/nf-core-modules-bump-version.svg) - -If you don't want to update certain modules or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `star/align` module from being updated by adding the following to the `.nf-core.yml` file: - -```yaml -bump-versions: - star/align: False -``` - -If you want this module to be updated only to a specific version (or downgraded), you could instead specifiy the version: - -```yaml -bump-versions: - star/align: "2.6.1d" -``` - -## Subworkflows - -After the launch of nf-core modules, we can provide now also nf-core subworkflows to fully utilize the power of DSL2 modularization. -Subworkflows are chains of multiple module definitions that can be imported into any pipeline. -This allows multiple pipelines to use the same code for a the same tasks, and gives a greater degree of reusability and unit testing. - -To allow us to test modules and subworkflows together we put the nf-core DSL2 subworkflows into the `subworkflows` directory of the modules repository is at . - -### Custom remote subworkflows - -The subworkflows supercommand released in nf-core/tools version 2.7 comes with two flags for specifying a custom remote repository: - -- `--git-remote `: Specify the repository from which the subworkflows should be fetched as a git URL. Defaults to the github repository of `nf-core/modules`. -- `--branch `: Specify the branch from which the subworkflows should be fetched. Defaults to the default branch of your repository. - -For example, if you want to install the `bam_stats_samtools` subworkflow from the repository `nf-core/modules-test` hosted at `gitlab.com` in the branch `subworkflows`, you can use the following command: - -```bash -nf-core subworkflows --git-remote git@gitlab.com:nf-core/modules-test.git --branch subworkflows install bam_stats_samtools -``` - -Note that a custom remote must follow a similar directory structure to that of `nf-core/modules` for the `nf-core subworkflows` commands to work properly. - -The directory where subworkflows are installed will be prompted or obtained from `org_path` in the `.nf-core.yml` file if available. If your subworkflows are located at `subworkflows/my-folder/SUBWORKFLOW_NAME` your `.nf-core.yml` file should have: - -```yaml -org_path: my-folder -``` - -Please avoid installing the same tools from two different remotes, as this can lead to further errors. - -The subworkflows commands will during initalisation try to pull changes from the remote repositories. If you want to disable this, for example due to performance reason or if you want to run the commands offline, you can use the flag `--no-pull`. Note however that the commands will still need to clone repositories that have previously not been used. - -### Private remote repositories - -You can use the subworkflows command with private remote repositories. Make sure that your local `git` is correctly configured with your private remote -and then specify the remote the same way you would do with a public remote repository. - -### List subworkflows - -The `nf-core subworkflows list` command provides the subcommands `remote` and `local` for listing subworkflows installed in a remote repository and in the local pipeline respectively. Both subcommands allow to use a pattern for filtering the subworkflows by keywords eg: `nf-core subworkflows list `. - -#### List remote subworkflows - -To list all subworkflows available on [nf-core/modules](https://github.com/nf-core/modules), you can use -`nf-core subworkflows list remote`, which will print all available subworkflows to the terminal. - - - -![`nf-core subworkflows list remote`](docs/images/nf-core-subworkflows-list-remote.svg) - -#### List installed subworkflows - -To list subworkflows installed in a local pipeline directory you can use `nf-core subworkflows list local`. This will list the subworkflows install in the current working directory by default. If you want to specify another directory, use the `--dir ` flag. - - - -![`nf-core subworkflows list local`](docs/images/nf-core-subworkflows-list-local.svg) - -## Show information about a subworkflow - -For quick help about how a subworkflow works, use `nf-core subworkflows info `. -This shows documentation about the subworkflow on the command line, similar to what's available on the -[nf-core website](https://nf-co.re/subworkflows). - - - -![`nf-core subworkflows info bam_rseqc`](docs/images/nf-core-subworkflows-info.svg) - -### Install subworkflows in a pipeline - -You can install subworkflows from [nf-core/modules](https://github.com/nf-core/modules) in your pipeline using `nf-core subworkflows install`. -A subworkflow installed this way will be installed to the `./subworkflows/nf-core` directory. - - - -![`nf-core subworkflows install bam_rseqc`](docs/images/nf-core-subworkflows-install.svg) - -You can pass the subworkflow name as an optional argument to `nf-core subworkflows install` like above or select it from a list of available subworkflows by only running `nf-core subworkflows install`. - -There are four additional flags that you can use when installing a subworkflow: - -- `--dir`: Pipeline directory, the default is the current working directory. -- `--force`: Overwrite a previously installed version of the subworkflow. -- `--prompt`: Select the subworkflow version using a cli prompt. -- `--sha `: Install the subworkflow at a specific commit. - -### Update subworkflows in a pipeline - -You can update subworkflows installed from a remote repository in your pipeline using `nf-core subworkflows update`. - - - -![`nf-core subworkflows update --all --no-preview`](docs/images/nf-core-subworkflows-update.svg) - -You can pass the subworkflow name as an optional argument to `nf-core subworkflows update` like above or select it from the list of available subworkflows by only running `nf-core subworkflows update`. - -There are six additional flags that you can use with this command: - -- `--dir`: Pipeline directory, the default is the current working directory. -- `--force`: Reinstall subworkflow even if it appears to be up to date -- `--prompt`: Select the subworkflow version using a cli prompt. -- `--sha `: Install the subworkflow at a specific commit from the `nf-core/modules` repository. -- `--preview/--no-preview`: Show the diff between the installed files and the new version before installing. -- `--save-diff `: Save diffs to a file instead of updating in place. The diffs can then be applied with `git apply `. -- `--all`: Use this flag to run the command on all subworkflows in the pipeline. -- `--update-deps`: Use this flag to automatically update all dependencies of a subworkflow. - -If you don't want to update certain subworkflows or want to update them to specific versions, you can make use of the `.nf-core.yml` configuration file. For example, you can prevent the `bam_rseqc` subworkflow installed from `nf-core/modules` from being updated by adding the following to the `.nf-core.yml` file: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: - bam_rseqc: False -``` - -If you want this subworkflow to be updated only to a specific version (or downgraded), you could instead specifiy the version: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: - bam_rseqc: "36a77f7c6decf2d1fb9f639ae982bc148d6828aa" -``` - -This also works at the repository level. For example, if you want to exclude all modules and subworkflows installed from `nf-core/modules` from being updated you could add: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: False -``` - -or if you want all subworkflows in `nf-core/modules` at a specific version: - -```yaml -update: - https://github.com/nf-core/modules.git: - nf-core: "e937c7950af70930d1f34bb961403d9d2aa81c7" -``` - -Note that the subworkflow versions specified in the `.nf-core.yml` file has higher precedence than versions specified with the command line flags, thus aiding you in writing reproducible pipelines. - -### Remove a subworkflow from a pipeline - -To delete a subworkflow from your pipeline, run `nf-core subworkflows remove`. - - - -![`nf-core subworkflows remove bam_rseqc`](docs/images/nf-core-subworkflows-remove.svg) - -You can pass the subworkflow name as an optional argument to `nf-core subworkflows remove` like above or select it from the list of available subworkflows by only running `nf-core subworkflows remove`. To specify the pipeline directory, use `--dir `. - -### Create a new subworkflow - -This command creates a new nf-core subworkflow from the nf-core subworkflow template. -This ensures that your subworkflow follows the nf-core guidelines. -The template contains extensive `TODO` messages to walk you through the changes you need to make to the template. -See the [subworkflow documentation](https://nf-co.re/docs/contributing/subworkflows) for more details around creating a new subworkflow, including rules about nomenclature and a step-by-step guide. - -You can create a new subworkflow using `nf-core subworkflows create`. - -This command can be used both when writing a subworkflow for the shared [nf-core/modules](https://github.com/nf-core/modules) repository, -and also when creating local subworkflows for a pipeline. - -Which type of repository you are working in is detected by the `repository_type` flag in a `.nf-core.yml` file in the root directory, -set to either `pipeline` or `modules`. -The command will automatically look through parent directories for this file to set the root path, so that you can run the command in a subdirectory. -It will start in the current working directory, or whatever is specified with `--dir `. - -The `nf-core subworkflows create` command will prompt you with the relevant questions in order to create all of the necessary subworkflow files. - - - -![`nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force`](docs/images/nf-core-subworkflows-create.svg) - -### Create a test for a subworkflow - -All subworkflows on [nf-core/modules](https://github.com/nf-core/modules) have a strict requirement of being unit tested using minimal test data. We use [nf-test](https://code.askimed.com/nf-test/) as our testing framework. -Each subworkflow comes already with a template for the test file in `test/main.nf.test`. Replace the placeholder code in that file with your specific input, output and proces. In order to generate the corresponding snapshot after writing your test, you can use the `nf-core subworkflows test` command. This command will run `nf-test test` twice, to also check for snapshot stability, i.e. that the same snapshot is generated on multiple runs. - -You can specify the subworkflow name in the command or provide it later through interactive prompts. - - - -![`nf-core subworkflows test bam_rseqc --no-prompts`](docs/images/nf-core-subworkflows-test.svg) - -In case you changed something in the test and want to update the snapshot, run - -```bash -nf-core subworkflows test --update -``` - -If you want to run the test only once without checking for snapshot stability, you can use the `--once` flag. - -### Check a subworkflow against nf-core guidelines - -Run the `nf-core subworkflows lint` command to check subworkflows in the current working directory (a pipeline or a clone of nf-core/modules) against nf-core guidelines. - -Use the `--all` flag to run linting on all subworkflows found. Use `--dir ` to specify a different directory than the current working directory. +## Contributions and Support - +If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md). -![`nf-core subworkflows lint bam_stats_samtools`](docs/images/nf-core-subworkflows-lint.svg) +For further information or help, don't hesitate to get in touch on the [Slack `#tools` channel](https://nfcore.slack.com/channels/tools) (you can join with [this invite](https://nf-co.re/join/slack)). ## Citation diff --git a/docs/images/nf-core-bump-version.svg b/docs/images/nf-core-bump-version.svg deleted file mode 100644 index 70171475d..000000000 --- a/docs/images/nf-core-bump-version.svg +++ /dev/null @@ -1,187 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core bump-version 1.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -INFO     Changing version number from '1.0dev' to '1.1' -INFO     Updated version in 'nextflow.config' - - version         = '1.0dev' - + version         = '1.1' - - -INFO     Updated version in 'assets/multiqc_config.yml' - - This report has been generated by the <a  -href="https://github.com/nf-core/nextbigthing/tree/dev" target="_blank">nf-core/nextbigthing</a> - + This report has been generated by the <a  -href="https://github.com/nf-core/nextbigthing/releases/tag/1.1"  -target="_blank">nf-core/nextbigthing</a> - - -INFO     Updated version in 'assets/multiqc_config.yml' - - <a href="https://nf-co.re/nextbigthing/dev/docs/output"  -target="_blank">documentation</a>. - + <a href="https://nf-co.re/nextbigthing/1.1/docs/output"  -target="_blank">documentation</a>. - - - - - - diff --git a/docs/images/nf-core-create-logo.svg b/docs/images/nf-core-create-logo.svg deleted file mode 100644 index c6e726936..000000000 --- a/docs/images/nf-core-create-logo.svg +++ /dev/null @@ -1,107 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core create-logo nextbigthing - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -INFO     Created logo: nf-core-nextbigthing_logo_light.png - - - - diff --git a/docs/images/nf-core-create.svg b/docs/images/nf-core-create.svg deleted file mode 100644 index 422e6734c..000000000 --- a/docs/images/nf-core-create.svg +++ /dev/null @@ -1,162 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique"  --a "Big Steve" --plain - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -INFO     Creating new nf-core pipeline: 'nf-core/nextbigthing' -INFO     Initialising pipeline git repository                                                        -INFO     Done. Remember to add a remote and push to GitHub:                                          - cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing - git remote add origin git@github.com:USERNAME/REPO_NAME.git  - git push --all origin                                        -INFO     This will also push your newly created dev branch and the TEMPLATE branch for syncing.      -INFO    !!!!!! IMPORTANT !!!!!! - -If you are interested in adding your pipeline to the nf-core community, -PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! - -Please read: https://nf-co.re/developers/adding_pipelines#join-the-community - - - - diff --git a/docs/images/nf-core-download-tree.svg b/docs/images/nf-core-download-tree.svg deleted file mode 100644 index fc9585c8c..000000000 --- a/docs/images/nf-core-download-tree.svg +++ /dev/null @@ -1,190 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ tree -L 2 nf-core-rnaseq/ -nf-core-rnaseq/ -├── 3_8 -│   ├── CHANGELOG.md -│   ├── CITATIONS.md -│   ├── CODE_OF_CONDUCT.md -│   ├── LICENSE -│   ├── README.md -│   ├── assets -│   ├── bin -│   ├── conf -│   ├── docs -│   ├── lib -│   ├── main.nf -│   ├── modules -│   ├── modules.json -│   ├── nextflow.config -│   ├── nextflow_schema.json -│   ├── subworkflows -│   ├── tower.yml -│   └── workflows -└── configs -    ├── CITATION.cff -    ├── LICENSE -    ├── README.md -    ├── bin -    ├── conf -    ├── configtest.nf -    ├── docs -    ├── nextflow.config -    ├── nfcore_custom.config -    └── pipeline - -14 directories, 16 files - - - - diff --git a/docs/images/nf-core-download.svg b/docs/images/nf-core-download.svg deleted file mode 100644 index 5594930fa..000000000 --- a/docs/images/nf-core-download.svg +++ /dev/null @@ -1,139 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -WARNING  Could not find GitHub authentication token. Some API requests may fail.                     -INFO     Saving 'nf-core/rnaseq' -          Pipeline revision: '3.8' -          Use containers: 'none' -          Container library: 'quay.io' -          Output directory: 'nf-core-rnaseq' -          Include default institutional configuration: 'True' -INFO     Downloading centralised configs from GitHub                                                 -INFO     Downloading workflow files from GitHub                                                      - - - - diff --git a/docs/images/nf-core-launch-rnaseq.svg b/docs/images/nf-core-launch-rnaseq.svg deleted file mode 100644 index f2608fe76..000000000 --- a/docs/images/nf-core-launch-rnaseq.svg +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core launch rnaseq -r 3.8.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -INFO     NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config      -         files or profiles                                                                           - -INFO     Downloading workflow: nf-core/rnaseq (3.8.1) - - - - diff --git a/docs/images/nf-core-licences.svg b/docs/images/nf-core-licences.svg deleted file mode 100644 index 8cc00c351..000000000 --- a/docs/images/nf-core-licences.svg +++ /dev/null @@ -1,107 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core licences deepvariant - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -INFO     Fetching licence information for 8 tools                                                    - - - - diff --git a/docs/images/nf-core-lint.svg b/docs/images/nf-core-lint.svg deleted file mode 100644 index 2e55a7e11..000000000 --- a/docs/images/nf-core-lint.svg +++ /dev/null @@ -1,208 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Testing pipeline: . - - -╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ - -pipeline_todos: pipeline_todos                                                                   - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ - -readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo  -doi (after the first release).                                                                   - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ - - -╭──────────────────────╮ -LINT RESULTS SUMMARY -├──────────────────────┤ -[✔] 188 Tests Passed -[?]   1 Test Ignored -[!]   1 Test Warning -[✗]   0 Tests Failed -╰──────────────────────╯ - - - - diff --git a/docs/images/nf-core-list-rna.svg b/docs/images/nf-core-list-rna.svg deleted file mode 100644 index 643545c6f..000000000 --- a/docs/images/nf-core-list-rna.svg +++ /dev/null @@ -1,172 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core list rna rna-seq - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnaseq               │   742 │         3.14.0 │ 2 months ago │           - │ -                   │ -│ differentialabundan… │    38 │          1.4.0 │ 3 months ago │           - │ -                   │ -│ smrnaseq             │    64 │          2.3.0 │   6 days ago │           - │ -                   │ -│ rnasplice            │    25 │          1.0.3 │   6 days ago │           - │ -                   │ -│ circrna              │    34 │            dev │  1 weeks ago │           - │ -                   │ -│ scrnaseq             │   125 │          2.5.1 │ 1 months ago │           - │ -                   │ -│ rnafusion            │   126 │          3.0.1 │ 3 months ago │           - │ -                   │ -│ spatialtranscriptom… │    36 │            dev │  4 weeks ago │           - │ -                   │ -│ dualrnaseq           │    16 │          1.0.0 │  3 years ago │           - │ -                   │ -│ marsseq              │     5 │          1.0.3 │ 8 months ago │           - │ -                   │ -│ lncpipe              │    28 │            dev │  1 years ago │           - │ -                   │ -│ scflow               │    24 │            dev │  3 years ago │           - │ -                   │ -└──────────────────────┴───────┴────────────────┴──────────────┴─────────────┴─────────────────────┘ - - - - diff --git a/docs/images/nf-core-list-stars.svg b/docs/images/nf-core-list-stars.svg deleted file mode 100644 index 8ea120599..000000000 --- a/docs/images/nf-core-list-stars.svg +++ /dev/null @@ -1,141 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core list -s stars - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnaseq              │   742 │         3.14.0 │  2 months ago │           - │ -                   │ -│ sarek               │   320 │          3.4.0 │  4 months ago │           - │ -                   │ -│ mag                 │   175 │          2.5.4 │   2 weeks ago │           - │ -                   │ -│ chipseq             │   161 │          2.0.0 │   1 years ago │           - │ -                   │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-list.svg b/docs/images/nf-core-list.svg deleted file mode 100644 index 5e4939f74..000000000 --- a/docs/images/nf-core-list.svg +++ /dev/null @@ -1,145 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core list - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ raredisease         │    65 │          1.1.1 │  7 months ago │           - │ -                   │ -│ fetchngs            │   101 │         1.11.0 │  4 months ago │           - │ -                   │ -│ sarek               │   320 │          3.4.0 │  4 months ago │           - │ -                   │ -│ methylseq           │   126 │          2.6.0 │  2 months ago │           - │ -                   │ -│ rnaseq              │   742 │         3.14.0 │  2 months ago │           - │ -                   │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-logo.png b/docs/images/nf-core-logo.png deleted file mode 100644 index 95a519194b5d59677304317ce84d9a2d72abf4ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 17930 zcmbSxWmp_b*C6ihZiDOK?(QB2cMI+i9D-YL2o_v}1`9g4Yj9_92u^U=-1oiv?e5?0 z=jraM>QhJRSans!XsF4fqmZCLK|!G_D#&O-K|z1{Ted?&`1{;KE<^kKg7(momx8LD zB0K(DL3UNp_ke<82mEtG`<9A({{_&u*8zEgR8@p5U7Xm>tz0au*?pZ{|3X7SiTDcr zEjn3ynp62YIXZg?`HIr~2SVs?`JZME8mj+*cshvEfK)Z8q+Q&tsd(9W*nu=+C{$Eb zBJNf;LRvC%|91ad6Q!~9^mG;C;PCPBVfW!?cX7An;1m=TS^!dO!W_5a|;(QPf;40e+d20^mL&75IBJW3O7N%JX5dOZ4y7%MC%uLvmn${yRa2M?MWU8~ zq0QUeG`M?xZ$0s>>e$k%TUy%5f6J@=)OzlI{=VrywQ-VRZa0Ty2gd_L0-X>dCjt0| zJ@fxj4NV4^C5lvFRQ}r=5*5r@}of#`k}u*2Ur#TTpSxWCDy@tQlcl6 z*wI$i;;qJtlkn&651AMF;Wul!Kb!Nj4X6$7cw9^+=p!Y)wYUrap%ae1T~XZCB`Up? z!95)iVct$^MzWYyCb-0>3cC~vReMT8JiPNuw|wWP>Dzwo##V2aZx%3|1{njX%;n!< zF|)wNg1B8}+fFsB+~)v^>XMfE9K#(1wsRZt<}B3;==fKwm=38h7h6zN*Q&C)tfL*@Op6CZjB%$9<@mGG}t|{I<3XYP>w$5)YHhLKd{4lgRo#bIZDE6 zJfZvJe|6vcnZ``)?w58uDtne5uIeyOgPoNcU*?)+`@vU#pWi=y-sk00(;`t%eZ-&7 zAgDHZY2aTAvfCSKQ;(iLP8;d(&ok0VZhKe1U!Og2ncLM={4#kw zy}bQeV?Q~)@s;uEZ-(Q25ewhHBM3hjl^PSLy$)Npw zAahMq26K(q@ozms9ixMH(i4A7IgAa`IiCH{qPUnJU)0iaOk@feOT2{sV>P|<-#R)B znSvQIS3b+$d}PfKpDmbjK7jn3nsOBTcDX26Uir&zVC!7ie9h}-RGQyO0)@JGgZCd7 z=oO&qO@>34aM0F9GptzjOVvI|-179MfTX3lFrt)Bu>DMR7LaLE^oKWr6QHhLCvuZs z=c$!uNSMV~p|TULifw=*rVon#QUXZT4%2}xkw-t!VN%JARy z@ngD~(nsI?{!Gc6?5P`TGN@;-poqdC8~PVMifCdFPu9=UnyPaHldAI%L%3 zKjnW-IZcl0yoU{gw|;^jKPnIc<~>Fhjx706QW|Int3?9n?>*{kD;#x78s;9q`1y&z zWcQIO>@^R&`EX4ayMKSJP%sN?!n#ku_Z9IEfVFAr$9L`CYPa{HiR53euCK;72M#p9 z1!AdG=2nybx}d8R^*1@9Iy(~)(Hx$GdP3xL8w|s%jieU7;8p)vD}B;0TJW-vhHmt= z!_|0eVZN^M(S0$8r6nGL(fFORCT9%9#R?O+>WKTsB%i~ou`Gl$K=ApTqwF^V10)~ zxPde3^gF)PY6u=nG&IDVXuQScA%8*R&ZeBfY42yMJy8OZG6lH!B)HIbAouctTlF6+ zMTLbXfjOU`HOBnoW6MWr|6{do<*e;)5v`bCPZ~B%zgp;%gCcG2=M%@q{*Oe+FX#-x zpJ6tBadnL9$d!7rw798GrM2-<^(!kFsT4P>>OYkV^EtPYIxJZiTtM8$c*r=Xwf*G4 zlS2FOSca7tY{3K^}whyuLFpyZit_4&@3`WO-shjCk5wsdB!~ykGnHiW0dwfJ2wed>I;STB-owMgX7o?O(#pJpok$8}E;HUV9HAyObaT^RO=@5x z@re6+{dk=PV5C2VNSuUt^=Gve;&y2gB0cn}sd*T+WNj zL-)Vj{4oyl5RTWz>j!m3=Og<6F2T;CLCTuJ^`5a~eVO%Fi?Pz;7}7kO)OcZO5b1{U z%SPYeB=2#!%yG$wNNdz;F68lir1r4;aRgr;8=Q4z3TQ zj-O9zVCa6~68Y>s`#0a0;lYG5(J-FzlMIEDPTDu@8})f>`$h<4d&SeH34e!+A$3~z z+3PT^;yxBMTp=@d3yZP0Dbrm!eoFn_?Z?pGtUkqAbx-Cx&;4;j#rV7ZZd`cybAA;- z3HE^e7-Mqxwsgh3xyHUhId{`@cf{C?n0}#?p9rA!92q908I6 zDL&@+$uP{1Y4B#*+!qPj_}p-HhUFG1l30jrTQmB(xofM#J<&VK)npMj2cbQhzyneW zv585nSc6Zo6`*;H(P$r2%-GZS4I+9@t3qs5^uY=;xjHudA4ki&PFr43(yeVQl^%JMvYM@#9s+|CuT>qeAmL(@pqh$nC|{!uHgYR&z{=ZcSG@y~!rE{^1(@ zu%UaWzJf~Hw10d&uhP_wu%WsP;N3g7C8vqYSNg;HxuMT^m(8kR z&sf44r1(#H{h<0kvn`-5R!DMmHO5Y;C1Q(g*^*JN@Z7X<_F0HW_~bIyZzQzHuPuGX z`a|J38W=>}M9Fw%O%|ZfJrPrlANrpg=$#}YX+C~(@w4!42a&7x(J-m8mzM)oUWd9_ zy8VuRb=209?~zA-?x+=7+WO?XM9L>#dLYGeq&g?rctZJ4ga*O^(PgA}*xNlIQNJTk zmJWVCJ&J*N+tQqg2=o}E+~vrn0k`q`@$(3Q%>#x2j?Kq zl<0#MpB7PR4+tWBI4=a79L8ocqb0(T>s$l$a~w4@i-n2ySF2 zQSNXBah>Tt#`7@QI28lQn5*czyzQ2X246b2SnC*fsO#r5-Resp29+;5Pr{~Hoc$#7 z8mxVXOXAaOATAOWA%64|V;I`r0(5Oty_GbK3o`l%nkF0kXw5*L)&4;$LuHz&vF}H& zhEWUVKT3ZSC1Q<`=$_<${q_|%$-Oks_HBE1FS+yE>(HCRco$n60JBU+IXA=RBTp{4 zDC$wRL}oUYM)1wXIg3a{ya+1>*RLjx*BZdB##|3M}p&{h>~boQ;{;bt*Q8=uL^sp_HT;IBDU>|E>XvD&nYH_LO0S;Ta8D;LK341}DruiDpa+W$wH|dtvAuH!`z0K*HhW2tH1ICzfXD+~qLb3QKV_{&wnV zi}4jKqEqNM(_5aED2R$Gk6xiI!1Wn7botLmp)SzhK-)YKp5gE^$deb?v9#gts$nFe z^zG5`VU^OyGK$v4DtYO>h@A9rxz-I!HDX@vQrk>xjvHYt1mt~tD_Zw_kV>b`k>WgB zpMD(b6+ITB6gDPdC*+(FmKmasIE~;zNPMQ;LJZln%-BT{5@Y=r_(PO3}(4WDG)~`!F^*cU4r#xY?DTNsf#kpK#>Fknk?e= zxOFe8rU~BfxVBm1eY%3dOd{N| zT1=c8cTsaqoCETa=GLhU;7ZCSi@O*l^&;8Ll9LjE9@L`D+nHd?zv@3kExEb!g$QFZ zK5J-)zzo1>T9K&n-P|h&)w_wSn6Tw~R3#DG*~G886>qKC2HjxEnJoIM_v3%@t;wqt zQJj0IR53ve!hawG;v0a9`hv(*-~+`U#DMq-19^4c+isc`mYJ72j%P8+(}?rY@t;3o zMZ0gncf-CAWzkLk0NoE&g*G~IP1A@wC$w*8hMJz6+Kaf#$k|yK1w#ds2CxVqW5hXN zjawt#w{ClF>zY*}wxP?O@x1Uq5M0)Q(^!bagFawLns$9q>`%Ef3;-Qhz_qU?>cQx0 zP(U^yOJBH(QF8X&f@>XweTT7LPw>z1#BDZTCO`%0yaWgX!nlzq0f8b>>|eEV#)I0f zQ~;+YFxH;9HymZQmtrfxi*6|hud9o z9=SSyUd)<0r&#SLx*-ih=D%;D&+>-n(&dB zLtZoU;f2ZQ9rcA6Sk+B)(BsK4bf0_Jcd4Sj`)#LKlb)V1jI>u0CxDpVlp?ILX~Irq zG#IwbBF%LdQ!PAtD0Dh{ALsIJyETNV{0q^PlmJ)f!1M!%UhnY**XSKH%bIO}s4?3p zY*(q`Ud(_7@n?LdMUL1$)40YW6gtJmD`FLYK$Q+Q+NjbIN!Tzos2k=+1mj&D>bO7( zi*a*4ffE<#muN_R>*xk2H{e|F-d^e&U{h1x5SA{1!*+i%7HTnm(fN_rCV%tDV9{Z{ zrh`NYUqkW&G8uHP)3?Z#7KU0~IKjz@M2s7&GFZ=pQ3u448VTFrFYQ|hWQ%`-{EMe&X z^~_-{7tx6Ha$!~%b+8|i!4I15-yM0H_NG$G)22GxNHN9ehMVwJyfuu@%4giV-+pT{ zV-CczrdK4jG8v;QV?${|xp%qCKAzFI(w}MDwPm!}WK0@Sqf>trv_K&d6Jt=j9S`um zBjBfwZY=LT`}@EBV7vMVM7MZUzm%39?Mwzu(MT_IeSp47x<*y2>l4kE+q*HtxuLz2 z5LCGpuT7wj_x*DP7$-2y==efzvd6bv6c_v1-28lp6|>)cl7B&Gg(L4nf{U@?)TY(y zlaxAT|JTVQFvZ{aQ>mmtg(Q@Cq>KKI(wqnu+6qu8T;Oq1Lii+4f}AS>B8uMm93SLE z9rPl!a^&=3H?`rLJL;S7SDmyFT-2fZ8D;&^ttkC(%0Rtnj=LD}$hJcukjU{tXXa#( z)XK#bBWalUL~RGQT{q9Cp}sT?#w52LO6g^K+0E7d89#@T(OJgeixx~7tF3;(7JVwD z0d}8fARy%vh+r&7C_J_Q3zKTiDXaQ*`u3#RqVbzeAbn)bJb>Dw|56JhK2<4pOl|#| zNl=7>>ZzElIBFjW65p4e3D(n84(-H=9Jnx=uhhG}_5oi`u&s zUa1q2#jp#0N$8yXlpr@2rR_EteGD|t5G+4g9#Uk*9izUQ`EF#9i#9F7Ew&kq(FxOu ztwL5(yW_nZvvp@y@B9{#TJy(&bJxi=~Hfcc!`Hl!%+>s6#QsLd(^RWG*6P+t-{a zKoiM?y;42MJnhnlL{%=z)DiwimOS&Z!dtua!_CrEW7 zmm=;b-<0`;fLgjCd4g!ZSaz|Tr|+(oAJ?-Q}cnYUS3W^*HjYm*>N;$%VK!z;{jZNgTGP%IT)jD*$1 zC^mcQr|6@8czJ$Mg@B(%X2)f)+{`v?zm|vMZZDXrdE5}+NmB26Fqbv;T8NbBP1)kA z4y0UvIY*fHjpxspU^vCTnf!T&^sv?Yi>}5NSM8G2gQA7y zuA4X6@}JXZfqh-g;@0`$qi>e+(AW))xu3hQE(BJ4g|3Eef3??%x6gv0V8djgbqmZ~ z)*~ZNsw_7~9e?p%*i}`%UAeCX?InsLW)#nvMonw5PP>uwA71MWjc|=S`{G=AD|EYx z6YzY54a3$aKvc#N(}A*}a2&U!wiprUEHM{JvG1sTDpmGz{N!->Gsl@jEFGWgeAXAT zXY6Wh#p9-G0$PBVzAk`i3T-NEI#-DdlWy#fZmg&7A?>7?*yO_h)IQKM{#C|8mf!;h z_1mnO<_bG>Fyae>D=6hh_L~gRzLdM7elRq}8;Vil)weS;MWo0!U82T-K1PHm0O#+5 z@OQX;Jiy#$^b#gsJNH8%X7!?({3BIa$Hfr%%>S}|CDqM13geW^6O;~aKCN~t7xsg6 zkGBL&@U-}XyIT6l{e8%TZn@V;PpX$@qf5vb4B|>-Fek+aH@ffLT2Y0@$$-U)BYTiMnc zcKSgMZ)v*Zm80e-4$i->PkvpCf*XXsl{P5Q1$>Nyj|z zQ8oM0+L(#!_XYt9MtXM#t96?bzBehy2B1RC(5ywbKYL@OW#1rIon%Q-Zn461;KDdd z17CP+;x$Ca-hsaf`?Pn0a$~Ggc%pTi>qqXRVSWM}<(MSI4JGU5h?W@)OxN5?5$bCr z%9!vUqP~tyPmue3?Yi_Ur?;lx935uf@~8Q{#rlAn6{|OXydT}Zvh>&|wtSD-+^Z3(8IFb{2=~-%xyzmxnu-**PxJ`8JK*R^Pw0T4E)8n1ui{e^VJ;^Iehc zqVw!P^?7mQ<*_Ss<926i$;Bl3R5cKX<8>Mi$C(D`*C5^n&EoR%_g9aMfLnAPiu3TTSek^951n#g4DKRVH=2h-vSATahR^Tx zbD;53+ziYL*;mTyV2i6^=5FBCjH|0hV*)j`4czR9hL##R&LF!_+i8G%pe52!Q)!80 zR5Yc3L8@cVxpKzaQdybWO&u&OS5-><<;3{rIZ<{v6mawG?2D8PwxB`Uic@`)kG$ z_Oge@3H@v0*EFc&M7jXDAHM@^X~x<`8J_^lyB&x&cbWml@Dbaz(1@gGWbrE>zEyQ0 z9Zt(U<|9a$mon=-hT~j%-I9sj*d2bFBixl7p84*8Oc4sxNY=#f!{bN9W&VV13I$#G zr5!)BYef4|nD)vzgJFko>zgMw*i9tosh|Gk7|bn2_bp@lxv29f;4@{{_VH==6z_^dgF`d3m+=7 zJ~ZccTgj-KmvvgT2kUF&w{iV5?boSx%|k3+ExO z|3*C|;Sk@aO%zZG==?y8DF>}FH($Oqkm!B4#p{37Yzm2nok~0={RtUmoOUk1O4pfR zTm%h2weXbvVlVJIq+gWnZydkPp+QGNjhJySU%clGelVVo5g7r@pzgzTRA%(~ zcA_>})dcW)%P0&o~Ym@g?pcqMgzsJBk4LJJoOvt`0F3wd>u}dU( z-y{#1?tE?89P}~qN+yq6PpSlxh*aUeP0O#-L}hx_YgqrxJU47D^;q!Pc@h&OK=hJ7SJ%8kggSo^Q6$(p-Zg~XB2-{4MOj@L}Q!dh=!<6fb zoEqvInr?>2+33^Iczh9uYeRoAboEK@oxh6Tzx#N3+G`uykY}PJRZG?T44pCnYV~RdyQ~uJC z-^{t~kxvi(sy_>yY;$=GxIZKIA}Fsm6Hfbhr9H3`Du;%8aF!or)ZRSJ%X$4p=)j*e zpzsO^%(K}xq@Ol%o2Mupz9wx2Lyte`RyCw!>nhgipP5Df96iGo$c#vE3$bhOIL)la z_46a)Q2((%9-}jIc}q={QegS{T3XHDEqd6kPhuN}|L6|Z!-JZV!Jt+?Wu4LNd+kJ}SY&Gfl>bjey2psHygUP4E)*B&Acb#;t)!lClgTb^Y z;#1EGU~af1ux7Noc~aU~cB}!(AG3n+;Fv4BDDL#b{?K7KZ9KFKWMFn|sOZsiKsUw` zD%b_y6xQZRI%cO7Tub^?KjKn0_~~8pG7}sh^=M&Kq~cb+9s|8!`WzfmfUpHiz%Rxj zGuN`%+mCZ8R&Qp&s3LftcAP~SBA3vUP1uOlKzGILXZgA=Jq*PuKJoszof&eR=|%u} zDlc#V*VL2E)}O^(Xjr@qKswAaPCI%_PjDd?FD_8(iO`aLk5gC0H&oZcCE@GWFBO%7 zy6NaBkXkFQXOpzFuL6gS0^WMr)eWFTCN|x$a2_(Djb%4$^506%!hWM(!JZr6dp){- zy^1iw=URQ10@x=0R_s)fW%i>QbnCcd=6gD=4!Ul!$LzXp=`L$ow39zwRDp>KlAOd; zTNXbFe1ChzCc+7T_&}bo8n^Xc$uUd!+(Bn(Pf5G*xXsBdUya`@o|N7{Jk6PUxZ3o#G(`Qo-Bh-i}d zeGi>z)rt)?xovq~R)1AF-YuWc*>SPqv@$;{Dep4aOZr`ojfg{*q#+u+GKSb2jk|T5 zXt|=#AOX=eXNR;n|rMS(n)*tqAwey%T``1bEK6+xiP70 zH^Fs<6x)BW1x&dI!oAd)4Jo1#+lmzh7f$aUFay_6H)0gUaI#Gjq6*+}mCLjl(M75p zPmYc>1O_Hfw*K5P6eP(0)?djL8dCgSyE77wOVW=NNk3U!pmInW(Bg5Z)={Zw&OhNG z8TCc3*6aZWQ;B6423r0Et<+h}R6I_as}+DDtGu6|^lcf((Xl)1RK+LI!~Wtnc!1G{ z(}cikkb}O1#KCL?EO(*%^VE$H5mE6Gr+VGS)6q#6-fqhpvm*C) zZn_zs2% zTc@fhKIsc0h)`n5KeOoFUP9od2;OVMzk+Diq+|yVJ$8}HbFQl5T`@RXky59qc%3JN z|5c6K*A1Jq#Wbqe$_u@Y9R2nU7faLA7 zjxv(kK1eas6*JZk@7xFB_i;nLIgO(BYbUZ_Vg_t^vdGPKSHLMwsDsFKLy#XBtOJGS zgyz-jvf>^6xjwZG-%kr1eTWsDl>Ir~QPFtk5@pN?J6{Eh8WeU2oYo#TuZ?Q86vNxc z(D_plhm^!7&`uJ-Mf%%tL2&8y1D9C#YaqQ0`|jgMZ)AH4&%vtB@& zn!wK*E0(fN4H=gDSyY1~{X$R0QEaXJ);qF_xfn^tmYi$e)%B%hqImHXkZiJB9=?u?_8jNF=qr>CMhcj@sQEI<9xoOLROB#eDBTB~hZu zVgGaOFuUmpZ7tbg+OF7E&-U=9Ss~Xqv}+Z#=(Ix0NxV*l@=13eWt+XP{QRJM)v`te zgA4Vp!-OzJlMCG6BQQ0-Ct}?`?W8Cf17m|77WjE@WfYw9Cbcuuy-G+^OxrPF`HLW; za^mt!IaZ9|y~2;VEq$o6AC*wFbvM)S&!lNBRs{?Zh9+h8Mgn7`ZVAfe3|upAQZ;9P{yLx3=U5)4vU9Ihl&jiK=Jkd()K z06NlJTh})vZ2b>feCK|$FQWiC`n#jnZitMIyQ1`>xJZ-hIoJ5fZvJjE>|Jh+8m_8wKhdB|X0}T+eMYd{C0}3$jD|EF38y@Rce(Oefz1PYOq{(=_$DpfNgt zj5>mD6t=%{-4A_ySKQIymLHKd4^Ocp_c9&Q`dxL@LhyrW#ukyLQ+Y2H+$6sCY=FlO z$L$eYFJ>BH9(|sSzRFK3YgAx!wdUIyUw<|LlPF|;R*cYrqSk};*kW(T0y&p7wgDei z{!B;K)w5JJn0-+&;GpZb1tEbPJB8&QSWh3{V^7m&p*o7E`4OIcj~OLe8&{`>*6k)J zVJkQ>rEf$5xuiBpPHvRYFo+)jACq%meHpx~A0rc?h68)}it^sGo&ib>z8arGD<3Np z@jHM;{fzPF0^?V1XpZ?KWQsMw@tNrYtOT~q>KS$aBFC$LeCipF^rO{sbu|9(&UQkv zcz{5dkBN$dLHhBo4o^IdO29Hg`S(E;*ezHfxMjD(UQc!sX}NX?0o@3{pX1e%1Z~{a z2o?-SP}ca%)~CTmnNB&JH@76k9hJvt;WHhxuD&obsm+f!0WTfrY0n>V&tdBAS80D! z0UH4Tonw`yh8m+xnpJKL?2EF2y2wLeAu;e>BP-V1#mPzc+D%(Dhi)78?XjsOzYj5y z=SoO4Cht*H%_6b%O0@_{$X@c>>(@VLW0VEY{PO2(rl~LUkoMI~R!;7$A(CZY>%umm zUlqYj1I4JS?n9pl!PHI(!SF0)?*VIf&BdIYYpcg1Ul9pwiUU zeyKRH*8R$DQ9_g-9(OWKYj}gP--K<7R>1tx#8UYs$Nw{F#MeAeJL0 z+5$O;kF@R9qI4$;U05qvXLmpG(-^VteyXx-vJc7i{1(VbP05kYhi1U`+Z&ijH6L{JXDx zBRWF>NempV9WlEs1;Zzjz0OGX5`w$nqaGP$kqPtI+@eBvc3t_BhNhHh1fMX5*kx{r zFcK|uwgqjh;v$XAxrKS8m?hCrfAWX+t6Q{$8kitNP8M6G#K;__I-Lz7Cjz>l?ldJa zXuH}fAK#mMo}3P*xL=040xyS8Uf&1Tz`BZ53edVKDjatDs|b1esYHqY@G*9gyOsVH z_k|Ik9p^q=R+9jw?*o=s7GCg*sVUpk9()W$rULYV4I!j579z4g#3w6rQG&T&i3U8ob*!z$ zqj^Hs@S?1^WqB$YKejw1RM;?@vv-c-g{7H1MNnQx<~5LC*#c^9sp$Znak3nLoIYT2ac$}DNI!1GSaf7>aVZqfHpfCU6{a^r$IW;vhBvEIlBiygB+rl4^LERC`Ck!b+IGWd{p?FX!vw+=}{`mBVj?D z5mGyqpvo_qA*QJGn%VLAF}oN;46!b)7b8E7RMf8CEgm2i;_2H8r|+f!M(`g}u(&iYv6^tLxgJlfx2fRXtKhsYo!pYt(z&ET;gjb=plup}A!zyBWiq)LC%1B@&%ya08GETZMG5>D(J zTE~UPGCo98{Gpt2X;>T_Qr;J(>_kvuE`Q?|ktYjaF^H#~XO**HeLM1}p%}DcC0Ozm z#U+0K4A+Ly*T_LvoOY^vIUbG?GY(>P=XSC%7>fpv zJj(e()?oNU!#E!?<@NL%n-#x4{V|$qi z5JHL5CfzR{(tfO-re0)V0&}Yl_mq&l3OkSGcD>ntxR_1QLdnc|6&Pu1L z_O1AIgV06iqzQ%bO&IGEpA^=jC|P#{#>~`hr~g`rG+Z7Q$7+ZQ(u6h_w*5m+;PJjs z!^8~cDIWg5c!;2vhhWw`siR2l;%@p-bkIm=rmdsyP47}jF#H>@FI$!pNFiQEY>*)+ z)|ig6wRirsSLah&xKhFx^N44$3J-;m@FRAXOtx|}`4jak%nKp#mAh7nHaR`=>qWIj zdsK4z8_1h5)F*Tcj$Toa8-t1C=S{v!XX%7nwOrQH&KUL#?Dlx?Jh((lz`vXMC75Ps zA9D0I&EbybqstX@#?ql-^@9Ci07qLA%4XE@}aBV@2lA7Ym8Ln6 zLKL$85Ya@l8+b4dxD|Z<>!U++{GHq4jT@bl#1Q@_6S<&63nL;kJhLIQ#!o`+8Q&U( zKCNW-+sEH0ipRl@wB~b#yOu)oy{%#5a5fjMX}iTLjz$yH&)^G$ro(5f2du1Vo6wwI z!mub39szv1G4*ykUwa8f$YxfTmE22#Z>n81e~3eGS~e)1<{;hAri;pEFDX>eJ%D0j z3I<})&J#ikmX3)8fDX)0mS+T*A6nlD+K;=#w6&HK;{Yn_z^A3_ZqnWSpj(at``m`@ zRlsJ$`PEAvHt$Jv#|xbA{31HH@S+R#8$`}g;wukK1eRHG8B9BBKu54TJb6Y1r~;`% zq{57d@T?9i#Ky#tr?Mubi zl8@&e3Lh@4vH9x3U0*q}`5gaApi~{45Jo5>4qb;ek6YivFNqF*J&K45VSH8{c@hJr zOS!5R;nC;o-xA@2 z^)TEFqKDBWCHWz;z1MJSRVPz7v)u<@PmcQDXfZTo$V1Jq5}BDuvR4mIRr(S4=M*pS zBJ$q6x6SHHeR}Gcchp_jR$6w(y)AzyNO&ytIq6MlynXOan=cE~B<#XmV~mw^mlFTX zD_(e`C*)D|t^Lc(M^}zEBY>7r`s|~bO9uMhshL`{rWo^eJ#FM?6z&lnynP;#vQcB> z4lpUWB>7iC^5-ip(v9E@UUZ7%gza*0F_VLZJbAxBMBH(%d%av5u0+su7aewMrt{Wq z@}OZ|{$}Eaj?Qvc6bjwNRGu(?83-*{9$icpy}wY5-WsB?Ns>D-W@5v;WbrcYripO;Mi#4#ICFh3O-k$#8B}f#r}xg#ZPI^8tD%lYCIpN zn`@-4@SHyLv$Igojcvvapl%~&jV)D}jH;VWHltJl_;NJNtDancTKD)J8k^g781S|u zQZ~Rd;Ok0zivpZ4gHEB~ca~1w6ZQ<@yd6n~*%w1Ifjx(blcaEA9TpC#3x;oLD?ut{B zg$vyjU9~U%OmHzOn`!=gpFg!gVdfAWhud1+sZeHh6bpoy!pXvw_Om9oJcV*_y<48W zwU54618;kN8VRpKv+m2couW?c#AHBJvZcbrtJUHw=R|BQ%A@4YQ+ko>9!I<;O1b%8 zzsuDN6%_3s)WRufV2{BmUjt#K6GPBGFmE0@*xV`;FZ;6pT~R(rWKUE(DrihXoNd#h zCt)R7Rt6*Xsw7RzNu_RlDe13A>??qMBhC_CxfS)b~h`5}8vPB`hfYa&h+I^wLw>aprau{IU%Spr6 zHk+!qaV5Sy6(yt0TN5Bbos@QoQtndp>JZwdqg2HLy#A3E&n#-iY$_E0bvn-aQwj0+ zW!ECmAL+h@ZI00P%ZXX!n1l!~CG?eJpj6Q{m2#&emek-_T0(4W4xl76#0AByq;ujo+e=NM6t%#+S`Y;*PR_J%1C+}tM*YqM2 zGcv{hC|WflPts=n$DBK~SM5j3>WDR|nFyo#L{;6BOrCvbhxzl6;kS#{Qm#8;%$j$t z9s`CeTtnqznAMKcRMWsk_oWx#>D0v}-f|Q_XBrZyNWh!h`GeC}d)DshnwTI_|I0YzuIuf8X&ijb&E@AogA=4Cm3oA;f?U`R#nlFesU!i zQa;f6CWTR|9Y@*xrxRlEBIan@K*0sinbiK!C)LMI%EEI@W04dabXc2b}6^}2F%+szXT7$ANTmyTSIy3n;H7;$;;F@^rWV9Yv zBz*)oD7933ziRbIOD%iJdrI}w$QO#6_5&4e+fommkV=|Ia}7c|WiBOAFKTab_S6%1pZ3)|;sZ7?CYGLcC{4YryRBl%X+$R^r#CZ1_7UQZ z#Tld+evFKHz4!cxU&|lL1UQY|%d}7WT)}9cORUFaVLsEA~pD_qIUG<^FUOPdjA!<_-J?hlYLLIb#G zlkLZuqu5i&k4yb=P@I_Ic@a*TB;LAE6tZ2-Sb#ECi=YoI0A)7#U1T5|I^X}(He0Rt6|{d091t^q$JGCB11QwkE^40`Guznj}t?g`Ma*T z-`bD!{AE^9Vy?rXdK9*jfm*OoSMY0V0Y*;rgTv}7iSp3NK^Er(JHbfO!+!d)*TK(x z6b#ewY4uD04<>x9e7-nQGr3|v;R4uwnS3#_@(>d>mYqR=bX0QB1rdT!I!RS5J;fi? z&8VL!6KDzpRFv%molP*hkpc)Fcz~@!1Sgf8rpDV0U}13D#i1FeW1B8|R%}VG(4?zm zQEoXLxzai}+oDsMF!li72!=G4a-xh0vI(p+%ryM?FI?v+>{H}&Ld@`Ym`!*az1!9| zQFhn?u1q1`ZGjzrBdTGkyI69)g+n6;LeD%LtR zTH1KoIB+p0cj_4uk7GIVNV}hywu|1Oiz<#`P+5*qVTl)6opyr*@i`K99%$R?%*=zE z%{T=d)z8Q^1?Rt@(J`|uV7;8q9PYoA896%6&4ioV?hKD^XPa@l2Tbv)KPHA0KvTjX z#yrw#XRGLtB9)2j-$&G2`OQK#Hq9W}=;L}UM$u-0DB#60gD`UI@)fd`+xypOaa*sh z_eHQwrk=xf$pf)%BpHRR(PbylBvGYzGkcRuHIb=m9QSYr?ywNx+Yh~K*RG?Kc&QwP zdg&<$DEr%>{jO1E_Um$kRpM+rfAC>X&+%0K&~O{0p0BprhZB^1ZDd#2z|`Nu$8r2J zkek3;;I5fJv|aFHGPwl6m62NKF2i0of?jOcFt<2#+^U*DX|?iew!YWl#ggBf^VuWE85+` z`vg{y64S8XaheIe8~d^&x9ddKOA$k)4MaPYnt{^L)KElH&M29>6vDA)ze@8*XJ#3r zzs8UIUrS$<5@LHdsVU^f=8FlQKFyaCMW>m@%e z;%ukWksCYadZ+I9etb~#C!g^-!LKufrTf)-4^${QEcI}8UhLGVk{Q9kz+wkF3!qi; z%>5KQ;pC>e19$ z_CBth+q~+JNcZJ8pDtL0@m(yj@LHx(-julS)!Z8W)CUbm3nzt0Pbg#xb6*YIn>NRR zRGi!)sBxmMkcIJHBB+wR58 zUt%2F9^R^0JYl`_;{sQk<)KM74<7}+K6t8OrDlT6MxoN6n@c91+`IDgCf)7Zk3=i+ z*SzQ}oV{z(pBI813JceR?OMRW)EU2R#X@$GFZZAKeffWDUR>6QckUj^`f$D1 z^EQ^>JzHLMOH0tdXZOd1ZHH23|1-&i9-Qz$>EWU1^=tahbo@Dyw`OO^-2SIKOjK%) zYt}EHGwYh^r5kb5#lbiA_9Ru5UOVweBtQJR%bGjh2|V|*!`<%w9gsZq4r7{);&N%RT>k+a2fiHnKC0pZ#2?~1Z@l*Uq5j={57{(+_21;ZDt3>*@mBeb&&xN-=XiWbbnFpXHFuwn_`Op*65flN zf7bouxTWIvvFi#KzDi7-lmAFU*xcE=X~mDT0y;gbFPHtkWHaw2|AXDW$8VSk@bgM{ z%I}@!08Bni2d{a8e5}sF^v|PZXJpNF)6)e$*Vq%yq89#1UwJsSFh#VdGv8~2HkbUQ zLmp2HZx_U!yc@&)e($EL`qOJQAGb-LSvFNxaO>4;^#`j?&XtbiZT`sSvX_gE<@o;@ zzhh5VoH!%-cHPw9fA^ivt4(5XJ#nz#wfDH8Ds!gD&WmhII4pim1g4?|f*KtReU_Q? zsxOE=-TNaw+~2&qDEr}l*7%~R-q$t}0#Xgn7i83n%$ECoy>Dy$`#}9~YxAZ5B+17y zwNX`6q4QIiFVdQ I&MBb@0Dk*Wz5oCK diff --git a/docs/images/nf-core-modules-bump-version.svg b/docs/images/nf-core-modules-bump-version.svg deleted file mode 100644 index 093a0cebc..000000000 --- a/docs/images/nf-core-modules-bump-version.svg +++ /dev/null @@ -1,144 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules bump-versions fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - - -╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ -[!] 1 Module version up to date. -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ -Module name                             Update Message                                        -├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ - fastqc                                    Module version up to date: fastqc                      -╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ - - - - diff --git a/docs/images/nf-core-modules-create.svg b/docs/images/nf-core-modules-create.svg deleted file mode 100644 index caa9c389f..000000000 --- a/docs/images/nf-core-modules-create.svg +++ /dev/null @@ -1,123 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Using Bioconda package: 'bioconda::fastqc=0.12.1' - - - - diff --git a/docs/images/nf-core-modules-info.svg b/docs/images/nf-core-modules-info.svg deleted file mode 100644 index 255c595e7..000000000 --- a/docs/images/nf-core-modules-info.svg +++ /dev/null @@ -1,240 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules info abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 🔧 Tools: abacas                                                                                 │ -│ 📖 Description: contiguate draft genome assembly                                                 │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -                  ╷                                                                   ╷              -📥 Inputs        Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} -                  ╵                                                                   ╵              -                  ╷                                                                   ╷              -📤 Outputs       Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - results  (file) │List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* -                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ -                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ -                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ -                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - versions  (file)│File containing software versions                                  │versions.yml -                  ╵                                                                   ╵              - - 💻  Installation command: nf-core modules install abacas - - - - - diff --git a/docs/images/nf-core-modules-install.svg b/docs/images/nf-core-modules-install.svg deleted file mode 100644 index 2b0331bca..000000000 --- a/docs/images/nf-core-modules-install.svg +++ /dev/null @@ -1,126 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules install abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Installing 'abacas' -INFO     Use the following statement to include this module:                                         - - include { ABACAS } from '../modules/nf-core/abacas/main'                                            - - - - - diff --git a/docs/images/nf-core-modules-lint.svg b/docs/images/nf-core-modules-lint.svg deleted file mode 100644 index 2809eeee2..000000000 --- a/docs/images/nf-core-modules-lint.svg +++ /dev/null @@ -1,114 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules lint multiqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Linting modules repo: '.' -INFO     Linting module: 'multiqc' - - - - diff --git a/docs/images/nf-core-modules-list-local.svg b/docs/images/nf-core-modules-list-local.svg deleted file mode 100644 index ecb9ed499..000000000 --- a/docs/images/nf-core-modules-list-local.svg +++ /dev/null @@ -1,155 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Modules installed in '.':                                                                   - -self.repo_type='pipeline' -┏━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Module NameRepository           Version SHA          Message              Date       -┡━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ fastqc      │ https://github.com/n… │ f4ae1d942bd50c5c0b9b… │ Update FASTQC to use  │ 2024-01-31 │ -│             │                       │                       │ unique names for      │            │ -│             │                       │                       │ snapshots (#4825)     │            │ -│ multiqc     │ https://github.com/n… │ b7ebe95761cd389603f9… │ Update MQC container  │ 2024-02-29 │ -│             │                       │                       │ (#5006)               │            │ -└─────────────┴───────────────────────┴───────────────────────┴───────────────────────┴────────────┘ - - - - diff --git a/docs/images/nf-core-modules-list-remote.svg b/docs/images/nf-core-modules-list-remote.svg deleted file mode 100644 index 138be7306..000000000 --- a/docs/images/nf-core-modules-list-remote.svg +++ /dev/null @@ -1,169 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Modules available from https://github.com/nf-core/modules.git(master):                     - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Module Name                                           -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ abacas                                                │ -│ abricate/run                                          │ -│ abricate/summary                                      │ -│ abritamr/run                                          │ -│ adapterremoval                                        │ -│ adapterremovalfixprefix                               │ -│ admixture                                             │ -│ affy/justrma                                          │ -│ agat/convertspgff2gtf                                 │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-modules-patch.svg b/docs/images/nf-core-modules-patch.svg deleted file mode 100644 index 5ed5e2f4d..000000000 --- a/docs/images/nf-core-modules-patch.svg +++ /dev/null @@ -1,193 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules patch fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Changes in module 'nf-core/fastqc' -INFO    'modules/nf-core/fastqc/environment.yml' is unchanged                                       -INFO    'modules/nf-core/fastqc/meta.yml' is unchanged                                              -INFO     Changes in 'fastqc/main.nf':                                                                - ---- modules/nf-core/fastqc/main.nf -+++ modules/nf-core/fastqc/main.nf -@@ -1,6 +1,6 @@ -process FASTQC {                                                                                   -    tag "$meta.id"                                                                                 --    label 'process_medium' -+    label 'process_low' - -    conda "${moduleDir}/environment.yml"                                                           -    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_  - - -INFO    'modules/nf-core/fastqc/tests/tags.yml' is unchanged                                        -INFO    'modules/nf-core/fastqc/tests/main.nf.test' is unchanged                                    -INFO    'modules/nf-core/fastqc/tests/main.nf.test.snap' is unchanged                               -INFO     Patch file of 'modules/nf-core/fastqc' written to 'modules/nf-core/fastqc/fastqc.diff' - - - - diff --git a/docs/images/nf-core-modules-remove.svg b/docs/images/nf-core-modules-remove.svg deleted file mode 100644 index 91f702004..000000000 --- a/docs/images/nf-core-modules-remove.svg +++ /dev/null @@ -1,110 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules remove abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Removed files for 'abacas' and its dependencies 'abacas'.                                   - - - - diff --git a/docs/images/nf-core-modules-test.svg b/docs/images/nf-core-modules-test.svg deleted file mode 100644 index 52e86ba82..000000000 --- a/docs/images/nf-core-modules-test.svg +++ /dev/null @@ -1,110 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules test fastqc --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Generating nf-test snapshot                                                                 - - - - diff --git a/docs/images/nf-core-modules-update.svg b/docs/images/nf-core-modules-update.svg deleted file mode 100644 index 23d9931ce..000000000 --- a/docs/images/nf-core-modules-update.svg +++ /dev/null @@ -1,122 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO    'modules/nf-core/abacas' is already up to date                                              -INFO    'modules/nf-core/fastqc' is already up to date                                              -INFO    'modules/nf-core/multiqc' is already up to date                                             -INFO     Updates complete ✨                                                                         - - - - diff --git a/docs/images/nf-core-schema-build.svg b/docs/images/nf-core-schema-build.svg deleted file mode 100644 index 7236440a0..000000000 --- a/docs/images/nf-core-schema-build.svg +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core schema build --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 30 params) -INFO     Writing schema with 31 params: 'nextflow_schema.json' - - - - diff --git a/docs/images/nf-core-schema-lint.svg b/docs/images/nf-core-schema-lint.svg deleted file mode 100644 index d08a01144..000000000 --- a/docs/images/nf-core-schema-lint.svg +++ /dev/null @@ -1,114 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core schema lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 31 params) - - - - diff --git a/docs/images/nf-core-schema-validate.svg b/docs/images/nf-core-schema-validate.svg deleted file mode 100644 index 33984ccbe..000000000 --- a/docs/images/nf-core-schema-validate.svg +++ /dev/null @@ -1,118 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core schema validate nf-core-rnaseq/3_8 nf-params.json - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 93 params) -INFO    [] Input parameters look valid - - - - diff --git a/docs/images/nf-core-subworkflows-create.svg b/docs/images/nf-core-subworkflows-create.svg deleted file mode 100644 index fd20914cf..000000000 --- a/docs/images/nf-core-subworkflows-create.svg +++ /dev/null @@ -1,143 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Created component template: 'bam_stats_samtools' -INFO     Created following files:                                                                    -           subworkflows/nf-core/bam_stats_samtools/main.nf                                           -           subworkflows/nf-core/bam_stats_samtools/meta.yml                                          -           subworkflows/nf-core/bam_stats_samtools/tests/tags.yml                                    -           subworkflows/nf-core/bam_stats_samtools/tests/main.nf.test                                - - - - diff --git a/docs/images/nf-core-subworkflows-info.svg b/docs/images/nf-core-subworkflows-info.svg deleted file mode 100644 index b3f4f38ed..000000000 --- a/docs/images/nf-core-subworkflows-info.svg +++ /dev/null @@ -1,172 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows info bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -╭─ Subworkflow: bam_rseqc  ────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package                        │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -[..truncated..] - readduplication_rscript  (file)    │script to reproduce the plot       │                      *.R -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - tin_txt  (file)                    │TXT file containing tin.py results │                    *.txt -                                     │summary                            │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - versions  (file)                   │File containing software versions  │             versions.yml -                                     ╵                                   ╵                           - - 💻  Installation command: nf-core subworkflows install bam_rseqc - - - - - diff --git a/docs/images/nf-core-subworkflows-install.svg b/docs/images/nf-core-subworkflows-install.svg deleted file mode 100644 index 8c86c3a7e..000000000 --- a/docs/images/nf-core-subworkflows-install.svg +++ /dev/null @@ -1,126 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows install bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Installing 'bam_rseqc' -INFO     Use the following statement to include this subworkflow:                                    - - include { BAM_RSEQC } from '../subworkflows/nf-core/bam_rseqc/main'                                 - - - - - diff --git a/docs/images/nf-core-subworkflows-lint.svg b/docs/images/nf-core-subworkflows-lint.svg deleted file mode 100644 index 7e827661a..000000000 --- a/docs/images/nf-core-subworkflows-lint.svg +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows lint bam_stats_samtools - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Linting modules repo: '.' -INFO     Linting subworkflow: 'bam_stats_samtools' - -╭─[!] 14 Subworkflow Test Warnings───────────────────────────────────────────────────────────────╮ -                     ╷                                     ╷                                       -Subworkflow name   File path                          Test message                         -╶────────────────────┼─────────────────────────────────────┼─────────────────────────────────────╴ -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.test//Add  -all required assertions to verify  -the test output. -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testAdd  -tags for all modules used within  -this subworkflow. Example: -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testChange  -the test name preferably indicating  -the test-data and file-format used -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testOnce  -you have added the required tests,  -please run the following command to  -build this file: -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf.testdefine  -inputs of the workflow here.  -Example: -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nf -subworkflow SHOULD import at least  -two modules -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfIf in doubt  -look at other nf-core/subworkflows  -to see how we are doing things! :) -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfedit emitted -channels -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfedit input  -(take) channels -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in main.nfsubstitute  -modules here for the modules of your -subworkflow -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#Add a  -description of the subworkflow and  -list keywords -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#Add a list -of the modules and/or subworkflows  -used in the subworkflow -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#List all  -of the channels used as input with a -description and their structure -bam_stats_samtools subworkflows/nf-core/bam_stats_sam…TODO string in meta.yml#List all  -of the channels used as output with  -a descriptions and their structure -                     ╵                                     ╵                                       -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭───────────────────────╮ -LINT RESULTS SUMMARY -├───────────────────────┤ -[✔]  42 Tests Passed  -[!]  14 Test Warnings -[✗]   0 Tests Failed  -╰───────────────────────╯ - - - - diff --git a/docs/images/nf-core-subworkflows-list-local.svg b/docs/images/nf-core-subworkflows-list-local.svg deleted file mode 100644 index 6bec883e1..000000000 --- a/docs/images/nf-core-subworkflows-list-local.svg +++ /dev/null @@ -1,168 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Subworkflows installed in '.':                                                              - -self.repo_type='pipeline' -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Subworkflow Name   Repository        Version SHA        Message           Date       -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ utils_nextflow_pip… │ https://github.co… │ 5caf7640a9ef1d18d7… │ remove             │ 2024-02-28 │ -│                     │                    │                     │ params.outdir from │            │ -│                     │                    │                     │ modules and        │            │ -│                     │                    │                     │ subworfklows tests │            │ -│                     │                    │                     │ (#5007)            │            │ -│ utils_nfcore_pipel… │ https://github.co… │ 5caf7640a9ef1d18d7… │ remove             │ 2024-02-28 │ -│                     │                    │                     │ params.outdir from │            │ -│                     │                    │                     │ modules and        │            │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-subworkflows-list-remote.svg b/docs/images/nf-core-subworkflows-list-remote.svg deleted file mode 100644 index 704c2e9a4..000000000 --- a/docs/images/nf-core-subworkflows-list-remote.svg +++ /dev/null @@ -1,169 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Subworkflow Name                              -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ bam_cnv_wisecondorx                           │ -│ bam_create_som_pon_gatk                       │ -│ bam_dedup_stats_samtools_umitools             │ -│ bam_docounts_contamination_angsd              │ -│ bam_markduplicates_picard                     │ -│ bam_markduplicates_samtools                   │ -│ bam_ngscheckmate                              │ -│ bam_qc_picard                                 │ -│ bam_rseqc                                     │ -[..truncated..] - - - - diff --git a/docs/images/nf-core-subworkflows-remove.svg b/docs/images/nf-core-subworkflows-remove.svg deleted file mode 100644 index 42c576f4c..000000000 --- a/docs/images/nf-core-subworkflows-remove.svg +++ /dev/null @@ -1,158 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows remove bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Removed files for 'rseqc/bamstat' and its dependencies 'rseqc/bamstat'.                     -INFO     Removed files for 'rseqc/inferexperiment' and its dependencies 'rseqc/inferexperiment'.     -INFO     Removed files for 'rseqc/innerdistance' and its dependencies 'rseqc/innerdistance'.         -INFO     Removed files for 'rseqc/junctionannotation' and its dependencies                           -'rseqc/junctionannotation'.                                                                 -INFO     Removed files for 'rseqc/junctionsaturation' and its dependencies                           -'rseqc/junctionsaturation'.                                                                 -INFO     Removed files for 'rseqc/readdistribution' and its dependencies 'rseqc/readdistribution'.   -INFO     Removed files for 'rseqc/readduplication' and its dependencies 'rseqc/readduplication'.     -INFO     Removed files for 'rseqc/tin' and its dependencies 'rseqc/tin'.                             -INFO     Removed files for 'bam_rseqc' and its dependencies 'bam_rseqc, rseqc_bamstat,  -rseqc_inferexperiment, rseqc_innerdistance, rseqc_junctionannotation,  -rseqc_junctionsaturation, rseqc_readdistribution, rseqc_readduplication, rseqc_tin'.        - - - - diff --git a/docs/images/nf-core-subworkflows-test.svg b/docs/images/nf-core-subworkflows-test.svg deleted file mode 100644 index 9cda6beda..000000000 --- a/docs/images/nf-core-subworkflows-test.svg +++ /dev/null @@ -1,110 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows test bam_rseqc --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO     Generating nf-test snapshot                                                                 - - - - diff --git a/docs/images/nf-core-subworkflows-update.svg b/docs/images/nf-core-subworkflows-update.svg deleted file mode 100644 index 3398899b7..000000000 --- a/docs/images/nf-core-subworkflows-update.svg +++ /dev/null @@ -1,126 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core subworkflows update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - - -INFO    'subworkflows/nf-core/bam_rseqc' is already up to date                                      -INFO    'subworkflows/nf-core/utils_nextflow_pipeline' is already up to date                        -INFO    'subworkflows/nf-core/utils_nfcore_pipeline' is already up to date                          -INFO    'subworkflows/nf-core/utils_nfvalidation_plugin' is already up to date                      -INFO     Updates complete ✨                                                                         - - - - diff --git a/docs/images/nf-core-sync.svg b/docs/images/nf-core-sync.svg deleted file mode 100644 index b2c6d734f..000000000 --- a/docs/images/nf-core-sync.svg +++ /dev/null @@ -1,145 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core sync - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.13.1 - https://nf-co.re - - -WARNING  Could not find GitHub authentication token. Some API requests may fail.                     -INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing -INFO     Original pipeline repository branch is 'master' -INFO     Deleting all files in 'TEMPLATE' branch                                                     -INFO     Making a new template pipeline using pipeline variables                                     -INFO     Committed changes to 'TEMPLATE' branch                                                      -INFO     Checking out original branch: 'master' -INFO     Now try to merge the updates in to your pipeline:                                           -           cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing -           git merge TEMPLATE                                                                        - - - - diff --git a/docs/images/nfcore-tools_logo_dark.png b/docs/images/nfcore-tools_logo_dark.png deleted file mode 100644 index 1b9cc02b17522232a22912d2e069f87db0ed823e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 64937 zcmeFY`9GBX_Xj?fQ1&D(q!?7PB>OTdq9LU0WQmew%f64hh3-UVLfNJgvL$5SCJK`> zA!HwnkbU0<-!ty}?*0A#2j3q)^LSL~x?b0Lo%1};^E~Hud30G{n|;rrJqQGX9eq*r z3If4GMIac)SeW1^XB+Oyz(2d)FPeKI5FDRqf9PVxISwNbM-gbvb4GqilYODj-uPY< zTVL@qjV>IPW~-A{;W*8Eoy9)#^v%=tci3VQpIj8~EBHG?QY-B&UuoCaa>2VA4#&=k zyX({Sy{A}Pzsxtu1$gA{KO8e5q$PP?p3l=>zW({)wqpk$a@9+7)JYq+2XDVUlz!#~ zIz1q3qsO3q!n@fsEA+?JrD=z(?DCYAg*8_TYika$bmw!K-9C-szi$wZ`h(2>eRERt z|GuXA;{W{c4$wpbF<-|zGC*d(>qqMqqtZ~Dl{NbA9W?|)hPB9nLxEva|c9P2i9&AODy z*ZY*gDa7UlY_w>Zr%#BT6ZXS&Ad%9Iv}_U$&|$v&8nGDDwe}#Kb(Bt?<;$&*7_n(jgL!Tmaz<}H4lZP7b)?&Kb(QX)HEoZ7yQDXL@osSW1w8AQ^n=fRur|+= zZB|#(?jc5wLxI})NE_Wu7hOTFr6Kc(_0|OwcY;W;_i){C#Fw|Rf6Bo<=`(U=azq2M zwJtWdmeJA`5r6%0uCoV4|6As=6L{7h9fBn0A-gOUn*$EvlWojS3ecN{{_h%dxJK&I zyfa!-JUI=MkKCg!{nx&be+_w?)Nh0jeL;yolh9D@X3#1rK!4nRU+DP1ySZ!!HwR)X z#faW%h5Ht1+ig9FIsd=gBf-_+6Di_!tNEUFFDz#)2i>#O*?)@|;i1AAxfe(`^!(!o zsn2|*iJXbk45@nml`;&w%gx(&PZ7VN`^}Vf?XkDo(J9CwAb{{3b(Xmk*X1+Z#{mM}`&+4cnuAmWq?bQ6WB9 zaa^SzMrnPiu5f`h)M|DsjNPGae*W$_+rOTeWu&1LOSQ(vvyF*v9|Z)VlvQfJTVF`Y zM}|J5bkmI}U5yr9dj&p-ez&kmfB1PV=fC$J{sQ1AMzW&cp_&qvpHZ%%U0)>I_}L=L z0EA+WfhxhP!(L(=s^w8DyNxg+RsY`hQ#t_W)gNCdz5xcOBWECZimFd-uL1n*zv-j+v;Lv9zG$1KQZ$`?UU5=04q10c^jtS z9`W873OnqzByTVn!JF~FkN~LG=8@Yzu@!>oc^=vo9MY|sy$O_Dv=8U1kD%`SFXW7T zJF}m1VqAQ@uH(PP+J2$|TX$X^d&lzgG+;LaZe8J!Z_k9E>x->b(e+uW5JPo#a-xCR zE0OT@Pg1;m3osEFj|-MG`#e2kFux8P627Om^aIQdMP=i)yn4fN)US{NY77yKkzBd~ z|KacJPSy>4{rv@{AiCBL_kDb*d}dpcs37Kl1HU1lSMtUS_nPy)#sBb5yJZ4P?LO(V zIRIb@asBSU-XbnuB}ufz-3d*|7QA>@7wg;HETvl9(S;dI|IagLxd8wbII1^RAb5Jc zAE|C}tAK?23Y$jH>!RQ36iA7g^pvR+GjKuD#F?+nQvBjLt^@LaXlLJPaBMYFy>--8 z5G|ubgU-E+jx>aOVi$0ewBQ;kb4n-uNggv_T4w6c4{Y>KwV|zLRchS**oEkf9{$1A zM3%y?Q7N3&&j#0UY)~8cr_LF-=cRJJ&VRztmO{;5G+jnx58e>ONd94~j&Ixsc4}0OUQOL*W;+6a!&Tq|9z2#al zh?7nC|0CRvI(36kh#QXDeCP(~EKc>9FBsuD#b9%BCZ&iUre+w)Xp{7QaP407gLsv<(; z+@D<*EoPze29h8om9)F=|L?BrH*d?Jx6JVsRgUbh5p4gf6&+th(9266n+)hJzb6F< zHunG7{)KbJe{*HWZ1i5tvDbgRMS#?Z67e^N)!ZcDp2PpUr#2SdJT<%y70j8o+W+f0 zwVX@-n?2zB4kQcVz2Tq2@-m>wzp8-Lx8+4!O8C!GOjECs73;mD!_M4{wg>)rM*5a1 zSfzoUws?FLQFrmr*3WG;qw==rb|?i+FMa&K@Mb(j9N-&VFt zmAETSW6|yeu?Na!N=Iz=&(XoK48(!Eaf|djiyqKZh~xO4F(8S zIfdS~@jvfGKG(1CHpgb`jE=SGLCfVx*_HjX;l3aS_2-I|!#d1kZNTK>V2<#{dgaz2 z_^i$u%V-<%=c1?j6-6qzaikJq@*m=CKTsX8-@xp_d}K)+ z2VGK(8oGF?Ec*X*jiy(3E|kK{%G&t;)7FWK7Rrq7TI)>?*Jc72F#e}OA|Hz5 ztW1bN|?jo@KdBojhpNU>% z-f?(F`o`G@K$4#>uU3KaSjG>TQf`N$Fny#^u`E3d0Oqr5^!OX-txXE(kh*qHh4DjO+aD360sV|C#Ix8^lXz@$?0WUQ}ut_ ze@FEhgu@war#&|WU#4i@R zjbEE?T1RoZ8kAUh&7G+B{m_4>H&=^MqmrM2WkTJJq2AORI0PdI3OE**FC?e<(Sz;6Jj%4DkZ!Wqk zqtq&|_^lmxwEg~I*@LsPp*{OZp57^dW={9C@pMV>%M%DK8o|aIy2)cH1zx(SyyoAe z{j`l^>{f=yQ#4I@qs9E)#<>XQ=2L;ofy@c?wweIb_k&AulL0PR{KdN-gNn54-@o$i z&doP+Q_8b}@GqCpwqT4OX08zw(>Ob3EF3W)Q$`_o-06(#zMlR zAxmhvQ1mUkcP1$;9dUpgB7aA5>88DDxd>jNzI#WPea}0r%RF|~vr>NUGR-8*sr!Nt z1TnPqvaNdytK+CvSp21eB6oh8NpF*YNzhp5|Cofqu9lOae2B(xzrShy{#Ngd1x106 zD%jDAIUVe242$D!Xq2@wqcm5x1a&g$MS_>nxi3b&6;3;Y^1`Nn0W zGfLDy=<~8~Rg;ET@TM(Rn=fOudAR|hC=C1F2hk`-u023*pG=9SJgNg%E_k&+h>zam=Z<`FP$ z_R)&Kbr-{n@iujnw|-73X$$7w>12}9^ec>Y=O4BztuGeOb>C+oOEZG*!xc(VDb#`L z5a&^6(dH%`t6D?J6YJZcLks$gTLIyIN5%G>ZD=~{3)mE}9*7{kr-h(zv3n&?_pWa9 z^n;y#Z&q^vKaEv1_BcgU(FsQ|{mO|_QLmhs4!!*^tb+iwCcGnP{vFxQ9B;8K}M6CnqW-y=7YQEzcmOeA( zvv^kls3*Y5($bD{{*KPs#b=1i(5FVK-<{1Go!+hpk5Y46*&=nWSe(UR!A8`^hw)|tNQWhKtw>83=%pvA7H~JP~rF>uj|tL??j4pB|DJNJ)Oxz z&eK*-fNl=U|Cr&CQ;na?Pn zl(O%{prhJwt?_{NQePs40o`A$sOJ09>%dD@nkARO5=kO+o6Fjn1OF7hAmfa_?nH;v zeK+i~hVW3=wk*fFE{j0&>S7;bt)B9Tq*VD|f8=P2ZHv3v@o9d(wVAZpcg^z1TJ5Wu z%D|rLk+hM%?6McltMkL5TT9VXe%<1(J@b!9*2~nUxQ2%2+q?7sb{2znTkmn3Ctgy3 z={SgDj9g3Ne7 z^2F)}2-;kh>3k6|7`9wB+PjdZUGpnOYR24EDtS&erDniqvU%ILlaF+(>z$sxJtu$V zqW{|bnUdt1^*0)TNClC#&ClaXHKul-fCi=A0j8hF-<`;wJmdF|GmvOt`Zy#W?VF`- zO#KP~Q3p|R_yW*JGUYc?O6RU|`PHC@*KcnvY=?{!Sf`clZgWbg9N)+>%dYqO3ef+359xQ++Gal!gOi${hxYNmGSsEhII%zL8y^A*)PAqZxc zuwyoWgP$E%U9X_XEAH4o4cl4eDHAO9M!D{$(dSSoIF+xwSj-Fn4yXFAlQ>1o2y z+_pUmTUdrF{VAc$S$9WwVjvxX$I&$H@DaRS4e6M>q=Pa|vz%YsLRK`1c9+)Iv?`ZQ zd}Q$LM6Il*j5N1C$gWCG>myZ85fqX$78e|5bmo77OB-~U>6l=6fhjbh)x#2OWtRIh z!Y+!D_Dq*GK>Hyl&cD!yj*f=2}3E@3EFTO2z0c;!yTc472rqp2k6 zWct>Jnvl*0!76fD+)6;fJ#>gXF%LA4L@WmbTeI0X0>SoRB6V`74oI<(o2dZbjkb)Z&P}P+a2TNSKKa978EawIP z$`{A4{TyD&F|OXz^McZerTRmfe(*YA>kcAAQ5>9a1DdpXr*{{ofUaH~??(!r5TIwu z1RVniQpY=-CR$;G9|*HQX^#6=f!g>sYO~q+xZ2-w!+(1gmw0>F(PHRe&$7CDp-5BH zlpMR+wRo{L;3Ypjh>opUD=WqrSkn?4bdo5NR`}nfAo4xva0#L?x;VbMHM}#rQ?%b) znUU>0n0J&&dZV%yqT3~#I((3svu`7ELp@O1MtQv!nTY>gu1o3)=?Qy`_gzxHke#*t zobnB_|BK;eJ1EEQ@*yX_pmz8;<()y%7Zj+J&~YyJV!($Ic&-FT)x+W=3rOYb8_7K@ zQ#I6Grl^3=)eQ?pmZ&+SPJs>TEAe7}<#{q~ZM=TLWjA@BXB_l7DWHb3ueixKqvxe` zMrjYg7`8Z-)MI;hw7(K)|0~LjIVOhTGuuNgT75~#@CR)XvC+4 z4x~SttI+o>`bMgra1*rYv~BmgR<0X#%}kl)y)vXtPKV_X+fHk4W*~*Wdk$*5na*7( z@$W6b`f?2O5In*Z(@}cgwe~#x5-0JU9fRJxfW~55G<6XCFUj^zqWIt!c zQKJJ2=di8kFk7Gpzn26QdKi|c-lGK@v*c+Hl5w2?sWI4k?4=DX-wPL4ptp|qoo77! z!+unCbe?3rq^B|ut_hi6Dc<}hoK8)8F*PnZvs6!8lcx6$)$Ao5+wr$6B*F#sI|=A7 z6yiA$o1nWg6%KDZmpA!Vz4kiN{Pl`U(8N}gjk~d1H|b+u$Z%7GPC-}pS{U(z} ztwP9u2kUmJdT7t++E+|(eBHdDVN7y^bcv_+TL+;&HJep+UBq^^D$$I!eFp51jJCc? zQ)!3t^gyLKzJcdIrCdc@n=7+AW)$97c(=K|`kGI2rq{<3duM$p-ljCO(CdTOyFCYW z=R=qEx0vJ=Vcm5PVNX4F7^Q}B#9;3)3lOSCm@ ztMtT_GQ1*C{JQhdnUOQWHM!_tIO_HBQ#rXBq>x`1n5)Elu`J4a-pEr+)Pk6QOxIA^JP0e(KXwuE|`VWtG3{__00i}S3YpbVfy5ZZum;KC1H zJ0kSJY2(D{g?uR1*U~fynTuagf{_KBIrXH@#3xO(IRXqwz|F`18B1uPL2TeC#%Z35 zFXgU(YfFc)#nsWh!zf9i}W{u zd+uC&PN8XOF+lgM7mN_AyjW=4Kfs&pN|>*{JJ4K9pqj}$r**+-Oq z9tBeMQy#eb&pArub5oJ7Ys#8}9Af^aKtctOU#-ZRE6U7lFIFG`0`M5fdDW_ke`bR_ zjDn@q(^U66*!C+ZS!vMZN)g?Fp*3!~?Hjk%pzY)XtzaYh;{T)VEM$BfM@xpK4l^cp zrw}gCSe0JmcM+t~U$DxLc6frV#+km1P;D%~RlY=;N{GKN_L^bwF4!75)7Yh)BuHyz zr3C2p?_R~SLKQn^qC4M&4tejzb>Ifg%Geo60o!|lM~N=zty9DXoazTqe~VtG?-UN( zkH>}hh?N~B8ezwg9p$t&kJ6?MUIS8D*u;OW@~;-`x7YYl=`30m zO0!n_d>=0ssMx1)+7lyv|7Ql6B{}t;RLuKa3rzN6mVgQ@BFytb8N$M69dPT5P-XROe_Xl_1 zD{q0SZjVMKQR=`$^~f451<(0=nU*PUA3|M{BRcECnCaWje8@I!`za*}ym;Tr656Cn zMYRm-?p;QiA{>PE@+>b5`_L^cOyDKJfzO&T((?DIwvR9ByRrZzg9v{3|E@A@;4Rz<2Ta|1ztE7p#5In;-kWS-PSCr4% zZOcJ>pe8aMOd4CZl3`>eb$zxq+ENr%0D6Aha9ZZvy3Owj0R@{kHIA1_W(q z4cub$!_rc(sJ~F)B3e6iLcjrRvPV}76Ez;B(jxHQ$MuxJO9R0Jd3uw8A1n~*9o_jj zu;x=$PXC=GOJ&ALTCbu%0`*%r{5T~HsSemFF;Z^ys5+rBkV0wxr)fKo1AVz27Fw&8 z3>s>9w=Q65-IagvI70e14JomP*P16TVrML25M!p?NCYo)AlI6azR#Lz6Ai-IzRR?( z4+agr(5xfTKs3peZ)u(gG`169WT^In|8Lr19pT}kL>j}s9dTEo6RwyHaHvC1OYW2# z%*U}0mMYktxyYpzJK?o1WDm5dST8Xb7-TZ*Fozk!zyOgU^EXcjhbS$xX8R-oOK`l`(pIO-}*r1to^ zyoS-;qLpAVg-A06KDKWvRf+zz>Etb}FEkQp z&?Y-#gxOsu4=7RV0hlPJlHrA+N4ipRxEG7x^<#oju*?AKN`CLM5^zU0)YNWpAHD~x8*;i6hn7?O762%8|>0#8?9E6Zt>!AKdA zGYia($+%%IWyVaiW^zK8+03y7#BpphZPSFYRU!>SV|ANol+nKd#!}=Uzy=fDz?4N^ zEC+mc_VGiM3avQpCWMv-RE8u9JFPOr?X-Se-xpeuI*jk@O}>l_rzk*F%*P4d?F(uykl(@_0BwS z6pW!{!@xhsi-R0_&0HD5rUE{y&g#GO<4W$CNT$Kd|J=h?1315cZK6>Oz>u&9k`S`b zyMeMcABH>fV*dW){AUNYJRp_6LMoX#IgcYICch6Y_Fnl18f;Yx zZ!8)drgi4?f$MfP`Ja>9*+b#Us!~SVDVGxhw#nwrtndJg9Q4!0$bY85{+05E$_zP+ zRbWx}UFa^pqcL(rA9oRSQsoC5?po8V&nXf_(+C))8b_%Cu!@zX(+04`BA|{~c8(`< zWN$wZZ1Sgp;{8opoA{5c{&%EK+rhGE*?TVz+jdDQ0mQ>tfRZ>wVdqG#qxJFCTq4sH z(ru@Dd5r*BZ3{amio+5zkg%vu%2Hy|_Z;Gm441DJu2 zGyTqdaGf-WVKaTz=%m}(X&Xr%K`NC)^N<+VGtb_6n*~dM&!CO!r$Nq|K+FTN zkm}6tCe5@y2{6P`6w8g>V`s79xJiTg7yTn!!#0Ou278Dc zTz3&N&k{RHnv6!M^ zvX$722lC#wnVH{0a)SLw)Dt(tvS3EvgR3n5PucK&S5JcAMD`00tRTBO{zU= zGD@cO=DpEAluw7n#RteL?Vb9*8&gTg5dxO^0(y6)iS|(NuZ2v%Awe}A8(Opq#Hh>E zjX3Lqbn#o_&`icn)4d(GR3kR4t{weB#H)`l8D{h;V=0%*nUnOZt=8uF3_OyPqa&6*S$#_aDav))vWx_L{XZo4t&A?M)M{HCA{jL4V#eKVT>y!);0mq3p$g(rU zYaG91{)Vs>BR5)u>(*BXHW~&P6hq4F*@aIn9f@yve3lbgCPf@rl$?1yTKWo7U>R8^ z)HfQtaQAhur)$TI{|Ke?<_5NMl_C!CXlN7PUM#Lb-_^!e@Rl6p?i7O@*AzSgW4|5c z*bGb;_LJ{{-~s3&#x#IbQ>MonJw4YImVEL&A}!o3r;u@={^l;1yOrhyY0QGlk25@} zyuzG}IItHYYg+y}jI~u_A?MV}q`+*SfYhlRlZrN1flxg!8*GjB+#FpS&zN=|!repv zS)k;&OP%yN56kjUW&y#}(9JnFP!Y^+65ISzbbT*eTWo`QYB5W3Ej`JuhHf@kH>hHtQR0qEVF9#DPEwf{yE)^XDor9t|be-j5 zR}k$hRA3}H>Xj~ly4dVBu&P}4!}Hw3g>TQ!oQ|6&g`O)9eI`n)nN?`i3koSPe&snM zBO*1eguip{%!isPU!|{(zwVGSNf!+O^iN&aM&BVrv92!$^Ov491V9d2Vy$qLiZ+RL zwuPGQ!Xn|mZIe#qTKxEt#?4z;^-J@wp!D7Qj6GMaEH{Ex^|<|I?Se*b-W1xptEU@i zuNb+24jCBs++5o8W&lrNZrv)}~WAi?GMJxqw4 zFwx!C^G<)h9tjPluh^Q|KFmWcHzoYz)?l_E-}3F6l4oCy>?{kfV>R3YI>K?M-fZ}t z{v0OP9;j-AogQ8Tntjot)-vi)gp`_gQCLYKg{b$@e;0s_q+tgi!n9ghT}%gMaFP%A zZCpPpQUC7yHrKucN-!Gbq@P4QJ+@qJEFB4w{1(bdWb2Cr?2Rw4{&R zsCjizvWlqIK&nSsPb+2IdOPT=9pK0SFByPfcTf5aHK8*j(AJ?U*d}Y`;ioRer<4I& zw-*hkh<%`~r7Hs*U(}rH8@=y5gbZuHg2;N?f9@y#;=C}rS%TkjAHUvw3lg$mTqs>p znyaeycM)0?P@*`V5ejczVJYe`Vc>}6xatics7*S)rs!9uypo}^G`g8?m>&ayx6z$1 z2MstI0|4@cocQ>8OK_#&5;xO~0r1fvS4w)6&dPm1^o z^>H~q4}`8rdGoybVhMYf6-lCebov`kMd$wiyzS}8?D5?XuLPRKeWFPl3xD}?H~PAiQ6%hn@v@V8KAY(HV!$9_J!iunqAQSDHPvJudfRO zm{f48*xPy!llb%Wg5E;b#scpKj&J2^3Dzr!{&QkV_0k>>HN|EJ;0rzaVx|+NN+4L!I;HFE3-Q~t zN1bpM=+^~polMa~OG-kJ`jV%Oa08d_R*anZGDh8o_uQXbc?Yj-kFFKn=ekaiyUudw zYx>2QlR_8X>^rfg)e@6stb5^mkwwuni<kp<7fK8 zz!ltfsQ z&^SfLEdev9yni1b-IB3V;$7HpqxA(T}lT@Sl zJ?aH~C#ew^NxjOC3L6d1R((~BnlwLL`l6ERY0ssl@?;@0R`!t?#utlc`!Sr;;>F|c z!KJlg!5lN_A%e$Y7BHUQS;MlfGtVZTJ8;cj;LV;^B#NmtA}EV=Ja_J(wl4!8L&@7! z;kr1{xx-<3mZM`IO~{Q$Ba6+Mxx11x%U%p)LW`OWy$x2X1n}lXL(I+`N@KFwZ0g;` z!^^Ce+bhrT9}0J8v&qP$wBMFps-=rTwPwHk6e+%{8Xk0)^th1~6>fPG`%1B{st*NG z{%DfwAMe7?%^forD>^qwP{H^zWZ!9Pq~G^Brx5eaX|Ij8^;Vg-Sk5v&Fm0)fpdFU_ zWVkU*iB(ZtZ)hTnli66B#fr2vUsaD66)W*wt-@cs^2{m5JZp7!bb8f9G3#^8NYm{tF zva2;UHP;(*Ppu2s?l-&6uf0qfBn0;LY&z}b6R(l)u0JH;TF|aUXlOLu)wJrzGrKPP zsD?4<6icdhE(1FB4Z4!WPtKn>%1gO$G3JZMjn)C7?SrYjneASOW=ov*hVCjo&XStj z5#~^th7Q`hmPIsdVS4wT<5&S_b`FD4`>~YH>N_KCGk#kIpEC?&8otJi=9c~bnJTzJ z*xpi~aP65J$j(_X>S_ye8{N&UcSq5uMqpHz2#kPvcSEi`6kEoSt=tC_V|qv0ME zRSR<;sump4-Tl}%cluwfMo&fZZMttn2~S52$4<3a4W$;92nDCtMzi()=>2+VUTm^i zRLXyBTjhi^BJV{)V%ee9i`gx`wuiMfH66Ul{;IPKi%yLd8>aY?afCFdy#t!4B}e_M zp+2PCZU0?sn`F%%S(&rPW+llku+>AQvMi|*^)DRMOE)Ho6!D6F`cr4vkp-Mid+|(G zXjG9!)(k!QtL!6E+Xume?+p{D&+y%EQJ&xu+K|2>@bYi{1Qx*x>S$DVLF0;kNwW-| zQ#EN80Y#NUl#ed556zbNLE{A3Lcvtw5N(c0ToY_GZ+oHf8*3jr- zcU4l)mL?W8KNi|x8S=q}d7y8;Fm-#n#y6_g$X;)id;AQq)SH>~UpLRso3o#p;k36V5~xWZ^(2qW?@8c7WTDZOgiqOFQnod`K`efz37_H-_&v49Pd6i3pNh`yDHpGheV}_%N^fX{5?$1PJNZID@!vt+qzugC zd(z!@N!s<1mNP4&z@6p2%9uv_9DuW2O*w_k9J2jFJWj);tAS)5CsEQ z{CVt+=-!g0N9+nzN{va7TSI6bI$MeEB2S3s`lSbg+TZZrSD zE&9B~ghcDP!E@n(+P`9q@jD|t#Isi)I^kYvbL$m~vgx3##m zEDld4d2 z1Y-gxyn@1cNuM;0__;VMg7P1Cd=O~4tCCLIawGOeSF8F~S*&G0)h$N!=<@&u&)!?pAt4_o|Sx(@D9E zjlU*83X%7aiy(6a@3$~bob?8-KSaJqxge;QJa`kcyYu<#HP^WTJp>pTCtrYd?p;u?6^%gp67f|dYyfOYcozXpEHPY zbvvi9@vWT5IVz1c+69U%Ph57W7&M$?Kx;TP3 zSLNN$%#KkOCy(S!7ME7CuS(ZR-0SPOEgO0xqJigNpnY5Vvn|MJ^YdGjUjGA8n>=(K zh;3=8;a{MJ=XoFC=H_OF2-8tdFy%Gw&BV$+GGMOe;I~V@pjCT?oK2W$MQ(jN8Y!#f zg#pd+qH=8E(8MXBjXGYP8m7vx%8%K;ohGP*6r4XAIbFyhEIjI^l|6|=;7g#qorUst zz>>MLvi%n7XY^rD9*irUYZo9Le>5~?yfmbD`8U@%lrJXg%LZ&f&^9K$gAt9MdPJ5+vaH}}Nz~x*qUd@M~AQk8a zv~LKG_f3QerlW&$yq|jDL{EcDUWm@EgoSF}5IkrI-)W*UHE!V7mFQPhOyBd3e}2$G z7vOb`ODozuoXh-WYA(Zpay~!1nlGpYJ$+I?gDU5$hHbQS?fTSW#*u*aKu*hmmG@Dd zoTMkUPi$rTPhq>3GcK;(oH_7p$8IGZ! zV}sUx>L8NN*)2)CLO4px-4spl%$wn*$tSw|2evyKaxFrC+v)0OD39v8AyU+p)Hc3A+*FwTUz((r zy=uRe!bzp~r_<9s>H6%*Qyo00i6nV|qYK#vtBP2xQ3W&xCfp_c^ z?uS*vr^gs|hL9fR$C0rY2>1+i4j=d8aVRgKs}dpc_$O3_m}_H$-!XdcA?E4ZMb=d|O-!W9v2ho*so2kFFm;5A1U(<41cEwqu zE$lh)FGrHWmamLEO>266Zey~r=n`xTt63stL{|>qBdHFpeqS)L9Q8OtAL0)#G3`59 z{K`o#VTD18x&Kl~4R_4nSzlOW^5=ZvksRMUA;UsIGxa+PKmmOt*vr#k!(eR>F|tVb z6wVMzmvl4vLOqr<2&FfaN)tf^%~ze?1+4@UJ{yf1zZ2T{rWakY_wt;hZkmWHlWh{AB{YzV$u%)6=Sg3)dA{hoY{1FO(<0%S>xeM zf$SqeXf{ND>wvJIkuRf;A{Eua3`Dg7jc@S8?9}>^ESTW>Mrt zhOdPxDq#vGhrS(Va_><~geaDc59(U8QYGALF`RrMUQ015)6|mC(MNX(+TD+Z8A3PW zJNW$v<{yn2NwXDX7ZKK6`z%C09QyW+um&5l5eemYvHr0)Ml#oZZ;YX@>?|MrG$1xJ zA|f&enzLP7+_Ax$7jb^OXXD>a3fC=|kV(C}RXtjfqIyFEbX!MagB>oFZmiLWc7Hu{ zb&BZRBiPgxYbJhsf!EAEVN$@3U7Q=1t7Y7N+_RNXa=wM-G~Sj=Kxem5Z-UI^o(+15 zPh=`aNWIhP%8&+1;q?=lZG0K+?M@b0X~{6?M~w|nMz1Jc`0rf+qAmw&?3LRlHS0-} z*z2n~GL!X!bfoMnT^TumKYA+t+Tt2#%O>lhn0t*waRThtE_re8D;Un6S;*&FtrS7-Ivw^yPPd zPY1~s99g+oIB+BLLBL#FT#DBh%@20p8(z$4;KKX-#}kB2nT}*X44zc`Nk{z+rJNax zfUjA2S0XSlIjo!3hetsNZ7UVr8#nAaP= zsqSZ`MtMj4y(&SL14Gw8^XhExQ|ts>z7fXM=-~V^y}9y7L<_5Hm9?WKOE``e;P8p)u7u9F`S?rr#=fpotmm%zCwWxfq%4b|wLUV3V)Zd_PlS3g%2 zBmb{JmF=MQMwR~A$U6O9Sp{;}d1l3-PRAxJe~^M@R0s&ZI3U7#}Ly{HD4m?Y} zSzvDP;395SfFhP6t8@;zQbDN5(YQEu4Fr`EnEhr(lDYr7z}QXXR}4k^3L8b=+*r)y8T`efi$l($KF%Sl0_WHgoFyDoc1bUq%20cGWVFiUwz}v!9AJ{ngDFnTnDYl+L&STi2toA z!t~ary;>T&GP1>Tx8DnIM9k)gl5tdG*Z^Zle*3M2SR_0n3UI>hj$*Z(TVf}VK$1_` z;+9Adb`$cD9sfjwW1zj1e6_wOfz7vh^ z{SvjQV)i?;7NEv`P=;o1Z>2pjm)A~Z3B-!d9VB$N*n4mztn!DlJCrna_R|gM$3Ep&J01k_LQ%0MnwU z4kNIR$Xf9A81MZt{-R+s>FAu;k3zyH^@ex}CEtf7$7{sfkf>Va`)mfF0#ZwwCqRMb zoQassTYG>Pd}^@h?+a6mJKVS2z~>5Kg<%2}Yl|CT`32(>$Cqy34Jc-}l6_{ccVCR8 zdV{F8SzcumQTs$;oBD%+gjytLRh^}Bli2~w_NGBl_ngCQ7~l1^d2fm6FPlX1#B9EV z$z)`zJ%$q)j+^_0F3yjA_L}V@u=b?;gmm>K>m07k*K!1W1ujTP=6owM1+m3{`Gf}3 zpgVcNV#-)NkIuwAeC*?X@*xNq^ULGZJ9Pc*l`q?GE$6bxuz>4tl1B*gzJ}*f?W0$H zGS}^)2%F45l4bDy_$5PPu8N^1xrD!bwK_XGd7h7XYmPa@Ik-r`Z;G;bXXJ0^&DPtp zB@8v^ssr36b@YUc--zXEI#>nU!erB}BIJUm=EM4`T56tQ{<$}k^yC{fQX6C=mq1t2 zSX;KYel+ZAWNM?I^UHQ?Z<%NC5|x)RmsU@%Mg%`bj*?d72yl>!{Nj}a5mlKLBHtvZ zxqfaby~Qznlm~$7CRn`qj-l+y04iv_Z^p*^lgCrzIi*q?d{Ro)fbyr8MThhtCdLHT zFzOwN_;-1bJ^Ko;Zh8|O9o`}khOpP#lDE{%<&14OSFiAZca9Oy`-wy~$ z#s1BfU=rAV7NCMKwq!0;a3Sl^d^nZ>=2%0}Zwb6H1qY@8JJXt+MDI9MqAf9Opkpr4=mWIQn|}}GpnI-VvsCpK34Rq-FnIWl{e-{ zAJ_es0GegBxs2F7@Ps}(E0sw5T}QG*eear+u7;rIXnjvJcX+an94UB!jT+|7!n#xT zZnyAokH?P?${<9VZ#qzWl5Pr|e{;QgS91Yq*VMMK6^n4L7&U<3=~*$O z1>QcH>JQz{k-$OxeviGQjyV0D9>MnIaz_AN<#a?{l=bgG;`w)n{boO>ri5&~z=fN5 zAF@2`w+-}jF(>`-eUsEKSGfduo0P%FY?S+;0t~GiOE<_rdLy@TdAMU<>Ec$|$Sb*? z5k`+J13eyibHr?VIVF5@rUB^p2-B(wa^@uK(()q)X0)$8-qeZkYx13mu3r?of9jTd+_IE0(% zR=d zOW^$dpxmTLx)q@)!$7*h*mDP*ZvPd*_5)ViPfn~;?mvjr#d{i&-NbE{BMAdrxsG~j zvp*iqQtU>48dzdQPgr}$BVU3F}LH=gUUkdJYeHIs;IkBZGuxhzIX?}HrgME|d zVeB9xsF(MRrY4il13~W0#|mc5TyROpfh% z`#V40h}e+JaM30b-hi6;OS&7jt9g4^o*ZK*FiEN7P@D5NT>bfoXVBheb1m};Gjsp( z%ICKjX3fEmS2$^BEZ34iWfVeR1RAy|TE-%ep7&s!=XWo`gX+g-?cg#wU&&b+_;;aF zNN?zBu14dQr?ZN-%Y}szXl34@pg*O1cerD&;FRTjmwi-sYf#6J*NqDuY!ju-V|BD4 z1a0$ba5^RA-TI#6sF2Vw;Ljd2P}qFzid@39u>JI3vX4x`eVM7UkN6nyADT~VbrZB~ zaT{hFk?GXP;uUF&Q9W1A+A(XI(JG;GU++oGSJ@x4l>cgf1jCG>Wz)sr`(Tac!!h<- z_#$X~>qW-$iyEh8F~h*ZY!L>ik&5>$jtKA`D0`axtPGPF~`6RK~}Y!+h23d9noJ#FO~eCb0uRU6-p zq)41QAX3%)T;-g#BqA4KeRgZrY&&_`#_-ODH_eap`m%kF21gJmy54>FTk6jVAX!qE zDA_09(Ya5s59p!%(YxAuPY}d0;EBfC(t#<-g65V~@xRBdgoK#3_V9-Ms<_e?tm*KR zCeq9O=>Ull7q=@@sKg*{{{r&~gz!yfcb-`nG6(rpi2g2?+FL4X+3cP1l3RPAkTo@K zO9Y-+<0%&3@F55I=_Y&kZ;4UUW`~e6K_4owwK; z&&o=SI(Ol&%Wb}^`o%j4KBhOcn|x)0zu>V1CAx4rZKKqgVGSoKqY2UBOmaxJXGBs& z@B@exh-w~_otHP4R3|9$K40vJuaU^Rh{a=mll{|{I%9;;U zwX9G2n{a7$S{SUurmu2o6;jl7w=h)yeC0szgipYCjek9XT+hBIowX5sNt2D%XJ~Cv zA5-4Z`rA8@{=c>1$YR;xupdh)zVhor9z`s^xV%Jyungj~Q_KQJ4@W~kx5?OM2CDB) zI*WSr3n`mkKA@kHkO=b_5bfMCEi3xAMcxsI{1{6gQ}a7QWAc=`hx)$=y7l#-B^9_~ zMi8Vhga6epcy4QTV)kQ@c+vdf4!V=B%Kr96W3>BlaMc}-kr+amlI#*2BROG|i?3I55TtQ) zW%s}Bn$UU3d&UVaUMRf??#zsstqV2|7dsey+a#7%5wxL*?~yU~idLfhiiwY~*Pe6Y zL<7}Lp*61rT;t<&1)*_t4G2yP=C1IH{I!{&lS>I(%PecS#Z-q>SPmc+PcK;1eEMNA zZkw%=b(NX??r7xt{ERi6P&XPhyZ-E1YDGSz=GSHtSc#qd0tQtfs3UMVA3TqZQ0?-W zCo19DcFyHD_TF#NCyT=|utxRoi8?}lappOU_BY|K@vtM9*W zcs*_wQCj=;uI;WhPj+!ks!ph+q@_cwhoIz9lBusH#s<#%O(u@g)=ZauWV0veh+nw( z5n(r=3C`RMDI%(hqyD@T|Ngk5%p@Z-qa-9`U9&PPWJIht~eS3T%`U+?oe=XsvzdCu!~0ZzFw<|LdY zG}^=-nwp*VLs21bZ~s=EY%V9MAPJ!MrS)vGX9<^aTt$zwVL(?QU13>^YPU%wW%c9J zUHz&B)#UJaC8S}6a{*H}roJU0Cv`w32}c`VIrg5DgIjnDU+WW4zoD*Ip)z}Q%Uv9T z_hu8PLhMb&-Q{{4;j4UB9@9RUTPIcK3!eUM!3q}vV`9ZD6LDr%Jtw+3o%yvhXGrmY zJL%Pgug++k`9O6A)}>+KTXU;IBXK>EY-26E+;*%_n>aY)hb0bTvop`Mv(kDlP=39j z9^*;2&4d%3GsrO0kpG>tcC4VvMfgu8hTt2YAd1IFl>IdeG^bCytLhFYeGdm(y(6%_ zL{djNLV}0FLVvT*+p*-QWRR0)dwV{ye+)ieC&kUEl8I;l@D+k&vW0UQw*PBCNdSq7 z0)1~P^I$JFBs``5{b(n`*B|Q7lIx^OR`@=LXL-jbGyisyr4HI( zIkeSlmA8fxzy^45s;^6XaC0_eb80~`0i`pWHQc=-A(fWUc`7)=>Xso%Ot#8b(?8RO z=xD#noI);3?J4BU{;^#m;pIT)9j)g{K}@fV2eVH;)0R1&sTFnTScXMvJ+J9Ykv$zH zuO72~cMX+@LLr<-bK?Cdt}^cj;*UK{tVk>AHPWq{WQF`PV>dWOcw;#&{(vtLCW%j#t zJhLj+#9iQcCOfPS3b-O-tz-VwZxKJn8Fts98-B(41uBA~G88}g$l<*sq(b4HFdB)w zHn}0|geEhkq{w_hzjXM*S#!$IeMq(X&m=uwiYc0}*;Q-#?%dH#{N5;LbPg-woCV}1 zY6r+>MNE-wQKpk2@MG=Ys#dqas01mC8`psOB`4iWe#9iSAPvyebRkurV@dJ8CshKj zToT~hp1X-65*tk!%)NXSA1Ei~CG{an2Dg+1x$T){5Afdc!z8|o!;2_;AI@vR`{C$fmI z8sP~kYb<|4DaQZ!*O}C_;SU0Bh?aSTUxmZO(RG~LJll)%==f241w*mpnbl1L1AV4g zN@s5JHjNhhyVIAQg)=3+CKXLc9)Tjj+xT7`^6xXE6`HjtOF5OSm93Bvn#@KXvSR}K zy5rmhHH+;T)f6L}^BP(>h5y$^6e$rgkO% z^c4O03;FCFt?k!41}9YjK|glfTd=k}Wo&cu49dx>RI_QTRT*Uz$?~j=5qEK(?K+xY z;fIO#zF)tzx7%kk3odR3gT}9&$pi<6{DyWs2=ujnHiIq*3Hhti<5yLVaH9D-lYuO{ z#1eZp9Bcs1u2Cws6Ky2(k938aZbDnO3L*DnG-V4Bc{liS=_#on;29wwCeY)?*u)Q2 zpToN@A~{10z$rV3_3r|y%*04S;SsXX4pruPi>Igvt|}Yo{nZl0T*38EX4Ro2CS&r;CG>Ak ze9duSN83ZaM}tS+o~}=dhmSsU$8oVMw_lp7*G1IzqzY@`!va^T<}-Gu2D~obXMh4d zUDcr9ZZ19~ycDUi(}fhK%cTOxw13}cdmclc@2ZD6byVMqngwx4Uaweitwz1Bf=^<9 z)};YhNqvj>um`D|AY+FLMbYr2(GVDSNFET0OSn()skOZ@nsfYv<-Mgp5GtiK_hxkF ziLRF%m->cOG9?UhWo}fW$Ty6<__#9*G@qxj61?;@Gzz#yeZd_Hp*fY5vo*S+429gB z#hmb{KLyXO)q|fy_`mE5gWlhRMkAKJcF0pz|6C0u=djw55#90{TdxY(Jo}Ajr7Ep4 z)79&7Qo`P8Gk?QBLFQG0uFLBl*rMhUlpL*Sr$_vBymbTWW@CpeEoT#6R_!r1-IdDL z$6v(to6>-+rXIXF$(<*PawnBjA40Ue3qxfp;K*;Px#c=OQ5s4!ZD&0&8>~t0EA3^UlOy@KrXO0dI z1jz>88ET%~B3j+$uUhO{QXzMx5S8DNN<}4yzghyY8kBGsW8oSj@1Coe@VFRy-!kM?1WEBcjU4cZqMs?BZIrI2m2WIJjb@DRgfyq zt2C@6s#SNg-B&?)`}8?(kGAsZi9HUx$G)AJ*c~IzH%iM~=1yV~-MpJ!$RY{n@)r3L zCY0$k3WQ76g6n$Fsb_`8BGn4lydc>d5I2)cr@Wx}T!@iC(hty>Or0+fBUK9-USe`wP&@J zQqi@mHu!~da@v}OiFScK9P0f-PU!U5fhIQ@#45~343>gtAXwTO%ht}}bRgvJA%+bOSmg_@eV zTD(Jxb=v(`2K@;c6TitF&*`s%5X@1pSJyn?;kE65PUsR@|GF-%NeL_@$j_g=1B51NH@_-xy>}5rN$4$J z`fr#nL@iR{+E&>U%LQ_RfYL(n&0kZ%Yvl756{X%n4+cZidw;mOgCGX0sY4ca*(tQxz#d!ayH1V--}sjN&-QDA2GX zI~0^w#3WEhKDK;1;)mzg&JPeg-lS6IAIPjYp{ll+eArK{@{=jIF{XY!1==yEpgQ6C zlLa2MFb}ompJDhLx==v7pytHeQkt%(s)5UQA+%;2K*3L1Oi9VX7tab9;YdoS2=wNc~m?1lG-xI$f{`D1SHw{V3eHxqt%2-G9#17}85{9%b} zE)VNgs$zVR^WSPrIY}QBe(y8`ceB-gmtNwNc`tM{+`O;3G-o{Q?)y2pAm=rf}{A%R1aKg2;}hEsP& z%9pPX`yHtF_=SFB>~Tk?32Pj@Gg%30+wu@%juU10@E!S~W@JnE7JrLuTJN&vd5Tby z?rrH|WMEVQAU2nkRqx}kofY|r@Q=$xN%_~AJqroJty6(A3wXeuo-rr-YL|DONL<9e z$FFWY)Cr0NTUd1>SUWBgI#h~&+4z|;#VD4W0c&C$%Ql0J3}$O3;mZ{AX4$xyE}`Mu z7kk>O-*v~PF69-ByY3KiNk%osJx9{}`PRG2r5i7-Vz$;v=u%o`@ih;6n+G;!Ytr|; zI@giMcsl+3-i&<|U~g}gd;lInnrxTytx5IC<-Ca#UbEWJliLu$m*!3z<<&XIL=~2G z+)H=dqK`tXg5Y@lJ<6=wBL)@7JZ$E!lLz>e@(}oaBiMPjyza-(HF3c&xI%YKm zeFIDr`1?lg2|n@Ljk)Jgf*<}4r(oRYPhKyNtvSob?Xv~#fa{y6c_Idb;ORU~7kYYY zBG_4yns2Zqe8DwWJb{Y z7XpuH0aw0E5qsES4f!jYd=b z-Eq?!Qf8l@O?dF_m0|}-C$l)~^=K~YdlR32m6kz7YL>ll-7YkesS*QGv6JA9we-cdlN$=Kj1?!@<^Cf9{x%-!wj!4eB zb%WKr`y|?QpLAK)R%IhgW2lGf$m_=1C%sB#=%2gqn-M)(HJV4so-_((E@6dI!U9G} z;P?oab25}Gd#v+B6F`Jf;afRd*j(eQnrJ?8{O2{qJkcE>e~fdhpT+@6E$6sl(r5R{ z`-0G>x6A_pt?r_uJz+{#&j<2TU0V_S<_kpwlzbCq{wg)>n#*Y5aY+?Q19Zb`LqWz7p6L=Z>7OB@$kd zIhI)-A0WSBs@}YXMPVJhu#)w9&TF;!@=ux~cKICo{j2I*MTX?Bqdu_!CW%-*1XCxF z=Od^FiyfaNhb)YW^v+MWC>WA6)1lt3xn>wEobv<1Q9McSucAz1LTJIFIYDU{4!4=L zN!c4!Qo5*Ge=9`oDwL{L`RKzPPqWRPwGCu|QR*s$1Q%M~l3h;|HH4!^f;yf?kiaS^tcd{C;d_38;F;m{B3Rp!~p??b|49-GXUbsz&nno;5jefTa>pdN*^>M)5E@+SrZIrm@zKz+*iZfKs&b~{p zZ`FUVoQvHph6p7rGpOMV&UD{@W1wT2fGIIL}dEV=FI(g6xJ!dMSm;n4+i0-^C7G+ z0g{r{^!+c*b=yox{kExv0zOLa1ki6aYg@uV$@4F9fJj`Kd()b$lQmDL2L9dlFTc?^_g`7K9-%+mg*Cb?Y`vzlQ}P|| zp9_tJ|2RsnoJBQ$i3>zpb8hY6e~77=5~di~pyb(P>L<41?JHQxM49ayG{#_2)ng%l zGc+UXfn(cdqeAGkgHI@=f7Mc`ed=r9ol^;#ST#Wd_&X@2xxM2-z$2dw0v-`sUQzLm z{>hDp4-~D_GA>UXnB3Z$y|uX-c1^?MmHw8>numX`cx?Qm5j~Ez;T4$%u6nVQY?-WV z<{){eU%0A- zyem&6;(Y#fszHx@M3b8L3vd~qpGXXIp5^bOK2g5rbIyWwqe}87^UTZg^$rW8rNA+8 zW;Ltwgc6~R;5bfIx0--tDEVK|m-y7^o>Wy93b{`Q|e zPs9Gf$-V?ZJ$v%3I^zlD;!(;ik(95|%hk<_>*tvkMID-0tEWdJ;bwoUf91|K6&-9S zr9uhKWk-On86B}%^p#Uyjkwq7>4IbYtwQjQQ5|CSDTw^ zYSidY$GFN0r;Yb`nfummq0il&dMV#z3s}c5!JPA z8Vl+&^L1-lR!f_rw9XG}O76RK)kJIID3qsOTVBDETVxSJSfI^1pwB@(3Rut&xS6P* z>qU;^7HiaVmMr?e`_9om^!zz1J)BI2n;m8H3`M)-lvLZG#(!u<$pxe%H^}wEj#({??(XydRl1rjK*XuKCd_tZy6V ztx~t=`lq_=-jW2;IwK=|LbM|6j#-^*wcg1V*%e>T72E2_u<=t%&=fi)DBu?mBtUws z;i9lpJ_LLOY=G4v%zmio!**Q6Q>kG4z5DE*yi^l*8+%=4oN^xVR?nCRhX06Qq&bsZ zCXg@-5kl1@#pqwkbzSM=L%e1e*j>$mDsQ2uIQWiPN-N&w%ms75JGVAxZ;h;0zhduP zE}XpBZ*y$2VlZJ`H}TKY#=;LZc>LG&(gT3aKk+WxOChP`{;vbLosXrv6lX7@27*z1 zd)qsSA|6y@!Y#LUip8oLvP1;;0?$K%u9rfgzzT?TpZ)%={SRRN4?X)TXgF`Gh7}@v zV;s!}IGf^1 zPW+(mn{6^N{2{>PYO;F%+c(NJ;o~civnW!?Z5&;+M5Y=&8s51OXNw(cGA*hW73BP) z%T_ODm-BXL!l%^qpnTcGNFl*k`wg)f&Z9*!Xj$LuSRH&!eBw3BY|zUjoQr@M$w8xp zs1+!4)cZZE4kKWo<=5!hl-zH^*vMULb_>0uQKlFeWioXn5Vt@;8@e;x1OZyfY7&f$ zaMQQ-Y6xK)nM~Cz`Y}&6(i}Q3<8Q;QRlM8kr;*D>zukaZA6@|K=P$1z(;78M*JcZM zTip4#Kh95OH7ibSVVjk_R>?y}bve!pyr=sdt@URw%d)cmwa%d3Exx_eb9&o`tk0rU zPPHE7497-S#p%4rbrh&&MBD^4)cY9)Dd-|}mjmfGFR9E=t_fXb?pqXj9n#x+VLz$n zHi$6*@n@EM#0kew+>zF4e7okG{Q9O0$5N<@jQeNvw_K)}4LVkRg@v0ucd9X z`S5EdF7Exhl_Rx*_TSZgo#w{%m8bWuZg ztgc019ba>K(j%Mbs@W#Lqs+SOlMJ`s9nVMb4!Sqz^sH{byt-+SvcyM3K$#yL$I1yr zsR|pQ#CQ#ufZq0?@RCQR2OJ(gI!B;H28RUcqPXCUHkevPMRuk{OWW-W&pKYSBI_+W zCQjcAb$+fgS30FA`|YT16~|6svb#uM_JvKi8!}jRg*x+#y`FGw^HuGZ1P|ZNt*T?Z zRdQUytx#Lm+>eWI#Z~v2rbwJ>xKZS?+UPo8UDLsGxu!U6Nw*?K1EkcZraa?&zeRww zT|CZ{GghyR+B4)SIay^Mf&o0I+fY%NFP^=QX+Oe_MTo&cQW) zIMhLZ{lebq=Lz|xW*=%?1cbY+Qo@KyB7O5Bs|(s3FfBLCdusWeMi<>#f2AByV{?(7 z92{oT4oX}jswr|aNOdIdJYmfGzPrioEO0;6Eu9UlPV4b ziu|+$=Jy%J{GW4bY|%yCAWUTeI};u)N#S!em3dvFrCMCl8fOt|rtzm!VCM9I(D_l^ z`9oZbguiz0JF!)AkLtQ=`JV|(6HFDxW^s2e?zu%O+a%JOd9*-naV;U~RP5&y%BiiX zJN|sG+MCTc8Zz2iTjdi5gn;PnP2StDYI~k<{5#uGR~#L{XbjY5v7h*d3HHE0-Ux^c z90t2?@fl|EC~p7G_Q)pTGlEQ`|4$u3NJI!b|AOqKWaUH%NmzYY#iQw$%SGpW?q9Aq zjx%`7FyXg$pL+fL!KAM1+Hc9ikO@?nxLGb$?vw{sufb9ZndpLvMC&vl0g0RF6EmN4OVa zQO8iGLiWq%)i-|l?u)|p#%;|CgDKlRhEaD?e@)1@A*a$ErGL0Ci&Z>r&6Fu_q{VA$ zH*VEBHa7~(3A!4F^2^sHk;Zy))QzQF(sO%^Y@BDSsBWPYHlK>~$|{N6Ga2@Cnj+D` zO_y=kzrFa(05ZYzOaf>3o+lD5nbW_UGfbLz@5AuRB0>aiD$my_x!mmLEi}&ct_R3F zCDn*jr5(aB5d8av1skP;(d;?yIQ_I9Ndk`OxI211;j24zx||L^qP_5C!^^kmO5cxp z-?~aiZK5@~$c)DmK8{&$22vJcqO^XlhRt^v4^MdvA+t)fY=oFdNAV=5s{Q?n{qyxU z`6E)^K*d5IwkB@oV6e#_;P<$;ZT&b&kBF*+@fgsCf={6d*HNg zAaWklq8n1v@$B9Gwc9GW+Y=ty$59-MJ6SIk8)Dzxuc9S*icfpwY!9^&>rGI#2WkFi zXk#wxUvryV#uZ;8DP%ikikgW}hi{jHw$ zF5{Q(t)(y62aosDEB%}XM793J-IQ_^)gp5+Aj`Nra3}GW-f3S5%^AjE-^jLtQvW zD3&vgUn7C0sAUlVRQfAOAt+9kT@U z(>AiAH986JWv#QX6ldQ}Dr>b)xl*0KK!L3IG;hT@^QVbO_ySYN8m+F!C*|U7*W(70 z{6!Q3T2d6gXPfHdG1(o~oH+w0_zKpe5!%^%H%>i^GHaK-ixSZ5n>`>T5v>rn+?7vG z!o(4uKzTosm%x^Ai3d3EKMZbbfW7A<1JG|#PU_ARDl*w6uCNd5X!+nmd_ka~uciL| z-XEVGCO7%zLfuEl*RhL=vzA+*UEl`mA>~5B`CD9T{PC!f#w95`ZT*WbJMs<{X8Ayj z;zJe8qyy|)wEi&sw!aB!m+`$O@LqkD;@cJV6IBJ@=?CMH`?u%@evokvXbjRvhb$;T07BT@Lo7B9CYdbH>_BY3NBb3as9>thHxO_39gNd`Im z=4Pj#h>oyaD$ykVA*}IZ)#OCuR@jD_nN74N4ti*wsY%R}iAq`BQCbyE6CS$~*smI& z0Z#bVrZu*jNA$Ryr#Lx9g@q5TvhG-#;d(CVhyCsk-oZ|IP-y97$9%ka&>ED}^XwFZ zKbh8Z&M5C5f~N}9ZY~|2>XJX6*?7d^yL5i67h@;k*kISXE#!xz0xIOC)jSPH{nQwl z*(NbE>n&B0SQL3Dk3hVc#lUzZ)N}Gn-tacxYT`^&NFo%R?%)3WePX$UjH>h7H}-nH z;}^9~Toi3LpXzg`fAJ^&Mb)`vOA4#H>4Wek~NvKFASd6^{u(Tf2U$W_Y!2{z>#NX$6CW=BeeR3h*6jRB02Y# z?k1$gKMv)NTyBxYj%!x@ygn=1De(U9lwC{`uw8G2L?d!|V#?8*$V2KJpHc zzHx8ki#90aR%FNg%y$N!WzAkqXl1J$UTtdC@OyoEh|gi(y~GaY*zTQFd5JoCMjf>) zr?VCH<-`B%uU*k-!R-TsdyZ)J1;p~FjxHVoS3?qb@%O+~Teu`7D#)&OvvodbLu>c@ z!_N*g_mnmZBlwcukR-u$x!Mrqm5{m8xz3ggwMO-vb8d%ph9O9KGWtP!27mYEJ`8l5 z*#iVq`9m}=oMQ3gIaE#&KM1pO!K9E-R!SVC={L=0K@>nQ@Z#*BJzvUrc%%L zG>m|G5<1{=zi51$2{ZFimOxT|O2sEAS$+TSF(fk3+Wh&9bI=@h5#RfGci~oP&#S#AevWICIxCN_hI29A7eF)k2F5yB9dXNR#5NI%o-W!na34b-Q-rjSy9os65X89V#sUFRn+g%>S8Tq$6)qWWB z1fd}I!J)q1f%R>SdjqvX_x^);WjOv+D7cmxWSJpD_K|V>uG=%-W2_t03bLCZhLlQU>Bhz5{~)TYvc&tR78nKRh99XE3tXQ?n?~d851Sx3YSH_-9``3c;64 z%p~OgynWFK1$YUaEC;4!nwK=UEz2A%?wPq^LLLqq{Vn&yt5tWGH=1`BmU`o|1=o}@ z>#&T=$C5eOxzxYVyk|GBXWNd_yZJv$m^vfTf@>18T$!^2`c5N$@%yhblR#;L+u@=6!EAKlovC}?-v zLz$e)a9H9e)$rGuKrUl;_OpGrhB*S%tGkD$=UchcC~MAFSt?GekgMrXvvIIr=A}J) zj@_cvJwCg{+ zB8p?}qKno;M82Y7jy~5#99=5sa6onq9yHV~*F{V`RN`*f^TC1jhOds(pVC#=+r#qy z@KzHT6tO4iYj<><@K2#IHeB0G1aV3{+R*JO@AE{Hy3@FsnFy}c&&29wlbA?e^=Li2 ziDTKSD&#p)OA$28B@#4QRIPMf#Mj|V1V3qX(do38uC5N8$K6)oRHJp4ZTB{DmR!jN zL*)fMwe5E=uYHUI|8r%M|GV(2iz~V`OTX^EB4LdY5@ycGi%7-x7_{Nl>KFbr{9z+Z zt!?4u)a>5L@udY49JAuIoKMw_9}c;vZAq8%sVn-F?~P2qo*_~nO8n0YULh37-ze8f zA+NpaeQ)_vC(l60ewQ6AYvr(88RdR6`hjk#>&*1C8KT@vE!+wxnZuTtG3Rkl2Q0DGId zr9M_}ih<>RBWl$j3J?l>fGA%y$!H=Xm@yRTOb~(MYrkd!iPi%V89cmp{{`VzH9zuf zQ&Ff6F^gF?|a~Sn5cEm3t6J~fM3XY7+V)=nB&sKZ&}$ztRx|{#Rq-_aRwSFldD}#EXPh~z=h1}I#b&- zfr`QK)9yF$Jn**N-J;9&(p5Z7H(R(P9dCasb>Qh2%S5O&QDz`Wq*izqM`$w;PvAxn ze`mL6S8iEHEmPK9Avg;>SEpT;3wic6E5Kpszl}wdIfVR{4<=GoR?X2K+AMZ?WH`B} zs(QurP^zeSCW2_}70qk=!UDtqkq!L+>zno#W{@Q=-R#bP)i`*gyCeA|`^hGP6OSJ` z@n1#4=KlQwui3Jdi6AL`eA~Jzg;X#k;yCUFr``0BH<}3}u*a%}jxu2MqDZF0ikz+3 zQ|o3tA1`dGR2^)#nPyT~`f9A_Mjrpa*aBqlj~0?E5?s5U!PCp^fWPad4`%c92aln~ zuoG~kfJj0`l{A4enUS4~x|!Kok&wrZ9p$k^X?bKi*^ulCZ|;MC=Htj|SG_@$%+8+v zqmueU2G5IyXK;1g%Lv;%h_3DqA$7^~b`szleTIjY=yqfOju}p`3$Loc!enla(GMUW z>Pc#a)8r5na>|Q;Mr4e>VoaGIx^(>Scu5nJs%p+b^p# zT3z&tQ=s!DjqOJXj_G47wSY@pbqwq$7pSeHR3+8Ka_xrwL3wE!br zA4J=KIZE_)#si3l89YzqSUmmQp4k@_41bjEf9RYIkoCD^LJ=KQ>et%ok5QE!x|s=i zSQ;}A+1ky_Es(db9g+qSpOQzf5(V37h)3L!UzWb&ZGRsqkRfA_bFhvQj)=JJch1gT za&g3L%|!C@LS0EyQ zw{mZBN3#^w7t9@H&ZMKv+1It&MJzsHDXw1j-%9<=&;@L?$*k<}jP~r^6kjU{hxv}p zgj5NH;iZPEH~miC1veL04`>~??qV0e%(QKFc!4*!IU*Bzv66p@ME(dbH@?pVCQ4sH zDn?`e8MHe3N)l;d8v6GZbph z+Fjea&!E!>Cz7 zboZ(JGWi=sfF`O}HKlapZjoQ0hpK9?cEHKGnwba-14tzrvFJf$-s0mtU%={^3?8)H z3NjJ3|9kx4je&Z1xn=}S&O5)reMl|2>a>-RqR{Vdbi-*QRxodg@X7{+DJOe3Be=~H z?Jk~K3LH4Ol*a&e*W1o;vQT2Ie@8D(Y{=d)`T=~j8ugzy4SVf~Iz7gn^^EI(6#;+^ z7Txq~s`$fvIaU&ZIhFa{HjH-v!w9T2l6X%B~588?4m93 zew^t;Gp;OsFS3x;#CJK_>1GhtQ3DS1I&Vvww1_odEgg6FatrKrHV05jIKAy=IGL>+ zTxx1K`Kgs#Ay;s%6V9|QlhT~%$jfBXb&$6W{Sb2nQs=ekIa`;VZofdwOd0!@MxyAi z|69Ry;Hxe=ak2P0&zJEk!QqJ|O|*hxzRC7Kr^R~%ev$|Bwo~BpW+D{H5l*ina-PDE zN#t3u+6eOqn%9Z>_13HXGZ8G_0fv(S0P$|B^RV;a6ZM%xRoe`>!7G!Ghdyo``lm8r zMbce?u;l*G#LPF#xY&Xbc1{=(7~V@>nA50*r`!mNe4~8O1)!?bcm>P&R9=C^OhZig zte-gY>@Rc3?sEka`<=%QuU1E{k-Q}>42yWJ5@_sSzFdIuvX0uTS)vjMlNAcX$Cfl5 zBDPQTE$@%x^3O}1xO)m9hyjyD2x&`>=`u@=U2i@+rJYyrAi2+6Bx7U5>L57hoob^?J>&z;4csS% zbI6M1G+oI4M(XEc_!mZ@7vBP5#_(W0+IVVtpRcnrKoHpt z6r#PSvdfnharF>DNldBk1%PeVQ3}xGoH|@lB;E=2jmZ&1Haw8j|+zX_>HG-(<~sV6I|htqA$I3j5zGcCl;u0uG{uM z|Gj~Eeq?~)iGgn^($3%w((v!0_J4@Mw421z@oFjO^B`u7q_veh0A)D&iqj%N(>h9* zaYx%as-AY6f5l5>y>FcJB~4BcBJfZ!tcF1=bW?J#hI3n?OGx~&L-d0RB~>+^$u#il zB}E)1CLWwpsxs>_a9C5uoeB6CXJyRX>Y(};Lk$lVH%#EA1O*xgnFxDFiJw)C z7Aa<1Xw4!Y3%6i&OOD)UK`>Q&EM*YjG4;pe%;f&JA<%Ze5f$8F+{u7_X4!^YH*lRn zewH5|=+UlZsH5`xB;j)|Ag#`Cknh2b7_ebH6J5mJ7u35Kym1hWWKs!p#RJ2zCMI@W z8)K@Snn?Sm>3Ph2(Ek0_M4%qWFwoN>bMq%8m=WC!1dD~b8|I^%w<)rA$(-2y3zuE4 zgwg(FHNkrTX^m;|58YIsvXS_*Y-<`$7D4#>>X>yj($;#`xi z6I|QUbuhvs`%b!lh`_NuF(A?KZVRvjV*~!^m}*=yM{-JCs>&obTJ_b+9!R-f*0)pF z_$QLOEvod`KQe&zkpIY!%(xdpDo=$k6_5(~s3pZ!(I{=+X(mKj;<%-@mzTI!{qTdf#V+vp^O5V=d=79&`&sawRPYJ!v z=w-yXbHX|*AP{W%P>*$XWKs;VYb@6pzQp`mRpMrM2fLQ|?feuUPdCi1`M*_^L5Gt; zN%)ADxlyHM67oBQeO`>>ZC@g}6tQHRvxE5%bod_WGVTO>nU|~Tzd`Q%2I6q8q)CC| z7SR}bH^AhiG1C$&ffI_0a&CJXP9F9v%w-hs1}q=at_bA`nR%ZD3ibdhBV9~&rVPsr zDUMR_`oJ<|)){TKs!^SQrs?lOFMJQ^J*jDSsJ`**4O1t<6@+;NP9+_=J|xFe`#Za5 zo`c3W*r7s>y?++Py3ZDT6x^^+?kLUGE|Il>Mbv?&KMK+q>B5 zfF&I{$<-DJ|!X zrt@b4iBkpw+AgXO6+;-Y-2azC#WH&4Q?G4&g(Ji6vi2d;(e7t~+{3yPdDTA|w{q9N zwvP$h9IeBw?@G3Cn_or!#-GRe?C%Zl6)kZl9-Mz;8K{uy&Ct_@8M!BMWuyUPoXKNv zE3#sZgA~!J*}X6{lWsaoJ6;=S|FNBkdhu5!6HQh^40`Yf{5X?^xj$9r+1%C&@GociCCZHLy$~R^{#>$ z_HY$x(bI8Be)*Fd>`4Z0SVxT=7ev$z*Vms@|HOR} z^sCspkZ_(L1+s{l!(S7h=ce0WFZ?b;oD-1NwKIao_U*awCJZ4eX?;mtn$>N!W8IZ!i3X{K%0 zk*|_euD>=gC~d|azi$f0w1}(XrE*09N!1PeeyZl%)4zO+-tAU*pH92k{pb$pgixzV z6`9uIZqPd0x?u>*<3ps8Zl1x#FKVIOc3RH7j4n0)SF>yqbP?EA@@;3gFg;cEMEH!P zpdzJ;D@~7Z-w&u^U4_RA$^CDLo46Pp9ynSoE;;I72=k@sd+ZfVM2?6gjUcBv(6_x80ZXi8Pwbz5 z#S1y^>D113Xi)S#WN^Of_N@1s6BkCN(V5Z$bZYsT7Ri3>m`)$awJl+RhdCF zsZbz{J*SoP(Qs>8B6te1xyABf4QoJhLa$q0l@@{EQK~0mNt4;JTz$W$^o;${+AcDE zZzR=4WB|Po#4nUp{m5@kRY;^g0J+l0-h~=H$9B?o`GE^9=qrL zqMF4gIpNC1|Mqv(C-4Jo3-U%c6$BA`_Z1)4=m*O18v6T1LfHt`C%Wizq!_%!^7?#v zFH2a^XABb|i@SScXHtIvEproDf%C6qJ}z#U!v8Y7uEV`io5ayaJdt8CDlh+`j@gaw zM--lCK1x(6=jFBr5?sN~MXb-APCW<+z=>pfELWL^;bd6gw2B9n1<%SCNFvuiRR$=q zdP=TDX%@}XEmTjx^2B;NB};)$?e>iZ<&8atu?g}Jtqtg)l&M|%YUk?{O*YBrNZ^Jn z!22jxvMjsZ|4NGB{oI1DXml*ldfy1$5|+Ask$QMO@Cx!i5ff-!XV|&yY#E(4I)$^5 z;}7ct-yFRZ*UB8=sPQe03M@E;Wze#iE26p#fB0tTpFD4u$ZNML4G_n!!w!|PG_VgE zJK_C@(GNI&U+7Z`*e;}$dd5e6)o{`^sOP_88~q#hfd8Yd%;D@^oD9&kCN4hl<^(;} zcXrFkZ0sMiG>ekJtcR3%reT_I8rkQ>w#&~g{#Q2tMI!Fffy~l-#`AAV;yeuwB;jZd zU_`7z9?BkSLaWy-pz0+yi9-*z!%tp3pp&Uv@*-LpOEIfZS3O130A(RLaR5{i+?oW< zh;d*PY<3^_5M%sy$1Cu<&GzEWp_fL!-2}}{MLGJxd|*6Ipgqfh*Up{ES>G+VG}!D7 zdTR zKqe2GxHkLnv#AdCIlFRVPY2k=ae$LZ2;4=CHQ_DoJt76YFKH(BZCSF#7$Pu*el-GY@IZ;GROP949E|>oHec z=kt0mkyEX>(vsEz$r&4)MNN0!)+!(n%QXAy&hWQS;tUSQM<1phetSu+b=aj9vER09 z&S~aYcrdKhlItbByE@lc@6%u^sqrN*!Txv&vy{WH<!rYPQ~4)b$t1 z|CPP%A-wGp#KK?Sk=d~V+7NX_EWLLBcDhg7%>Pn+BHT0xf`wggtg`>fm2jw?^AVTA zBVJ!|tFIuh|8phJL4i-h;})VO?#m0byXxxt*yKR1&A5%E{N^7S#`=33otyza+Do4@ z)9^2SB-upVTSNT}9&NYhIOy*09VqsN1Ga<#1WX47kTX(G_&GK{%Hu++6aNF02Wmj8 zNwml%lq=N4|C-^dU?ohWap;5qt*jR_v?dB+_G0nqS=Xc51hqg>A^w>i+WkPg>b;W zK+sFgqH5TmB^mass4Jjgf5E6`|JHCw9?>&7Jc#oU_QAJ%2qG=}u#t!GZ3wGB8_AbH zZ{+ip$p{01;^5`vKY*IxZrkF}M;19X+^zXjfd1>Qi_#G{2E z1S$J|M(Rll1ZDHtm5U&ix^dB0;2H+_s;L`2zo&Yq?~gBS-#>H>L~kZUblzDHtbfOX z_ZrTjK7d{Vl6G6x=fJ0Ib-KQ}sCv7@+{B|rNZ@@cUO^!8;HTU?=l(o)4F#DTYwzdw+2{Y+t*=tj02Yuk!gh$G8!s#mdf_0{?$iZ zkk)lE4>M)ZRZu0~$FW0rF!mE`K~B``Z@yUoq_GThGeZV_Uf};*9+2!$KR^c4Pa^_u zif^y?gGoED)GeOepU4`eWfQ>(V^=+;(C z*DQ)y^PM+G@zUX8Jrn!cvbcXa^yE3a%|a;Infe48c!|{$HGukZ$d0!C3Kk86O|>^Ld62e#x_av+Ajw5%%X(J52!2M?mzPVMpf5o zCkFm27P#)d=_(;Mzm(>PZ(lS|Fp2lyP4$VydtqL8{vUg99uDRH|BqY3X+fRpBx_}G zN(;%pRVq!kiV4Y5in3-IlVvP9hm@se3Rx>6w`D}gKBZ_XgvLHhnX->1WcPjEqw{{B z&-J^0|NZ{>U7zdgI@gs>_x*Z3_vP_?GOh3H$E$36{HOSw$G+j#OM2IQ5VBBnXOCEh zA1924@P@bp96kRLikO8m#vC7*^q)+|=dpo2iHVa3cHdehemq%k`f}57nqGR@NpO2h zmWe=>;K(*c+BrXelEXHDG!tw%<)w4h{ukm>R?^D)maK4YIkX)3efAiHiPK8Yzc@O3 zJyU7X;cfpkh_C)F;P;mSz(vc(o)s!uQr$)-xHIggO;p{^PmC5-e?{=a+iXjA>ptqf z&^Y$?L;d(Gf425e%`p4h)AipE_)mv!Yj~LWu_xQUfM-M?fCz|=`M!XGj&1H^a#O1F zNxxNWQoDA`wFN--f4I0ieLyQ2JAL`HTdrwr=MHS)1AvXz&x%fKk!{k3O{u+LBMI99 z{2dA+&bbc-iQhEa<_P1@ftwJqfyZ^qks+!3>UoVvds`vb8$9E8&Px#yI}p}gU=1d) zpMRM*UX?HV+>&3*`{B0b+wyI>y5n7mFkpYed=qj%@r(4&oS>?()+6i02cJ|AJ~soqCaaFQ{~EOow>8UcNeTEUcrq z{tj<2Z`kjOHDF2;_-c*GzLLe;+jOq>7*UAt0!@FAJB&o{&xtYfO>7A>+k=R0EU&ES zPcHMRM*G6L(^Xbu_Bv9!CG;QbpYe+Uj?1AvX8ORPWHH8*dxY)dE2WzbJB4HF&jHL~ z0oa*GllIxDN>OxP7~BZ;poYfLvoEJV_gOqHr9NNkBC^?2WvV>#!BRiR#>yGmiYirI z2XqFc7G4Ao-Fo)-(gJUY{u$SB^`zzL-XI&V#*`J?LvQ5RjO%aY+$>-H_}*APClmB? ztW02K#$DpM?~m_uP1c&vzd)3ApC4kCE$QGJD|OR5w(7B18J+vaLrjH~l zCg5}46+yECAWYUDt@;8Zk2?-IXP?hq3UK<7^mEyh(KNR7O=H<^CMW7Zd;gI(-HD%n zY*}bbF1#^wy1J_VeEDQsb?IC|Dkb^npnnklO<7ib9ecj*P(rVj!I6O1W0N1q(SlbrU^g{)wjYyMIJpr11mjN-23OaO%tu~f91gL!j9G2Q5h5n}@xh-vUemetb4?42&D)>uEP2qeq3n3IvSSNq z*LYtNIip(J-{-trkKtO=Z{O=pzd2;wu20;Zp<r0foCWw6))V+DZD_7N|U2gGAWb)Yk*kc?UfI*#(ePZjfcp zVH2iN7EFTjr#z@}S6k1VX!ic6B2EiVUvRl4bh^-`yTW(o=j5g8v2Sk!7oF_Ajazaf zgzk2}#}$Dx&l*AVv}wFL4P)ZmU(x_L2+O2V6SHh+ zdy39OKSVy{&o|B!aMrx5g!={`UD3ul%^rhpCk%&Vni7R3euSTgftfn>8Z0+x9MCv? zc67G#@fOG9Fl;)vY8Qfc(=G3%cj$5nXLefbM4c*?XIbQ>PTBDyPhR@D)H|d@ciirM zJ`4^-crjEHV)C~2q@Kr~d|MdM=Yb8yLi2@gzie^5@~=a|*xqC6Q%vyPZwdw^cLS*e zt{C2)O^he!XZ4vLaG0dTuB62xzYiPR8Ff4j?jJPCd;mB+9IwrihZ2gV1BnNkB>rK5Q z)0=>bE7Zr->SHA*m6&SGchHM=^IwpJI1t-CF8vmI*BwO#=B-KDpmh+dBoJ6EjV+W9(dQZCyUi3AtUJLRrPj;`!FGT&U<9e@9S^DYSz+!^ z&osRp)q(N{zz*ao9x|Ij?kzB>kDL&j^<{qBqXvFSp$oq=(yrmSw(-WmcoGXi;vP(p zO>^P$hv^;LsnWp*PW-%jf*1*dx+F|}fHEchG051Dd4{U4=8}8LJ|-UwPEp|xhGH<@ z%yvcQnm-$EIq!rj)L~Vj(vU5;ViU9lZtz?;^^Tc&@5f9H(S_?|K!M|9SPiwV)O_SsZk;do)g*U6_g?1!&UGm1c)}ZL!}>f3)QFjGvk?v>^c>#ncm; zJJlJ{^@KM)cHS7h(a2BX0E%p=f|n@M{|?XOjeFaA^>B8GDvvN&x1f;HNF_u@?&UQ% zHZjK>3mGS-eMi?~6V%_x?7jd!*%`s}E4!~>yB_V(vxUAXo5HmW*{2^4!;&1Ly2qj3 z)nd}YbmMs%Aj6y4Znp0uQxgV+vaYcG12F&8Kfi&#_h1VRz!^lx^{Ki%HD&oS`%ED6&-roD~I1kKEd=wo5|dCCv{^TTt~`Ky$U{9#PXWN|OPL$gd0Qm6VWU<-z(Zf> z^@){hZz#}X$-U#j7E(#{6CZ-Tg7rW;o+JnJvgemyt{ScYK-`i7 z0qd^Er%S4wCc60@BRQB1NIm$P8kK$my$(|kCd-q=m$SjRnQbY!s04BB3pQa%4>TvB zT7Q3Yhzz=Mw&|rT6fx3^CSVZ<<6wg6+Rwp6KOY5tfcrtJ6i;Sf2KnJ3z6*Rw+*&ZH zr~|HNz`nm-%wJaA+a`Mrx`15Wey;4Bg`GnxNX$K5!{(7G2(uAFr1VOtfcFv?q+ODjdW%D7fW7r;8J7_!U5ylZHz|4 z_3w98bE&Pnv1fb>^J+i{wWs~?^d9(pWY~goF}fb(vlClb9#R4W{unbUflSlNdB#|x zV}lRI&7`2)-ZOno;?fYT1}NF6vSN*y(`@*#4Q5B;bTm)Nn^!L_GCz3d2@ z7NZ^<7s~(3X}o_oweMP6@IXTjmmiF)+GfI!AG$fe%^~KA=(0MW+QH&yXWSl%j+btE z*<_?cdXbAR7o;3f=9;=uak^A)9YRt6={jE8y6=wgXQ`p;&GFk91&QmBwAO&8lyn9_ zRn>QTDcG8ECY0`UnAUWyb_myYz)bRK*~aP1$f#U+Q}ms)!_W5bvK9`=Ug^UL9e81C z$r3p2y5y@t595!yQrc^6CM9Rfv)`!sqa2saHLj({k>sqfAY=ZSh*GV&f$M7~Cp%x#Y5V4S>DX9Q+~6<@4+?rbhkHQ%fXef+g6IJ-(|aZ~TjqFYY9pX`J6f4bk{DGdAO#?}=Y z@1hYm z1Vf}O{`|yQWe07#eSU7ZADio(#%CpMm>{QVgca|uf}$Z6nnx%232aTVS@nE(?>mQ{ zxbX=ki-^Ur@BYO}t0W#ks#1q&C3Do9%S*AkHe6a)p-b4Gzp4EN-E}yqgnxFsLh0L4 zH#;MeLt@x?^={X+m7)2itgtWaa_!66K{{L~!;@67MIBDKm1Q}rr5{cQUYvX03u+Av z#3?0!=uLPsPY_N@4;4W^kpdLbZ2QdSTVUcOh=9Xc`0UjOCbq&y}`le_!T!!~DMGuGST|%DP9b29_-S=A4VE52)J!-&5|4gK<*A zTIb+9@Ha*yr*6QKoYI5tO|h4aNa@#_dY^dp-fggct}I%hgsw7HCGyFPKK+o!o4#qf z6|6xJjFjE>MkyIx(MA7j-Vcr3#v2#GvQ@aGVD;;>3z|Vk?9z~QZ6D2X3cMhRZho#l zPCC(Yjhci4_JE9aLR}~Q#Q;_;ZmBy%o`We;y1;77i$0yc36iro6aTBOY<~B0uf}1~ zo#xzeU=yA2(*?hd#6+d^^T0Cd2IeG$bJ*=UbTF1X8&LF3r@0)BBct>QCp(2Wr80+N z(VbWB@h1JVRumNtT_QheO%g@KdN)e!x~A~2;{#xQc*8(9 zrv8Z-rs#uM2rT+%Ik>G!(E^lWKZ^gZMo3hVS+6)_cbPJKE&_Kdw2hzlgXMh)M#nkcLzT~>`8f1Cd`&PQPs`HQU^{~Fn zu@$I0g~7hLUT%(;f)t{{Eeu5q8*rjgm`lX)F3Xqctt(ioMuhPF8vd_;qu_sxQn~HJ zwT66Qs+{=Dy)D<^0WqpSNQx^vL>J$#FDmgbreXK5k-y;<8$e6w)7Mn2eW|E%RP>2+ zus(Dqwo)LX`PES@>O^Oiw6}7A**46z?|eSkFCbG@u120#=s@(wC7BT*1h;nbiH|w; ziK?*j`-_ouZU9Yj%s0?RYRO{@ZD1VEfE|^T-vapJFjo&THh4n~LAfJz?~u~s*MDs3 zYz@p(MCwInEV%=KBvSDqMU_Aq`h-pMpB-Ek_lABu5Kc_r6boDD|J)Liyj4Qs5zK}X z(Jj!?8{5XOLv_8g`OwP^Hy9w|+bG7?2+M50;(^m$*sA7ZulatWWF%vn8cIH7wM}9d zwhO>oM_wsD8*#?2(Z_V^cB%}p)wGWk_U2y_{@{ESmGE(Bn2|!zb{0&u*4YV-R}7?t zb^Uf>eRt2w1V7T-COu!W^tJg!Gv@mK@g8u7>ef^H`ojJbl0;#pbjz$F?1O*cl7#F$ z3NG?O9&mI%9bql$vZoi`Leh3Z?-=uc)9M8&{3u<7W zJE6DP@ZUJ&bPJ#lfdKdc3@+$=W?n$B>$C>gP-r}m`Jic^pvR{1SRi_9=<;i0%KGc# zf|WH7-mr&DbjHsz#qFBBE9y6SX^*Qgn^Ex$h+wBzw}(067le(tGt?5am|Q;& zleH(Bm}@bBV>1rHSM+&}Kebg}Cl3|tWO=F8vm((jUsR@w2h z_5ZR=UH~9Hm~T<~V51Msdg+xg@r>coSuZY)bs!1I02?6K}@;BqTqb3T;O~zdfvA%qq)oDv{I^kn$=APm)5n6 zaAGmIP|3%=S4juvC8R8gjqnb_03>Gu8D@|yo9A1Bznfe&mzcj*bx0s4Rw7-d3j z!5_)HKFx~fg0Plve2tD3x{bi)&TnJv6=qAg!h3z~IF@-E1i0&^{@I#;@e9?ePMlM@ ziRpd?6YgY)pU1;bHhaUoXyJ_D5?IcaTX~tCBZd1l(qS3ww~7d-uCtp}m$y)_m?Z}6 zhZ=^kIVD~DJExK1C`9?(A9Ei|pU4WwS@%PN6) zf>n7kxxGTYe7-p__rM1#E8`a`?|;7V->_n4539(J)w_sXZYF4B_Zv3h8fw(=xgT5G zRn;72FRa&k+Q7R9bh~q$pE*rkWF{@-@Z_aKUF~$qrJE8#a`;tZt$OQ=a3~FRsOn}l zy6bOxWqc?yOC+}HhmS-rr@pe~a^zmvFb8E%+iAhs3v0VUn>wsDSl88k>|gFa3Zs5a zVYRI{B*q7rNtF}FnS9_>Vo&t==`cFxk>hDxW6V91`knAx)S|VTm>E?4o?8mL>Ax`_ zih^rsomtSb4cV&|T-{KkzdUb9_ zXJ?^ehS~o}n~=goHDJqZty!q9{b5m2_<};T9}j2DTLu4ND!44pttHa# zp53KoCrGzzOxWkv%a_OF=JQUUbmGo`vm__YgnooQ?O&8}g7u4qf9nDMMRXP0{}suB zq+fvQe_r~(53Gvf|64gstrQZ*UL~YlDP^?P^>wvSgcqpB7JPb&RwCK z6nJlm^6lKQ%Wc!?Chc9fpB%pODfiZm_FKUNH>e_fTQPsWiw(_Jf4`lDISRk}DmnSo zm9tr9>^z6%**`=dw`3Nz5~|03E(hYW6K-OLhYk#%uRlLllp?^z6~8G|itE?c|NVYd z5dW`oxTnqF=Hd#r!A+ZDCEJtjjh0;^C+;wIa$d93+z<%=c)-Z>4B?ituN7hSu=f4t ze()0KiyA4i`0HD)|6SjY3<^=dXCeA>zBYttA^XS!Lk<5h>Y^g|zm)0!t;+oW17BcJ zT!&*2jomaFc1Ervk)FD?gs8cj_823eJ2u*iDZ!Lpi}bj@M7!uawUuLP!CePux!X75 z@Fz&T*akz-+SaaZz29|HW9fEG7A6zZQNla+BNn47Q|d9?CPoTy7#?l0st(*klOtcy zteySn(<|q~L*(O}#qni_bfn>wXCUbZDFAmaGz1QP4rksK|Vbm2bB&1z+?BFA_kzL03Sq|>mlxbhfr z`{-mio`MOAW93>3t{2Kx-YG6eq?(*>|_!7bXo1|3Tzyb#S)uH9nV_kxXV86lm-l&B@6-p|y{OEuX3q zcEIy}jl$+N4h_|qT)_)|T`8YVrPL4Yc{A)I&5Pbk{Q|~v=ov!9vdOF-?EZQug?xEo z#KA3y^f?5aCcXySP4(uiZZQvMxes7Ze%+oxMq*4LtDA^B#V zw}6^4NG~8?^Nnm**?j+POm8w>FQ-(8nh+X_wTWX5VGejd*jQ&^+jh!UE(1v+vz{~5 zTUn*{ky0Z<&&_Z}y0j};b{ri>bv?&i-Ha!D3(I&W==PB%D*c=+;ZVzTtie49dL>S? zuNA4LOI&I(qxa+lA`AYcZdF=O&#^n+2BbLIxg`um#9rK~q2}1&FlJzQjmTntVzIH2 z|6$VDyP?}}h?NntgJqZl2A)PF^Jqmkrv{F`43p$5xqMFIk{_@H$Xa`7gY_PI+4`DsJCVV!|4O^E{_lmbsE7z`B{Z|Qi9K+OgUdxYu7&9w7F&S&6xtl z+I>-_sA7U6CXLL4T@l0|M+Z&VlTvR_z=!7Tq&>hcmm+xxa;KcPN#9n^#P;HMWT*fs z)1YM*YLpJ^OyK(`I^*aTL&vepj=0K4$o|o&HRy?kQSKEyNso}SAY7#kEt_;fGBd-7 z(Ss2-n2bgIh&V>3=EmVPN?{sBK^liYgM0+Bryot1x=d z`o6@QT>58lj^^|`qzWD5-#W|);CPR+!GhTeNoqvvHN?MVarp4A4MfRg`n3O4nJFoo z6p({Rn&TGAcsb@Xd=aS8M4a4l+WTk%1@pU|RC;Sji+VU~OPu7ThFewpNmZ{C%;+hI z-%*nA+fR-V;wHi47yK%|aHw$636%=2d4`nB`4UK}QQ_we3J=gsNfrLXeZdheZO2>8 z+ppm8mZWNv$w*A*p}H>v;Ex}3j2Gw5gq}p%XurU(%T;m)w~#&5VQDSkZ9|YepB%@U z#!crSoxKua2hu6sVO6h%vJunlAp{eY%<5)qiI-?%=B6=H2ojDD=P=@v@=BN*ilw?FoVV9as!uI!=a?|V@as6vy$ z8+KrE+WTpVU*+1>D|juFB`5!4?--ER$M2Y5Czp=Q;OJfj8ub2XP}dm(b}|9+JA+H6 zOYn1802$adAww)={tl+`Q~|WOr!{&=-Q-3>)(Y#)HgZG_?qIhA8Ee(eL%hoUR#5kCSz?TANb;yxB8=nT$eK&XH=^+4hY<+=#S9AgqgS-%=t>QsF~jzf(m zQ@V%N!`|!i7n!7$hNN^zT;4n=uW4v|(EvA^g=CYp@-)|@V&IaiWeQkCB7Om+{J5vviXCq6N)?e_)34d@NHckq^O5J5fCzI6ZA&jX3q^t~C>?kvu ze=j;9f5!~BK)OI?=`bug54WeFm9&o)Zs{~Bb*$mlt^oFnzru?%;uVQkqTuW-L>mp| zHvc@Xqf6i5@&YN_MJp`x%Ky{cjP6yY_wim;^I==dzj9l0Z0@tF{&{vgh_MzyKO%9Uq={G72FlUC{VLbn)g=*bu`Hy)U{cNlzmu3^hKm^%QWThfzh*#Q zEE|EsK)T$ELv4QHhQQz`Q(Puy?hYo?+wtN=U=hM@k+}4J>cQG3a}{Hpb;_Hy=Qz)b zy<-Zr9}6OZRd2jYk5CRei*qChJX9cF*)#PZ>MU-XZ0Vk=@0?iNezIzAy~};1>>!O; z?nQM?ie)7r8l{@=J`c!)nYjFgbf&V7V`m7C9R1l5Ak~VcA?y?umZ8p6XfTi zLNSc5Dp;G6%t~T&@+8-YB-n+tOLFb_&78c;A-nx7aM~4`Z?+7`D?OroJcrX(`!8qZ zz?sp+2*!~R9-MZ=2PZKgq{u!0UVK2l&=lubyB5b`>W=2G~A)YHQ@_MEH&Ks86bZ2sH;sugWoAA5!`fLtCLsJPEBjsCQ>n{)Z{< zr{Uxywm9d>obXq|3Ir1)&qtbT#53iyhjQ2(1*aaUpLeo-crTayEU;3O6JWW9?cw%w zI0zyW!1tP@q`#BB(+$nLX1nkElZQ=c* z8NEbCo$?PCj=)7zOBpX!NMj$Z(L22TI2_k8RdrBTw^f33T(G2n884T33{s{QNJ_5% z@e=X-F4AYpIO0R9DtN3?K>28bChio5x(c-(n`h!_K(Zo>cl&jbFORi$En_leJKFAV zLXTpu2PVCKykT?Yub;VsKUI@K3~=up6Cl-t7JlTbJ)gGdj%N|A+=Eh~f^|A57* z$9%pV3Yp_vO{$XnkD0BOQC5uxp63=dIo8+uhA9vqg_Jpx49dJdc28q@G$8~>lqu7)$%}YWTeMk= z+LTYvls1;Y#d~SIV|Nu}D^T=vGgCx{ScuI{Dm z7mErx8XdcQBhtgw{8n!GJz=9sW{&aJF9Uf7lQLE{Q60@~eNQMqiZ&6Kk`O;*Qk_q~ z1-U<3F%W7>A2WIjY0-rc9dW~pBp0n{(c}2687y(VCdg>rHZ$Rid z@6`0WDZG*k*m359w?KUGFgf6-NZ)Y3!VH$ASgT*6(;sx4%Sk(g`+{j* zWkO%>G1O&;<}oHllTYmT3Tzq`BK01c`dP$IA7-L zz%U0)z_?*3;hBuTJqf5=ex~^@Le##(CvxI0Y8-Lo7Z>z6Zuv1zzEb4^j`8#hLq~x| z7VR0^5@1aT?wguWti>~SMrR=VO(wsBmb?_QHy=&WsePHH`Ede4G-c=JM8djLC=&J+ z*_v-6KDE^}x7Dk3RrSQmSAI`JB=52k*y2$(39Oxop2KWXkl^QhOIv4{8wzlq=GDn80WqoguQjI;`-1ndt*}cCo z$Y*#SmmJF~08Tvqq^%^g^1LL4yIV#2+(`j=QtwE^K}E=J%V1Fkwkr}u>uAfF2bSgj zgkV{z#;|Cjn=>>jkrV#*krczpyO^=ThSh?Z46zC?e7ofxiS<24(o-AXBIzBa=q72u zc%3w6A!>X5happ@&89{rf!d-Mnolvu8n z_u%nm;j1Rn?H@ToV(%!w7C9kcX>ICBB*aGCxJn4#{D?ANX67{_uVjYnm$m#Fah4)! zU-3lqSe#<5L9K{wBCCu1@~8-U-brYP4J~lVDQEyoAReT(#VGc4OI((y6`>e-mgMA( zT9kb#&U=@@M>KjuF;K~_I_OLo68gm2&VZkVh@5CvtZlo3{~D8Bg`Q^j1v1xetNH1} zs&aF!inZb8p*bxgebSc6;-1!oSO@>YA?k&J?5h18-Sh7Q7^RwrLx;%~1)6W}yClzg z_2(G3F4z$y(UTsbdFV=Sr^KVC#~5J=`@NH$VYy9jN-iruu_#ctzW0=&IKZnooh=r^{7@&mw)H_L1%;laMUh zlR33TJ6h@AUXEdN3HGSej+?gWE7YpRvm(Gi1563Q(x1$$f~#KVwYNz;qJ^-z5+VN9 zD%PrEeC<}NUORu(3d{5f!TgO7E(Gi&b&V#M+2Vri9tq}eQx9sI&D%W;Dm#aq%XnR$ zDAuMD98to#itE6)?xXE>a+z07M;&?b8sXOVxjCT2I2T!8t{0#~{3bFWKa%t`W!ww9 z3MB{c27oK$J8AEjSIdt&k#MMh->oMH6lfj`H7Ms553BM#1U->sR%q&aWa#i}JrtH_vXCu&Wo6nX6=P^g4*LSORAH(uxpV^T|B=r0qrh(NUpK17A>h8@@82@Q+WR zj0)vDfI9~~(R{N(TQ_^+E$PB%38uo-1N+C6d?h#Z4SmFk7Al9n6MO)QM8o=K;|+6U zHka26<+2;rglJ{Wa>RI0Xu@~uK^n+HvBX>8s`m*}>WC2MCI!@@E>;8rKD?}T=VUfNv_C}={kV?X% z-GU(VJ|K>0pz}ozJ0n|~ff$di_GAit27adn414()+^r+u7~J1ooC79m+2YPKcm}td zo~=6IQks_rN_b4P3FZ%Y`?65e1L_( z9lf?}2Td6_EnCAG{McdP#VobEmrY=5;RhBWI2p_4a#s19`&Gz*yxa-fi~c%25_Yfm z)0}31^dR#lxFU+?Y7ORw+T$f`=HD3pHJXEK;I4-+4+}`3~9`W-bo# zJAvCbxb76nrQH$Eb%h`vqG#m9L-s~7RxbMHVL~umXAIYE2#jW2!=~J2>EWisa!rH4 zemDS5zA5_ZEV#Dw)H25zJxoc?EDTWZUD8tktX%E z#L3BLm;>*JrczaL^anV6#BepL?O;3vTf23}!$SEdz~}Q{=`=~$MOD3=AP&oGf=#}u z+)p!N>^xa8?>A4vm|!O}Fe_;&WGJ7s%;}LwR6uF#V!h{Ikt97SU8Fplp^9B6Z9CS2 z;IBq{IFrnM`kDF029x<1b4TT66jKYpB{Py%i}k}0C${Xpc6j_DsBOd)WccoZ&G2JN zxjvX#9*nHcjA8*VZ#0~Y!Ahd4@!L=?XF$x-a$zGWGRY#Io_!0g<~}#8Ue7GK5x9Kb@L{2$ zvd2vWP;~=0I=t!;hN-A}Rv?2bmqA3~FMkYfg@@n6`2ZTMd}5GDYNiSei`{QDc@vYG zhxD7#ll`PQQOy*MY9P}vKZO$i*r`q*pA#Td13%Ze&)Zu;uwJCz;f7av(COQgAmqch`xV*!ah=1ZZ456p7T~6o01BJ z#}URw^v&QvRIVea2|@N~w=?i}Ee`_^GPdY+E>C|fK1{em;A$N@OxlK{_nxEA{d!A> z^A69fj+&> zIvK6ku2DuwetN0$3FNYkWiP_JhrRPPsR`Q`M|6k`ZOC6P zksd_S*jU4nY{2%OB7r4HKo6?#U_n413}0Q+*%qa^;Wr-|k8M|;fbabK;(g;q)6C=| z5&McmGc5wuWWt)uHsxIKs-0R?V&3Gz&PC$7Ll>@D#0=m zCf*R!(|p@`;PgD7ETodLU8TL*ri&x$i@?VvKfP)g6<{S3Sl`)uUz8s@p^Ltg|NNK8 zK0O5^LgmY^Tkd{M%5?a}i#8u@CHP4$r0=aymQRf0d4{7|(D16}&_Mhiv?IU|nPO{kIhObS|(c!mzaGuw! zfuiZAjP`HVoqB9k^Zw2)3v_dz*QhkAj*_5u^uh+J<^9~{3sBnAU!X4$<)^zFf<~-Q zRJ{gBKHouAxzd&sHF7#c`c%nU3bcI;%d15PXm_%QUa)tdw^3&R;NkF-=>xXIiO_oX zk$GRDIBqB!sc1ujd0gID!_n+Qk{0D(5HlJO^V7k0iqqSQ3zbkuY6dH9P~f$XRFy4t zCJ{h%^iX8z)he(SyqRM=QE2p>URz!lY^TkMFQ1?bYQhiS+W}%3n)$k(P!xnYtpd(c@JQ^TnG1-3Ih~}Vil#j3$f~7=pUlM3 zX;40XA{np8`k~o2uIja6q@oaDMbfl5nYmJo>LLA0KSvBEpZ${Ia{$FUIc1&~l-{0B z?;5N+gCC9Fm*AxPkI*A!DAtn60a%ih$z;U$lOxdpa&V~S%o6tK(1A7hpvv~c24Wk% zb+6LRi}h?TW)z3%R>Qbfw>OccU4O6L67mS&FWayOY}W8?T{-A_vz||nIn)z{&WXL% zoYF|C1W#|1id1ALvGTAfVr@j%+aJm~X`QPM?J#c32>w=;0eP$JC0Z>j)V_SK9?E%B z44ehFpM+)SdP!OsnmnnyGx4_TY&4rgEcH2RE0?gmH_L%m3`_uH@y917Lkg%Sr-2{}C92B>9r7r8s@F9<&*zgj<5J_7kj} zc+@=O?nUbl#^T@Bv_U%AVNmY#b?Mg$e9OtXn}ealo&Ba*TN16ftVhz6n|qEVx7-=CD!E5RwZ)eeGlp3-Swm9eHzcso}w+Jxqb*+WvhbcfSDskxP?a}R8* z6Y0HA=Nbsa2JWMU`?t8h@cLyY`kdyA#J>g$V12OQcre70nJP9{$zzK60U}?qYox~y zz(gRTz)=1Q>43?kG}h)GE4Ig5>{~Wq>FC$$83N$y9V;CYb`8bG7fSOr>k^Cx{dN$) z#ShBo1JWTSVZT@_(ie!WQlk-M=F%{#Q?iNLtBXt2H>gKWroi1kscbH#k{2yUNvyFl zP7A0twY41&(08p!kVfq+JEvB+m&6h#b+;$FK&(OUxBH659N(Z={MMR5kR>$w3s43L z_H;@xRXFV`^*q$ppcSXciAv}oB;#36u|hMliBXz9zhVihl}N?fSD3NkhUX5wT@rTD z$j~BY8=>E#5UwVdLQSlGZhoCNSPGEw1GFqR*Gpc!L5fkSODJpkw>ay&P*H?tJGlcT zXhS2|)AUbQPrV$e2nt$!FVgpc+Xh|Qte*{2w|Qi zhIl(UF1iBP6N4a1WqO7k=c-XOj+PDZOGwouAFvMzt_T)9SQ+2{WX$|hpIfR2&&Xa~ zzH+758(ZI3VRV}$E`61ZMMs22HuP1XsL8yY4_B0_S#uTubuz22%ZGRo+;Up;CDA%07E&3MPMMI5{+ZiW|ArztC5ne_x$OAp;Uk$VB`+&_UGYwcw-4 zta?z7`WKEN$$}mE#LC zl*koOyXNe%Z5hpjt(D7!Te7PEImlh8ra>bU62M$GT##ap%R7CtU|M6*ICiO@{-KOl zoR@gq*8KY?bCtRjPIriCP&7Ygh}w(4KWI{5@XNoNV;2we`_w@0Y2RWW=}fMu*3>6^ zkzlU==9rcot(37-w9-CuB7(h9KMm08h&u5885wAEs+^Vea zl=Hg8%cP1<{)LoPXdcT%=9@4pS(^HV>ej;Pl=T@D?034kN@EqKF$Y~3A%zplzx4q` z)%|I8j`tFZx{zma#t6?^ z^eU-V;8Ol4A;A84&E~3yLDulo$ls8m*BFWk40dDJ-Z=6vq(5=28$*FT0nS>A62TMP zAr+SO&@b_aK@^z3q$nVs0=7aB(NW*wq7rD=I-?$SYk| zC|)npCxO-JPPXTPS$G(t0UH*ejw{$@tE4TH>IWdv^h$iaF*sb^+;-*R6`#_ZJjp3%aZ8E-&B&qrU9f8F_7*MKdqWvCp zZa$}Q$!0hcgUQLgKguGzevGiWx*k#F=gY)l4D`Y49BG>w-9LMXn{m+r1|4w6vw!aJ z@@;Rd02{I6fr`QJl(_?rE`baY$Q3f#Epp#kD_Bn$0Uk@b$2tJ3GhWrPY3OdwJVsY6 zNRrViP$xoR3q2Q{pLd_a|ERB|>}})2=4BxOw^WwZ7+T_z@3HoSr>i~Z7_w{wub58_ z9wh(ub70ST|ggf1ez{j2V0W zWNU4n$^o_>Q^KE85}jL#e`De>zVz1dx(MCypU*yyujq~i|0Mj%O)e3=FE7VI8EgV$ zrs#;-26PK0GjBn{oUV?W9>J{Kr$=n?st)|-3)BoO=HzXF@Kpcxa|VpLX}ylqV_u_` z@ga$%#Uq4U5|1dhCv8<5Gp4qZ00987gMhPc(zZywjqXC|Lr{DvLj#ZEXblp3joX|d zA$VIgO*7njFkAzSREN**p+HvcHn= zvo>LM@~1s!24EPZbQ+h8!18HhZkMX+kW}MP^2}= z&GjSm4CH)wKZK)0w9O!df0oOc162cssY@;dewEWW>txn#m@4{sk=kT^aU_C_?+Ppr z=+BE4=kaOYMThZ;tS6Xla_X*P>YS;y>*A7gk@EfdevHA&wIB9)SS!2&v~?=h3J`uI z;|7D6I@k@=ZI`%|IWX+e(&pRotV$L(tmJ6+P#Mx(R+eydYf?q! zAR|Z(#e754mR4FzyEH;Zr!Cp37h$Zn=&{;~GvCd43)tBc(>1AYMjM9^%0dvk`WX^x zY5n)iN(tfTbHCQ-*W#`G~*dr{qjq7l?Tu3MM^g6-?B_n zjNBVGtKxm|LQ>-F?|t#2b{`eea(-{!e)+G`j{LAg{N))JjZDikc1oyU^HaqKb(`qx z-%xjL^a^YoE7eV9HJJVulFhmx+<)D+amgRk7uBm*Sj~n_VHx!+OZNL?kxtzWHMX*I zcEfv<)ppaY9L>WnwkNaLn3dS)X;x$x%~QC8qcfRR z1_WbmEq598np@b*$@ngfoFeHJrV6Xb%-N6@$N@o>`rb7PNr@9`1+bn&(1xa<9fu>7 ztj?Bl0QLFtVmH6;$3_?vZr1PmRN|aHbgy-1Ye-eWhKy$O__)mnmtC$qka{&1<-#vo zU?-U)et*(}WI7&R)~|gGTdE6)6Wv4@B%dC+Qt#_V>c{wAto3Lt#Tsia_r7l~tNg{t z^oGFr%rVxjm50L4S;#qH)s9r{l*||1lz~Zgmbj!nAa7uIkoY&hvGHVc$Mi8FFq)IK z(z}GkJtRJ=Vf@b5rn&-TJ{4)Q?d!mj>oCWPs6${yCO?aXsLkbyw5`6AV#o4t3CWgz zy_#|^f6deb8>8ox{Hw+%J%>gv=@gqiNbN;mdn7IQLWp{{D~a``b*F_*VIsv`)?;&Q zPXL0b6bU^~nR*aylt-arh{eBbbwFfMLA5ZkT+FK9t`tWnVyixD2#EhZiq-j~acuqk zsqkXQCs5a4k)avo{)*Oh%y8cYhhZ+m*L3X63nA|8>woq&9hT1xJ(NzFI%*q|HG352 zdr%%F$GaAu?b=vZqTmPswmV3UNrkty{mxuV2cYdAWTO(Xi8lE%MU=Bb{A?bEaBAE8_V1(Eo zd!IbO#F^7ywI2E7R#m|>DB^EFz_b;w+Jm@Qs8nlBF1GuSJsWYdB@t>BJ@=-_13K2a z^ADH+QPIc6984~A#yuLB$7{NA^gN8%`F=Z5#K}J3?8CR7M4WonkjH@h`jfzA`vcTM zQ7(h+9mi|@=3GD|`Zywnrh2|V7(e(c8}dN=^V;4PWx%9c=^^X`rN+U^?0~ z){|ez(~je&uMh9g{$wuNVs3Y822+aslFTefCZ+1`W%uEYK$G@eRx?K6yyL~QGgrig zZ(OQB18+3J8$vR^`nXXy3C`|`8sPIeiO6~EB&}6F9>Qik%X+)}b!x?^$z(RJs1$!! za!`KQ5LlnHz`6cXnuQp=R3b*2J+^xoL8=m1L#^r-egdQm=TJ{|$89n^cF>0q3?Z)0u61ky!HxsWIgYFWnvHSB7Vs0q<0+;g#W;os) zCqTaN@Qs|ICE>)Hoae;t?oSwWjdur|&0FKEcuWiBQ2Cu6-hsYA?Npk59D$U{(YAq@ z6C9*({(L(S*;gn{j4!E*|;kDvaO?dZ{1(_)5G z#guB%mRl_cnwPLP-()-IVI)sRFVd{lLGQ?)0yZ)bN$h0w4JY0xGkPwzCmvL^*ZW&E zhLVRQ=Y|?aRCz|C+z8mSI5tkU8`m=}OsmXuO%5GQAPQFPp~bdZTHFXH1w2FcXCV(- zFJ=H_untITf{`%=8T}=JACBXCUbyLd7+ZIE6G3vZ4F;ZG5M^_;K~Q^m-_}k&z;(KIqdi<(ug=?TnHLB5%%39; zJHF{BWWZl_^T>pbm4Emwe*uj2wc}?Vuf$?GXg*gg+TBDrCF;SGj{N-9v5`CAS;!D} zJbn2)^$O{CimsQ=WK!nTXEs>a8 z6H(6CaSUH32i+dQ2}92k(h6hpcji;N<8X z(6`vljeK`WpA1ZT@cHp*&2zrN!f{#Wl?1>aVEiad>8W)F0*EX@_- z5Q>?}*E}UqD%g_7l@QsVU_xM$0G1Xn=ebt)bjRY=(hn1{EKI)^T8P$vQZI;16OF}0 zJlbZZ?cWN?)FHgw+pcs`xSa-z4<(iD*bXU61$iA6MQE$>nma)^kkBLK1GMu)EV`tr zsxzowv5MzXs)NkCcT=zSD~z9EeM#=9+FyX5X9|+>1OL!eej0_|N2h|(T-ys9saM`T zNT2Od8n1t0#b=Ws$=!+Oe?)oH)t70i{n75q#)nldjq>m0Xl`779A+L2q0QYgwBIy1 zWK3?%QTc^Nly^?l_OrU11%6r%Zc4=Z*F8n5pZNT;b6mPhDTmq4v`lKQ8@=0bQjK^h z-iWyq2)W$|Dhji_-wM~1r2h9(`f&0gQj`_g6*^+A>fsDu^`q&!XTEKLI>D>j*K^kJ zvl!a*6s-toEy?{XO7pH}aLf_VG83O>+RYc<&HJ`Ydf2}-d{u@^sT9aF+AXN)b;<36 z>kFl(<76@#l&J4k3o1N?DQ^q^&G3Kc{$wk7@stnRom;(|8&<)K!uK)l0Ds^8l+;tP z>@$xOy?!((u#F=eM37-&`R2qT5~%vab3UER86Mg zCTpYTo9l&h5&b|;S_xM2u_P3wp3GZjH+^5ycjx5qkF66-y8#)L6K*++y?EHj5SnVz zgzSsY->F{L{A+FuxT`%D6Q1Y0NX%nW+YnQ*1z($S_?xSnQ1+h6J&ta%&A!@u$P@oU zrGhAU`+WyZBpk4njvi@8$qu76P8!6h9V_Glbx8w0)E3Wd=CpX8F&kpg-I_eqV>+BlAf)u+%eMW1Kdbw!siF@~v}L+Z&)%K%;OJ#1E>o+uOeVhUEDfTm zR?_R#lYPiq-YjpbrK&VZ>4tXJl6CmrCZeoQ&E0k__gt<2MRuKXo`+}gT-VlAbmNOUx~X6trLy+sVBSj>`b}a1;HKA>7L%Ee1}K z1(k(gWdGVQ!H-ET8ysR+D4CW*)4X`%zzW%2gR{4B<3$mx&@3ePU||88!2n9pE|be_ zS2`?(SnT!n0ldRgtJ9sy3jE?@Pwhj*8L_9Ty&Qrt{rzYG^)!s)QkrPj@lq~eYg@`M zuyqabT-Gd7njpEr{-vRAv#2?Ky~jNH`+(6}@p3X=B diff --git a/docs/images/nfcore-tools_logo_light.png b/docs/images/nfcore-tools_logo_light.png deleted file mode 100644 index cc4ccea1cb12e60311b513bc7de0c339ebd591cb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 64856 zcmeEt`9IX_`~RRQS?ZKSWhn*~p=96c5GGq9OZHNfecuPCQz(&w!1jG0qM))u18{N;szxKLnntC7*Z0~7*=;B1!jv^4p5Gb_^hQ29NgTYTSd@O|5 zS3HI44fR<@BwC_WweNAg^K`t?ay|Ua^`zuS;o*5X;p5j0nLR_3TdTw-*C$<<{Vk$; z9`%au>-b1%=CCl=x~!Jp!Br{RFpzjKp!3X+Tb;*QRKss@Kb){h^c+@seV?p-3zMBT zv9)Zlu({<`v3Pc z_~QTk@G~L)&kz6ShyTBGp!b^mFYH1%8g&}PE+NMRdy{Rgwkaa9QvrRQY2HJz)6`6H z9;J$!8p?T$p0J;N*Ye!J#ykH8M)iUCxVX5E!@pK|Rzc1t45Gxe-2E^GvsRWhY(8G+ zqQw!LH!;zIl^)J$8$X^IcCItbD!;xEnF(K*M&+X@JSfW~(%%?AjAD}I{FvT)!b;+< zT`3RVvHyDV#tr{F?pFSzX|tN{P8k1QHN6RI-9sVD@-lUEm%l0Eg`Uqb{CpIznVgoC zqUmmd=@Irb{U+;BnnF@S4JpEd=f8=bxA|}L4A?vsm9JMY?xEj%PSrz{(B9T6zCrD{ z5aNCa{cB^cli-wq*o{Dpv7Lu_ua|VKlQa68K&C3~Q72#9XybNMzba}b4=Acza~8q2n+%iDoFDn0jDk39X?^7A)!^mJ;E z5ekGVYdquWg)k>J@LX5^<&$Ub>jptvS20#izP!}h(}bdq;~{4o<`Z~-?Z6?eBvmOx zsE#!^me;!Al9p_BB9-oh+Bc@3zYqDCn3hx{MhJ+VI+>dJOaT*E;koA-_dUK}Uzf&# zH;{fF7_10)<{MQM8t=)+Bc#9Hzz?%a`@_R0){SISt$Kn@K8L}>h6mZ|Sq!BZKB@H20kftU}^PiE` z)c*Xdd@3S@t0+sw_uO~aLtzgUG2d;xQ1Q*1H#0qHdV%)wP1#8svyWz%C}A74L_x?B3pf9H&Y@2X=|G$}7iYO?E5Lr+QZ zunjfr@njOx!!AI9VRd9th^kl#?3g$t5Dxfn?H4g>K($Nt+fHaOY#hv@QlJIXl)td!4Cw33#odkl6Y zV>S|OhL=y33;S(CMLA9S@}2)++OhBFrXf0zRg_T_+T~HTPwd7xJV6cPBJX{fB~&hK zs$Fc?B(tfBkrDJu$X3Q1{1zTNRk(@T;z!+JtsYJ#VQFEI95Bp+1d)p+`Gk3TG-5Wg zkhB!>_0%li8!7wS)(5l@KDF!}dm%NoRf{a39g|I_D;7#><0*1`M%3kp01AB_Dq!Zg z8ht}kcgMfVhs)|`f(tl+ixNr3KYnoDKRVH}!H24qCWtT&%xd}zW+opB3MoDNJ0-8f zNvx7d#yy3T+j3B!o%L;!;b>EGDQXB~+h}0EX^k<%)ZBpGVwTz%Bc=Z{6LNVVmQ)Zs z#qHX&f?Rw4S8Pz4H6Vlw2CL`ph1rxV>T3%^&1h1dBkPo8>RjJw|7HE<#P4E!4_OE` zO$@0HI!7pPZx!b@3)8f7f(6Vl`(n8hAxh@*>=H@8QQ)g9oK9SqBFr%3t$}fQ3U0|& zMTUI5{BLzyt1e{`H?CqHGJTzP#T38;zV<;^=nNbG6N-_k!KrUQDx)Z|AC(bG|5a8Z zB*H@M#uON%NKm+sWqkHO`)aB@we3grs9;DMV?Q{%PqLj~`hASTUIF*q`ZO5WR)wVFI`G?Zxevi{$Td5LndKR;aC(U=|9wR~L8w;+zr-%IHsbY> zUgGTk{6DWrVb zYX7qj`>+ae$t5+}$|T_!B3=Erhn`P}k1ai*^PzUqmU{4eDXuat%oMLHRxej$e~5m@ z@ADVp?D3O)y6!#xyXd$s{yrf~zYM$Yrd~^{xM%^*VgG&MleV6Y&|SUNwG!INi~rl; z<-XXdqpn!99)UghSN}nCVm|NOx&~&TmiGceJ?{6R>laTmSZ>pxJbelcMsk4R0F=Ar(?q*%!}BhZw%+9K`8y{Yh!MT%%c;Bib&k(wxLRjmW=N{ro zoje;XgQ^~##P@&C)S#ViS*=Lu%Jg6vf7wA7B1zehn!53h9Ut=hiFVdZ2A1)BWO+Or zT}sR*gJqqhOx-8b1SCR0`&Ue?BhO8gDxoY*R=fY z+Cyn|_k)xr7Y`wB{C-T)JdQ-^IL_#4Kt|xti;{O2Uif`>)vlM+z~WAes&vp2#~e;> zaP#^zhn)Ghwj{nES?XIu)mFnEPiGi7&MHYgMRFdBqLYyRcM0|3NrSwRzt{zDC$Q16 z*lJ*$9KIG@s!K*lv(_p8gm-n5bjuuJKPNIbLluNw9-=Anc+g>>{ftA1)Liqyomg7G z0lZGlRAqUVOzOE5hF~nSdqkDH#ahTn%b<|fSG~?U$lf?xD}R^!j=>M6H8HyWF6y2} zPGPZ%iKNdTp7uW4JWgAQE8vm;X_WJc)Enn#$({*pabQ-s4krlc*`UTUP?m@IrR(4uk6XT&bDN%A5aA~}3fQZ}+Rd6c3 z*IAG-N{$P(j4Q>Srfr2tpV8=0h{!#~3-AoOv!u9tWom_0YBxR+7|^?x3!H1(U)HeMcJvM;GiZDK%TC8~?<`}ApK9*l&Oz?(AV;afU?!7R7^1E3 zn(zjAZ>L6+)k_BZ;z(Js8zvb4U#rVK@}KTN_B?4j^DOxi6XO26e;wx5>Meq@OeH16 zPKhP&D9lsS_dDnqJvA_TPayL?T-&Eo4MaN$Vsh~LOFAw$sP98vj^)e3erB(Ix)0Ed zcRcmT-^mAK97kIoOzJos^3BBIn=oowuyWRsVNp-Q8QI%4?47^vYmBj55kB(7-5G-Jw=*jed)*MV}zlKa?!7quxNI9Dqv5~0*qxF{ z-|ays&_rj1kTx$F^uK@^zBGGr$N8@D5U_4!fjHEh%d}?#HzMqS1VBYf&^KYut?s3z z#x(Dl-G0}fkFA#VYCT#)Cajcq(Xx9}P9Gs}$ynv!cB`zU=s>7GEmrr*<+Gsc;!_6q z1=Fl1&esa#1l?YLx5t#zFs9X%$7g7LW1T&4gw?plYc~G0M)WlGL4fi~%|d=l{ONR0 z(ExtJ#m(uPIko8AUgyCi5<6xC?H?P${GQ>p{S!2bzAysv+#gde=;uWi-SN!d&Z0cl z=Vxa<6L=w~xspnfYZmT}S`g$EU~=c)X2)i+nZgjfLi{{7BR9A9V@M?IiAzae66wR{ zbVBUFuw%J$iY49n2)JM4(tQT$^3x(BBAJp1iSJ3%-4{`4VM1nRNn{A0Wy;eaWAc95 zmX5rTQxA~AmcS{swE)2-o_n~AHzPLsJI(%{&@RtXp}uWD?G!-#W|yZ}HlXQ(*l93tqTy}~zd~*$CAgPi|Hx9G?WY5}M z02i&|#Gzt|tMhtL2iunNy9`lKjcFtdl5U(c0=}qQSucG4Onn{mfpPuC~ zUODq^;@FC~c)^rubE~#vvhN#etKRV16JtlmZIYdM@X)Bpn0CtGAJ@B}v82Whya624 zAWNK=gJR5mxMhoFA9d`R9<}|+y@96bmehO5?J{6J#mA%^uw=C3g0&=Yhgqk{lD6Pl zA2MNCrS_F=zGQJRW^*O@TbhT;+S9Ov8I?CaYg*B%^XJm?+K0UD#yYZ6KNnk=2?@=p zc=mdfEVeY#XB$fMFMFYgxxJ-=GENxkH(mxUP$i=}qjnpYz~jsE$`XWx{Ko z{su~~zYEKQH!jQXa{LphLJz|!xE7Bz&XW0HhkW@%MrHfMT?G}tx!TNXzI;CFJ5KS| z+d?rqica4@b;u}fj(?1w;vxQs=2i$^nPv}O^2q1a?fY1*LTE(|m4YKGJh`lI0QgB5 zLd7Q`gSl>EmtO3M%k!8F{Q_tbt)Q?GgUEKEQ{K}&yDmX?P&-6cwO7Pf5_I02N$U;D z^>}L)h~66K!L}xBeQR1XE4$^_To%#xacxYw<_$IFVFHr~HRaRStq6wUxxh^9K{nwv zGSbBg62eHHrLdO9f=R$peChd;#blkTAnf=uz@z{+E z09mH;dkVd2@B;WHFHWdCk-9TsY`B4HF0mG@Y0w_n%lfxep=Py_`>pF8HAic zI5>Dzt5K|fzC3L9WK7<5F*_$RAK>TKRTAWIyYol#>f`FxkO*AF7vCO4Eh?p$q_x59cLmsMlbT+}V zaI|PtAk*V&lNx5bTV?I&R}u~D-glvDnrJQ!d9;*d={1AV_H|(ab9o^1DGx zEg*8wH=cWZ&jMWl(Bb3=VVJ2CsbSv&R{t)jDfS@mUP+~{)vZwNT@_+ChG}txxpgN5 zoEUkoKQHx6+acPT(tX;P1!#WopOG#Ay=mGdgRh0xa7Yzn`F)du8^WH4JELXyeXy9XZNETOysflQOlCGBF*;iJnGrL6%1H`;Ol5>#tPMvU^qdFg6f+ zJ15{3Uw%mDwl9BEHY@WzC}z+7&<^JkfyR=ThRTwkPyL*}H=xoj`;$p= zzvcr(!zV$+TpgsJOE5~&Iu_a!B5G-Szdsm3JB-9Fv?8G!dg;0Im|<{;?oNIT>Mw_u zc)4N9LGY&l#N!Pr@+CYtT`7<%?rS-11^B9A3X|D zz`k>awRwQ!@Zpjy&@Rq`BKE}8fF_hR1+je_VFF#Pw4WYkP`_+9>`NqEb*gHg1zKK# z9$UEbB;f-%d{2K8i4zlOMLs6c2Alex9lj=y7xD?ln8j|GV)T%Ht{_O8$oT_~^dpxb zh6WP}2HLBBFTy$k4vuWXZp^LOJN}+>so%B{$y?m^&t!i3t`;ZptDkukl%4!I;I-4amD{4_C|db zZO)L6QpS)3z?ueRT_Op~KDooYukNekjPxi;Afr7!vZ@W`8FH7KQEehTFy}6Xhdg}Bj%BxLhz^5<=~ zrJ&XZ1!n?b)vw=MrncjT`pUz!c7_Mm_2vn-!H_(%@uWNm`l$j4BYD3>1G>f&!KDEh zuXthGF+96Nj(Oc46AUNoKh0wc3yq*^&k*k3OQ%^>h~DYB_{L#K11?8(IF=tl4VlX` zMOG$&kXWFZlMd!&o2S^Ck@w$&+a4-RQxde8 zhGZVKLiQTS?|R%5$A%c8!MMTUp3#~rR4ufb%a_T=gv~&9CX$k42Q1}xh5@QxJ5-Se zO<11i9!(6?i7+79&@ktMc#3qHQhSn3jY# zn()HALZ!onAgu|0NiBT3VTe(OOFYa_MqYyO+Igr4F>MH!VT0Sdb_l2_5AA)BkRplz zY67NS#Pi%uH)8<~6fiX}J=utEmR9nJ$b(Slx}(J%bj-eu-&-8ZJ$G2ML6xQA zAn$*S1b*Nrux5H7vK9w{fGcQ-XFC?hb{WqE`jYR|FDtK<7QdrH5269ZQVSZR5JsC% zYD*y4oDl33NA7(pbp}7Lf=ANz3oMdIKMMhB_~RphsVuLXpoz@ncSX`BrMlA2&3=Le zr=R#GVf5O_Xw@XE`ka;gE+ojMDkPy4EYh2}2^PujSTtg^Dwjxl`x8^S*#Bo-a)~MA z>X3;%V(y9P{#itTa%OHjdaY7hm6%u0FA6rueZa!(z z55fR4_!W(|Y)7QOjkW(ASX(RZ05^mIM!wMa#KRYB6NL2nLt0$|L~%@$H13UkWcF=r z`R6Sb*U{lvTj&`WWK&2m$Hbo+Hj_uVHq@qrle~7EG{CIF^po4H9ib5MAw#`nF)#2a zskzw?mkZ`ZT3m&w({4j*Y3f&}v`ym3{rX>ST8FkF4wX+EYy#6Da?BGl^l2ksF*uF_ zSf~FIiseqVB)Xk7I-U)Z3xPLz)#r(2_XdOp+Q|V>M&R-JqC5!o-U^;CyNQJ96Fkol z0ui+IH8F;9L=Cclw!91!P9v0{6Ux$3o=Kw61;|qUDTx1^F2F78u$?LlqwQc#!YOyj z3wao0qG>yrwC#IMe%(Q5{p2e7gCJtkB>*DP;%-TMG&e^bSEfYxsr6E4u8>&@`vA)k zxdcFVEn&Lu2qsQM&ZGW+Xv1=NzHkVxy8(U~=QJ_fFaS@1l%flfx{Z7aNx5?ikptdu z{Iz(pIxZe5Lz~Z)10m7UbOc0FEs_(8Gq;xm5{Y)7VO{DbvU5p+_xE>uE!9gj!Iaau z%TFIXWBQcl8QS$m&d-|+{G1^WoC~bS1nb3WC$J$>;x_+XN(!O`AFjVa!rEXG5`K;b zLkucjdLoFq=2sw)uk#>uh1rhcpfy5-0i{s0rF|25=m!O-h2=Vit8$brH`j`EeQw`? zL6`I+b)0m}!FGYHzOt7qDQX zIS6n~695KoovaVSl!6c;GgU4mm$Y?s0f=D8&_)T~62QOo>)(U|a=<8| zmh<}3Vo5buv9oOvSK7;t4{f@qTbfzW%O{eaBbhLPRl$D5)gGw(des^iu6^*W01VD= zV`SCyCXV!F^g(CP^s5eD;YpQ(DVV+nE2t1WsC?LjMo#~>30v%zN7F=bEEDaTetXht zD1o#E_J1y^GsUSdbxb#c*pR9T1iLgE)cIhl2K;)5od|btFs`W=y+@_Ni2Go$G z@Q{h=CgX5+t#?(wO8mjy&(d?s1W;^(en=qu=JwRZH31Ya4A+#T-}62FOj(4Ize6K}@W6YZr^?Dem#2jOqCXeRmww! zGoXHbb(q>X%pi-d^xzQ?UExb;e0Y9E7+$IvUKF2wG*%JQ^{QuCsPZgsEN-9sivbU` z^o-vqspl3owq}(i0*$Rkr}*|_c^%3<0OR+;sp0(+>IjV)o+Gz$AOr8Yi18q}9&GBb zhCVk~4W$D)%R_z?rKpk>Y~a!^-}tp}xLZErW@WFlQsU52v7F)kHR6QLkLPa`e7PWu zP*($;n`-Gse6jdZF{fFHdOy&oao;`%FPORU1nYRZVCpQF<}Y*}i+P1BV@o7}St8x_r>2-9wNP;M8 zcD9UX^E6p$%+jaBD+&%Za`9O#c7)A0(g;|qKb}NcWL6&jTBlfN|LX0O_N>=8LS}~s zEG>-LxD6U{;Q6zLS7gq*oU)Xj)4UHIuOt8#v3%G9OgVIN1CN5DR`a*hn4WcMhgXDB zET3mhL~RFhA}g0OW>3rX=Z(1R8A>B*u+jHze?P<-rw@NK&kIl&y4o0 z%LA25?zFbbb0q!k(@9RF=!8@GnzM3FN?D7!<#~RA`YxsQ0HN@LgA74Kd!kPf;JS7( z{bOMTc9-*QcbLo2OA#@Kh`ezN@SyqA0S*o(*?$tUfu^W(7FFBZ2>=wKiV0x*H62-`5Fclu*L zA~Ipi-Mq2=6WV6m{YiUEZ;SypCJhiu0!L}LK>g?tkyI=$n*VCQQ_2pQKnKvZ`dcf( zW!^7Wh9_W1bPC5%$)`mLLn%YIqI6mGFsa$VK&*8n>!rELxi1ZUF(i)7X}Hj`zyj*c{HII61u=Y<{rl8{jrhqkAEU5q=%DQdXOIh0xDvYHV8Foh+13dBI$3Yd4~3b%RKPN&QF6obt$IcIBy*HauFFq|vp$<%f`KJ5a8XFyi<8}qXRuV}*ahZQ{g zB#I4Eenr^N1*2yg6?F<4vjkE^Y?n-RvKCWFXJJauev8uSfw0=yUMsh4+Z)tnp0TtN zhyM5PYvE0}LBHz<(y1Rt%#K}6GXFh~JA5SnU z(4kC|If7CaB`fZtoKX}kjSw>H4J{xGWQ8v&vsvc129b3({jj$U9dAK)8^_krX6J!# zIxW_rTP7Mp)wT=zd62oUF0=NxDXnf+`wUUv71&SpDi__ySdKB&|8%(&Ba<$!0N(do?Y0_U~$B}&=QlWP~%Hr~FH$qctY?fm)58_koMPp*h( zJn3j+J$KN@k#?RE6iF6U1l#d{Cx%pb1cTHP~un?rQDjRQ5zSi@)HkbH|YsJFE} z%IdEucy<51w_zb#xgMV1E)d6-W~&UlNK=dTyp9)j12D5bqpWdPHZl%RmduPR=4A;e0bB0cAG9A(?*V0)a!t%S*Pumi8vLLfTp)urZ-phYc`kn znQgB;!M50G<(_T&5zyFZTCoXVP2ukAo;;Y=wPf?8DSysHM5M?H_ zM?Wme+|<<6)Qt}@hB3?{hFEjUbOat=K2*|1U#4c`%Hy{-#+zE$7d#W!Jx0&BJ4!lA zfa!-QG4}*ZK9e$>O|?5TBlv}c?B5%;0m^F+?`B+!rxzE*;;)*`YcRhV4_Pc=nV4M|q$8`7S9o({=o;ipR}!KWvPa>3ogeEH1k6m9Ibd z*&c6fMz6k4v9uNlNMFG7E4_Rd&GH2dKT9!=t9!6PxVA|wDCi6ghLEN0zV&88OHD1q zXW-+DVY*u(O|nr_*!s|ws&Z<�ev`Q}H7y#R1zKkC5n?0_OP7^FqWWeXhX0t0pNK z(bt$TL*ehNPtM(;VA@5R9zN!e8~K<~cX3NnUF1p*`5e(DU1F8lRX-)8KbL`E|L`3V zNx2$Zf1S7Do%}yd%DH81m#>ET4sG1bNkca-B!p$@$27Ju`3?2uL@BKov2V<7mu!_y zZ{zyp_2QITSG-eP=P-{N#gu#(3@bdT4+KZJNda3|h8Nf=HS=!63yn&_8xd=3Jkhf$ z!}BGTsS9Rf-o-Z?Q?|cG3CC|q^rGJn>M0i8LCYqr+E3?cMnhr-$;c_-;y3nImk_jg z*SB>)9>F^Z*<}?lDtFvDC)3w(;J|^ymifdvBjSktDB*-0?<&&u_8~@@7`@G>U0<++ z9+SbA7tkuQpQRryewLjRBRYX|j#Qk}?Z|6*YO7K~og$D#s)y)BWmu8L?D||OjOHli z(rd40>4_~TSlT+@@R3Vwl4m533X}aO_w!RFZu2~QpnL7?*4I%LpD*2+wLVo|@%I8{ zzZ*2>_N_CqtE}T$qqCAa_KGgmtQr5qR1iS0X_i)@emeG`q0wmFbyr~nZu(wbqnm8n zm>_weO@nuHR=8~I#88`0`PS5U9d(wcUZTt7AX?2|`@=qRC83w>Mlt@JqGP!z*B~9k zLWkYhn<%5xrfan)FuTkCh{hk_05N^8n#jP+e{_`}<+~B3W?CiNuAua}a_MTdYyUEu zusJz*oM-`=N*{Piw?l43yLb=$GNYte%b+5I@-V7dC>B1^m zR*$`EP?Yr|V3rCL9eeM`ru`w7D!cmZMv3U8-`dIMVpnov@J7;{b@x9^3m-Z3Y{Z&* zD_zX0=I>)SdOkw+&z36W$kA!;9RD64IRcJ9N)qO^ytsAe+9S#M%>(p0L@&TU7Z<6d zXj3LQe0J3d7TseiYm0wOit-x`{PWm{J|RZs<&$+&Hgo2h z5yoyB+HQt44OJ{z%<^Nov&O3L_s`N7xT*-x6tM{ij1IE&RK^F;>C|9s3ZaVQ%s1ZD z&nS+C*X#c67*TD{>-$e&9F_U?(pP^n73=qY;t~6n@8+=ca8aLp%dr}3!iDJCk?<^K z&vypzO3_=}Gj~EnkD5>38d&H~S$*Q#8lks$jjwQi7#*)n;Y=>q4V;``tYFUD_J8e# zh|!nSX8$YmI;3~P|A88khWk?zH-)?If|Hk_xY3dxFKoZ2t zJhyn*p%TVmg-uCC^US3grB{BCe;gjJc~y-@ArHqhvcIIv>?>x{3Ka?IQMYkLr(_(> zW9Yhih|wXG9m5&4$o+&R?gWb^T_Edb8q`Plm^+Gd%I_1>MvGg_x>l(|hG zXL8v{RZZI(QAKaWHr5s{+1W7^G~V*hY!i97m?+bvfBkF?1U{OvO;CKD`v$kh#Mp6S zW}dnS&g=07uy2cfao?kBg`l52EM{x5^{qZ9WVy(?lQ9ObhGymV&M6W5@vZoDNTGn5;{NXx zX<|J~8H=}B&gYFdI$k|n(j)EUEB-F--tzpx?lX!kjav~2haKue-^}@3(<2`l9v*%V zpct`r=&rGCgdyq>V-|xIQ&eFazpBmQxvNAkeJ+~rNaF6(0Q}arT=aY7^=HiHH|9($ z2FqKi7a4zW5&2$7`1++}teA$yJok{Vzq)`Pmy%Nml3Kg-F zXgU?f+Q^T}S6DR=!9a6CFTM63I1qE;!8>bUFzl|a`*)PGkDYY|aNoPCe2S{MV#&TC z!F=~d-rdNg6D;BHXbe@$z9Ddm+VuDVjk-}hr>I}r58#I@|Hf&`?C6on@5rDQ;BtN* zCm#GK9DZNG)n!xr>vw+e68-Re^a17vyB)GrmOgb32YfBAX7Z}B^qsjdl3ZJRYm~<- zu>14DocgGES;E)15;iXQOAcTgE-RVS%WN{_ViKsrj|B?;TuuS3;|dS!u*jwlru ztBk1E6!us{JY>%V92A6y^0s)NzF5~my5ZE6)b0sJz-@?W8pFoHx$16HHPOny-p6#g{Jl;f&|&AJU;;%xQ`;X{=fW1tN4U72f4 zG2cMw-+5+3LoqX^{p5EUUI>9<26SbY{c>rF%o(YY8`tmLVq6s@K1cKBOl@2}*jRT~ zwnF^kOUr9N0z8a!ueni;qm=x6K}x5od!>a{9A3?Y6I!_mV$%j)A(Y*B&e?@v8S-a( zSs!W+gCwB|RuzEbEPOpaAT+ZfMs4{P_i7&;wmSDNBc#h04lydP z5hC|$bEW#=|eu-u>CWszC&qFp66I!fh(Y*Z8a;X4HJEb(E8rIV;uNI`YuH-0LG z_x|L@M;I=omg$aE(ovAcYk2X;oS)P(zTYR)WiNgO zyKe)d4l{1;mgU^sK2|@v0DmngV>`~z-{GLowF<(4%{)|B5!HIprtr|JB(XfNq)F41 zdBg7zqyK>m2|zW_rj-*ODz_K43Ai6K?;X2D^odN@Trxj!?`>nAs;1XPoBi~&g)}9R z%Mk9FZFTg7bZi1w?Ot=Hz}>6#t^$S6^%~71Rd%7%yXx;S_t zt$ev7PH)oT_RV1JM{E6CffG#%%Bw8`QG6>kQr&(jVIfv&iAif$%O5ydUwiap6W<&v z6Fcmpmhs~C*}t_NH&TIG85T<+5v{-jE2d1K8R0F3_wzj=JtlSsiU1_P;jIu^rVt_$ z12*~{@dWX^EGlooFiB*1lh^f3mtR~?6WXJ5B!8FTMy%2r1aV71x1-&JDdv*D$fk(E zVm%|}?A;~_a#xV!!8snvf{hP7d)bjzB}+edZ+|(zqRkJa54CYhAB$vW9i)=5Jb1Td zsKHz4h5CdIc?r6d&$A<`fhL|44`p0}NYs9xL{5hW#nr+3gyFT9ae7LB7N1huo;yjb z&wqUL-Jo$kkm45a9E#{1v?(hCYS$&-Bp%v6bD5a*gN`dT>3kVm>-w&YhaNy*!&?ij985sS&kCNa*JE8-5_j zl*)Ynf_EvK>~Nl0&OdOB-Lk>%-s?G}==9cy*Z4c0bLjG)or+@Iy6*0Mt>7%jftcqU z_udxaRbCWFgPc{vTfq-3ZDye=9>R0)Bi@CaU_mpj1{f~K9QZafW~F|U&y<^Q)&CHq zFo4D-zr(JPUg2U$d;*Q;!ZuHD4D6}d<7)|w^W(gcEkIi(h^Cp!=CPKa!I7uay&pJ8vY}rHdBkJ~S=vi+eT$}~wv;e%L7}&a*03xDe z641-lqNOI{=)U4uT~qf@4QM{Q=j=M%-eZ{#(dJS=iu^w{4uPI2(A91YbOkq5dnMu^ z15m)6Dz4IgZaQj_0FM0W-{F6{QB$+Ehc;Vmu4mC%2G{h-{o+HBkP?7|AROl^&*XlN zc{98Ncz*GL$dj#;uK8Yn9=-%52mw7idF*<#&aI$(UQuEe&OGOBRZcJaVH|)#IH90w zbu(d01*q~5_r>ReULX$yb~x$fg?8DnBhL)Ur!y5BcXn#3)B#SIPF@jTO#X+%}kW$rp4 z3HUieI@rAoBzq4wsev^5inv}1Sydf6MvtALXt@YrrxxtnRhJqC@h{PQq)%?!|2&PT zpP5>5)3pHS*KMqIO&W(WVY_EfVp{Cxd02)`XoJK9h!XVb@0(q4F2# zJ}mNy&+|Bnmlqv1P4hM{I*^EWBi?`d-6?cN$lB^``8zBA%$r;9tA!NF3I$fVIxVhD(!OdjKfxSyz0@J8@s*BK_WI$@|uGw$m!mVLT+5xsx z{KGk7{QTE}Jx58gK}JV44rH?!|6Sc8AJ)Wgapd0HBQ)FW>n>WJ;vmc9Ex!(h$pqqc z8QU$FAE6>prrggQ0J;1iHDkRVI|CX7z+Xi`kvVmn`a8x4e!nt|yE*#)L1tRH72FwP zy}zc8@yNOTAu%*!f}4v0+e|0--z5ooD6v-%V({(K1kI(3Hm*lpE4|pVS;4rleR&L?aN7Kv{&uC*`91Y|dCsl=N?)>V1R&soy^VyDmb4<38D)!4InyyH&6 z0f16w;%OKKXPivp?+|A&o!mWFCBUZO|8%zX^pC0=yn*wtvWC$=-ao&Z+91td6AYAd z!l-jeHRp2*41eHtPKGkGu>*&tXe0PnR3d5W%~sw)$Ql@8vJhADJi-kl%mUo*d9lT8 zdO|NQ3VcSJDtZcmSOat* zd%gvZvK$-FccrVC9p44n&2AF*>TduE);a!3ZvJ$2;kOrUzvKx9m&SqQ!UN^W&SlX+ z_Hcl^&Kr0c z2vJj0bsAlsEv3mQa4tNe+GnM*KG3D{Q6u-#U4aBKIj{YuYvU4kcx;N)(KzJ_={MjAFuLS?R3PHnijg*CMuZ5>*2TkknWmFH2nAKDBSVjNthgj z441SWzajgc%#wb9c|*XjDC@+^q1o~Vlsx-%@yuDGtMxmaxH4MIRjAOva6YW< zFzABA!sNW}3mFRe+N-*g+!j?W@*&}0ItKAZ)+U!^?=F6e$Ue;R>Y}Z+=M``$sRg*X z9$@rO*o*(H{6N!|M=q5ABL$mP{Yh>C$9-$4KFZ$y)1!4et}IvZ0*zuhK_@)7;<(0tx5Cm_Jqrzhea(H>C6xM|;cjg@1w zuhx7IF^WgVevuFJ96L?gU2apvTk)CZr*?qQ0T>mo@y@AFigJ|DC6+=ZF1>);wJ#Cu zDa?V5@}Slt@1I~fKZ#UZR_hF6Yx$E1Q;krj-qL{*Dcz1rXXlpGW8$14M)cyxf&+86 zb*Tj>$~LRK_QxFY6Hb~b5oSkV5zY@{Jq_yE{tzZJQm%6JAS#yb&kA8{GXB0jbBM@+ zZ-sfD+rX?hr|H;u2ge6bu>%Jfg6}b_?6b%wEAyYV2h7wQtU*A5!NroL-j;1`xMFXl zSIF@ao{GJz(ymN%m&LQ_-=mTq*Y&xolD`)q0IyOuhKmz0DmK-x?U?ez%3%;&B#Y{S zcKR?(;6!&T+oz`g-5p!NRnzvJ6bzS72tE*=SBRT1B(eV_cWQj_)tsbu+pee*w$Jyt zRxwb!*;1R4{axORv&G?Db8yEHS>c3Nrx=?IqPE^|29fmMJMR9n$Ws#wzY1@%hl{Me zuGwB}y&sGyjixIdegma38z|1h&!9G$bc@^0?E2B9rCdj+sHEFr^(c06LKYQpZMio= z76r-X?~#%*%On(P#i*>Itgrc}#_nA)Z+(Sb|M3cE_KU1Bq~yw?3QE%!Ve8I z9KS)gws75Rc>?g|TG-=@N6W~{#?UmcP!q$slAzUy+*sozSkNX+A83(}7TO4(!uk=9 z6Va5j?R6NedEbwrGJ0r_1||=l28w=M_x-k9VG9n6&^?A#^Z4V4!Jvb%UYl;`opV4| z;Z1V^!i5d;YOIR%0~g^wrmm@n+sVsiG`f6x8kvy1M}m&KHhD$QV>bF&@P?OfaBbW* zxC}sWl=Du-BRX~mTduC%3r-Ub)*q5Be2=qg>HmW=_D4LO-pQbvta6x_UG5C>KBJ-hc}&vz zZ?nwzsH)wou7?;C7=js7Y?7NI*=tx=u?=#zFkCg+SJMYG01Dn zo%MX{qLuA=X@pPb$z?@^;@3Ope7MJ1t2@9nbhOCgCt?bRQ_wPD-e}3QosK=x7I`@6u*Y&)f*YmpW*O8rQDj_T- z@}h93a%r@n4-iJLCjaHc3#jMD1SXhc+xbu3*;h{e`x*=6qom#zvWJ(#VRL)Mwh5FD zA0d`5DcpW``T@6y6l!V5ZR^l;J}ey_*!gm4(E^kZCR_v6K-n{-9Et|1+Lt*&ziqBQ$XXl>)uE;ekq^JE{zl2xhx>V^#t*KS+K zP0(&@ExRQ?$zXr$n%Dj#=U@Uz?nRyL=HXx`y4PR$SGem;yYr-~-?)EOog~+FoJ9S! z^}+KTC^n_Om%rQps2kVDz7Uj}>*sq300^hGGECx5S4OgZFRLSaA!}pE*q3yI3#(9Rwg zftY|o_2f243lz7s_IJkF&Y(}!ocZ|lN`{4U@K+-xfF@Axau+YY$CebSMlT85x3iTz6X+C|GlUiRiaRrN50`ZGJoy6g(1VHJP#d@Y%C0_2v zeYdcGU4|6zDE%cm!D{w4ai~PwHdO55>o4ybp>NxXRH^@{QnUNOWCB8!qO7Z$VqlOW zNasf1dlf(7u?<}0-|N+PPrsxK%R}dMt#wXIJ?7yJFwIe&*6ct5cq>Lx?JcV_@!1{5 zxQbJ)?BL5ZN@}2fTBX#POz(p`#V@-&1#e4weCz*<|E{ISg{KUPtp!_k}9@K1@mB7?>dG`_Z5$0R*ozIiaia!mt8GUhq z$~EQA9U*yf>BGuLPvX+Nw}Pz%q-T)V;^sF5ss~VD zy(CckI%aWcUnxOK?KOdRL_cF%NM6DF>OnbFKnx7&sH1Oa-U2g%&U+c!W{%+fc|@ZG zC4(%NFXpT@8&G^Sczd)3|3bNxP89@WTy0DehHRe*kQdMvQ_?#%_3v1zbOlB&+#4n^Bg7TZuyFk@ec%HdtcvOyuuyy_98 z1PLHr`$^>|ztey~!)%SAfT}ZiL3!FB2_vRVRpq1)N5sK|07RG#oIm)D_~ze2iXy3G=N#aGe$H}bppmCMKC15urD zBYDNQzvwY8e425y&2uCm)}6k=6p`>XSWXF~5a^BTO{bq#+6H+A{qeP@6X&}5nAUNN zu#wG1-AjyIyfBOrU-5N3DVgPM z3?=KCa-{Ojnx35U%-EKTxru8&E)k9df36s%fJ!BD+8tlXH;z1b(E6P8j_&lu1UG#3 ziZ8MVA<1mE}kilZE7d-S>a7_8p1orxsQgIJ+HwbBgyuar`a415jpG?foKE=+Qi zH>gOEyM)rngbbfAs~q2F`i1cmdLq)-MqBZ%tTP;?n==}492R#!+*R%jtSj!lOF9w2 zc4kh5HvcqN0Stt3%=2$3O1;sIOWl7K7v-z*1_DR`k4D~9+SBRYjmHZK)JkY*{l&gF zghnKz|6Y#^4qHzZl5Zzv@i{V&%lH{rgsg{nRRMju4Jq}g9vostXa33?lm!U5zCHOo z&cJS+b>H$hWH@>g>YV=g7?GF@ogKeFu0s`Zt~pibL;h%{eQl?}S8J#7HJix_NC^gz zh6GiYtN(!a`*wesFswSDd9&X1Gru=7&HAXRgqd>P$-TWrd_{zh>c>jmOHMD@DY0cY z)O0(8iAw+`u6?|trmC#XT)~0 zqwlp9+cAU$BJC2qb>>T1FQflL6m)rc9u{Mli6NR{^ap(cWgKTpfFc=!WSsg2v~0L8 zi^j_z1#;p=lss3d2tl(sOU;h=K|{vWk=Iycyv^Bs8&VrTM_;t*QGVc2#r)#}RwssE zi!PocnX4lDe;U56iSUWna@tQaj<$co+iO2N=*daUEbNQX=wYq4ga)f>ETQ1O10w} z8$$isCm3D;Kx~$^!0e{l=ZMk*FmFOi^}rucr?(R@7PLJvx@5!maM};SWbp2*(G{UC zxGvTTSP%>q%k~L)+uldo*MzpAy3^^vVl|1Zi~eh``Z_$W1~2#!7afz|c9p3!wdVwr z0HncX!lya*7wIA4Y0j!j#hZ9`wQu)ZQ8BpmH|Raw{9>unZ`((JOkwc;xrNo(Y^r)v z5EMJob?M@XiSsYrw;ZMW8@Lt3JjFhwmDzcIi2bSl;P4WM(i;0@%aEfe72l|3l*g3t zXaWcGr22~jgPPJ1yVEw%Nik-GWC}egHFHN{c5)tBPc^j*)935%%%7D(Jpu1M87GB` z&I$uYmhLO;gA6yCiOeHf^O*7o#%OK! z&qg`>1%9l^TZA1Ee2OBqU7ZSj!5J_01=AJy>agDL+(OK9-}Qd zDy*aLP4MgZ-Rz3YweCfbCSeql3lES(5cYCWckWFWzhGVoqYwS~BK~bQqs!eW5CM8(&Zj zxg=~lFlwE+$wJi8MzmJb=NYb@P4jInnsIGy<4OJ2*xusTj*}|em|{l)$zXzM%O3BA zZ%w^~0q(8Hy0g1X8!kBKPwI(0zIdSh5T#3Y@pGOYS$ed!9@)kB6}eKyI2NO?NGUo7 z!WtM#kV?j@{c8b-;aIZc?g>7~@PhOlPO5q783-N(xeNAs!OdcE;tu}e=tLDg-UBk{ zI5@Qg(P}d12!m$+8oiyKcmk=tJ2>)v_lPLHwby+gCc03JQ;WM-dF*e*x0zrQ6S{Ze zo9p8-bi!*mfVdfN_=c3IAG%+IwC|3idF|u)M%Tux{a75CME{NOZTx&`<7+!`Ea>j2!4}ZP zlt%a*35=!pk0h@>r?=2<*^r{@8OsMv=?PcwSEyA1gy`*fIf>DBB*V{-iX9 zPg!-H-RnV30eQQ97F^viW#E}A)xyx0F7ELxiybA;iq$`UXD+sF>kZW6FYOnG_ zfWim=M^6?Xp_ca8Q)x`&+m&l?e|VP7b~P}*5QtMhss3|lhRPsV_uX5-mG&q<_ak5V zOzV=Jy~O0GH@#s77@x`2m9A1i`S4gY<;dM;Vd4vrsa{DsCC;RF7nXUl+qpUTkb)*7 zKTdq-Qt(#6!uV-!jLr{d62?4(m8O|+E4B#p3qudh6;#Z6G*`>rz2C<+jyK<5^b@NY ztzr1ZzUcyx?Bly>%HWB*Z806YB~q2&HZ9t2Nf#ipwV~trE!Uyw>ZmUa>$BUWI#Mz- z`h^t*u}-8Y!iY(CZ;uPk|ZX(5ZB^t`IQfO-e)uXQ+0C|ztXd8hYu=Z z{bXBWYX|#Z#$E`Z;`a)tSqM!Z-aMoUdxLu!fZuQv}SUI!Pyc%^@K!ES@c~@-~fT&+GK3MR#{`ZMxJe za0)Iq6gxFz+gB9M+au=-MMfLA-)y+lTTM5xv+Pb_+pW8tIja1(7X8F?Rl8CBk8}?v z!^+z$$zE`o+3LuM$v;aoY}R)7l8(fK*Wql_sLA9+;mP zGgs;m|9DZLqWXh9Xtpx(;Z$xE24y~}WmeH%6-5{16sZ|x>M2Igwl?%lrZz0k;69Gd zgr1_kl+wuPHh!e^(oILs{h?AvpGME6Crkyyk z?O7B0&V4b;FxRE3a_M(lhFBP#@RtB1MVA-1#r=$okm)#NX=8I^iBR(n&uj zIhw_cxr9?@#db`v?h#shxK8?lC#~9*Lj1@%p+D1rN2Pji-+#hAhivOqtI4_k(@+QK zRw>iV#zU7}Sab~WQZc2f?G`>IfGiupBzSlBK0cvwDyu|3gKUfGE#k^Amr4!)5#VuR}%HzxIn)&=tSj*{!GC77J9w%G1?x9}J`2UhRs3 z0{zJ|?BbM9JAMP|rF(vMJ$|ezguidRfa>$S3D$1aG^$fYHGOp;%#*G8PT9Gj>5!fJ zD3`@8ok*3LOO{dQ$jNxzOTp36l>D{iClB{p{G0CApGahSTFE~#j$sfU>^Br{uZ$_qsv*vtZZJxC+_{ zsS34kSPtmFKEyNJ6b5k)N#^CL4*_QO(lcl>HwNLUjTR2!qXh{%THEjLc z^?^I+M5_8}#rZEoeLL}Q$xL#Kx=_m`F2mu+u%@sds72m;mknKDg>nk@o6LpH39nUHP!sCv1Tu_@k z%dD)njLcUtIgNdvve}Tt~%S~&z2ldUoj2ACMql5qgn#V{O zKXdZ_lYJ4mzhZhrxX-;zy+3AGw4s@o{8bshtC*ESA$&x5zyG5vDsbj_?$-Ldd}hN3 zCO!oj+nl~*uX4jTfoMvOBRT^1Ahen@@2a=C>SU1fD0{KF*%YyLul(?Dxq!AYikI5A zQ!2rLJC>W)p0BouFKcF<#`0_PeBn@d0&gDwVjA08xW9<><3lzvE4PWqDg|_<{TkZ2+u8gD!dVu7akbNQ+2itVA%5pH;ocR5OtTz5bYBo# zRuEoLTbZS?ch?$Wr=Xn6Ubka3tJLqyp|dX)p8BHfd`16My1}L`WDgPJ-}tEpkp`e~ z2hdTtq~OQ_m9*A!&#H;@@RA_YaC+Bxp4<5K;m3$4;7?zv(pS0^m#<=D_&JxLl1JmE z5YapS=RFUH@u(D!M0ZaQ(dV=UPAu=M zS+a5Wmt}}dl>RAwC+X>iR54RfNn7YbjZb1KFK?V^rwxcV5%UCm;qi|lcQHV5`eIIdyWcuEX|NxMzk5b@IgYakiJr5bGBPu%dt zm6r}GPa1#|BDe&k*mvZosws42DrK! zM*BJzH!Z3klBOQL+SFK8C3jo%LECDTyT8hw$LhvNSfo(|>n;r$yMp9cuiNAwWY{aP zg1zOJtJtOS@zcUfn|y-#W@c`~T8Dl=hf!06=s+#a2VA-jahL30C)zbq$1D+p98~8$ zOFIQ=q9g{0|L!=v{0NRqqjWE@@d-uOsa=#%Q?(zB#`bLByKESn@fVVxhAPQ-{R^9N zTkpF`spJBg`E~qFg>GelrqYop4+ZI{O{d%^5mB}C-x>X9MNp_W=6Tb0uj7BVv+mKP zT(PNV5UgO>Gm_~^!*QH@yo;v zYfIyaWv?o8cuUW5a(H+d=bq))%*NqlEF!f2u)&#Zs`L_?Jc9#C_^RU7ZIz=H#}e)9 zAh|`6Q7NE$QQPdI1$5R4K0b|0A|Le0I$nMg+Xc^}Ym!noE!UMhVD)lV>sbq3C2t?0 z7F+i1F0mPUJbJKct}?VL9EfON&Yrm0YZe$X`qa%|#XN?Jp)wbTTO)5!n6Cxw^kjd# z95jO&3!cPYv?och%QqXD&!(Dxu(`S>V7zp(#xVQ?&e+VsUy)gRlMn<*oopnn=N-^H zdXV3JceP;snrVB1a)Qt?sUY{E#Z%YMN?YZ4zryE(T@xB|abb|$d>5LY#izmucSwlf zmf=C{!Z;?5PlfkSD%)O}>1Vz0`SX1J-h;8baggmI1D zq`*{VlbB})JHOqW#`Xs?;6T^Dv7UZ;qs|Vm1J8;b6t;l}<#eAQ3mJw2@&w!}xu^-l zfdnHa|6NR=o@K^&+ezhM`U7NO?A>N3_U+H}lPOISlUs33QkYdTe?D~v7LHWv z@=%qjy%giJ+V^Vx=2GBfuvQ&9)(n|*Er;oY;h_}~YNQ!xj_UhH_+h%!$WElU90_nx zp6?^|HgWnjHyd0$<7XMaUGvLfkdeM}`;Jre_ z@RwC~HT%CYEP|^IEq(U1eP3F%FsAWXx;Oi6G*=s2#Okfg;v2M8krrMe1z{fk!2NIX zrGLM=m!-UQ-kT8$vd6(h_+npscuAb;-6tp?Z|*P9Z3z!m=GZ&T^5F@O2i&LiZ6v@C z?LqHk+|M)0!#|On;lp%k<*oYbaoI)9S)!^9O0DKzqV?Jl6>1}N3F_0sr=3?{r%OUU9P-p z(lgc*X?xv^CS5WB@I`Z)+Acqlb?N?LG;>?ls>7bWzMOBC=$Lo_)#a)~{xAR^(5SU^UdBP%kEhDthlQ&|rJ$UP)WyN|L zhBc?|7@4Nz%?^c^jyVZaEI1v#Y12T6P*LT1=uL{fU#7LJ_fJ)|bKx)w(P8b5AUOc`~cnUA*?OAp5iI=;!P&v|g~g3Vf(dNKn@=jdpn%yZ@47a9djS?dEsJp~c;$T?w~}V8bCa=8ww>T@D-g zm;8zoo`&^b#)qU-a%cSSnD?Gu2%Q1!Xijrhng6O7CjSk|c`sbX-JO-oTHjZZ_4Iif zq%qv+sJ8EMo84ED^OXwMaA#_kSq>doD2w~7X&dYeLn9RL*DHMHKr46D?YT|hFo{9GSbOCU$c_3fl#;h6Wu{k)LaQ(;qusA>QMOvLn zKhdRc*#?wz;l?6cV)nviBFOV@`@FRV-K!pX>bO-!suumoC;q|9pdrM+U3N|-r#1Mv zxjN9Wn2r02k3v+&!nl~=a!sinq502tOKDHuMsgZSNyWWv5dl5Hi z6{pspRvk(Hqv|!ub*F>fCkNUY3+h+g%*;2m#PZn;#|4&~#U}H(p-g8mHbzbVu*K%} zCDm8N*$lvppuzf~2y{Ma#2F3>Kei z<}Yg!u9u4MG+}VpB5f|HS{RS0NsT7zMv-a8-=8REJwqGzmQSIcvG%rf`oXhyZlx19 zQ_s+Ld9bnUO^jN4KENvf8qj_U3oXG%;-k{9_lHljgQ06jD`=;rHdBt5En``I0q!)P zbxHgGJx2+klL=IKN~mxduQxF1Dbrky6GeSqw2Z_* z_aM~>A3V7cz1$mIJ~%pQ$ye9F$n9~op`Lc`+a_F=y4|>vIaqNDq@=tGTF<%lLKzd@ z`}oo#@oW3vk1aMzk`+{C!+4p@`&mj9{QeJ}BY0t{CK8q)5Pg^~p1<{hj3G`<852Pl zep*mk{YT&~d$Z7vBfHY1e=vXJh%j$fcTza-=3lH+so$$y*wUPvzqz=8>?cFs z<*U2QLFbF3a;}KIEcqJi;daXABYrZU^q=QS{KE&R`C&eN$q$>F?7_9?GMT7k z-V>?Cb>OX6EbTV=sGJ}?qSs>5unV(Ry-z-Xb?#%o^J-_wDPcW-Prp3iCE1#EE~ll+ zH5_}C<50trknp<#wUCyr56<)Tz>PdJw#OsZqEh!wP}I34Q2UwK&Nv4(6>fxSz3Sn;E80Tt;Hm>z|-y9W`7JoXh5Si9Q<>3-Fj0SGl-0GQq6&CLhNvxW- z=ih95pjG-+B@Ry=s38Spyie05ONXv@FOiwf^vu^QE62I*B|f(iXlhT-yj0zfmoj

)bNtXB<>| z?zw$VG?;}cA_WMLuWxkpU`bqq^-gI`l!vzyJIgmqm5DEFjm;@^zl*oW_s|8wm8e*b zz0XFbT9w}8+|d^`xK_6-vkAYgt=Keh)4pg{f8qatTnp1$c}kL8Q8Mn_uNQo(tIlKi zpX6ZQc^`-|an(4vp*vd)^SNh=Ro#iKRpvBh@*kGgjw6S?q%KHqoeH6(_1wIA`lV^z zAiRs`A3r0$<3C?@`aE7#*py0h!ZV&RT$9)V_a4o83@+F_%Eo_IXpu`p#0RmnkYKV6>PRTk%i$*vH0e2KA$-EIE^&JXaojXAE*53ZKr9x)`Qum z7UB9BUT@5(waVq@friz=*QwcTSIWnOG4BIs|6G-zA;m{oOAc}4!>le3X(;(rUNgef z(7*5!tt5aZn8P0!173!kFHC$!crh8;jTxMQSIE;}csC5F6Vx;H$&(nH3E%(&HAh^MAf}e0nfSMQPOniL_ z7j57+Bi!(wmiNfn2t9a|2C1x>?Ls7;Mf~#%uyxQ4XbR0iiZG~93)7HJPQ|COV0;>D z#;*;}%i>vM=bScHgBHF=!NCGns4A2;tr8_sKh_4a@ zt{B5ZWXgYDXOdJtuC%DBe?Lald9&;{9%iclNek+#CCvfe_-`5NJW@!FZA`&&O&=p9 zUwlVLYHm&ldOFGYwv^64tn!6!H32EqrT>2?b9bz=kKq{R5PdaZBW0#`LK1sQ18{uJjq4Q*}wb*uTa%(>{4%;VK01*KSq zh^qcE(^@tu>pk>REghc5E4ZPCWk%EaO%C z&%%0tbPv5YmqdT&R)}mL3i4XV6jvmR@TXK!7qX{ZJj;Gln!(~06Vc5%7Z>XGw*|CW z{3(&T7JDu_+<_&!Qbi0h)Zwm?Xj;_}Cbifn__LJbIWH-7#rR}P@spEbTfxO^XYW%M zhJEnJEAHE}H`p5>4E?|@|MY1)YOBU;fR@a2X-nTo)!{n3Xe8yyJAvAW=7UAr+^*hFU0;)||N9fTIy zB@~>=9fZueR+b%uo2$%=%7YAE@|9h4K3Gnr3xsLX&S#8Hmt95P4}F2SFI?k!cZE44 z^2&Ay?B%9a<(R{>NER!X`!cultn!S|gQPK!EeGM-a%y_zD!WSZ*gKbs4pw(8pY<-^ zZBJZw0{4iaQ9^ zT8kD}ql$!cJZi)g!$|5ll7vYeP!8VLd+Mk=2qkg8GX(MjA-$f&*W^R5TcrikeH_3g z2RzjTDrfB$SYPI)M3L--)_uH^7i!obxP{DPi zM5t48>!<|&hzBc#kyj=3dbup07F$XBsm!&;-|?ih7;FeG61KWhHgd-0#CxaI2<~64 zohOXU9U8pb+TZb2+zY+0l&eo_^T46u{q~Ue|CxIAMORWHakreaG}#%Q%Wu`*Og7GV zU(<`Cn@pWKnelXBd)xB7O*ED&nM^4DsVG+&`L>C}E7;)|eoNuO5us;xlLaK?UPnWL z9oIsOax`n6NWdBgeD0uZkVvFNYZ%?+(*c2XdpL?3?WayfRx`iGtCGnq$3sx;Vx(au zeMO66%Z|@fLcKSiZ}rdp!ka9fSR9_AmJ&!TPG)LeAcVXh*qv(ZH>Fx_p?Z7S7nWz) z)ey*k3!|#s(e?>@K9M-NqOo)0su5>}F+r^NmaMFtnvw_?(x_3SS5a+IXoVT<|7f5n z-$buLmMlGF3C@o%cq8VqPK?AJsprrN^WyKE4no3s8pPF}Mx72q;$0I|xYfakYG_Gc z357U>Rwm+~cQ?0o5ZVLAvyHORs^qFRX=&JXjNyp<-C>)ib3q~29*v;gHnL2YMhrPvbt=vSuYW4(cr@f z8=UnNlqNf&edfv)#HSxS=HRS5$s<37`H)w=WnJZkdw)=f6Q~4HzGpHu=cCi6ALdP1 zOCr9WAv56gk*@9&ED&R5pq8^O508?s7~M)Fejy@&lnCqs11Ju?5*TNoMVw8rVifFj zD0Up1el31t94lNCfFJZE_M$Bg$??f}Y%#sOy>j30VgauF7cy3Jc`~NLc@mm zb8?LBF*sBh>XCT{wRV0tuIBgEOClz^!hqnpS-}56WzSQ*Z%VqH3wb{?>5ydo4tnPU zxyUu-egF3R#hbM+cj|mFzLvWi^Qho&TOYdh=><&`I1208d#|_`Ht* zfRdAjL*2={gxY5jye5M9Fzx%{!{{ykj`IBreyhrM>4S#a(B$UT4niMF_`CmYdt<}! zv8TF&?0Y&h^K-)qPt6Bqvdv`30^U!{lAW*_lN~5#lp;HEsikw`{me=8=mP$JDi?Wt zpa#P;VlYn}B(4JBW&+~lL7B{A@a#9uw?wkCvgxV=oB4M7kt}3Vvit@|LV5W!K?I|L z;3>H|#C-&2vSf0SPNeU_A;)l4Y=bTzbFMEopMuqayJ>Lz%MeuS)id4_(^6#Vsx^#o zqJb}O-d?j;t$TRbuU`6g@^K<|lER|I)?xgC5t-FXN4tI4sFc_8?ck z_s6pNjh^u1IPD}Zwz6z0QHJgOnmH*Tb6H$7o)*DF6c6r@K!6SodT)WI{mhGGYJ}Iv z!G7g_coQcvliHBmNaKOzCs7eL*ZUIhBH6^Vh1?Ut9Hgq~`^Uy{HQT9hx&FUXSiT-x%ApC;r_aezH z5*`hvJZYm4$ztvx)wS-`9#1_?{hdO*b6x)e;_Sl70nEZD-K&s5e7azHJS6&nIr0Jy z?hX=4@T`nG|L}!jp#>f|MKlg4`HoU`vDo%oI}t>JFDa7b*?2-Xjg7j)tL_sR)!fA4 z23JD&1o4a40%LCb>_Aj+KL-dDo6-q&IyRM3Vtl zU6Y4%0zY5B3a3h_CFR^*rw14cAhz554#zc6UOiEcHj1tR-a)J!uynF>Gtjm(L5vac zkXVJ}Py~5D=3bgQMWH~wV;yehqYQ&q*5boqKlP*5;s z`X$CJ`Am|30f|^+vYK=ms{$_?=mVJC$3(L1Ny~P_IR~dzTaL2&%qKA?v&>rSREbn1 zkzOFc&M>~dF3>-o5p){uFYMDUgU?T*?8t2ujbV>sTsYHiSGuKX-cIu3QDPS6oVyA4EfZW2Xu4$^yXXbD|MOyt_HljBV9W z6`249m?4$_7Z3xlgJsFO8%4&}bYl3;ZyYtwQ0-PxX`kA^+oQ_p*x74by-6~1385-` za4&r=N%(~UHR7s(Dk}VPdPzeDZiiDz89;xt4p`a7Tg6>H)D3wmCj|!yibe7T{AVh; z*4=`{Lh%R{UP?R~u#_Hh;B9SUj(aupz6921>-B58q3%Q7{#bHcIb^a=%!{q|0`7%`CQcJU~7Riz({dUF&@K;~-%)}AK|MpP z6Vq)quNDoPAyEd~Zbr-yWc;Z)i+Ff@&0EFP-0rD^+#qCOLB+7J0{)#VaJAHF?AKT} z(v`Yr>SbyflDqkG5@ggM7A>wpIw7u#q*V7aSJ^-QJIP#+3%@TSRBw}~2Sq{JXiSHN zCvYnL$RPDV$sdq;5H!BCyKVExK{i3sTToWE`yQkVVmeuft0<@iSmwbkZ&W0`8Hq}1 z8pY?Q4kVmBAl-6C3703W%N+{L$2-ptYO!Xr_!s~_mYIKk#TD0f#l(r)50*1O zT~}6fshz-2@bN`%=&ax6Q3Rtco!>Xw+yDk&7V_`#v@)#s*R1XPkO;Kw|0ka~6a zdfJPaG8moV6TDf9k{=LetjpsNUZc}^*~h?omwZo}fmCQuOonx^b(n-}IZ3?t4W_#PZ236ID--qTq5GeclbvmU%r!C#T|19f7bM={LI z<$K@Ay!9H!DU!u7g?@d<%}CWobKJz-j;*zV=OZy49x4J6K894zlL`2^25M^|_z#AL zXRIxR;0&gwh`h+Me|Am;a4OM@*YSZ%LB0eoh2dUNAF~gb%BmMX2lz)ubQF>z&k;|v zXuXMHT#4$qC6F(|-5iTQ5?njvOXssIn6VZBhjT-nLXa_9J10)*#OMc(E~FW4_y!tr zpyow~JQ9{b<=G(42t7}_U*5Jis{Ng*(?eYKObubVVF;gk1;H1)`_hAs*i5FhyV1qL zn_mH!s86VWez=1m?V;$Vt0F!bK8UlrJ+X$$yoR+V$RpVdzGVrSVUrMb0r)I=BJkO% z_;ZL~1d55oZ&JGEJ7*n_=(lfD$}1Lk%(0H%06I0>{Em<8P@p2|9wmtwi94%en3joo zs5BV`Jf6IO|8BL{_3tX)rCp({-nhh}lkUihBo@j<`rW%CNRvD3+-zQN=HxCtvKuP| zNIYrR(!Tx^zCmRB+hK=BhiGvJBknGgf?KLqy8EO(XPvTw#;&~3B2aSu>7@gR1*ApI z0LrjP!rn1=%VhYywzo8Vfkez_K2wE(bANl+7!(j-Sw4~|2#VgPke%2TlsM#>2O zLM}42U(mDn^%}D32eRO)0Fs^#4_|RAO#u$wk7Qv?pvUbXdt{J;J3n6>YPP3zAc%2| zPvr-S$1_O%i!FnFDWk38P|nv@7)5NtM)P?EpeFjkip85!G?Z>Kt`3TKiU>k@Ntcr2 z#P?Bns)Ks){v6ddC*TseBo`@*_fg`m*AQz7*N~vkU=p*%bz-r|l&0E^;EHG2hogJ7 zCu*dN>lLXcfPHZSc%61JbC4yDBXEzmnAxoc&$#U`**7>xwezv8^?kb+LEiUk*vCQ< z7L||Hhfe6z;xo~-EvoBw=Vec1^%8ZRv&%|J+Be~9bP{&_y^J(7RzC_{lIY+z4=tj@ z<}I-`VGYH;h+>$^M(_cWr_3@9AZT<{dA$!Xh+&&#MKY6opZk-mKsA(SpLEx<$y^Cn z4gkx||C00p3n8eH*|2aioZK-IBa-L-fWcVn}SELDwx)Jllb2CHe3m@i&x>cGr9Ixs~!M zOG^|wxxkH`PTJTw$Vx6q7Ax79yy+6I=BgXb-)k6Y82cgezic&j=wqQLOON1tK{+=X zpWj+L2-Kss&cf)H4VjJEQG?~4_z1!Cfu8!z!_~*+8S%dTn}^P&d(*_}T)uaQKEDMB z0M~w`LHBpvNQK~#Louu+Jzk=+1pSQ(JmX9iy~{1i%Eh*0F-nab-tJ2*b{NC1GBZkm z<5WTuPy?R>lK%5c)Rw5S8C1f%69VqqvsTC+|9xOtHLX(Gm(+n1R|+kgDIR!cZe^SRw}7d z;1&em1-gDV6g*@e4JNquZCras|!I3mmu2_8wnNe^b(RX!YgJmR@kpN_+ke zN`AvRg&|j zlt6_`N3vKGh+P?G>H$^=Hk26yRz|@`CzS8?a?UqmvhMU)n#Q*q&hVAJM7=7`g@9pe z89^<=G(sm_Xlz7mRswoTyYz60oQcfIC5`WJn*c#XDC%LR1XncX@lk5zthKr8aWR6g z*hz(MArpKerN|aCl=H|}N;ULiw!VkJdB6UT&f3!vDrVG_N30uZJ*3FGavst7@RE(% zQ3-P_&_?8bq2tAqnG~n{@01>-qa3GMUVkVib@76t>i+aY#M?422j6bHc9ILyvS*B> zQQ;hTorEx+5%Ejntqj?MpK@L-A>*grn3}Xmf~eL9A<3fu@V^M${v%Mb`npo{-kWab zY$g4;waJ-CY5_)}&t6?C)$H8ON*&Z{gA*WkD2AnI$WqGr+dDx4Jha4IECI7ORlX%xLkM2S>PMcfQAoTHXiHgre$Ng``C+UO#Tf z%h)nwFM(vfd1`y)$+e<9#vF(0WB#2seWeOrC8+#Sznrt;aTFq+VHge(W zrLULV-9kwxSkZvb=A>{4q$?@Los{c>y!(<4Z}}x7H_1eA)Vm2%hAVvAq&Gr=X3qss z%ZI$*`HOR832P|h_`UCt@YeCB?vDk`1ijIFpj0~S;5t0+y?on^xUzWvD01NIzw-6X zg!GOMi0ue9#H92NEiey6Cu+B^icR#ZYNp@eiUFO?Nfr7Ruph>k>z8L==o+C44y|SzJlM0I*>xbKB8ipr}PC$Vq1>q1lcQUVmYSy6QkL>A*e-!H* zE^(h_rDTROBbAFN7eq_a_1wd0CwYNzI#a@`n-!AuwhhFxQXr+>8N&+;k^;lb@8IM0MP++-^ot&?qrdT% z@mt^g{?3Z;HrZm^T9}sx)ecIrLxK@CD-D*|m9|IDBSIvWPqVHyJ{kM@xVB3677f>}YM!uoen+4Oz@ixxU4lLhmdnA5_Cq zn!eQCP6VBdu#5-q++!n15F&4}luzs{UuR55zOLgFrsna*>NC!J?Cp@C$r2nxuAoQ6_@4>i!6BY@q3nq~DerN>eBtm6*u#Q`uY>m(|fJDWc zpd*|pqn5K+7*%^nTL*KYS_V1t6%vq`ecJ&{84B}oF zCzG?le%RKJAo5Za*j|fNy}S>y9=!0XA^r$uwZD_MT)i18>}k80A($6~-0{+6T>DhH z))3w`G*u{EYE@%Bnl`c);H`-I_l(mxT>~H9CT$R>H^+UeV*&En!Rqu z{b+UcK~w&8PUYTj?1*4Qo4e_xVehcV!aJ`ri#6`$VfW$Z)xp#{#z~hsQAf`=ZCNL{JQMT4Pss0(=nZcMfFg6F79R(b&tT1 zA~R(|O243sb%AyG9^}`bKkgKq*>=nPf)x~SUzz6ij(RZ7+V`Tx0@d|mcE1L^^tM(30<+-Ybq|(J5AS4>HfrK@Y`q@59{K__?e~yDbZ00uR4!EC zK}u!5t72Q@REmf9ef}1&kj+`|1rPau?7e4HQ)$~j8bpm1^l=oV>KI@gP(*r{!4_$w z5D<|jD$-F1p@kx&SZD@8q$(iFrUXGidK3qYN;mYTL^=UN@8{aVdFT1BbJjYa&X==Z z)+~o{_I>ZGm)~^}AhgTnv6FGo=$|*gp{!AG+CEH5j|U52GCvJBF$uB#`E{Ghy4im2 zy1||Y5E(Z0?xC4L*{rzp+X}BE2reaEK-3CSi+f)hp~qrPGP^xOy4<6w4BK7!BC+RT zPvIsF&pGuo{+^ZUrB{uUpdXqwG04Orx+Btdobe6ih! z$F*@+$8|twFYH>!a}JJJdR{s=bM@<+=EFKp5qv*}fs;+X3Eqi#YQqd3eqF;MTum7G z=_QrDG9VfWr){8pa-*zPRAdTuitJ_{%v5~;v~xkLr`vt}SHPRTpI~)t5G_*m|8ho=S=#%&!mCn}cR8VG zNV{KTf0ul~0<9WVC2r#GoeF=}FHl}4VB=5A54ssy^>j}d9Xx!GQ+Hi}b{Sgf1z9Aj zsVo5VwJRtE;zrxDlV%dA~RQ&xc9}_l1^z4qU4X#avK@Iu9mL z)m%wf5z($gjMn|M+c1#z1+*4259R%juYDJvZ=&5?=`bdN3kU*y8NM`U=0eg72{l;y z8@pnYH2YlTS9|NV)dRf^Mjh5+&-_>BAg+`Rtnuxfob-8cNYiiXcbKEaSuC%y3cp#? zU4)9ReU6qYcHn!Ew6@qSSfNm4y=DO6oi|oO^X5q zm~Czvn~Bv35#@^&e)6Z&>}1rv3lG=31qp7^cYzK=T7Y_Bj(rbXWD=W*2^ zQ1j(NXoO1c+gn|k;$ao8+&nGM?3?Kt11`hnr^C1T*|OsG>Xx#CvpqkJFE@YXs+e38 z!tdN{-~Z)lWY2l$;>nCiQr*&*t~sFvh)UI`6>BzBP@`|pZ_)w)bz3x){w29`@QK|! z7<;rXjKI&{pN|$73^4dMM~yWd-(WEyV{t(hOhiT}lJ7zpvg#7#mRh_&#kBf?6oB`| z;H&u&XRxEsi{uMO#EKSG5=`R+!{Rghw<^E?L;~W`TupSA|V9z3hX?DW&PI&Q}&V2|@D_m*Ns3@9@=((KZ!j34?30Ux%(g2YeKAzLd zid~*58*!2LD_aPzXpixuc6!nZ$IIqu=|{3u{%GF$KLJ7j3NRqi|J=-nnWqiv7leUi z=$4_?+A+GPE2S!S0~)A2xp^VrZPi;o$m?t6?c4cxM4GoJh5(G-0 zICuN@_$jWj6K&>ysAHsJWNn9cPl?=W(V7~_@|j;=5Zt#?wXBC-t}0ExGc{17(_foF z>WMzmi8Y*aYCBHs*_Wo^()PYT{fME1KUrFHneephPiM8>MIS}!*!K6|-mN(hd+&<= z6B}y4_O^Nwqs4oYz5iaYC~H3}un7|NifioRKf3_qyp`mg7x{sawvB4AkYnL~FLiN< zH+ofOLM)c}_>o-GKPB*Q2kbI_h24_eo1deLdP+!BtDHZU?QPuko3bD&w0|PxbCe49 zwR@r>kn!&Dg(Dy>?ulwzR>Kkd#|+vY;n_!TB)!geZ&BlUd$9Nvx+K6w8-i+~b<5j) zgT?~b3i*5C=D)d7KAP{x49zAmBiSrf1su#63BUjFw&B+uNt;)XL8OEAmtmkfpO%Dy zx!PGS3%xx3jq^7w9y{-ecyk1beWi?K?$n=SW26Wj_C*FNUMQmw?DHXOmHuYQqb8d4 zl@Yd=_BlOl@{*|Vp>$<-@|VDuhGd?}so+Zh>MziqqUDSxlrv-VWzT=HCHza@PRJPj zrN6E1_T0#*+Lk^8?G0`snD$_BALA3z$c%gxaPQ(%7r4Q>W^ORfV3Ky!KDztO-D(Y8 zDtW~B1E!3eO_Ia4!!WJNS_fnVaIhHVbzUQU`0f3X6ra=^ATXX~8EBjKG2Ux(`N#A9 za013-f$svwkiv#PG#!jd- z6azJrRh+-q_6qH6(K8a%{o2wKKs-8eb=mVqhH9Y1z^R2r$ZV&o&kDe%j|Gp==2F$- zQ?Z-Ev0)FOTFRoCKtg0t1pQL^QOPQ}aVGutuOJI8R_iTuF+BE2-wH2~1TmlaacYQG z#{(81Ht}1pI;|gGS$(=UizYg~wk&36%sllJV7XsBg%}fAa=!}{c1cKoN=IYOn}aaL zPba{tSKRtWi~Qxa4PT3BhXQm`eB@FTxSwFcYag=dK;I+2Ywm_wKTat><@;euI}|8= za_)vP`LTWd0IVx4+FRhYLCD*B8)$;At(T&zM64gZpkLu^ED63UU)5<0>x3a={#OKq zU$ZX!(Yw0Nv|5-YCIf)7&rqp*_%ZG za?_bgzPtx)hrDV(-Q}SeZ|J!!_LzM9kK+l zm?y3tgM#F8K&yow@!Pi{+K<=r0`-@Ag?g>M&X?C(-z|fUFnO9Nm-NQv6~_XzbPcMy z4D9f|#od8OX^d!%XmTChUfVDlm#lJS<_B#YD2s>cs|J3+SE%#d zNdyuB(fI60_Qm}S5{+8`xcMDA5nxeZ!iT2G75UDzy+|;S&(LYw3qqP;a?ok~THm)m znkZ)Af-v@ZKYYaMX3wi@QK|u?dy(cHoBUt$|z`T3xAkewO$Pf++0a1x8PL^vHwhG_rey_ zwq80-wEvVBK=Bp_;^8XiFAv#HMxxuP?^DKyS@uZE1r@LJs}8VJ66AxkdjbugnDi%2 zZWUSwM%quVC5t8)s0lISCWulMK;)E^>#*73mVP%TmWiS@p{_dM$j2>Vlc&9wgTbVW zLEjBNQazQtY}0$5wiKwQlo6VZ?ZyZ}9764FsEYCpTpF@s5d6+6{~f;nag9k8*@o2V zg#&T-S5hu4=RNxR8#pW&WD%`0Fb~2YJ~lC*R)GoMBm%~$2^^WZHaRNq=1f}!102Ai z$mGt3TJRr9{JqGR+@17-z7&`e_;P~*SJ=DIXFe&< zyrY^bxSxRA9({YJ1h_G$nZXcP0tpW?F)Ul`8jrIz(4V=<=A!ADP)-~B8@5%$|5jqO zM8i)tW|{H}H0x&s!e^=u_VhF+9~8yFm)gtZ$(G$AJeB%Qny5;GLHK#*e!e-Y# zyk+Pb?lF3)eP-pGG^}OvtW=wsY%sYFD|<@&JjGpqclESt|E=Wd@NciyQrmm`(oS7O zXBcx!o?49<=alx`k?k?5ZF4`ssE*lwjy?jb!Qj^RQaXDA#EgP84Y*)-viT(G+TQvU z0k&zk=)Q(6!BL>5&%(P zdg}?izzP4~`hG#aB^d^gZ64ZZV4~gDCNE0@zJDch_y~-XH+1iZIY>B!n?{J(H8An* z*|2Qx9xV6$XcE7m|0UQlbK} zI`lIpfCQ`un?|W2UM-=IE0vnzo&D9=4^doJBZuKoSFZN~ANbM#+o>9K8UqfG+1yn*%PEp0JPask; zrmY+8*L_2NOXJ#kHRu@iKbw@Obrz@Cc4fU*DE%CP)*<^x83Rhoe=)6+kQh%@%95DNBwfcOe zz?Sxt@*fQy-6{g@TfI91fn0p>8Cy#z6`!d@7amT8h1VA-x#>+KejAk30hm2cjpY|y zvvU6{^;1sLiN|2zI0!N6M_12gHEQm2c3LaAaz^u|<}Ys|hR^0~I#JxUQlPn7Gq&pW zU^sdBbg#+E(v`TSD<%GgA{YCNCGqgHWCLu$#X{_mi>0ntFQ#6jqw9EY;i9GmZN*ZR z^)0YcbwOdxY03YI&kp-Cbi`gGFl)Ng7shxOQpj7Ov}?=ZLH7e2SJo3Z?7}Yc;m3!2 zaQN8WD@Rn!g56mmSq%|KLV5V>H#zTn{N_pde}oT&zEU&@3FQOcSS=r96JVl_a)NJk zhH>F8Pc8Rq%(h-_lyj#mj=q!xN291W)y~|j^6(<=b1U| z&iAm&2>}^fxZ4F#{OO+{GkUq)6=?c+OU0owgH_bC0{fr9pNE*g_vV7@@b+_naSjbB z14T40bP4br)EIy*2<&g+c0dxoI%tRe$t%!c_t#as4Xz&Of*%y$aw=d* zF9Uc_kv@YxVf^ln_}T|O>Y2i(!ySo)g@ zOt%EQ)U9CC^()4Ns}Ah8M@aZrJMVvaO#cSO^7=Y9tgnQYuSO>U6RELF;uF&lN71$V zv^q{rfXa>%JHD~^9$K>VN`;3-eC0u7$$2N~N7Gi40WSe*;DhHkuP@SaG+9GJ$Aas7 zu@_3>xphGq@^0L&Y~b|n;4Xa7e@SICtnga(euWLc&aJ*GVBICUA)%JMrH24(ee{DC zDN$9`awhm}yX<-mkCOA%kpn?Nfq-h+{)OU(Li7a_e!cgBvPd9iwln^SS;0v~w2(mm zNmYkVOM@nSVqe#By|{qjfm2>4(CK-=lrJ2+DqF0*EqANu3{ZCmJ)8(DjGSXQynXhQ zhInhyfN<5(Lx&sT0mZ<%lccj;%^Uo`=Y-3t{Fg{!rSIu>6x-mexNr4Tchz?POYUR~ zCOnZ}&~i5@pnrXYfL?t;R(-D$PUQ-wg?VadlL!7AnD6#pQ(E!hKD#a5o7C*CICk#z zX}tF|53m^6bNtDN9PHecZ;t1Ow@3$*Q+F!>T>a01oui}Z^a6T2 za1q=XO*P;Q#`a|JS=ruOz4CB41G@bh4FE%bN+}&+oi^UL+!uF&bd@J^KrirgV;H2L zBKW1Z521Ngf3r_%`&;)bj%vlH8Y5u+_~+Tw>t13t`)9e{ezX?F6BVE&h7qwP&ljxz zLzdm!P};5Dg4OWtNKeb5rNWiI#?y^BSCNTC@Pvxyb4OrD{~y5!{Hhb_MGFt)PBGg& zfra{WA8hE9po>miN!+l;+;}W5@L@nWSBpLbsP;C4(dptP?lWA~80k$s`%m|7xwLax z*7e(mR?pqShYsKU*RSMH!oDU~66~YXen3ll1XecEG9ZOvyRoIe7QZ-nbVIkCj6%76 zy=ChVkxPWPR7gqqtnG2flU+MXh3DtNVEPC7@vr~OOb!BCDc$=P>|G5uL?pqF?RKqb zJ+62zkXaw*gyBSvHr>+K=$IY`4H&7|`-RF^j4>`;N8|VflOBXUGUEL&SAdi$w9^;b zUW_6))ZoX9wJEb_VJf5mmG14q=-8tDp8Y^Yh-eEC$OqeNwVP(+h6;O5m|#8!WPo#} znBf?(FZYiw9qgR%u_3kk7rusxGM2MNZ1%1wddL*D? zLvEX|rTm-C^SFuE@V^98Y;cSx|K7+ze{=w&$<==fDmN9ha@z+^_NH*LQeuFMde3>Q zhwucmO=Pys7=}(~^T!7AY0K7SJ(!UI14F0BmsqdODQW97Vve%yVfR20elab$Kp`|( z&S0T6spu#%gnN66*@jGvzDR#gT>%gm2n`T;@3)q&!vQ*h1{f0vEG|5(M_-TqD?cL} zv`lPgH9g+`#qb1l;POD5ToasLHPY6syXU+=d9P6%S>a?b7;{iSi=mU5{r9S*8rlz| zIA2sDq&i?C*(vIFHFc@htnCpfxMYuB@LltW{k~ew@snPgWsnG$U z=V5?bfNbEVcgiJhd*C;G%lF0>uUMS3Kl*RhLS9ShMc2oVX<$%ZZBbqefLC5mv5D(e z6}|CoIYOT260k-n!vx~;lKlC`%=Jz0pZcBs6UI<{p4?Eg zX}nXGy{62#o&-CFDvSP`{kh_Ij;bb*=55-!QQZI{#JW>Yp%<11Mb4&`lr)RYr2lku4Ds^amK=zZ6>*rs#Y*GrepRnB#!zVC(qGw|<5K;)S>;K4pP&`lop5_Ia(P@W` zIn9EQLl>0^ZJi*yT z{~r&_!%${`TPGkT;LO~3_=ef8&x&OtZw21+&)*g#-Bl!lkIpJcLjA`@h9Q!KG{UpXPMZKe+{wY5Z$h=wGI8{y)S4 z*=~I*^iT5p-v@q*?*Cgst*ni9`Q)V8njW41Y}8_IZvIr)!{f^_qY$SMnW`@8*4ngnTdvpv$FN(QmG8q3$IeflEUN{@?@%{J%Txp28=fYsh<>+Ev2b-B=UGN|9WmR%{ zXcOl<_(S%}1o!_hzt4TCiivxPWb3n+Swg~9{FYx6*4f8kS{?OKxp2M$_RIY5_aKS? zmBas=J+w6mV**47#B&tG$WfgOrYlcfmkJFjLi)D`r@Bv5g77>cGCZ(=A{Ab~fpeFu z&t^@GU@}sBE?1M@hOce+Ui#j#G9a5Pup>T}`3C1h>LrWl-a0_fGOOsOFlEhLlDZdX z4DlLInN>1XwgpQX)_Vpe*U9Rzp5Q)qh(Oe*Y>&Jg!OVa=Mh$N1dsyvq{m-m(S%R0E z|73G-Zxv3oXK^M7uMxpqu?aC?>7N{YPF=rYbYGBYz*YoN$FnF*3U01-v5xO5vAsHe ztY&!b=DUG4UINj<*yi?}_(BQJ<+J@oSI?}i-!=|qly4|2$W~WV&uD7Um>cQN%s#Q) z*^!u;tpWdf-m&6sX51p$z1qT!ciwN)Sy0=v9ep2d^7uSgOaP*Ch+Y=yYe@MF1iQy$ z8i(Fw`i(ykEAk$xD9J*)pE6>4^YIp%Rd4eTi{ctujx27=tZB|+muBkgsY=tITqRy= zS2i^d^*l{6^cW(Gej48P!7Q>fp1}xC`ZO%?VU0AR*%QMc+*2Goy%~LPz#(ww8-Egf zB0Ht&q{VBdyz1{L60h?RAc{0T`;Wm8QU4`FAycUN;ec5@gi;;+m zB)w2AO5B!aoxa}7bC65HpKH7F)}Ena&*g38RwxL)&2D?!q2HR83Lotl$==R zV*K)M!jpxt-wkT)pSfEb```7g{e~?5NhuuY$=326;P0znJj5}hxbF`&mfGq(Y_0eR z3`X^8hDWEOxYxl zs*czxlu3-N%|zZej%8!EwLN<`UWeu5SE#JwzDJd2F3Xt`vI;+%xi|f?}Qtg+%Z<)Ara+h zBg64H%^0Q(zSc1;&)p|d&3$Cd(2O@XNwI2P*F`Gr4@-$tme$+^MmoM+Hbd(N`TN&5 zHp;T}l)`QvV}|85;8z-jk`4N|m=`bS9r6W(o7pifZd;0>@iwr3alw~(U z*(>De7|%&ja)KK9&eIG;5*cpCCDjfyvzPvENszF2F}6v6f?G%;%)Cd6OBMfvQh7|i zd~%yB1sM(`%rxVu_*>WFV-<)|%-#6zO9Yo!$Py8Ig*;94nBIFm717T?M6(cHiG>lI ziC9~Ukb~i<4*MgHnuUZD7={_cwO!BAI|KY6G)TNcvNI8q*urO=ep1X!AF%}&!b}xX zoUFQ|1@whb#vn4XVN64;etZw+1H~t9n#E4NAx2~>G zD3`(v30)2&e?>eIFM6xWtNJ|=zv}I9)@nzt)->_ZkfiR1=K6sJ{OUnEub6Vaa%1Bt zo*jYnHzSOvBWH}FsAH%Ut*m_wclGik!<~pXo}&0+>%q9;L{oF&>10HuM0JORfhC`o z<#RltYsf=8DI>4gFWiEJhmA7P45ZqG(ak!6GD-91U&eF>Uh?v$@Lr!W{OVYK2DcE5 z6sI9-AL4hD&2i*A^67{=Vdeu)H0*huQ9>lM&D4BuW@Du&dc4&aWOU|F!kjKU5vM80 zn;(7Mh4NC|tFF14YtKvS!!&BnXJq&R&OeFy6tC|PR{1(1ifL+^Fh7;D`4M_SP7z4M z`6V(-V7TQL^9-Y+X(qo)g6{+EsKQ7cZ7`zFHIN^Da%C4RLPXF9f$_ zq-&5^wUuGTW|S6Vh7NcH>ln%cZFc`M<{=OWc8>G%pppNul5@$FkCU$j4@wGY z5!6-bvfoGkKxqnSn+!F#I*$y?!1Oq^I4@_nJ|1Psm=vLxN*z5$zH@|-50#r9d!8kQ z9>By41&Lb8Aj4bn7Q2#`TD=TjAp@k~R@~yJqm3r}vXI$p#5*m7E>q6KN?sz#To3;w z80ddD!g@3@2t+5sLLM=0I!eCYWM4WmD_(SzzEYgWc{oSir4asMCv|{!io{)3~7M#sFf|4gWyXQzmLf63q8G9U7Q<2&+p5xYwpNs}21z#XX z_1VeuvApQBR)-e)rE@?bvADVMTCPI3)07cp_$%vxVG30@qC9suVsA-#f@^C@;Is_^ zT~LfTh9JeZ6baHmz@fLt=u;)yy7$Y`%f&tQDO1HiIrBj!kV6!x+@4oV*_1Z8n=tc{ zwaf4s^-DncaWle9JuaM8%P~0`3+e@hzuhSD9l{&Jz9|#(D^x#h9F%l}XY~mQ!V*TD z$E0q*<2~j>n4#gqhikd2zrDeMyIfgYW|&;RI!V43?I%wk>N?2bzm+Uvg~>A}_~fY^ z+1UR*7q@0Yy2rElX~&TQ>K6k-mnvPm*p2gKj>u+BTBrIE`YLOe>+S4x#I0CcuHK~o zMmqW0=>q-a#Tk|m=fOjX{V#-h>LA6uc=veu$7Sk+J71=eCQxz^J2MMJ(F;QSlew2iosd=Q>kaRC0jq3O+bX0Q6kT8HNB9X#&x|&f8a0@pm zyf`lx3dwl|Tu2eOU_=Qr=i(?U?5Y6?6R>AGm4(UKK6{;bt}>16>(V}srPPc8?f2%n z!uoDHQ0839R>+iamooCN^ctHG?dm071UYxho08`;H@nv~)J*#9E&XLx&Xvs{fUr=c zm_+$p?DnOz2CqS3R%I22JFR#d8f$)ArMlRJbRhOF6tbDiOXn5u>lC+0UsKA8nF}9I z*~z@({?3`V$l@KDsv})m9=FR`TgVC7p~I4SGwNMj;`X>A4Z5X-R~LU@m+CC9hz)Mx zOLK>+=9)hhF^s%SVK(9{lbGzx&73HPQLP19_PjhC{}u$GLxG~KiN8-+;|HZ92eCUKEW0JN)L*>-1&$TY+H3Lr`ziM3RJmEULnid`)`P- zR?TOzc2%b!vl0u{Z^gfPmMYTo@vEL8H*HMrWd%RtHDK3g>hJy53Kf``S!HNjV+#a} zsr=pKm3YM4nNm$0xEMJQD&v=<`oY79(GJJ;)JOs%2egEa@r8%hnVxZTpjhg%qijM-!esnjyQxLsY(hxkGKq3k^SPPX z`SWGCU=`UK+EaWsRFK@$GlSDaS;uphPhUdu6cZ2^={v{YHv+#o5>Jt-+G&!4rclp*=)5%1g0@Mwu#ua2bt8;lZ<2{4V=#d@U)`2GBQT z3xEIE?>GEcdAAI?OD&HjIb}q6d7Y^Dl~Q(?ZeGpr|5ccI75GxD^1Miq)?t6fU&=nn zvHJecQ5J&P2&;L*Su~zW-#Jd&R-Ua&+qyW<-;M zfi8Qbn%`xJCzr=2q(!3WjB$&HRM%cJldCni%f%a7#Nr5+PxDLMu{YRXSQx`&Rh|oA zqq!=R)dWlQwj5W;8jp{xct7i)_>S=R9qQ6*2I3kr^?8cYRdw<{A>3}yfwrq|Ba&0Z z78Yiwvp#zmvR^Lod!2cnis`s^mlt|b0GfUtlN~^N!I5gdA+J9=2o{{)(?en0yF%}G zKNVd*oKgOsRq{;rM34!@7E*9bvP{*(NM9mlT<4aJivW6wRTo5cJQ_E>NMWZ9K$xYf zCtpGzM^RIvKW9mWvIvBi#9*a;WQGY@?|5YSaUWX}_QpgQu%=L@q*etkYWawHhT@AT z?nW=nISN!6nI~SgB;6CI3fL^)BHcU8Z&Ug^y{Xtu4#AnY2P^SBIb+FZU}@WA(mz() z#U&s$jC_ccuTlc955-wVGhaq_HVSP;Z)?4V-lh&LM4IImM+Y9U34uOlktGyb{z8>@ zv)+U<=+NH)*XYAF{MqpVGF9&Y$Gk4Tyzl~tjXT5%{v~MeN3b5Ck;gK3Gx*Ev=Ol`< zR8K}KyIb?@Ck2&@JS~5J^c5nTr-HY)+hMH#p1&_zrYe%+3k_;*F7@(8pd=vl>IAp0 zKB4O{6h_JdrJ8gXDBwV#p{6W?mMwt+P$}zPIeTH6SWLb)8||53lG`bHi1dNKFA{9| zEE_(3pgRG}VM-a8`lYJ`m`d7jBgEp7v95?_-ev95J37we`pe>{N6;9QHWhQrnGTQp zoC%fD0Kv@SdLo#5KQ%uF5JKAE+8|GT@0iUFO(OXh$bb06uL&u1MIu&d8~Mj}xZ*(> zxXI`^PYxa;oq&z6HU&#s?=A|eK_baML{KEysN1os`6is`U~NNhVi#A~AC}6$LU&X6 zH6yD$O_e4NI`krOIvOjqF2kooQtN!7S{+MPqbGq39wm(f9n(nhTh$K(O(tPMKpG7utpx_|*qE3y zeBx6kH15yVyHw1`bZq7FsvAj~HeKAHjTZNF^ZxRgST7b4#oTYwn9wL~i8Tip^e ztV#F>TfmgifztqddFFKbS+3j^PevVPna-ib$p%NZyWz%#(Sc|nCBMOOda;1wanHmp}(3s4;vY#!^>k5U}gdtd{zoML_`U^@a zO`mgA2bC)IadL-Ks$kvhHnVYL#A+d z9Xy-6=)IVWGtguAfJCu}eT7k>7ZlW|{CMHV7q#3-sMLWvL9iDsF1l+7EHhIn$`X)u6@%R~7hi^V?E zdkY4NSs0m3XdYz(x zZz>(+6I*D*!6{zBHS}$mj%(-H?Nf{F|vMm%gMq zm=zi`lz#$|UZ`DLS%N{{y7G}_+IauqONDY5@-+#F6g}e}SgSLaA2avkHH-;>VV*Gt z|N9etBW)Tr%ZUo`yA*I)#~ia_?#%N8s7Y@HSC|W)^s(DCxLa~K1meA;fCW_iox(_lO3SbLippaP~X;;i`G4&nhjoK`t8BpsZD7ri(sj6jZZL5YB(X2!~07u^tZ(VS{1Lz?u0X1LD zI*TgxUL|J|VFKFQ{`{K&SeQ@c9lhzW*~l_@&nOV<>;2nYyqu{{3Wn!H1S1Q!Dh`8 z7duhIkvDHZFlEoH4RHIr4qhxd$KH;@&P#FY7?T4Uc+ped{DnTzv+V-llrPA-Sxt== z9bz+gH`|1Si+fp+c-k*h73Hw*ev4XIyMx0--ct0eOujzK*jK^D z%aKgzc-|V@);1x~#QBqWE+tTy z>4cdQ!jCB)lrKjmC>plyn<}~yVtpb!7;Ct;v_|bPmy|mUbuKa!W;v{Z-oJXrf^2#))WfQEa+y}S2 z(-}e#F^kvaqvvx1SqgK znC-bx;!5)BVW?C0UwAUeTws^f(80OlBSZOPqkwbmiJk_5%YFfY>)~06%&soe8<6;mx%S0oLE3`%Ox5)2VG6yd5a$HDGz@{^TDT0=e+p`i43AV&wlp~CO7^>JcG?E=Z z(5i0P3B0MCL9L1xa+vOWT8mz zQeWJ*N2Y4N5@-2@c?77Me1#N0TNj+`chn2sLenCkEOT{E%I<`#;XfJWuMkd!zld8? zK5eg)C!xQ9=Ai&nv@^EC^J>>YB7Ij2`QUVtoo^3^u$X5M5A>j?fbg^3fq0Ez-nd?6 zD#>sJZFeFk70lGb0_AGnly{vr`Kq@!Lndp$O6g}xrg9WC8#&Pt^YhS<^Nu$u;?nrj4y`=^~d1I*Xm0kdHVhu^hv%kw!aZ`M6Qi3HG z`ji%Ioms{N5^FsO2*sg?*P!?hJ(`I?t*h|yg=uq=t)d6^UimHd^xl2YPXUAwUd<0! zooHmIB3)X{ODBF+7CSfmQ&=4ok++zpg48|Qzthdp!c_+nZrd-#$M5VINv5jmSXx_G z@dsD)1NwoYZeWBC6c=6gV;oig)-(A7XV^Q}q2d6WsXZe!qfW?#H829GW$hCDE&^^m z8uU*Md`HkmNSD2sRcORGY-*n5>1C|_%tuVQOcM5)eLU8C!LXXG(9d^9Tr;1XAfjb^ zV|K#)2~q&GB?gv({)lqMlb}tfLX{_jzponB ziJme)N5U_dFcDL3jdM=%C6G9G^bVr&VZjRgbxE^?`%7E%eMvs3g7XfW^QKaT6v>-B z0GPTfQ{@sOBND*`6vu?3dG3~uOKzLnd(JRMHy(^dY^ZgR`NHzDyO#R(^FM8s{IuZ0^3A2L&fe!5-9=>Db)N4F1|ocvSL+2289Ya(Nu@r$*md^E!PTT+YLbi4Wc7I7p4b}FDseY*7jX!@g4qdB5EbgCOy=n;G|4{OY5 zgRY)=7$_F1m7@wZn1r3O3^WDASHx!d0_s*zS!#P;Q-8{<8dQ!407##m26kH@k&*P< z0F2IeAf*2C?wZ;2w;;uT^dN1BP?Or`9NVKf=a&ZY5{4SPHA!A4*$GsK7Gwa=)IsXAztpI9$^ zHK?pe^~9baQS`2uV?m;=rcXSut;cjUN!7ynAAb!RZZQ^l#sETruN{>^}j9mZQ+L}&a%usWf` z&V-4{#F;KG{ebe}k;a;Is$;f~xR)itys!A3FcC%u;z>VKOs7ncs7ECQs~N?7z}giD z1#Ps{oCFfd{E8wr`vKlgM~$(d=5gp5!d;q&mXQDz)Ky-|W0d~Js z+;>+}z~*31=tbGZpiWt04a%L(fj+ALZ@@F5YiiNizuSoMH@-W9In9#VGx45`ggCv{eT5ST)i&RkeEKznX*#F4orlV2WSO zMG|G_9F)<;E0?qEyvc!vG4hR&n`F{Gr}`HE6l$Cci+ru<=($_wgt8)p)g*lV6Ed8M zBSAQ#-6x|mJ9bl~HAm2SnCIATK3n<4wx<2C=h&(8=$j}-O+|Keg}=`b#mQ~1`29v# zMTQ$$2PEuA9!F$EmP*joOQv+|^Pu7mE*E)J?woBpx0iA`nkjEqDJrol)Mj(Ma&4`Y zTciGGQClqL*GDl|sCuhRAFP$x-SdDv}tz%3LL9m+{HE9-xc52e)!9F5a0YPQmE$py^Kk>y&6l+JY3lR+fTgok66P}2l=vFTtNZu*ob&gM@hJ|gWUhHJe5 zH6V!>2~<4%rf2;ttzR8X{PI<4e?jYg9KR}!ZRFOk~Qi1g1sI6v4 zLf}p#LW_72qFVh|S2UA|w}@nR;WZ%ae{ER3o8|CsRzg{k4iDhyFYvXkr^BjY3hW^0 zzjn!K=ZjTkdZT~)vZt^v`$48gEeu}g>z&K<5EZeE@Xt$P3;npcFzccFo{*Wbu(R+B zS>1Y3MOu*2f=fpR#Jw&<3G#HTq5^@{D3V;+QaElQ0q5_Uxz?jXGLy(DJNS7;cJ!M~ zh~_`Fc2^<`piOY*{ExAHwUK`+u>6q@9V_ zN))B3e%2k;V@IGhuPvn%mb7kM{EL5p|7NW+@z`$v z=Qm=K_4^~f^|z*lI90v9qx8p@?;+M#WjrhMcato*RNL! zH1dze-j&xIi%lE)7}p<{#tI*Bci;G_V^)2f%$oAmpwka8A2h*JZ9cmtENMKFw7dTD zQRy+da^76aRiPbn>Mv4lf+?M?DjzO2w!pk#FSt-zhz!^@R#a=Od0H(B_xty+k&|S6)O}_v{%$YIM?{>^eI#^W;rpfL^7r-$u}l+Ns@YxZ zw~BO4Tu}*9T;5FLze~ER_V+kCM9tfi^==WY`vQ)xZ6 zExtUC>3`RH{)siehTM+IxVzRD-x|8zwN~=yzbk!JE#Ygc?R0q7dXJdV_m^mYGQz`C znRA4$oRyd3d;R5I-{bT%muv$g-$>9yv$VfNmgk;~E~n;H!4_6kn7@En z@I~z6r{gg&(6P{`&)WvMQI=VcDwKCro}-+(PCXO3Y(g=|dt4W4AJe(@3^68q4<6g` z^4juR{D6cPAeOd5bFdP=Ut-PB^b+Z(zB3cw@lH*53C)O3Ssx)VJ{{X3dXBA_l}Fp! z9?zW0($2(^uWi*$qt?@g@D>_A>QR-I$JC-HAAD39CjQnJapU8>ZHFvHO}207521<* zhxK@H5jCQrz-F$C-ii`MD%E}`Ha;BuladiW@aDGIGwQVc1M;0yZs#bnw$lAJ4upj% zK~z=l%*SxT-i$KpB7FOYywAM1-hX(lhiYDC*t?DF8o^r*wKAbHZ#?o-2hgVn7d=() z?E0#L{hWn}=(2w_EB7jmo@KvlK6t6#MCHw)J!6u`4ol*loqk4Yjm?^0Itnjgt^Sj( zlUqpKa`8^(e(L5+cEYI#$uIce^1t~qKG8E#YU=j`4^B3hd)~*xFe$`j)4SvjD3l6# z8YtzBkLYuIWBxkFrZ*o%pAG1+&EF&%a41(jx$i7R@(ZVkO3 z_?L4;SZBXK-|uEZz*0&jvme<^UN$F$zu|d(b~E>q5%qJz0YV9f;BiT#%fG5%My||a#;BJDtW=QU6O=+R<$9Ivk#AJ#FQ7;wMu-ebhN6djA}@ESZ~#YF z_71;`29~)tv= zAg2%`uE7Q(`i?yLzJbI&)2MW1{GiZ6bO zACx>MUlMuQn1H>6MKM53Goa2R*$9tVk@$U=m|RxKmWwvn;~FPR3gHE*)Jz%B2y8b) zKh@Bzg&qjVx1G@5Lm>K7r1L@FHK#X@hd1n(u(hQ;%or|j){1^wo_iYl$=EXd(RAcU zRuFm!Gi5!YtuwBZn*u^kM-*^Hzti6}n+`U@XuI!V`y${e6qFN*Dh>4Moar5YH_{Lh z9d@l!e&A_jI1J~a4+`2>e2avxA!j#hrNL-Z3;d_%kjyR4_5}Xe{PXlShK1sr+IQ|X zeml#>>CMfi7d?Z~jp?IQB&yju?X0CT51eUO!5>8s;eSX2e%uH%4Y-~g2|+^$;)Fk{ zd&_J@N-7gL$oQT6@2cr20(jE{jX36$j8Aw^l)_`XH)~2gAc=du1%@&ZV`Lb?jq6>= zn#%bT!omXtK3R1aY}3l1@(vdRPs`3iw6iMFqnHuU>zzNruP(=4wH=YtH6xJ4>8Ea9 z+tnArtcxryKztRB0FLn+N@)<@!jzz2#tp{;wjQ6iwl-p9homHx{a@`{{aaI294E_o zEWpx431Pn6xkoW2!5RYsGhePTlW^SQ?AhIR&b{Y*zTfkG`+P*z&M`pdMpdp(lCfH* zba=6X*6%E#x@2)h}1Q5Iv3U;39q2u3l~*{Pq5retpO7fW%|O z)mlRAGvb2x2!}^$1>5L|39-JMXuB#mx3d!wh{QGs47M67$6b&+dJ}L^E?P`hVW)$2 zeV0+QA(KD9{SeooJ-)MRsNcyWhqxOI?1_ObII33O$8u%tn>h1$*k&k{P+3ssolI)s z88^e8C>l?Pw2kyoyo+Ae{H1=tzG5IB7jGX4)q8k|N^o(r!D8?{!TN*shJM?=x}PMm zbBKXSA~^vHe>J4&a&W8r&nw-L)Oj^T&puEWWNAJfZBg;|&vHTo>oL{&JAr>8@OJwetDregUV2EFr0M)vT7<3EX5{*beTT^^~+#PgY%ZF*!Z}sz%@Ld6kupOnu!7b zfS2N2(?XECqGB9si*^FeehS>`s=f6k*B@c-NYtgNCFFa6nq@{Tv=<5u!@24AST7eJ zM*&n3xe%XpRz@=h5N^YGXL*4d#Sp8KqEYp6qyQ_cVY|*ltd$R;f6s>SBE+i9Ry{F2UqxHGbhdpB$HCO`khPjQv41Cq==;86o zzm-_SZPv#T{&24!cqX;kFc*z0X`&I=)@_Z7J{Sz$^X`2H=F4n0PmOZKqnu54CLfKn z)%q5#FlDi;p!y3(d_;|*Y*?TSH196`^CyaqCE0yyT-?Sq4abgOKz`Lt#UOrN^q#eq zBGw=FwitE|j^Wug-(Hn&RE6M^J&_qn9bFsGO4?6Q;p6bR>gK%q)hfG~g`5bwDgl~y z!@7#76b+NQ!SyO+^!;ceU-*n}%u4i*lcFBrTbqSnZLm@t#CuL5=x@Vd%Btjg8_`t9U_j2t*u|PhcU#O?0sQJua5COx3U|>H39bYgeSb z@N)lA;ye$OvB`MBU8Yv6i-apqZp?yEE z@cv!NATBQ=$BE=KoFxyU$MuGK3ITA8-&>j9^XNqjS5?|E#BbPIOEavq6x;OC%)~QHPsD%{OgA+HlTV5xYa# zxdd_C{^s}k@O(RWRn)pr`+5Yj!C0h*x%Rv6teWZwN%eups9z9h*EBccz|yM@#^HWE z8!P)jrEkweV8$Ivr*}#X&;68_u9tRC3}JO449lIq`!ZJ}-RJWgx;}8%;@%MqqY7&) zzE*mo$xAQbIT(xG!^cbc!Z=G8-f!yNro?o3t-34sz~dAFPeL?3FzhzN>`vKd(Grg# z?aPmQrWjQIE0O)`_`i{Kd2fX%$nDERQF|P`}S=*VeuR#k-hC0VFmQ${< zF_y%#T?EM`1x*cKrCvO|f@XX$EbpF3XC%357p>7$MCnZF+?nCt>;L1*pIKQZolg9y zAE{(u=1&$!5s32N&#eD(za3E5dFJ~J!F0uwy7b=m|49)MYUb7c=5!^Kx_tP}?DQ6A xR?Z5?Vpek~dzdw=*+YTS0|X$OJrq!EE-ph;*gvOvH}n7i From 33a97bbf0971db5376648a16f2bee107aa4205e9 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 9 Apr 2024 00:27:09 +0200 Subject: [PATCH 103/737] Some tweaks to text strings --- nf_core/pipelines/create/create.py | 4 ++-- nf_core/pipelines/create/githubexit.py | 2 +- nf_core/pipelines/create/githubrepo.py | 24 +++++++++++------------- nf_core/pipelines/create/welcome.py | 23 +++++++++++------------ 4 files changed, 25 insertions(+), 28 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 1fd4601ee..801c203d2 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -267,7 +267,7 @@ def init_pipeline(self): def render_template(self): """Runs Jinja to create a new nf-core pipeline.""" - log.info(f"Creating new nf-core pipeline: '{self.name}'") + log.info(f"Creating new pipeline: '{self.name}'") # Check if the output directory exists if self.outdir.exists(): @@ -536,7 +536,7 @@ def git_init_pipeline(self): "Pipeline git repository will not be initialised." ) - log.info("Initialising pipeline git repository") + log.info("Initialising local pipeline git repository") repo = git.Repo.init(self.outdir) repo.git.add(A=True) repo.index.commit(f"initial template build from nf-core/tools, version {nf_core.__version__}") diff --git a/nf_core/pipelines/create/githubexit.py b/nf_core/pipelines/create/githubexit.py index 79421813f..3dac88cc5 100644 --- a/nf_core/pipelines/create/githubexit.py +++ b/nf_core/pipelines/create/githubexit.py @@ -20,7 +20,7 @@ ```bash git push --all origin ``` - * Note the `--all` flag: this is needed to push all branches to the remote. + > 💡 Note the `--all` flag: this is needed to push all branches to the remote. """ diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 656f8c51e..ec762e181 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -17,11 +17,6 @@ log = logging.getLogger(__name__) -github_text_markdown = """ -Now that we have created a new pipeline locally, we can create a new -GitHub repository using the GitHub API and push the code to it. -""" - class GithubRepo(Screen): """Create a GitHub repository and push all branches.""" @@ -29,16 +24,19 @@ class GithubRepo(Screen): def compose(self) -> ComposeResult: yield Header() yield Footer() - yield Markdown( - dedent( - """ - # Create GitHub repository - """ - ) + gh_user, gh_token = self._get_github_credentials() + github_text_markdown = dedent( + """ + # Create GitHub repository + + You can optionally create a new GitHub repository and push your + newly created pipeline to it. + """ ) - yield Markdown(dedent(github_text_markdown)) + if gh_user: + github_text_markdown += f">\n> 💡 _Found GitHub username {'and token ' if gh_token else ''}in local [GitHub CLI](https://cli.github.com/) config_\n>\n" + yield Markdown(github_text_markdown) with Horizontal(classes="ghrepo-cols"): - gh_user, gh_token = self._get_github_credentials() yield TextInput( "gh_username", "GitHub username", diff --git a/nf_core/pipelines/create/welcome.py b/nf_core/pipelines/create/welcome.py index 38f29b041..1da0a3c01 100644 --- a/nf_core/pipelines/create/welcome.py +++ b/nf_core/pipelines/create/welcome.py @@ -9,18 +9,17 @@ # Welcome to the nf-core pipeline creation wizard This app will help you create a new Nextflow pipeline -from the nf-core pipeline template, part of the -[nf-core/tools repository](https://github.com/nf-core/tools). - -The template _must_ be used for nf-core pipelines, but hopefully -helps all Nextflow developers benefit from nf-core best practices. - -If you want to add a pipeline to nf-core, please -[join on Slack](https://nf-co.re/join) and discuss your plans with the -community as early as possible; _**ideally before you start on your pipeline!**_ -See the [nf-core guidelines](https://nf-co.re/docs/contributing/guidelines) -and the [#new-pipelines](https://nfcore.slack.com/channels/new-pipelines) -Slack channel for more information. +from the [nf-core/tools pipeline template](https://github.com/nf-core/tools). + +The template helps anyone benefit from nf-core best practices, +and is a requirement for nf-core pipelines. + +> 💡 If you want to add a pipeline to nf-core, please +> [join on Slack](https://nf-co.re/join) and discuss your plans with the +> community as early as possible; _**ideally before you start on your pipeline!**_ +> See the [nf-core guidelines](https://nf-co.re/docs/contributing/guidelines) +> and the [#new-pipelines](https://nfcore.slack.com/channels/new-pipelines) +> Slack channel for more information. """ From 1503b12bb447826478416c8910879b01e5753ea6 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 9 Apr 2024 00:58:20 +0200 Subject: [PATCH 104/737] Whitespace etc --- nf_core/pipelines/create/githubrepo.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 29c008d4d..cbe1891bd 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -19,14 +19,15 @@ log = logging.getLogger(__name__) github_org_help = """ -> ⚠️ **You can't create a repository directly in the nf-core organisation.** +> ⚠️ **You can't create a repository directly in the nf-core organisation.** > > Please create the pipeline repo to an organisation where you have access or use your user account. > A core-team member will be able to transfer the repo to nf-core once the development has started. -> 💡 Your GitHub user account will be used by default if 'nf-core' is given as the org name. +> 💡 Your GitHub user account will be used by default if `nf-core` is given as the org name. """ + class GithubRepo(Screen): """Create a GitHub repository and push all branches.""" From 0acb2e2d639ffa55ae8d66b42b3c529b03a07839 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Thu, 11 Apr 2024 09:09:09 +0000 Subject: [PATCH 105/737] push s allcreens, not switch --- nf_core/pipelines/create/__init__.py | 3 +-- nf_core/pipelines/create/finaldetails.py | 2 +- nf_core/pipelines/create/githubrepo.py | 4 ++-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 96f027e9f..c7a401496 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -87,8 +87,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: elif event.button.id == "github_repo": self.push_screen("github_repo") elif event.button.id == "close_screen": - # Switch screen (not push) to allow viewing old logging messages - self.switch_screen("github_repo_question") + self.push_screen("github_repo_question") elif event.button.id == "exit": self.push_screen("github_exit") if event.button.id == "close_app": diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 86b93423f..3ea75f419 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -84,7 +84,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Create the new pipeline self._create_pipeline() self.parent.LOGGING_STATE = "pipeline created" - self.parent.switch_screen(LoggingScreen()) + self.parent.push_screen(LoggingScreen()) class PipelineExists(Message): """Custom message to indicate that the pipeline already exists.""" diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index cbe1891bd..3e3144b4e 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -185,10 +185,10 @@ def on_button_pressed(self, event: Button.Pressed) -> None: ) except UserWarning as e: log.error(f"There was an error with message: {e}") - self.parent.switch_screen("github_exit") + self.parent.push_screen("github_exit") self.parent.LOGGING_STATE = "repo created" - self.parent.switch_screen(LoggingScreen()) + self.parent.push_screen(LoggingScreen()) class RepoExists(Message): """Custom message to indicate that the GitHub repo already exists.""" From 5fcedccfa26aa4d6d6ea070ba7a92abb2eb5b5bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Thu, 11 Apr 2024 09:12:48 +0000 Subject: [PATCH 106/737] always push files to the remote repo --- nf_core/pipelines/create/githubrepo.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 3e3144b4e..fafb66a4e 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -86,14 +86,6 @@ def compose(self) -> ComposeResult: with Vertical(): yield Static("Private", classes="") yield Static("Select to make the new GitHub repo private.", classes="feature_subtitle") - with Horizontal(classes="ghrepo-cols"): - yield Switch(value=True, id="push") - with Vertical(): - yield Static("Push files", classes="custom_grid") - yield Static( - "Select to push pipeline files and branches to your GitHub repo.", - classes="feature_subtitle", - ) yield Center( Button("Back", id="back", variant="default"), Button("Create GitHub repo", id="create_github", variant="success"), @@ -169,7 +161,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: github_variables["repo_name"], pipeline_repo, github_variables["private"], - github_variables["push"], ) else: # Create the repo in the user's account @@ -181,7 +172,6 @@ def on_button_pressed(self, event: Button.Pressed) -> None: github_variables["repo_name"], pipeline_repo, github_variables["private"], - github_variables["push"], ) except UserWarning as e: log.error(f"There was an error with message: {e}") @@ -201,7 +191,7 @@ def show_github_info_button(self) -> None: add_hide_class(self.parent, "close_app") @work(thread=True, exclusive=True) - def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): + def _create_repo_and_push(self, org, repo_name, pipeline_repo, private): """Create a GitHub repository and push all branches.""" self.post_message(ShowLogs()) # Check if repo already exists @@ -230,14 +220,14 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private, push): self.parent.call_from_thread(change_select_disabled, self.parent, "close_app", False) add_hide_class(self.parent, "exit") - # Add the remote and push + # Add the remote try: pipeline_repo.create_remote("origin", repo.clone_url) except git.exc.GitCommandError: # Remote already exists pass - if push: - pipeline_repo.remotes.origin.push(all=True).raise_if_error() + # Push all branches + pipeline_repo.remotes.origin.push(all=True).raise_if_error() def _github_authentication(self, gh_username, gh_token): """Authenticate to GitHub""" From c2381ae6b10b8faf09fe87b221b11c4c4da377e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Thu, 11 Apr 2024 10:22:52 +0000 Subject: [PATCH 107/737] fix going back from nf-core to custom pipeline --- nf_core/pipelines/create/basicdetails.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index 3f319b58f..deb22b48b 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -77,3 +77,9 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.parent.push_screen("type_custom") except ValueError: pass + + def on_screen_resume(self): + """Update displayed value on screen resume""" + for text_input in self.query("TextInput"): + if text_input.field_id == "org": + text_input.disabled = self.parent.PIPELINE_TYPE == "nfcore" From 30f6c5a2f0d9261a72e54de2ad2ee3c2537f79bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Thu, 11 Apr 2024 12:20:59 +0000 Subject: [PATCH 108/737] fix logging screen by using a class to hide buttons --- nf_core/pipelines/create/__init__.py | 1 + nf_core/pipelines/create/finaldetails.py | 11 +++---- nf_core/pipelines/create/githubrepo.py | 12 ++++---- nf_core/pipelines/create/loggingscreen.py | 35 ++++++++++++----------- nf_core/pipelines/create/utils.py | 10 +++---- 5 files changed, 33 insertions(+), 36 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index c7a401496..da6a69322 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -53,6 +53,7 @@ class PipelineCreateApp(App[CreateConfig]): "type_custom": CustomPipeline(), "type_nfcore": NfcorePipeline(), "final_details": FinalDetails(), + "logging": LoggingScreen(), "github_repo_question": GithubRepoQuestion(), "github_repo": GithubRepo(), "github_exit": GithubExit(), diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index 3ea75f419..d894d9a5f 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -10,8 +10,7 @@ from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch from nf_core.pipelines.create.create import PipelineCreate -from nf_core.pipelines.create.loggingscreen import LoggingScreen -from nf_core.pipelines.create.utils import ShowLogs, TextInput, add_hide_class, change_select_disabled +from nf_core.pipelines.create.utils import ShowLogs, TextInput, remove_hide_class class FinalDetails(Screen): @@ -84,7 +83,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: # Create the new pipeline self._create_pipeline() self.parent.LOGGING_STATE = "pipeline created" - self.parent.push_screen(LoggingScreen()) + self.parent.push_screen("logging") class PipelineExists(Message): """Custom message to indicate that the pipeline already exists.""" @@ -93,8 +92,7 @@ class PipelineExists(Message): @on(PipelineExists) def show_back_button(self) -> None: - change_select_disabled(self.parent, "back", False) - add_hide_class(self.parent, "close_screen") + remove_hide_class(self.parent, "back") @work(thread=True, exclusive=True) def _create_pipeline(self) -> None: @@ -106,7 +104,6 @@ def _create_pipeline(self) -> None: ) try: create_obj.init_pipeline() - self.parent.call_from_thread(change_select_disabled, self.parent, "close_screen", False) - add_hide_class(self.parent, "back") + remove_hide_class(self.parent, "close_screen") except UserWarning: self.post_message(self.PipelineExists()) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index fafb66a4e..bac63c1f4 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -13,8 +13,7 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch -from nf_core.pipelines.create.loggingscreen import LoggingScreen -from nf_core.pipelines.create.utils import ShowLogs, TextInput, add_hide_class, change_select_disabled +from nf_core.pipelines.create.utils import ShowLogs, TextInput, remove_hide_class log = logging.getLogger(__name__) @@ -178,7 +177,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.parent.push_screen("github_exit") self.parent.LOGGING_STATE = "repo created" - self.parent.push_screen(LoggingScreen()) + self.parent.push_screen("logging") class RepoExists(Message): """Custom message to indicate that the GitHub repo already exists.""" @@ -187,8 +186,8 @@ class RepoExists(Message): @on(RepoExists) def show_github_info_button(self) -> None: - change_select_disabled(self.parent, "exit", False) - add_hide_class(self.parent, "close_app") + remove_hide_class(self.parent, "exit") + remove_hide_class(self.parent, "back") @work(thread=True, exclusive=True) def _create_repo_and_push(self, org, repo_name, pipeline_repo, private): @@ -217,8 +216,7 @@ def _create_repo_and_push(self, org, repo_name, pipeline_repo, private): if not repo_exists: repo = org.create_repo(repo_name, description=self.parent.TEMPLATE_CONFIG.description, private=private) log.info(f"GitHub repository '{repo_name}' created successfully") - self.parent.call_from_thread(change_select_disabled, self.parent, "close_app", False) - add_hide_class(self.parent, "exit") + remove_hide_class(self.parent, "close_app") # Add the remote try: diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index a862852d7..ae9b5244f 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -5,6 +5,7 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Static +from nf_core.pipelines.create.utils import add_hide_class from nf_core.utils import nfcore_logo @@ -25,21 +26,21 @@ def compose(self) -> ComposeResult: "\n" + "\n".join(nfcore_logo) + "\n", id="logo", ) - if self.parent.LOGGING_STATE == "repo created": - yield Markdown("Creating GitHub repository..") - else: - yield Markdown("Creating pipeline..") - self.parent.LOG_HANDLER.console.clear() + yield Markdown("Creating...") yield Center(self.parent.LOG_HANDLER.console) - if self.parent.LOGGING_STATE == "repo created": - yield Center( - Button("Continue", id="exit", variant="success", disabled=True), - Button("Close App", id="close_app", variant="success", disabled=True), - classes="cta", - ) - else: - yield Center( - Button("Back", id="back", variant="default", disabled=True), - Button("Continue", id="close_screen", variant="success", disabled=True), - classes="cta", - ) + yield Center( + Button("Back", id="back", variant="default", classes="hide"), + Button("Continue", id="close_screen", variant="success", classes="hide"), + Button("Continue", id="exit", variant="success", classes="hide"), + Button("Close App", id="close_app", variant="success", classes="hide"), + classes="cta", + ) + + def on_screen_resume(self): + """Clear console on screen resume. + Hide all buttons as disabled on screen resume.""" + self.parent.LOG_HANDLER.console.clear() + button_ids = ["back", "close_screen", "exit", "close_app"] + for button in self.query("Button"): + if button.id in button_ids: + add_hide_class(self.parent, button.id) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 7b332615f..670aa585f 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -201,16 +201,16 @@ class ShowLogs(Message): ## Functions -def change_select_disabled(app, widget_id: str, disabled: bool) -> None: - """Change the disabled state of a widget.""" - app.get_widget_by_id(widget_id).disabled = disabled - - def add_hide_class(app, widget_id: str) -> None: """Add class 'hide' to a widget. Not display widget.""" app.get_widget_by_id(widget_id).add_class("hide") +def remove_hide_class(app, widget_id: str) -> None: + """Remove class 'hide' to a widget. Display widget.""" + app.get_widget_by_id(widget_id).remove_class("hide") + + ## Markdown text to reuse in different screens markdown_genomes = """ Nf-core pipelines are configured to use a copy of the most common reference genome files. From bf14b4e1bd181b2cbdf24cfab0d84dbb99a201c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 12 Apr 2024 11:51:07 +0000 Subject: [PATCH 109/737] remove force switch and always force, show warning --- nf_core/pipelines/create/basicdetails.py | 39 ++++++++++++--- nf_core/pipelines/create/finaldetails.py | 61 ++++++++++++----------- nf_core/pipelines/create/loggingscreen.py | 8 +-- nf_core/pipelines/create/utils.py | 2 +- 4 files changed, 69 insertions(+), 41 deletions(-) diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index deb22b48b..b88ede10d 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -1,5 +1,6 @@ """A Textual app to create a pipeline.""" +from pathlib import Path from textwrap import dedent from textual import on @@ -8,7 +9,14 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Input, Markdown -from nf_core.pipelines.create.utils import CreateConfig, TextInput +from nf_core.pipelines.create.utils import CreateConfig, TextInput, add_hide_class, remove_hide_class + +pipeline_exists_warn = """ +> ⚠️ **The pipeline you are trying to create already exists.** +> +> If you continue, you will **override** the existing pipeline. +> Please change the pipeline or organisation name to create a different pipeline. +""" class BasicDetails(Screen): @@ -50,12 +58,35 @@ def compose(self) -> ComposeResult: "Author(s)", "Name of the main author / authors", ) + yield Markdown(dedent(pipeline_exists_warn), id="exist_warn", classes="hide") yield Center( Button("Back", id="back", variant="default"), Button("Next", id="next", variant="success"), classes="cta", ) + @on(Input.Changed) + @on(Input.Submitted) + def show_exists_warn(self): + """Check if the pipeline exists on every input change or submitted. + If the pipeline exists, show warning message saying that it will be overriden.""" + config = {} + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + config[text_input.field_id] = this_input.value + if Path(config["org"] + "-" + config["name"]).is_dir(): + remove_hide_class(self.parent, "exist_warn") + else: + add_hide_class(self.parent, "exist_warn") + + def on_screen_resume(self): + """Hide warn message on screen resume. + Update displayed value on screen resume.""" + add_hide_class(self.parent, "exist_warn") + for text_input in self.query("TextInput"): + if text_input.field_id == "org": + text_input.disabled = self.parent.PIPELINE_TYPE == "nfcore" + @on(Button.Pressed) def on_button_pressed(self, event: Button.Pressed) -> None: """Save fields to the config.""" @@ -77,9 +108,3 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.parent.push_screen("type_custom") except ValueError: pass - - def on_screen_resume(self): - """Update displayed value on screen resume""" - for text_input in self.query("TextInput"): - if text_input.field_id == "org": - text_input.disabled = self.parent.PIPELINE_TYPE == "nfcore" diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index d894d9a5f..bd15cf9dd 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -1,16 +1,24 @@ """A Textual app to create a pipeline.""" +from pathlib import Path from textwrap import dedent from textual import on, work from textual.app import ComposeResult -from textual.containers import Center, Horizontal, Vertical -from textual.message import Message +from textual.containers import Center, Horizontal from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Input, Markdown, Static, Switch +from textual.widgets import Button, Footer, Header, Input, Markdown from nf_core.pipelines.create.create import PipelineCreate -from nf_core.pipelines.create.utils import ShowLogs, TextInput, remove_hide_class +from nf_core.pipelines.create.utils import ShowLogs, TextInput, add_hide_class, remove_hide_class + +pipeline_exists_warn = """ +> ⚠️ **The pipeline you are trying to create already exists.** +> +> If you continue, you will **override** the existing pipeline. +> Please change the pipeline or organisation name to create a different pipeline. +> Alternatively, provide a different output directory. +""" class FinalDetails(Screen): @@ -42,14 +50,8 @@ def compose(self) -> ComposeResult: ".", classes="column", ) - with Horizontal(): - yield Switch(value=False, id="force") - with Vertical(): - yield Static("Force creation", classes="custom_grid") - yield Static( - "Overwrite any existing pipeline output directories.", - classes="feature_subtitle", - ) + + yield Markdown(dedent(pipeline_exists_warn), id="exist_warn", classes="hide") yield Center( Button("Back", id="back", variant="default"), @@ -74,25 +76,27 @@ def on_button_pressed(self, event: Button.Pressed) -> None: except ValueError: pass - this_switch = self.query_one(Switch) - try: - self.parent.TEMPLATE_CONFIG.__dict__.update({"force": this_switch.value}) - except ValueError: - pass - # Create the new pipeline self._create_pipeline() self.parent.LOGGING_STATE = "pipeline created" self.parent.push_screen("logging") - class PipelineExists(Message): - """Custom message to indicate that the pipeline already exists.""" - - pass + @on(Input.Changed) + @on(Input.Submitted) + def show_exists_warn(self): + """Check if the pipeline exists on every input change or submitted. + If the pipeline exists, show warning message saying that it will be overriden.""" + outdir = "" + for text_input in self.query("TextInput"): + this_input = text_input.query_one(Input) + if text_input.field_id == "outdir": + outdir = this_input.value + if Path(outdir, self.parent.TEMPLATE_CONFIG.org + "-" + self.parent.TEMPLATE_CONFIG.name).is_dir(): + remove_hide_class(self.parent, "exist_warn") - @on(PipelineExists) - def show_back_button(self) -> None: - remove_hide_class(self.parent, "back") + def on_screen_resume(self): + """Hide warn message on screen resume.""" + add_hide_class(self.parent, "exist_warn") @work(thread=True, exclusive=True) def _create_pipeline(self) -> None: @@ -102,8 +106,5 @@ def _create_pipeline(self) -> None: template_config=self.parent.TEMPLATE_CONFIG, is_interactive=True, ) - try: - create_obj.init_pipeline() - remove_hide_class(self.parent, "close_screen") - except UserWarning: - self.post_message(self.PipelineExists()) + create_obj.init_pipeline() + remove_hide_class(self.parent, "close_screen") diff --git a/nf_core/pipelines/create/loggingscreen.py b/nf_core/pipelines/create/loggingscreen.py index ae9b5244f..f862dccea 100644 --- a/nf_core/pipelines/create/loggingscreen.py +++ b/nf_core/pipelines/create/loggingscreen.py @@ -37,10 +37,12 @@ def compose(self) -> ComposeResult: ) def on_screen_resume(self): - """Clear console on screen resume. - Hide all buttons as disabled on screen resume.""" - self.parent.LOG_HANDLER.console.clear() + """Hide all buttons as disabled on screen resume.""" button_ids = ["back", "close_screen", "exit", "close_app"] for button in self.query("Button"): if button.id in button_ids: add_hide_class(self.parent, button.id) + + def on_screen_suspend(self): + """Clear console on screen suspend.""" + self.parent.LOG_HANDLER.console.clear() diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 670aa585f..6006452ba 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -23,7 +23,7 @@ class CreateConfig(BaseModel): description: Optional[str] = None author: Optional[str] = None version: Optional[str] = None - force: Optional[bool] = None + force: Optional[bool] = True outdir: Optional[str] = None skip_features: Optional[list] = None is_nfcore: Optional[bool] = None From b1412cf39af03f52e8302d95c805db121433c6a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 12 Apr 2024 11:59:13 +0000 Subject: [PATCH 110/737] add warning if github org not found --- nf_core/pipelines/create/githubrepo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index bac63c1f4..ddf7bf90f 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -150,7 +150,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: f"Repo will be created in the GitHub organisation account '{github_variables['repo_org']}'" ) except UnknownObjectException: - pass + log.warn(f"Provided organisation '{github_variables['repo_org']}' not found. ") # Create the repo try: From 7cc3757198974574834df992ebd747251443ecca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 12 Apr 2024 14:46:46 +0000 Subject: [PATCH 111/737] update snapshots --- tests/__snapshots__/test_create_app.ambr | 1181 ++++------------------ 1 file changed, 209 insertions(+), 972 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 3477a381a..ecf8cd033 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1133,809 +1133,252 @@ font-weight: 700; } - .terminal-3970307065-matrix { + .terminal-596440806-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3970307065-title { + .terminal-596440806-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3970307065-r1 { fill: #c5c8c6 } - .terminal-3970307065-r2 { fill: #e3e3e3 } - .terminal-3970307065-r3 { fill: #989898 } - .terminal-3970307065-r4 { fill: #e1e1e1 } - .terminal-3970307065-r5 { fill: #121212 } - .terminal-3970307065-r6 { fill: #0053aa } - .terminal-3970307065-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3970307065-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-3970307065-r9 { fill: #1e1e1e } - .terminal-3970307065-r10 { fill: #008139 } - .terminal-3970307065-r11 { fill: #e2e2e2 } - .terminal-3970307065-r12 { fill: #b93c5b } - .terminal-3970307065-r13 { fill: #808080 } - .terminal-3970307065-r14 { fill: #454a50 } - .terminal-3970307065-r15 { fill: #7ae998 } - .terminal-3970307065-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-3970307065-r17 { fill: #0a180e;font-weight: bold } - .terminal-3970307065-r18 { fill: #000000 } - .terminal-3970307065-r19 { fill: #ddedf9 } + .terminal-596440806-r1 { fill: #c5c8c6 } + .terminal-596440806-r2 { fill: #e3e3e3 } + .terminal-596440806-r3 { fill: #989898 } + .terminal-596440806-r4 { fill: #e1e1e1 } + .terminal-596440806-r5 { fill: #121212 } + .terminal-596440806-r6 { fill: #0053aa } + .terminal-596440806-r7 { fill: #dde8f3;font-weight: bold } + .terminal-596440806-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-596440806-r9 { fill: #1e1e1e } + .terminal-596440806-r10 { fill: #008139 } + .terminal-596440806-r11 { fill: #e2e2e2 } + .terminal-596440806-r12 { fill: #b93c5b } + .terminal-596440806-r13 { fill: #454a50 } + .terminal-596440806-r14 { fill: #7ae998 } + .terminal-596440806-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-596440806-r16 { fill: #0a180e;font-weight: bold } + .terminal-596440806-r17 { fill: #000000 } + .terminal-596440806-r18 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Final details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔Force creation - Overwrite any existing pipeline output directories. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackFinish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  - - - - - ''' -# --- -# name: test_github_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - Now that we have created a new pipeline locally, we can create a new GitHub repository using - the GitHub API and push the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - You can't create a repository to the nf-core organisation. Please create the pipeline repo  - to an organisation where you have access or use your user account. A core-team member will  - be able to transfer the repo to nf-core once the development has started. You user account  - will be used by default if 'nf-core' is provided. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔Push files - Select to push pipeline files and branches to your GitHub repo. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - -  D  Toggle dark mode  Q  Quit  - - - - - ''' -# --- -# name: test_github_exit_message - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - HowTo create a GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - gitpush--allorigin - - - ● Note the --all flag: this is needed to push all branches to the remote. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Final details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackFinish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -1944,7 +1387,7 @@ # --- # name: test_github_question ''' - + - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - After creating the pipeline template locally, we can create a GitHub repository and push the - code to it. - - Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Create GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + @@ -3069,147 +2304,149 @@ font-weight: 700; } - .terminal-2790734285-matrix { + .terminal-3553059683-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2790734285-title { + .terminal-3553059683-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2790734285-r1 { fill: #c5c8c6 } - .terminal-2790734285-r2 { fill: #e3e3e3 } - .terminal-2790734285-r3 { fill: #989898 } - .terminal-2790734285-r4 { fill: #1e1e1e } - .terminal-2790734285-r5 { fill: #98e024 } - .terminal-2790734285-r6 { fill: #626262 } - .terminal-2790734285-r7 { fill: #9d65ff } - .terminal-2790734285-r8 { fill: #fd971f } - .terminal-2790734285-r9 { fill: #e1e1e1 } - .terminal-2790734285-r10 { fill: #121212 } - .terminal-2790734285-r11 { fill: #0053aa } - .terminal-2790734285-r12 { fill: #dde8f3;font-weight: bold } - .terminal-2790734285-r13 { fill: #e1e1e1;text-decoration: underline; } - .terminal-2790734285-r14 { fill: #e1e1e1;font-style: italic; } - .terminal-2790734285-r15 { fill: #14191f } - .terminal-2790734285-r16 { fill: #e1e1e1;font-weight: bold;font-style: italic; } - .terminal-2790734285-r17 { fill: #ddedf9 } + .terminal-3553059683-r1 { fill: #c5c8c6 } + .terminal-3553059683-r2 { fill: #e3e3e3 } + .terminal-3553059683-r3 { fill: #989898 } + .terminal-3553059683-r4 { fill: #1e1e1e } + .terminal-3553059683-r5 { fill: #98e024 } + .terminal-3553059683-r6 { fill: #626262 } + .terminal-3553059683-r7 { fill: #9d65ff } + .terminal-3553059683-r8 { fill: #fd971f } + .terminal-3553059683-r9 { fill: #e1e1e1 } + .terminal-3553059683-r10 { fill: #121212 } + .terminal-3553059683-r11 { fill: #0053aa } + .terminal-3553059683-r12 { fill: #dde8f3;font-weight: bold } + .terminal-3553059683-r13 { fill: #e1e1e1;text-decoration: underline; } + .terminal-3553059683-r14 { fill: #14191f } + .terminal-3553059683-r15 { fill: #4ebf71 } + .terminal-3553059683-r16 { fill: #e2e2e2 } + .terminal-3553059683-r17 { fill: #e2e2e2;text-decoration: underline; } + .terminal-3553059683-r18 { fill: #e2e2e2;font-weight: bold;font-style: italic; } + .terminal-3553059683-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pip… - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Welcome to the nf-core pipeline creation wizard - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - This app will help you create a new Nextflow pipeline from the nf-core - pipeline template, part of the nf-core/tools repository. - - The template must be used for nf-core pipelines, but hopefully helps  - all Nextflow developers benefit from nf-core best practices. - - If you want to add a pipeline to nf-core, please join on Slack and ▆▆ - discuss your plans with the community as early as possible; ideally  - before you start on your pipeline! See the nf-core guidelines and the  - #new-pipelines Slack channel for more information. -  D Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pip… + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Welcome to the nf-core pipeline creation wizard + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + This app will help you create a new Nextflow pipeline from the  + nf-core/tools pipeline template. + + The template helps anyone benefit from nf-core best practices, and is  + a requirement for nf-core pipelines. + ▃▃ + 💡 If you want to add a pipeline to nf-core, please join on Slack + and discuss your plans with the community as early as possible;  + ideally before you start on your pipeline! See the nf-core  + guidelines and the #new-pipelines Slack channel for more  +  D Toggle dark mode  Q  Quit  From 0bd18070be88542b735b5ef719512a7682037d65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 15 Apr 2024 17:36:21 +0000 Subject: [PATCH 112/737] fix providing outdir --- nf_core/pipelines/create/create.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 801c203d2..3fb86e88f 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -105,9 +105,9 @@ def __init__( self.default_branch = default_branch self.is_interactive = is_interactive self.force = self.config.force - if outdir is None: - outdir = os.path.join(os.getcwd(), self.jinja_params["name_noslash"]) - self.outdir = Path(outdir) + if self.config.outdir is None: + self.config.outdir = os.getcwd() + self.outdir = Path(self.config.outdir, self.jinja_params["name_noslash"]).absolute() def check_template_yaml_info(self, template_yaml, name, description, author): """Ensure that the provided template yaml file contains the necessary information. @@ -367,7 +367,7 @@ def render_template(self): if self.config: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) - with open(self.outdir / config_fn, "w") as fh: + with open(config_fn, "w") as fh: config_yml.update(template=self.config.model_dump()) yaml.safe_dump(config_yml, fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") From 9976440e6d8b5f4a49b1673a7347e3ab663d173a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 15 Apr 2024 18:08:28 +0000 Subject: [PATCH 113/737] try fixing pytests by using tmpdir instead of mock --- tests/__snapshots__/test_create_app.ambr | 792 ++++++++++++++++++++++- tests/test_create_app.py | 26 +- 2 files changed, 798 insertions(+), 20 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index ecf8cd033..b2af13b11 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1385,9 +1385,569 @@ ''' # --- +# name: test_github_details + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Now that we have created a new pipeline locally, we can create a new GitHub repository and  + push the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user + account. A core-team member will be able to transfer the repo to nf-core once the  + development has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_github_exit_message + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + HowTo create a GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository: + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote: + + + gitpush--allorigin + + + 💡 Note the --all flag: this is needed to push all branches to the remote. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- # name: test_github_question ''' - + - - + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - nf-core create + nf-core create - - - - + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + After creating the pipeline template locally, we can create a GitHub repository and push the + code to it. + + Do you want to create a GitHub repository? + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  diff --git a/tests/test_create_app.py b/tests/test_create_app.py index 124078cec..92fb52e43 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -1,7 +1,5 @@ """Test Pipeline Create App""" -from unittest import mock - import pytest from nf_core.pipelines.create import PipelineCreateApp @@ -189,8 +187,7 @@ async def run_before(pilot) -> None: assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) -@mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) -def test_github_question(mock_init_pipeline, snap_compare): +def test_github_question(tmpdir, snap_compare): """Test snapshot for the github_repo_question screen. Steps to get to this screen: screen welcome > press start > @@ -212,14 +209,18 @@ async def run_before(pilot) -> None: await pilot.press("M", "e") await pilot.click("#next") await pilot.click("#continue") + await pilot.press("backspace") + await pilot.press("tab") + await pilot.press(*str(tmpdir)) await pilot.click("#finish") + pilot.app.get_widget_by_id("close_screen").remove_class("hide") + await pilot.pause() await pilot.click("#close_screen") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) -@mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) -def test_github_details(mock_init_pipeline, snap_compare): +def test_github_details(tmpdir, snap_compare): """Test snapshot for the github_repo screen. Steps to get to this screen: screen welcome > press start > @@ -243,7 +244,12 @@ async def run_before(pilot) -> None: await pilot.press("M", "e") await pilot.click("#next") await pilot.click("#continue") + await pilot.press("backspace") + await pilot.press("tab") + await pilot.press(*str(tmpdir)) await pilot.click("#finish") + pilot.app.get_widget_by_id("close_screen").remove_class("hide") + await pilot.pause() await pilot.click("#close_screen") await pilot.click("#github_repo") await pilot.click("#gh_username") @@ -254,8 +260,7 @@ async def run_before(pilot) -> None: assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) -@mock.patch("nf_core.pipelines.create.create.PipelineCreate.init_pipeline", return_value=None) -def test_github_exit_message(mock_init_pipeline, snap_compare): +def test_github_exit_message(tmpdir, snap_compare): """Test snapshot for the github_exit screen. Steps to get to this screen: screen welcome > press start > @@ -279,7 +284,12 @@ async def run_before(pilot) -> None: await pilot.press("M", "e") await pilot.click("#next") await pilot.click("#continue") + await pilot.press("backspace") + await pilot.press("tab") + await pilot.press(*str(tmpdir)) await pilot.click("#finish") + pilot.app.get_widget_by_id("close_screen").remove_class("hide") + await pilot.pause() await pilot.click("#close_screen") await pilot.click("#github_repo") await pilot.click("#exit") From 777f68c7c9060b9ddaa405695fa8314a5cf65092 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Tue, 16 Apr 2024 10:42:16 +0000 Subject: [PATCH 114/737] fix outdir again to fix tests --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 3fb86e88f..cd86c9a36 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -107,7 +107,7 @@ def __init__( self.force = self.config.force if self.config.outdir is None: self.config.outdir = os.getcwd() - self.outdir = Path(self.config.outdir, self.jinja_params["name_noslash"]).absolute() + self.outdir = Path(self.config.outdir).absolute() def check_template_yaml_info(self, template_yaml, name, description, author): """Ensure that the provided template yaml file contains the necessary information. From e7d38867611490042f6b220f7f17202c5fcb2df5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Apr 2024 12:42:38 +0200 Subject: [PATCH 115/737] debugging ci tests --- .github/workflows/create-test-wf.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 3aa2e3eb4..f2c25a2be 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -71,6 +71,10 @@ jobs: mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" + # echo current directory + pwd + # echo content of current directory + ls -la nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results - name: Upload log file artifact From 6b83de8c53b0362ca070c72681bc91090ea65f4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 17 Apr 2024 15:35:55 +0000 Subject: [PATCH 116/737] try fixing outdir, again --- nf_core/pipelines/create/create.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index cd86c9a36..151ad83ff 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -107,7 +107,10 @@ def __init__( self.force = self.config.force if self.config.outdir is None: self.config.outdir = os.getcwd() - self.outdir = Path(self.config.outdir).absolute() + if self.config.outdir == ".": + self.outdir = Path(self.config.outdir, self.jinja_params["name_noslash"]).absolute() + else: + self.outdir = Path(self.config.outdir).absolute() def check_template_yaml_info(self, template_yaml, name, description, author): """Ensure that the provided template yaml file contains the necessary information. From 20b824d3f65fa95a9385b903bbe187ae190a8255 Mon Sep 17 00:00:00 2001 From: Niklas Schandry Date: Fri, 26 Apr 2024 12:04:24 +0200 Subject: [PATCH 117/737] Add charliecloud.registry --- nf_core/pipeline-template/nextflow.config | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 17e75f18a..f097cc2ff 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -184,11 +184,11 @@ profiles { // Set default registry for Apptainer, Docker, Podman and Singularity independent of -profile // Will not be used unless Apptainer / Docker / Podman / Singularity are enabled // Set to your registry if you have a mirror of containers -apptainer.registry = 'quay.io' -docker.registry = 'quay.io' -podman.registry = 'quay.io' -singularity.registry = 'quay.io' - +apptainer.registry = 'quay.io' +docker.registry = 'quay.io' +podman.registry = 'quay.io' +singularity.registry = 'quay.io' +charliecloud.registry = 'quay.io' // Nextflow plugins plugins { id 'nf-validation@1.1.3' // Validation of pipeline parameters and creation of an input channel from a sample sheet From 97bfc217fa02fef86c36100331fa8b27a6042c5b Mon Sep 17 00:00:00 2001 From: Niklas Schandry Date: Fri, 26 Apr 2024 13:17:06 +0200 Subject: [PATCH 118/737] Update comment, add newline --- nf_core/pipeline-template/nextflow.config | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index f90fc546f..cbc98cb02 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -180,14 +180,15 @@ profiles { test_full { includeConfig 'conf/test_full.config' } } -// Set default registry for Apptainer, Docker, Podman and Singularity independent of -profile -// Will not be used unless Apptainer / Docker / Podman / Singularity are enabled +// Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile +// Will not be used unless Apptainer / Docker / Podman / Charliecloud / Singularity are enabled // Set to your registry if you have a mirror of containers apptainer.registry = 'quay.io' docker.registry = 'quay.io' podman.registry = 'quay.io' singularity.registry = 'quay.io' charliecloud.registry = 'quay.io' + // Nextflow plugins plugins { id 'nf-validation@1.1.3' // Validation of pipeline parameters and creation of an input channel from a sample sheet From dcc7757b0062b100b656fd44d208914e9aa6d605 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 9 May 2024 10:36:14 +0200 Subject: [PATCH 119/737] update snapshots --- tests/__snapshots__/test_create_app.ambr | 2979 +++++++++------------- tests/test_create_app.py | 2 +- 2 files changed, 1263 insertions(+), 1718 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index b2af13b11..d2c239acb 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -22,253 +22,253 @@ font-weight: 700; } - .terminal-3000245001-matrix { + .terminal-1527309810-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3000245001-title { + .terminal-1527309810-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3000245001-r1 { fill: #c5c8c6 } - .terminal-3000245001-r2 { fill: #e3e3e3 } - .terminal-3000245001-r3 { fill: #989898 } - .terminal-3000245001-r4 { fill: #e1e1e1 } - .terminal-3000245001-r5 { fill: #121212 } - .terminal-3000245001-r6 { fill: #0053aa } - .terminal-3000245001-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3000245001-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-3000245001-r9 { fill: #1e1e1e } - .terminal-3000245001-r10 { fill: #008139 } - .terminal-3000245001-r11 { fill: #e2e2e2 } - .terminal-3000245001-r12 { fill: #787878 } - .terminal-3000245001-r13 { fill: #b93c5b } - .terminal-3000245001-r14 { fill: #454a50 } - .terminal-3000245001-r15 { fill: #7ae998 } - .terminal-3000245001-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-3000245001-r17 { fill: #0a180e;font-weight: bold } - .terminal-3000245001-r18 { fill: #000000 } - .terminal-3000245001-r19 { fill: #ddedf9 } + .terminal-1527309810-r1 { fill: #c5c8c6 } + .terminal-1527309810-r2 { fill: #e3e3e3 } + .terminal-1527309810-r3 { fill: #989898 } + .terminal-1527309810-r4 { fill: #e1e1e1 } + .terminal-1527309810-r5 { fill: #121212 } + .terminal-1527309810-r6 { fill: #0053aa } + .terminal-1527309810-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1527309810-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-1527309810-r9 { fill: #1e1e1e } + .terminal-1527309810-r10 { fill: #008139 } + .terminal-1527309810-r11 { fill: #e2e2e2 } + .terminal-1527309810-r12 { fill: #787878 } + .terminal-1527309810-r13 { fill: #b93c5b } + .terminal-1527309810-r14 { fill: #454a50 } + .terminal-1527309810-r15 { fill: #7ae998 } + .terminal-1527309810-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-1527309810-r17 { fill: #0a180e;font-weight: bold } + .terminal-1527309810-r18 { fill: #000000 } + .terminal-1527309810-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -298,256 +298,256 @@ font-weight: 700; } - .terminal-2776506879-matrix { + .terminal-2230840552-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2776506879-title { + .terminal-2230840552-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2776506879-r1 { fill: #c5c8c6 } - .terminal-2776506879-r2 { fill: #e3e3e3 } - .terminal-2776506879-r3 { fill: #989898 } - .terminal-2776506879-r4 { fill: #e1e1e1 } - .terminal-2776506879-r5 { fill: #121212 } - .terminal-2776506879-r6 { fill: #0053aa } - .terminal-2776506879-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2776506879-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2776506879-r9 { fill: #1e1e1e } - .terminal-2776506879-r10 { fill: #0f4e2a } - .terminal-2776506879-r11 { fill: #0178d4 } - .terminal-2776506879-r12 { fill: #a7a7a7 } - .terminal-2776506879-r13 { fill: #787878 } - .terminal-2776506879-r14 { fill: #e2e2e2 } - .terminal-2776506879-r15 { fill: #b93c5b } - .terminal-2776506879-r16 { fill: #454a50 } - .terminal-2776506879-r17 { fill: #7ae998 } - .terminal-2776506879-r18 { fill: #e2e3e3;font-weight: bold } - .terminal-2776506879-r19 { fill: #0a180e;font-weight: bold } - .terminal-2776506879-r20 { fill: #000000 } - .terminal-2776506879-r21 { fill: #008139 } - .terminal-2776506879-r22 { fill: #ddedf9 } + .terminal-2230840552-r1 { fill: #c5c8c6 } + .terminal-2230840552-r2 { fill: #e3e3e3 } + .terminal-2230840552-r3 { fill: #989898 } + .terminal-2230840552-r4 { fill: #e1e1e1 } + .terminal-2230840552-r5 { fill: #121212 } + .terminal-2230840552-r6 { fill: #0053aa } + .terminal-2230840552-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2230840552-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2230840552-r9 { fill: #1e1e1e } + .terminal-2230840552-r10 { fill: #0f4e2a } + .terminal-2230840552-r11 { fill: #0178d4 } + .terminal-2230840552-r12 { fill: #a7a7a7 } + .terminal-2230840552-r13 { fill: #787878 } + .terminal-2230840552-r14 { fill: #e2e2e2 } + .terminal-2230840552-r15 { fill: #b93c5b } + .terminal-2230840552-r16 { fill: #454a50 } + .terminal-2230840552-r17 { fill: #7ae998 } + .terminal-2230840552-r18 { fill: #e2e3e3;font-weight: bold } + .terminal-2230840552-r19 { fill: #0a180e;font-weight: bold } + .terminal-2230840552-r20 { fill: #000000 } + .terminal-2230840552-r21 { fill: #008139 } + .terminal-2230840552-r22 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -577,253 +577,253 @@ font-weight: 700; } - .terminal-1170633481-matrix { + .terminal-3444045345-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1170633481-title { + .terminal-3444045345-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1170633481-r1 { fill: #c5c8c6 } - .terminal-1170633481-r2 { fill: #e3e3e3 } - .terminal-1170633481-r3 { fill: #989898 } - .terminal-1170633481-r4 { fill: #e1e1e1 } - .terminal-1170633481-r5 { fill: #121212 } - .terminal-1170633481-r6 { fill: #0053aa } - .terminal-1170633481-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1170633481-r8 { fill: #24292f } - .terminal-1170633481-r9 { fill: #e2e3e3;font-weight: bold } - .terminal-1170633481-r10 { fill: #e2e3e3;font-weight: bold;font-style: italic; } - .terminal-1170633481-r11 { fill: #4ebf71;font-weight: bold } - .terminal-1170633481-r12 { fill: #e1e1e1;font-style: italic; } - .terminal-1170633481-r13 { fill: #7ae998 } - .terminal-1170633481-r14 { fill: #008139 } - .terminal-1170633481-r15 { fill: #507bb3 } - .terminal-1170633481-r16 { fill: #dde6ed;font-weight: bold } - .terminal-1170633481-r17 { fill: #001541 } - .terminal-1170633481-r18 { fill: #e1e1e1;text-decoration: underline; } - .terminal-1170633481-r19 { fill: #ddedf9 } + .terminal-3444045345-r1 { fill: #c5c8c6 } + .terminal-3444045345-r2 { fill: #e3e3e3 } + .terminal-3444045345-r3 { fill: #989898 } + .terminal-3444045345-r4 { fill: #e1e1e1 } + .terminal-3444045345-r5 { fill: #121212 } + .terminal-3444045345-r6 { fill: #0053aa } + .terminal-3444045345-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3444045345-r8 { fill: #24292f } + .terminal-3444045345-r9 { fill: #e2e3e3;font-weight: bold } + .terminal-3444045345-r10 { fill: #e2e3e3;font-weight: bold;font-style: italic; } + .terminal-3444045345-r11 { fill: #4ebf71;font-weight: bold } + .terminal-3444045345-r12 { fill: #e1e1e1;font-style: italic; } + .terminal-3444045345-r13 { fill: #7ae998 } + .terminal-3444045345-r14 { fill: #507bb3 } + .terminal-3444045345-r15 { fill: #008139 } + .terminal-3444045345-r16 { fill: #dde6ed;font-weight: bold } + .terminal-3444045345-r17 { fill: #001541 } + .terminal-3444045345-r18 { fill: #e1e1e1;text-decoration: underline; } + .terminal-3444045345-r19 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Choose pipeline type - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -          Choose "nf-core" if:                  Choose "Custom" if:           - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ● You want your pipeline to be part of the● Your pipeline will never be part of  - nf-core communitynf-core - ● You think that there's an outside chance● You want full control over all features  - that it ever could be part of nf-corethat are included from the template  - (including those that are mandatory for  - nf-core). - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Custom - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -                                  What's the difference?                                  - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions continuous-integration configuration files: - ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) - ▪ Code formatting checks with Prettier - ▪ Auto-fix linting functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core configuration profiles - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Choose pipeline type + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +        Choose "nf-core" if:              Choose "Custom" if:         + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ● You want your pipeline to be part of● Your pipeline will never be part of  + the nf-core communitynf-core + ● You think that there's an outside ● You want full control over all + chance that it ever could be part offeatures that are included from the  + nf-coretemplate (including those that are  + mandatory for nf-core). + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁Custom + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +                                  What's the difference?                                  + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Choosing "nf-core" effectively pre-selects the following template features: + + ● GitHub Actions continuous-integration configuration files: + ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) + ▪ Code formatting checks with Prettier + ▪ Auto-fix linting functionality using @nf-core-bot + ▪ Marking old issues as stale + ● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  @@ -853,257 +853,257 @@ font-weight: 700; } - .terminal-3272111277-matrix { + .terminal-3071202289-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3272111277-title { + .terminal-3071202289-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3272111277-r1 { fill: #c5c8c6 } - .terminal-3272111277-r2 { fill: #e3e3e3 } - .terminal-3272111277-r3 { fill: #989898 } - .terminal-3272111277-r4 { fill: #e1e1e1 } - .terminal-3272111277-r5 { fill: #121212 } - .terminal-3272111277-r6 { fill: #0053aa } - .terminal-3272111277-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3272111277-r8 { fill: #1e1e1e } - .terminal-3272111277-r9 { fill: #0178d4 } - .terminal-3272111277-r10 { fill: #454a50 } - .terminal-3272111277-r11 { fill: #e2e2e2 } - .terminal-3272111277-r12 { fill: #808080 } - .terminal-3272111277-r13 { fill: #e2e3e3;font-weight: bold } - .terminal-3272111277-r14 { fill: #000000 } - .terminal-3272111277-r15 { fill: #e4e4e4 } - .terminal-3272111277-r16 { fill: #14191f } - .terminal-3272111277-r17 { fill: #507bb3 } - .terminal-3272111277-r18 { fill: #dde6ed;font-weight: bold } - .terminal-3272111277-r19 { fill: #001541 } - .terminal-3272111277-r20 { fill: #7ae998 } - .terminal-3272111277-r21 { fill: #0a180e;font-weight: bold } - .terminal-3272111277-r22 { fill: #008139 } - .terminal-3272111277-r23 { fill: #ddedf9 } + .terminal-3071202289-r1 { fill: #c5c8c6 } + .terminal-3071202289-r2 { fill: #e3e3e3 } + .terminal-3071202289-r3 { fill: #989898 } + .terminal-3071202289-r4 { fill: #e1e1e1 } + .terminal-3071202289-r5 { fill: #121212 } + .terminal-3071202289-r6 { fill: #0053aa } + .terminal-3071202289-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3071202289-r8 { fill: #1e1e1e } + .terminal-3071202289-r9 { fill: #0178d4 } + .terminal-3071202289-r10 { fill: #454a50 } + .terminal-3071202289-r11 { fill: #e2e2e2 } + .terminal-3071202289-r12 { fill: #808080 } + .terminal-3071202289-r13 { fill: #e2e3e3;font-weight: bold } + .terminal-3071202289-r14 { fill: #000000 } + .terminal-3071202289-r15 { fill: #e4e4e4 } + .terminal-3071202289-r16 { fill: #14191f } + .terminal-3071202289-r17 { fill: #507bb3 } + .terminal-3071202289-r18 { fill: #dde6ed;font-weight: bold } + .terminal-3071202289-r19 { fill: #001541 } + .terminal-3071202289-r20 { fill: #7ae998 } + .terminal-3071202289-r21 { fill: #0a180e;font-weight: bold } + .terminal-3071202289-r22 { fill: #008139 } + .terminal-3071202289-r23 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference The pipeline will beHide help - ▁▁▁▁▁▁▁▁genomesconfigured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration  - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, the - pipeline will automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will Show help - ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for  - Continuous  - Integration (CI)  - testing - ▆▆ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md file Show help - ▁▁▁▁▁▁▁▁of the pipeline will▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub  - badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference The pipeline Hide help + ▁▁▁▁▁▁▁▁genomeswill be ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configured to  + use a copy of  + the most common  + reference genome + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most  + common reference genome files. + + By selecting this option, your pipeline will include a  + configuration file specifying the paths to these files. + + The required code to use these files will also be included in  + the template. When the pipeline user provides an appropriate  + genome key, the pipeline will automatically download the ▂▂ + required reference files. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI The pipeline Show help▅▅ + ▁▁▁▁▁▁▁▁testswill include ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + several GitHub  + actions for  + Continuous  + Integration (CI) + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github The README.md Show help + ▁▁▁▁▁▁▁▁badgesfile of the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + pipeline will  + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  @@ -1133,541 +1133,259 @@ font-weight: 700; } - .terminal-596440806-matrix { + .terminal-1456849374-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-596440806-title { + .terminal-1456849374-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-596440806-r1 { fill: #c5c8c6 } - .terminal-596440806-r2 { fill: #e3e3e3 } - .terminal-596440806-r3 { fill: #989898 } - .terminal-596440806-r4 { fill: #e1e1e1 } - .terminal-596440806-r5 { fill: #121212 } - .terminal-596440806-r6 { fill: #0053aa } - .terminal-596440806-r7 { fill: #dde8f3;font-weight: bold } - .terminal-596440806-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-596440806-r9 { fill: #1e1e1e } - .terminal-596440806-r10 { fill: #008139 } - .terminal-596440806-r11 { fill: #e2e2e2 } - .terminal-596440806-r12 { fill: #b93c5b } - .terminal-596440806-r13 { fill: #454a50 } - .terminal-596440806-r14 { fill: #7ae998 } - .terminal-596440806-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-596440806-r16 { fill: #0a180e;font-weight: bold } - .terminal-596440806-r17 { fill: #000000 } - .terminal-596440806-r18 { fill: #ddedf9 } + .terminal-1456849374-r1 { fill: #c5c8c6 } + .terminal-1456849374-r2 { fill: #e3e3e3 } + .terminal-1456849374-r3 { fill: #989898 } + .terminal-1456849374-r4 { fill: #e1e1e1 } + .terminal-1456849374-r5 { fill: #121212 } + .terminal-1456849374-r6 { fill: #0053aa } + .terminal-1456849374-r7 { fill: #dde8f3;font-weight: bold } + .terminal-1456849374-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-1456849374-r9 { fill: #1e1e1e } + .terminal-1456849374-r10 { fill: #008139 } + .terminal-1456849374-r11 { fill: #e2e2e2 } + .terminal-1456849374-r12 { fill: #b93c5b } + .terminal-1456849374-r13 { fill: #454a50 } + .terminal-1456849374-r14 { fill: #7ae998 } + .terminal-1456849374-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-1456849374-r16 { fill: #0a180e;font-weight: bold } + .terminal-1456849374-r17 { fill: #000000 } + .terminal-1456849374-r18 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Final details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackFinish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Final details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackFinish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  ''' # --- -# name: test_github_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Now that we have created a new pipeline locally, we can create a new GitHub repository and  - push the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user - account. A core-team member will be able to transfer the repo to nf-core once the  - development has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  - - - - - ''' -# --- -# name: test_github_exit_message +# name: test_github_question ''' @@ -1690,262 +1408,255 @@ font-weight: 700; } - .terminal-1291666581-matrix { + .terminal-4165331380-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1291666581-title { + .terminal-4165331380-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1291666581-r1 { fill: #c5c8c6 } - .terminal-1291666581-r2 { fill: #e3e3e3 } - .terminal-1291666581-r3 { fill: #989898 } - .terminal-1291666581-r4 { fill: #e1e1e1 } - .terminal-1291666581-r5 { fill: #121212 } - .terminal-1291666581-r6 { fill: #0053aa } - .terminal-1291666581-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1291666581-r8 { fill: #98e024 } - .terminal-1291666581-r9 { fill: #626262 } - .terminal-1291666581-r10 { fill: #9d65ff } - .terminal-1291666581-r11 { fill: #fd971f } - .terminal-1291666581-r12 { fill: #4ebf71;font-weight: bold } - .terminal-1291666581-r13 { fill: #d2d2d2 } - .terminal-1291666581-r14 { fill: #82aaff } - .terminal-1291666581-r15 { fill: #eeffff } - .terminal-1291666581-r16 { fill: #4ebf71 } - .terminal-1291666581-r17 { fill: #e2e2e2 } - .terminal-1291666581-r18 { fill: #969696;font-weight: bold } - .terminal-1291666581-r19 { fill: #7ae998 } - .terminal-1291666581-r20 { fill: #008139 } - .terminal-1291666581-r21 { fill: #ddedf9 } + .terminal-4165331380-r1 { fill: #c5c8c6 } + .terminal-4165331380-r2 { fill: #e3e3e3 } + .terminal-4165331380-r3 { fill: #989898 } + .terminal-4165331380-r4 { fill: #e1e1e1 } + .terminal-4165331380-r5 { fill: #121212 } + .terminal-4165331380-r6 { fill: #0053aa } + .terminal-4165331380-r7 { fill: #dde8f3;font-weight: bold } + .terminal-4165331380-r8 { fill: #7ae998 } + .terminal-4165331380-r9 { fill: #507bb3 } + .terminal-4165331380-r10 { fill: #4ebf71;font-weight: bold } + .terminal-4165331380-r11 { fill: #dde6ed;font-weight: bold } + .terminal-4165331380-r12 { fill: #008139 } + .terminal-4165331380-r13 { fill: #001541 } + .terminal-4165331380-r14 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - HowTo create a GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - gitpush--allorigin - - - 💡 Note the --all flag: this is needed to push all branches to the remote. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + After creating the pipeline template locally, we can create a GitHub repository and push the + code to it. + + Do you want to create a GitHub repository? + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  ''' # --- -# name: test_github_question +# name: test_type_custom ''' @@ -1968,255 +1679,261 @@ font-weight: 700; } - .terminal-3308461771-matrix { + .terminal-3459022791-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3308461771-title { + .terminal-3459022791-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3308461771-r1 { fill: #c5c8c6 } - .terminal-3308461771-r2 { fill: #e3e3e3 } - .terminal-3308461771-r3 { fill: #989898 } - .terminal-3308461771-r4 { fill: #e1e1e1 } - .terminal-3308461771-r5 { fill: #121212 } - .terminal-3308461771-r6 { fill: #0053aa } - .terminal-3308461771-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3308461771-r8 { fill: #7ae998 } - .terminal-3308461771-r9 { fill: #507bb3 } - .terminal-3308461771-r10 { fill: #4ebf71;font-weight: bold } - .terminal-3308461771-r11 { fill: #dde6ed;font-weight: bold } - .terminal-3308461771-r12 { fill: #008139 } - .terminal-3308461771-r13 { fill: #001541 } - .terminal-3308461771-r14 { fill: #ddedf9 } + .terminal-3459022791-r1 { fill: #c5c8c6 } + .terminal-3459022791-r2 { fill: #e3e3e3 } + .terminal-3459022791-r3 { fill: #989898 } + .terminal-3459022791-r4 { fill: #e1e1e1 } + .terminal-3459022791-r5 { fill: #121212 } + .terminal-3459022791-r6 { fill: #0053aa } + .terminal-3459022791-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3459022791-r8 { fill: #1e1e1e } + .terminal-3459022791-r9 { fill: #507bb3 } + .terminal-3459022791-r10 { fill: #e2e2e2 } + .terminal-3459022791-r11 { fill: #808080 } + .terminal-3459022791-r12 { fill: #dde6ed;font-weight: bold } + .terminal-3459022791-r13 { fill: #001541 } + .terminal-3459022791-r14 { fill: #454a50 } + .terminal-3459022791-r15 { fill: #7ae998 } + .terminal-3459022791-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-3459022791-r17 { fill: #0a180e;font-weight: bold } + .terminal-3459022791-r18 { fill: #000000 } + .terminal-3459022791-r19 { fill: #008139 } + .terminal-3459022791-r20 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - After creating the pipeline template locally, we can create a GitHub repository and push the - code to it. - - Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Create GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference The pipeline willShow help + ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI The pipeline willShow help + ▁▁▁▁▁▁▁▁testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions  + for Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md Show help + ▁▁▁▁▁▁▁▁file of the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + pipeline will  + include GitHub  + badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add configurationThe pipeline willShow help + ▁▁▁▁▁▁▁▁filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles  + containing custom + parameters  + requried to run  + nf-core pipelines + at different  + institutions + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  ''' # --- -# name: test_type_custom +# name: test_type_nfcore ''' @@ -2239,261 +1956,261 @@ font-weight: 700; } - .terminal-1734914007-matrix { + .terminal-461754173-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1734914007-title { + .terminal-461754173-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1734914007-r1 { fill: #c5c8c6 } - .terminal-1734914007-r2 { fill: #e3e3e3 } - .terminal-1734914007-r3 { fill: #989898 } - .terminal-1734914007-r4 { fill: #e1e1e1 } - .terminal-1734914007-r5 { fill: #121212 } - .terminal-1734914007-r6 { fill: #0053aa } - .terminal-1734914007-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1734914007-r8 { fill: #1e1e1e } - .terminal-1734914007-r9 { fill: #507bb3 } - .terminal-1734914007-r10 { fill: #e2e2e2 } - .terminal-1734914007-r11 { fill: #808080 } - .terminal-1734914007-r12 { fill: #dde6ed;font-weight: bold } - .terminal-1734914007-r13 { fill: #001541 } - .terminal-1734914007-r14 { fill: #454a50 } - .terminal-1734914007-r15 { fill: #7ae998 } - .terminal-1734914007-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-1734914007-r17 { fill: #0a180e;font-weight: bold } - .terminal-1734914007-r18 { fill: #000000 } - .terminal-1734914007-r19 { fill: #008139 } - .terminal-1734914007-r20 { fill: #ddedf9 } + .terminal-461754173-r1 { fill: #c5c8c6 } + .terminal-461754173-r2 { fill: #e3e3e3 } + .terminal-461754173-r3 { fill: #989898 } + .terminal-461754173-r4 { fill: #e1e1e1 } + .terminal-461754173-r5 { fill: #121212 } + .terminal-461754173-r6 { fill: #0053aa } + .terminal-461754173-r7 { fill: #dde8f3;font-weight: bold } + .terminal-461754173-r8 { fill: #1e1e1e } + .terminal-461754173-r9 { fill: #507bb3 } + .terminal-461754173-r10 { fill: #e2e2e2 } + .terminal-461754173-r11 { fill: #808080 } + .terminal-461754173-r12 { fill: #dde6ed;font-weight: bold } + .terminal-461754173-r13 { fill: #001541 } + .terminal-461754173-r14 { fill: #454a50 } + .terminal-461754173-r15 { fill: #7ae998 } + .terminal-461754173-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-461754173-r17 { fill: #0a180e;font-weight: bold } + .terminal-461754173-r18 { fill: #000000 } + .terminal-461754173-r19 { fill: #008139 } + .terminal-461754173-r20 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be Show help - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will Show help - ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for  - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md file ofShow help - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add configuration The pipeline will Show help - ▁▁▁▁▁▁▁▁filesinclude configuration▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different  - institutions - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Template features + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference The pipeline willShow help + ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the + most common  + reference genome  + files from  + iGenomes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  D  Toggle dark mode  Q  Quit  ''' # --- -# name: test_type_nfcore +# name: test_type_nfcore_validation ''' @@ -2516,261 +2233,263 @@ font-weight: 700; } - .terminal-182709094-matrix { + .terminal-2179958535-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-182709094-title { + .terminal-2179958535-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-182709094-r1 { fill: #c5c8c6 } - .terminal-182709094-r2 { fill: #e3e3e3 } - .terminal-182709094-r3 { fill: #989898 } - .terminal-182709094-r4 { fill: #e1e1e1 } - .terminal-182709094-r5 { fill: #121212 } - .terminal-182709094-r6 { fill: #0053aa } - .terminal-182709094-r7 { fill: #dde8f3;font-weight: bold } - .terminal-182709094-r8 { fill: #1e1e1e } - .terminal-182709094-r9 { fill: #507bb3 } - .terminal-182709094-r10 { fill: #e2e2e2 } - .terminal-182709094-r11 { fill: #808080 } - .terminal-182709094-r12 { fill: #dde6ed;font-weight: bold } - .terminal-182709094-r13 { fill: #001541 } - .terminal-182709094-r14 { fill: #454a50 } - .terminal-182709094-r15 { fill: #7ae998 } - .terminal-182709094-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-182709094-r17 { fill: #0a180e;font-weight: bold } - .terminal-182709094-r18 { fill: #000000 } - .terminal-182709094-r19 { fill: #008139 } - .terminal-182709094-r20 { fill: #ddedf9 } + .terminal-2179958535-r1 { fill: #c5c8c6 } + .terminal-2179958535-r2 { fill: #e3e3e3 } + .terminal-2179958535-r3 { fill: #989898 } + .terminal-2179958535-r4 { fill: #e1e1e1 } + .terminal-2179958535-r5 { fill: #121212 } + .terminal-2179958535-r6 { fill: #0053aa } + .terminal-2179958535-r7 { fill: #dde8f3;font-weight: bold } + .terminal-2179958535-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-2179958535-r9 { fill: #1e1e1e } + .terminal-2179958535-r10 { fill: #0f4e2a } + .terminal-2179958535-r11 { fill: #7b3042 } + .terminal-2179958535-r12 { fill: #a7a7a7 } + .terminal-2179958535-r13 { fill: #787878 } + .terminal-2179958535-r14 { fill: #e2e2e2 } + .terminal-2179958535-r15 { fill: #b93c5b } + .terminal-2179958535-r16 { fill: #454a50 } + .terminal-2179958535-r17 { fill: #166d39 } + .terminal-2179958535-r18 { fill: #e2e3e3;font-weight: bold } + .terminal-2179958535-r19 { fill: #3c8b54;font-weight: bold } + .terminal-2179958535-r20 { fill: #000000 } + .terminal-2179958535-r21 { fill: #5aa86f } + .terminal-2179958535-r22 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference genomesThe pipeline will be Show help - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Basic details + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  ''' # --- -# name: test_type_nfcore_validation +# name: test_welcome ''' @@ -2793,428 +2512,254 @@ font-weight: 700; } - .terminal-2320153615-matrix { + .terminal-1144763792-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2320153615-title { + .terminal-1144763792-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2320153615-r1 { fill: #c5c8c6 } - .terminal-2320153615-r2 { fill: #e3e3e3 } - .terminal-2320153615-r3 { fill: #989898 } - .terminal-2320153615-r4 { fill: #e1e1e1 } - .terminal-2320153615-r5 { fill: #121212 } - .terminal-2320153615-r6 { fill: #0053aa } - .terminal-2320153615-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2320153615-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2320153615-r9 { fill: #1e1e1e } - .terminal-2320153615-r10 { fill: #0f4e2a } - .terminal-2320153615-r11 { fill: #7b3042 } - .terminal-2320153615-r12 { fill: #a7a7a7 } - .terminal-2320153615-r13 { fill: #787878 } - .terminal-2320153615-r14 { fill: #e2e2e2 } - .terminal-2320153615-r15 { fill: #b93c5b } - .terminal-2320153615-r16 { fill: #454a50 } - .terminal-2320153615-r17 { fill: #166d39 } - .terminal-2320153615-r18 { fill: #e2e3e3;font-weight: bold } - .terminal-2320153615-r19 { fill: #3c8b54;font-weight: bold } - .terminal-2320153615-r20 { fill: #000000 } - .terminal-2320153615-r21 { fill: #5aa86f } - .terminal-2320153615-r22 { fill: #ddedf9 } + .terminal-1144763792-r1 { fill: #c5c8c6 } + .terminal-1144763792-r2 { fill: #e3e3e3 } + .terminal-1144763792-r3 { fill: #989898 } + .terminal-1144763792-r4 { fill: #98e024 } + .terminal-1144763792-r5 { fill: #626262 } + .terminal-1144763792-r6 { fill: #9d65ff } + .terminal-1144763792-r7 { fill: #fd971f } + .terminal-1144763792-r8 { fill: #e1e1e1 } + .terminal-1144763792-r9 { fill: #121212 } + .terminal-1144763792-r10 { fill: #0053aa } + .terminal-1144763792-r11 { fill: #dde8f3;font-weight: bold } + .terminal-1144763792-r12 { fill: #e1e1e1;text-decoration: underline; } + .terminal-1144763792-r13 { fill: #4ebf71 } + .terminal-1144763792-r14 { fill: #e2e2e2 } + .terminal-1144763792-r15 { fill: #e2e2e2;text-decoration: underline; } + .terminal-1144763792-r16 { fill: #e2e2e2;font-weight: bold;font-style: italic; } + .terminal-1144763792-r17 { fill: #7ae998 } + .terminal-1144763792-r18 { fill: #4ebf71;font-weight: bold } + .terminal-1144763792-r19 { fill: #008139 } + .terminal-1144763792-r20 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  - - - - - ''' -# --- -# name: test_welcome - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pip… - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Welcome to the nf-core pipeline creation wizard - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - This app will help you create a new Nextflow pipeline from the  - nf-core/tools pipeline template. - - The template helps anyone benefit from nf-core best practices, and is  - a requirement for nf-core pipelines. - ▃▃ - 💡 If you want to add a pipeline to nf-core, please join on Slack - and discuss your plans with the community as early as possible;  - ideally before you start on your pipeline! See the nf-core  - guidelines and the #new-pipelines Slack channel for more  -  D Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Welcome to the nf-core pipeline creation wizard + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline  + template. + + The template helps anyone benefit from nf-core best practices, and is a requirement for  + nf-core pipelines. + + 💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans  + with the community as early as possible; ideally before you start on your pipeline! See  + the nf-core guidelines and the #new-pipelines Slack channel for more information. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Let's go! + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  diff --git a/tests/test_create_app.py b/tests/test_create_app.py index 92fb52e43..ee010c251 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -24,7 +24,7 @@ async def test_app_bindings(): def test_welcome(snap_compare): """Test snapshot for the first screen in the app. The welcome screen.""" - assert snap_compare("../nf_core/pipelines/create/__init__.py") + assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50)) def test_choose_type(snap_compare): From 8aceeb3ac84922f985ec8645c3fe5f06ce86ddd1 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 9 May 2024 11:05:15 +0200 Subject: [PATCH 120/737] handle token not configured with gh --- nf_core/pipelines/create/githubrepo.py | 11 +- tests/__snapshots__/test_create_app.ambr | 562 +++++++++++++++++++++++ 2 files changed, 569 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index ddf7bf90f..99e7b09ab 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -240,10 +240,13 @@ def _get_github_credentials(self): # Use gh CLI config if installed gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml") if os.path.exists(gh_cli_config_fn): - with open(gh_cli_config_fn) as fh: - gh_cli_config = yaml.safe_load(fh) - gh_user = (gh_cli_config["github.com"]["user"],) - gh_token = gh_cli_config["github.com"]["oauth_token"] + try: + with open(gh_cli_config_fn) as fh: + gh_cli_config = yaml.safe_load(fh) + gh_user = (gh_cli_config["github.com"]["user"],) + gh_token = gh_cli_config["github.com"]["oauth_token"] + except KeyError: + pass # If gh CLI not installed, try to get credentials from environment variables elif os.environ.get("GITHUB_TOKEN") is not None: gh_token = self.auth = os.environ["GITHUB_TOKEN"] diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index d2c239acb..d3cf69a2d 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1385,6 +1385,568 @@ ''' # --- +# name: test_github_details + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Now that we have created a new pipeline locally, we can create a new GitHub repository and  + push the code to it. + + 💡 Found GitHub username in local GitHub CLI config + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + mirpedr▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user + account. A core-team member will be able to transfer the repo to nf-core once the  + development has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- +# name: test_github_exit_message + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + HowTo create a GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository: + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote: + + + gitpush--allorigin + + + 💡 Note the --all flag: this is needed to push all branches to the remote. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  + + + + + ''' +# --- # name: test_github_question ''' From 2d4aefa53ea6c7e414bf5629cc9a934b0702cebf Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 9 May 2024 13:56:44 +0200 Subject: [PATCH 121/737] bump to 2.14.2dev --- .gitpod.yml | 2 +- CHANGELOG.md | 12 ++++++++++++ setup.py | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/.gitpod.yml b/.gitpod.yml index 445cb3570..b2fbb7313 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,4 +1,4 @@ -image: nfcore/gitpod:latest +image: nfcore/gitpod:dev tasks: - name: install current state of nf-core/tools and setup pre-commit command: | diff --git a/CHANGELOG.md b/CHANGELOG.md index e130d3c37..b71c64fe2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # nf-core/tools: Changelog +## v2.14.2dev + +### Template + +### Linting + +### Download + +### Components + +### General + ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] ### Template diff --git a/setup.py b/setup.py index 47137cde7..013c863b9 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.14.1" +version = "2.14.2dev" with open("README.md") as f: readme = f.read() From 43545440d9d1338d01cdfe28d471963fbc5a4e26 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 9 May 2024 17:46:22 +0000 Subject: [PATCH 122/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 887cbe027..4b1941d81 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.3 + rev: v0.4.4 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 37a4bfc4d7dea4ae329cfb8aceefcc1ec804af67 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 9 May 2024 17:47:18 +0000 Subject: [PATCH 123/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b71c64fe2..3f46a265c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,8 @@ ### General +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 ([#2974](https://github.com/nf-core/tools/pull/2974)) + ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] ### Template From 9572b237f89a864e728d3384e5dc841a33758fa1 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 10 May 2024 12:03:27 +0200 Subject: [PATCH 124/737] add asyncio to pytest.ini and upload snapshot report on fail on CI --- .github/workflows/pytest.yml | 7 +++++++ pytest.ini | 3 +++ tests/test_create_app.py | 3 --- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 70b9cfd0a..587f8166d 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -148,6 +148,13 @@ jobs: name: coverage_${{ matrix.test }} path: .coverage + - name: Store snapshot report on failure + uses: actions/upload-artifact@v4 + if: ${{matrix.test}} == "test_create_app.py" && failure() + with: + name: Snapshot Report ${{ matrix.test }} + path: ./snapshot_report.html + coverage: needs: test # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default diff --git a/pytest.ini b/pytest.ini index cf3715947..fcbd03fa4 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,3 +2,6 @@ testpaths = tests python_files = test_*.py + +# automatically run coroutine tests with asyncio +asyncio_mode = auto diff --git a/tests/test_create_app.py b/tests/test_create_app.py index ee010c251..2682fff6b 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -1,11 +1,8 @@ """Test Pipeline Create App""" -import pytest - from nf_core.pipelines.create import PipelineCreateApp -@pytest.mark.asyncio async def test_app_bindings(): """Test that the app bindings work.""" app = PipelineCreateApp() From bc1096f89bb1c082afac338e90b819ce4cfdab03 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 10 May 2024 12:27:44 +0200 Subject: [PATCH 125/737] wait for workers to complete --- tests/__snapshots__/test_create_app.ambr | 267 ++++++++++++----------- tests/test_create_app.py | 9 +- 2 files changed, 137 insertions(+), 139 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index d3cf69a2d..698e21b70 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1408,261 +1408,262 @@ font-weight: 700; } - .terminal-2377938861-matrix { + .terminal-3947539839-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2377938861-title { + .terminal-3947539839-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2377938861-r1 { fill: #c5c8c6 } - .terminal-2377938861-r2 { fill: #e3e3e3 } - .terminal-2377938861-r3 { fill: #989898 } - .terminal-2377938861-r4 { fill: #e1e1e1 } - .terminal-2377938861-r5 { fill: #121212 } - .terminal-2377938861-r6 { fill: #0053aa } - .terminal-2377938861-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2377938861-r8 { fill: #4ebf71 } - .terminal-2377938861-r9 { fill: #e2e2e2 } - .terminal-2377938861-r10 { fill: #e2e2e2;font-style: italic; } - .terminal-2377938861-r11 { fill: #e2e2e2;font-style: italic;;text-decoration: underline; } - .terminal-2377938861-r12 { fill: #a5a5a5;font-style: italic; } - .terminal-2377938861-r13 { fill: #1e1e1e } - .terminal-2377938861-r14 { fill: #008139 } - .terminal-2377938861-r15 { fill: #454a50 } - .terminal-2377938861-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-2377938861-r17 { fill: #000000 } - .terminal-2377938861-r18 { fill: #b93c5b } - .terminal-2377938861-r19 { fill: #e2e2e2;font-weight: bold } - .terminal-2377938861-r20 { fill: #969696;font-weight: bold } - .terminal-2377938861-r21 { fill: #808080 } - .terminal-2377938861-r22 { fill: #7ae998 } - .terminal-2377938861-r23 { fill: #507bb3 } - .terminal-2377938861-r24 { fill: #0a180e;font-weight: bold } - .terminal-2377938861-r25 { fill: #dde6ed;font-weight: bold } - .terminal-2377938861-r26 { fill: #001541 } - .terminal-2377938861-r27 { fill: #ddedf9 } + .terminal-3947539839-r1 { fill: #c5c8c6 } + .terminal-3947539839-r2 { fill: #e3e3e3 } + .terminal-3947539839-r3 { fill: #989898 } + .terminal-3947539839-r4 { fill: #e1e1e1 } + .terminal-3947539839-r5 { fill: #121212 } + .terminal-3947539839-r6 { fill: #0053aa } + .terminal-3947539839-r7 { fill: #dde8f3;font-weight: bold } + .terminal-3947539839-r8 { fill: #4ebf71 } + .terminal-3947539839-r9 { fill: #e2e2e2 } + .terminal-3947539839-r10 { fill: #e2e2e2;font-style: italic; } + .terminal-3947539839-r11 { fill: #e2e2e2;font-style: italic;;text-decoration: underline; } + .terminal-3947539839-r12 { fill: #a5a5a5;font-style: italic; } + .terminal-3947539839-r13 { fill: #1e1e1e } + .terminal-3947539839-r14 { fill: #008139 } + .terminal-3947539839-r15 { fill: #454a50 } + .terminal-3947539839-r16 { fill: #787878 } + .terminal-3947539839-r17 { fill: #e2e3e3;font-weight: bold } + .terminal-3947539839-r18 { fill: #000000 } + .terminal-3947539839-r19 { fill: #b93c5b } + .terminal-3947539839-r20 { fill: #e2e2e2;font-weight: bold } + .terminal-3947539839-r21 { fill: #969696;font-weight: bold } + .terminal-3947539839-r22 { fill: #808080 } + .terminal-3947539839-r23 { fill: #7ae998 } + .terminal-3947539839-r24 { fill: #507bb3 } + .terminal-3947539839-r25 { fill: #0a180e;font-weight: bold } + .terminal-3947539839-r26 { fill: #dde6ed;font-weight: bold } + .terminal-3947539839-r27 { fill: #001541 } + .terminal-3947539839-r28 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Now that we have created a new pipeline locally, we can create a new GitHub repository and  - push the code to it. - - 💡 Found GitHub username in local GitHub CLI config - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - mirpedr▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user - account. A core-team member will be able to transfer the repo to nf-core once the  - development has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Now that we have created a new pipeline locally, we can create a new GitHub repository and  + push the code to it. + + 💡 Found GitHub username in local GitHub CLI config + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user + account. A core-team member will be able to transfer the repo to nf-core once the  + development has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + +  D  Toggle dark mode  Q  Quit  diff --git a/tests/test_create_app.py b/tests/test_create_app.py index 2682fff6b..f01ea5b6b 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -210,8 +210,7 @@ async def run_before(pilot) -> None: await pilot.press("tab") await pilot.press(*str(tmpdir)) await pilot.click("#finish") - pilot.app.get_widget_by_id("close_screen").remove_class("hide") - await pilot.pause() + await pilot.app.workers.wait_for_complete() await pilot.click("#close_screen") assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) @@ -245,8 +244,7 @@ async def run_before(pilot) -> None: await pilot.press("tab") await pilot.press(*str(tmpdir)) await pilot.click("#finish") - pilot.app.get_widget_by_id("close_screen").remove_class("hide") - await pilot.pause() + await pilot.app.workers.wait_for_complete() await pilot.click("#close_screen") await pilot.click("#github_repo") await pilot.click("#gh_username") @@ -285,8 +283,7 @@ async def run_before(pilot) -> None: await pilot.press("tab") await pilot.press(*str(tmpdir)) await pilot.click("#finish") - pilot.app.get_widget_by_id("close_screen").remove_class("hide") - await pilot.pause() + await pilot.app.workers.wait_for_complete() await pilot.click("#close_screen") await pilot.click("#github_repo") await pilot.click("#exit") From af584d3d3f6563d855ae5d156767c628a80b57e0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 10 May 2024 12:45:19 +0200 Subject: [PATCH 126/737] upload snapshot report always --- .github/workflows/pytest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 587f8166d..1edebb0b5 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -149,8 +149,8 @@ jobs: path: .coverage - name: Store snapshot report on failure - uses: actions/upload-artifact@v4 - if: ${{matrix.test}} == "test_create_app.py" && failure() + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + if: ${{matrix.test}} == "test_create_app.py" && always() with: name: Snapshot Report ${{ matrix.test }} path: ./snapshot_report.html From 0de5b690ce65fa0c5f73ca16c04bec27c8243285 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 10 May 2024 12:48:54 +0200 Subject: [PATCH 127/737] upload snapshot report first --- .github/workflows/pytest.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 1edebb0b5..4e873385e 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -142,18 +142,18 @@ jobs: exit 1 fi - - name: Upload coverage + - name: Store snapshot report uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + if: always() with: - name: coverage_${{ matrix.test }} - path: .coverage + name: Snapshot Report ${{ matrix.test }} + path: ./snapshot_report.html - - name: Store snapshot report on failure + - name: Upload coverage uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 - if: ${{matrix.test}} == "test_create_app.py" && always() with: - name: Snapshot Report ${{ matrix.test }} - path: ./snapshot_report.html + name: coverage_${{ matrix.test }} + path: .coverage coverage: needs: test From 9d6838c0a94d7bd8755fc4a8c9e38c542bf9adf4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 10 May 2024 11:00:16 +0000 Subject: [PATCH 128/737] update snapshot from gitpod --- tests/__snapshots__/test_create_app.ambr | 266 +++++++++++------------ 1 file changed, 132 insertions(+), 134 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 698e21b70..c486ec4f8 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1408,262 +1408,260 @@ font-weight: 700; } - .terminal-3947539839-matrix { + .terminal-436990287-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3947539839-title { + .terminal-436990287-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3947539839-r1 { fill: #c5c8c6 } - .terminal-3947539839-r2 { fill: #e3e3e3 } - .terminal-3947539839-r3 { fill: #989898 } - .terminal-3947539839-r4 { fill: #e1e1e1 } - .terminal-3947539839-r5 { fill: #121212 } - .terminal-3947539839-r6 { fill: #0053aa } - .terminal-3947539839-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3947539839-r8 { fill: #4ebf71 } - .terminal-3947539839-r9 { fill: #e2e2e2 } - .terminal-3947539839-r10 { fill: #e2e2e2;font-style: italic; } - .terminal-3947539839-r11 { fill: #e2e2e2;font-style: italic;;text-decoration: underline; } - .terminal-3947539839-r12 { fill: #a5a5a5;font-style: italic; } - .terminal-3947539839-r13 { fill: #1e1e1e } - .terminal-3947539839-r14 { fill: #008139 } - .terminal-3947539839-r15 { fill: #454a50 } - .terminal-3947539839-r16 { fill: #787878 } - .terminal-3947539839-r17 { fill: #e2e3e3;font-weight: bold } - .terminal-3947539839-r18 { fill: #000000 } - .terminal-3947539839-r19 { fill: #b93c5b } - .terminal-3947539839-r20 { fill: #e2e2e2;font-weight: bold } - .terminal-3947539839-r21 { fill: #969696;font-weight: bold } - .terminal-3947539839-r22 { fill: #808080 } - .terminal-3947539839-r23 { fill: #7ae998 } - .terminal-3947539839-r24 { fill: #507bb3 } - .terminal-3947539839-r25 { fill: #0a180e;font-weight: bold } - .terminal-3947539839-r26 { fill: #dde6ed;font-weight: bold } - .terminal-3947539839-r27 { fill: #001541 } - .terminal-3947539839-r28 { fill: #ddedf9 } + .terminal-436990287-r1 { fill: #c5c8c6 } + .terminal-436990287-r2 { fill: #e3e3e3 } + .terminal-436990287-r3 { fill: #989898 } + .terminal-436990287-r4 { fill: #e1e1e1 } + .terminal-436990287-r5 { fill: #121212 } + .terminal-436990287-r6 { fill: #0053aa } + .terminal-436990287-r7 { fill: #dde8f3;font-weight: bold } + .terminal-436990287-r8 { fill: #a5a5a5;font-style: italic; } + .terminal-436990287-r9 { fill: #1e1e1e } + .terminal-436990287-r10 { fill: #008139 } + .terminal-436990287-r11 { fill: #454a50 } + .terminal-436990287-r12 { fill: #787878 } + .terminal-436990287-r13 { fill: #e2e2e2 } + .terminal-436990287-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-436990287-r15 { fill: #000000 } + .terminal-436990287-r16 { fill: #b93c5b } + .terminal-436990287-r17 { fill: #4ebf71 } + .terminal-436990287-r18 { fill: #e2e2e2;font-weight: bold } + .terminal-436990287-r19 { fill: #969696;font-weight: bold } + .terminal-436990287-r20 { fill: #808080 } + .terminal-436990287-r21 { fill: #7ae998 } + .terminal-436990287-r22 { fill: #507bb3 } + .terminal-436990287-r23 { fill: #0a180e;font-weight: bold } + .terminal-436990287-r24 { fill: #dde6ed;font-weight: bold } + .terminal-436990287-r25 { fill: #001541 } + .terminal-436990287-r26 { fill: #ddedf9 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Now that we have created a new pipeline locally, we can create a new GitHub repository and  - push the code to it. - - 💡 Found GitHub username in local GitHub CLI config - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user - account. A core-team member will be able to transfer the repo to nf-core once the  - development has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Create GitHub repository + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Now that we have created a new pipeline locally, we can create a new GitHub repository and  + push the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user + account. A core-team member will be able to transfer the repo to nf-core once the  + development has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + +  D  Toggle dark mode  Q  Quit  From 3cf57a85d3b0104679ecda719ea8227a7883045c Mon Sep 17 00:00:00 2001 From: itrujnara Date: Fri, 10 May 2024 15:52:42 +0200 Subject: [PATCH 129/737] Added nf-schema as condition to exclude Java deps file --- nf_core/lint/files_exist.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index d801caf70..0c8710062 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -200,8 +200,8 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: Path("Singularity"), ] files_warn_ifexists = [Path(".travis.yml")] - files_fail_ifinconfig: List[Tuple[Path, Dict[str, str]]] = [ - (Path("lib", "nfcore_external_java_deps.jar"), {"plugins": "nf-validation"}), + files_fail_ifinconfig: List[Tuple[Path, List[Dict[str, str]]]] = [ + (Path("lib", "nfcore_external_java_deps.jar"), [{"plugins": "nf-validation"}, {"plugins": "nf-schema"}]), ] # Remove files that should be ignored according to the linting config @@ -246,10 +246,11 @@ def pf(file_path: Union[str, Path]) -> Path: if str(file_cond[0]) in ignore_files: continue in_config = False - config_key, config_value = list(file_cond[1].items())[0] - if config_key in self.nf_config and config_value in self.nf_config[config_key]: - log.debug(f"Found {config_key} in nextflow.config with value {config_value}") - in_config = True + for condition in file_cond[1]: + config_key, config_value = list(condition.items())[0] + if config_key in self.nf_config and config_value in self.nf_config[config_key]: + log.debug(f"Found {config_key} in nextflow.config with value {config_value}") + in_config = True if pf(file_cond[0]).is_file() and in_config: failed.append(f"File must be removed: {self._wrap_quotes(file_cond[0])}") elif pf(file_cond[0]).is_file() and not in_config: From 0bdd4dd99bd0f0da6375c57c0fafa27e1d03bfc6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 10 May 2024 16:47:07 +0200 Subject: [PATCH 130/737] add explicit test for nf-schema --- tests/lint/files_exist.py | 17 +++++++++++++++++ tests/test_lint.py | 1 + 2 files changed, 18 insertions(+) diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py index 508152289..08da7f14d 100644 --- a/tests/lint/files_exist.py +++ b/tests/lint/files_exist.py @@ -79,3 +79,20 @@ def test_files_exist_fail_conditional(self): results = lint_obj.files_exist() assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"] assert results["ignored"] == [] + + +def test_files_exist_pass_conditional_nfschema(self): + new_pipeline = self._make_pipeline_copy() + # replace nf-validation with nf-schema in nextflow.config + with open(Path(new_pipeline, "nextflow.config")) as f: + config = f.read() + config = config.replace("nf-validation", "nf-schema") + with open(Path(new_pipeline, "nextflow.config"), "w") as f: + f.write(config) + + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + lint_obj.nf_config["manifest.schema"] = "nf-core" + results = lint_obj.files_exist() + assert results["failed"] == [] + assert results["ignored"] == [] diff --git a/tests/test_lint.py b/tests/test_lint.py index b72a6bfdf..15c1550e7 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -212,6 +212,7 @@ def test_sphinx_md_files(self): test_files_exist_missing_main, test_files_exist_pass, test_files_exist_pass_conditional, + test_files_exist_pass_conditional_nfschema, ) from .lint.files_unchanged import ( # type: ignore[misc] test_files_unchanged_fail, From c3e2844b77fddd43c812fa3ea1c734f1b72cc0ae Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 10 May 2024 14:49:13 +0000 Subject: [PATCH 131/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3f46a265c..6dd09482b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ ### Linting +- Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) + ### Download ### Components From 5e611c8ff68824836b487a9bfd4153e6c2db78fd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 13 May 2024 10:42:19 +0200 Subject: [PATCH 132/737] add GHA to post comment about outdated template version --- .../.github/workflows/linting.yml | 8 +++- .../workflows/template_version_comment.yml | 42 +++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 nf_core/pipeline-template/.github/workflows/template_version_comment.yml diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index b2cde075f..229aa9f65 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -41,10 +41,16 @@ jobs: python-version: "3.12" architecture: "x64" + - name: read .nf-core.yml + uses: pietrobolcato/action-read-yaml@1.0.0 + id: read_yml + with: + config: ${{ github.workspace }}/.nf-core.yaml + - name: Install dependencies run: | python -m pip install --upgrade pip - pip install nf-core + pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }} - name: Run nf-core lint env: diff --git a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml new file mode 100644 index 000000000..2f49ab126 --- /dev/null +++ b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml @@ -0,0 +1,42 @@ +name: nf-core template version comment +# This workflow is triggered on PRs to check if the pipeline template version matches the latest nf-core version. +# It posts a comment to the PR, even if it comes from a fork. + +on: pull_request_target + +jobs: + template_version: + runs-on: ubuntu-latest + steps: + - name: Check out pipeline code + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + + - name: Read template version from .nf-core.yml + uses: pietrobolcato/action-read-yaml@1.0.0 + id: read_yml + with: + config: ${{ github.workspace }}/.nf-core.yml + + - name: Install nf-core + run: | + python -m pip install --upgrade pip + pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }} + + - name: Check nf-core outdated + id: nf_core_outdated + run: pip list --outdated | grep nf-core + + - name: Post nf-core template version comment + uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 + if: | + ${{ steps.nf_core_outdated.outputs.stdout }} =~ 'nf-core' + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + allow-repeats: false + message: | + ## :warning: New nf-core template version available + + Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. + Please update your pipeline to the latest version. + + For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). From 7c1bb8a17ebe4b6e3a8afe27e4c78bdc42df01fb Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 13 May 2024 08:45:47 +0000 Subject: [PATCH 133/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3f46a265c..ba1d7da10 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ ### Linting +- Template: Lint pipelines with the nf-core template version and post comment if it is outdated ([#2978](https://github.com/nf-core/tools/pull/2978)) + ### Download ### Components From 940b3841e94b8368b5827aa356010e8700f3ead7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 13 May 2024 11:33:16 +0200 Subject: [PATCH 134/737] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- .../.github/workflows/template_version_comment.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml index 2f49ab126..e21283309 100644 --- a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml @@ -31,10 +31,10 @@ jobs: if: | ${{ steps.nf_core_outdated.outputs.stdout }} =~ 'nf-core' with: - repo-token: ${{ secrets.GITHUB_TOKEN }} + repo-token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} allow-repeats: false message: | - ## :warning: New nf-core template version available + ## :warning: Newer version of the nf-core template is available. Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. Please update your pipeline to the latest version. From 961a17b1fc223bdd3a07cca42f6bbfdb0c147636 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 13 May 2024 11:44:35 +0200 Subject: [PATCH 135/737] fix links in api_ref index page --- docs/api/_src/index.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/api/_src/index.md b/docs/api/_src/index.md index 17bf2407f..af3813bc5 100644 --- a/docs/api/_src/index.md +++ b/docs/api/_src/index.md @@ -16,7 +16,6 @@ This documentation is for the `nf-core/tools` package. ## Contents -- [Pipeline code lint tests](pipeline_lint_tests/index.md) (run by `nf-core lint`) -- [Module code lint tests](module_lint_tests/index.md) (run by `nf-core modules lint`) -- [Subworkflow code lint tests](subworkflow_lint_tests/index.md) (run by `nf-core subworkflows lint`) -- [nf-core/tools Python package API reference](api/index.md) +- [Pipeline code lint tests](pipeline_lint_tests/) (run by `nf-core lint`) +- [Module code lint tests](module_lint_tests/) (run by `nf-core modules lint`) +- [Subworkflow code lint tests](subworkflow_lint_tests/) (run by `nf-core subworkflows lint`) From 25c6ff47e000a18d2e59f343a26f183a7789c583 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 13 May 2024 15:01:38 +0200 Subject: [PATCH 136/737] update getting started link --- nf_core/create.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index b420b1c86..59db7fa4a 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -259,8 +259,8 @@ def init_pipeline(self): "[green bold]!!!!!! IMPORTANT !!!!!!\n\n" "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" "PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE!\n\n" - "[default]Please read: [link=https://nf-co.re/developers/adding_pipelines#join-the-community]" - "https://nf-co.re/developers/adding_pipelines#join-the-community[/link]" + "[default]Please read: [link=https://nf-co.re/docs/tutorials/adding_a_pipeline/overview#join-the-community]" + "https://nf-co.re/docs/tutorials/adding_a_pipeline/overview#join-the-community[/link]" ) def render_template(self): From c36741f7aef8a575e980390afb070c79eb744013 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 13 May 2024 20:29:04 +0000 Subject: [PATCH 137/737] Update gitpod/workspace-base Docker digest to 92dd1bc --- nf_core/gitpod/gitpod.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 6afca0e47..628421607 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -1,7 +1,7 @@ # Test build locally before making a PR # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . -FROM gitpod/workspace-base@sha256:124f2b8cbefe9b4abbb6a14538da8846770dde20b93f038d9551b6230aec1d1c +FROM gitpod/workspace-base@sha256:92dd1bcbd5a2fb466c81b1e4c21fc2495575546a9e6c53b3f7d4ba0b0c29c5be USER root From c8fc54bab9a67a3a0e95c60b639f80d8a5cb6a82 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 15 May 2024 04:39:25 +0000 Subject: [PATCH 138/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6dd09482b..70b606932 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ ### General - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 ([#2974](https://github.com/nf-core/tools/pull/2974)) +- Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From c448406b5e8ddae9d2c618db0ca464dbc97e7cb2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Famke=20Ba=CC=88uerle?= Date: Wed, 15 May 2024 14:21:04 +0200 Subject: [PATCH 139/737] update template according to PR 5281 in modules --- nf_core/module-template/tests/main.nf.test.j2 | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/module-template/tests/main.nf.test.j2 b/nf_core/module-template/tests/main.nf.test.j2 index f31e92d65..1f70df64b 100644 --- a/nf_core/module-template/tests/main.nf.test.j2 +++ b/nf_core/module-template/tests/main.nf.test.j2 @@ -28,10 +28,10 @@ nextflow_process { {% if has_meta %} input[0] = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.bam', checkIfExists: true) ] {%- else %} - input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.single_end.bam', checkIfExists: true) {%- endif %} """ } @@ -60,10 +60,10 @@ nextflow_process { {% if has_meta %} input[0] = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.bam', checkIfExists: true) ] {%- else %} - input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + input[0] = file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.single_end.bam', checkIfExists: true) {%- endif %} """ } From 4ca14b448471dd365c13b41dbe13c46254d24b2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Famke=20Ba=CC=88uerle?= Date: Wed, 15 May 2024 14:27:11 +0200 Subject: [PATCH 140/737] update changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e130d3c37..02dc68cae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # nf-core/tools: Changelog +### Template + +- Change paths to test data ([#2985](https://github.com/nf-core/tools/pull/2985)) + ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] ### Template From 51447b27afc787075048b0deb51d2737dcc43062 Mon Sep 17 00:00:00 2001 From: Maxime U Garcia Date: Wed, 15 May 2024 14:29:08 +0200 Subject: [PATCH 141/737] Apply suggestions from code review --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0338ec0ba..52b4d5735 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,5 @@ # nf-core/tools: Changelog - ## v2.14.2dev ### Template From 515ef02739a2da37701964c96798e2c2c1e15e8a Mon Sep 17 00:00:00 2001 From: Stephen Watts Date: Mon, 20 May 2024 14:24:55 +1000 Subject: [PATCH 142/737] Fix README.md admonition --- nf_core/pipeline-template/README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index e6351b0c6..ad0914f98 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -80,8 +80,7 @@ nextflow run {{ name }} \ ``` > [!WARNING] -> Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_; -> see [docs](https://nf-co.re/usage/configuration#custom-configuration-files). +> Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_; see [docs](https://nf-co.re/usage/configuration#custom-configuration-files). {% if is_nfcore -%} From 0fa391d2e8c846514d430776afdb8a1739708900 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 20 May 2024 16:32:48 +0200 Subject: [PATCH 143/737] fix contributing sentence --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4ed5e6373..85b608bf7 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ pip install --upgrade -r requirements-dev.txt -e . ## Contributions and Support -If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md). +If you would like to contribute to this package, please see the [contributing guidelines](.github/CONTRIBUTING.md). For further information or help, don't hesitate to get in touch on the [Slack `#tools` channel](https://nfcore.slack.com/channels/tools) (you can join with [this invite](https://nf-co.re/join/slack)). From a70f8a80316bf580c408fef925ef01652191b912 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 20 May 2024 17:02:32 +0200 Subject: [PATCH 144/737] Update website docs link --- nf_core/pipeline-template/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index ad0914f98..feece399b 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -80,7 +80,7 @@ nextflow run {{ name }} \ ``` > [!WARNING] -> Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_; see [docs](https://nf-co.re/usage/configuration#custom-configuration-files). +> Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_; see [docs](https://nf-co.re/docs/usage/getting_started/configuration#custom-configuration-files). {% if is_nfcore -%} From cfc06fe702a837f50487951e222997e9a6d4fb58 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 21 May 2024 07:40:26 +0200 Subject: [PATCH 145/737] update output of generation script for api docs to new structure --- docs/api/generate-api-docs.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api/generate-api-docs.sh b/docs/api/generate-api-docs.sh index 6b3c3abfa..f2b905c0c 100644 --- a/docs/api/generate-api-docs.sh +++ b/docs/api/generate-api-docs.sh @@ -28,7 +28,7 @@ done # Set the output directory if not set if [[ -z "$output_dir" ]]; then - output_dir="../src/content/tools/docs" + output_dir="../src/content/api_reference" fi # if no release is specified, use all releases From db6f0d38ab665e229cfa24393528415e68fdeac2 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 21 May 2024 05:42:04 +0000 Subject: [PATCH 146/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 70b606932..16573a91e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 ([#2974](https://github.com/nf-core/tools/pull/2974)) - Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) +- Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 3b0e3011a48b7453c4708558c17d4e69fddeca56 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 21 May 2024 10:42:27 +0200 Subject: [PATCH 147/737] update release checklist to generate rich-codex images on the website repo --- .github/RELEASE_CHECKLIST.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index 9a1905c7a..3e0b19ec6 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -9,11 +9,10 @@ 7. Create a PR from `dev` to `master` 8. Make sure all CI tests are passing again (additional tests are run on PRs to `master`) 9. Request review (2 approvals required) -10. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) -11. Merge the PR into `master` -12. Wait for CI tests on the commit to passed -13. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. -14. Create a new release copying the `CHANGELOG` for that release into the description section. +10. Merge the PR into `master` +11. Wait for CI tests on the commit to passed +12. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. +13. Create a new release copying the `CHANGELOG` for that release into the description section. ## After release @@ -21,3 +20,4 @@ 2. Check that the automatic `PyPi` deployment has worked: [pypi.org/project/nf-core](https://pypi.org/project/nf-core/) 3. Check `BioConda` has an automated PR to bump the version, and merge. eg. [bioconda/bioconda-recipes #20065](https://github.com/bioconda/bioconda-recipes/pull/20065) 4. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:dev`. +5. Run `rich-codex` on the [tools/website repo](https://github.com/nf-core/website/actions/workflows/rich-codex.yml) to regenerate docs screengrabs (actions `workflow_dispatch` button) From 8b2e82f8a8e1237eb107306bae13f0233c91bf4c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 21 May 2024 10:42:54 +0200 Subject: [PATCH 148/737] remove rich-codex action --- .github/workflows/rich-codex.yml | 39 -------------------------------- 1 file changed, 39 deletions(-) delete mode 100644 .github/workflows/rich-codex.yml diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml deleted file mode 100644 index cd12b139d..000000000 --- a/.github/workflows/rich-codex.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Generate images for docs -on: - workflow_dispatch: -jobs: - rich_codex: - runs-on: ubuntu-latest - steps: - - name: Check out the repo - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 - with: - python-version: 3.x - cache: pip - cache-dependency-path: setup.py - - - name: Install Nextflow - uses: nf-core/setup-nextflow@v2 - - - name: Install nf-test - uses: nf-core/setup-nf-test@v1 - - - name: Install nf-core/tools - run: pip install git+https://github.com/nf-core/tools.git@dev - - - name: Generate terminal images with rich-codex - uses: ewels/rich-codex@8ce988cc253c240a3027ba58e33e47640935dd8b # v1 - env: - COLUMNS: 100 - HIDE_PROGRESS: "true" - with: - commit_changes: "true" - clean_img_paths: docs/images/*.svg - terminal_width: 100 - before_command: > - which nextflow && - which nf-core && - nextflow -version && - nf-core --version From 96f96d6d50c40eba935d1c12579cb6e6a6884fd4 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 21 May 2024 08:46:44 +0000 Subject: [PATCH 149/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16573a91e..f0869f8c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 ([#2974](https://github.com/nf-core/tools/pull/2974)) - Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) +- Update release checklist ([#2989](https://github.com/nf-core/tools/pull/2989)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 814dc85937f53e19c0681da810f1da5cb75f7188 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 21 May 2024 08:59:33 +0000 Subject: [PATCH 150/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0869f8c2..c97fba046 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,7 +17,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 ([#2974](https://github.com/nf-core/tools/pull/2974)) - Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) -- Update release checklist ([#2989](https://github.com/nf-core/tools/pull/2989)) +- Remove `rich-codex.yml` action, images are now generated on the website repo ([#2989](https://github.com/nf-core/tools/pull/2989)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 1d0d69135454ea080abee18a32636eb50bd8d2fb Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 21 May 2024 12:33:10 +0200 Subject: [PATCH 151/737] update test after updating bwa/mem module --- tests/components/generate_snapshot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py index 3176569ec..a5a8eaba3 100644 --- a/tests/components/generate_snapshot.py +++ b/tests/components/generate_snapshot.py @@ -91,7 +91,7 @@ def test_update_snapshot_module(self): snap_content = json.load(fh) original_timestamp = snap_content["Single-End"]["timestamp"] # delete the timestamp in json - snap_content["Single-End"]["content"][0]["0"][0][1] = "" + snap_content["Single-End"]["timestamp"] = "" with open(snap_path, "w") as fh: json.dump(snap_content, fh) snap_generator = ComponentsTest( From 49896ff1f42ab44b97f64f22c63455256473d214 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Tue, 21 May 2024 13:20:12 +0000 Subject: [PATCH 152/737] Add no clobber and put bash options on their own line --- nf_core/pipeline-template/nextflow.config | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 7648a5ebc..4d1701bf7 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -220,8 +220,15 @@ env { JULIA_DEPOT_PATH = "/usr/local/share/julia" } -// Capture exit codes from upstream processes when piping -process.shell = ['/bin/bash', '-euo', 'pipefail'] +// Set bash options +process.shell = """\ +bash + +set -e # Exit if a tool returns a non-zero status/exit code +set -u # Treat unset variables and parameters as an error +set -o pipefail # Returns the status of the last command to exit with a non-zero status or zero if all successfully execute +set -C # No clobber - prevent output redirection from overwriting files. +""" // Disable process selector warnings by default. Use debug profile to enable warnings. nextflow.enable.configProcessNamesValidation = false From 589265e68164bf024654f83255e36509d425dc60 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 21 May 2024 13:23:11 +0000 Subject: [PATCH 153/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16573a91e..06d8e1b5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 ([#2974](https://github.com/nf-core/tools/pull/2974)) - Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) +- Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From e6b089b6e14ea2d8a1dd86a1b8454aec02e46b99 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 15 May 2024 22:40:15 +0000 Subject: [PATCH 154/737] Update python:3.12-slim Docker digest to afc139a --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index ae3a4e1a3..fe4162b4f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:2be8daddbb82756f7d1f2c7ece706aadcb284bf6ab6d769ea695cc3ed6016743 +FROM python:3.12-slim@sha256:afc139a0a640942491ec481ad8dda10f2c5b753f5c969393b12480155fe15a63 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From 33179ea81b05862fcdd2fca844d96fdaf91ca241 Mon Sep 17 00:00:00 2001 From: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Date: Wed, 22 May 2024 11:06:20 -0400 Subject: [PATCH 155/737] Removes tags.yml from module and subworkflow template tags.yml is no longer needed with newer testing workflows. This PR removes the file from the module and subworkflow templates and updates the linting tests to reflect this. Related to #2881 --- .prettierignore | 2 - nf_core/components/create.py | 46 ++- nf_core/components/nfcore_component.py | 10 +- nf_core/module-template/tests/tags.yml | 2 - nf_core/modules/lint/module_tests.py | 103 +++-- nf_core/subworkflow-template/tests/tags.yml | 2 - .../subworkflows/lint/subworkflow_tests.py | 124 +++--- tests/modules/lint.py | 381 +++++++++++++----- tests/subworkflows/lint.py | 211 ++++++---- 9 files changed, 599 insertions(+), 282 deletions(-) delete mode 100644 nf_core/module-template/tests/tags.yml delete mode 100644 nf_core/subworkflow-template/tests/tags.yml diff --git a/.prettierignore b/.prettierignore index b923532bd..2a445d487 100644 --- a/.prettierignore +++ b/.prettierignore @@ -4,8 +4,6 @@ slackreport.json docs/api/_build testing nf_core/module-template/meta.yml -nf_core/module-template/tests/tags.yml -nf_core/subworkflow-template/tests/tags.yml nf_core/pipeline-template/nextflow_schema.json # don't run on things handled by ruff *.py diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 6c9c01b49..f2ffa3caf 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -88,8 +88,7 @@ def create(self): ├── meta.yml ├── environment.yml └── tests - ├── main.nf.test - └── tags.yml + └── main.nf.test ``` The function will attempt to automatically find a Bioconda package called @@ -102,8 +101,7 @@ def create(self): ├── main.nf ├── meta.yml └── tests - ├── main.nf.test - └── tags.yml + └── main.nf.test ``` """ @@ -228,7 +226,13 @@ def _get_bioconda_tool(self): log.info(f"Could not find a Docker/Singularity container ({e})") def _get_module_structure_components(self): - process_label_defaults = ["process_single", "process_low", "process_medium", "process_high", "process_long"] + process_label_defaults = [ + "process_single", + "process_low", + "process_medium", + "process_high", + "process_long", + ] if self.process_label is None: log.info( "Provide an appropriate resource label for the process, taken from the " @@ -252,7 +256,8 @@ def _get_module_structure_components(self): ) while self.has_meta is None: self.has_meta = rich.prompt.Confirm.ask( - "[violet]Will the module require a meta map of sample information?", default=True + "[violet]Will the module require a meta map of sample information?", + default=True, ) def _render_template(self): @@ -262,7 +267,8 @@ def _render_template(self): object_attrs = vars(self) # Run jinja2 for each file in the template folder env = jinja2.Environment( - loader=jinja2.PackageLoader("nf_core", f"{self.component_type[:-1]}-template"), keep_trailing_newline=True + loader=jinja2.PackageLoader("nf_core", f"{self.component_type[:-1]}-template"), + keep_trailing_newline=True, ) for template_fn, dest_fn in self.file_paths.items(): log.debug(f"Rendering template file: '{template_fn}'") @@ -372,7 +378,13 @@ def _get_component_dirs(self): if self.component_type == "modules": # If a subtool, check if there is a module called the base tool name already - parent_tool_main_nf = Path(self.directory, self.component_type, self.org, self.component, "main.nf") + parent_tool_main_nf = Path( + self.directory, + self.component_type, + self.org, + self.component, + "main.nf", + ) if self.subtool and parent_tool_main_nf.exists() and not self.migrate_pytest: raise UserWarning( f"Module '{parent_tool_main_nf}' exists already, cannot make subtool '{self.component_name}'" @@ -393,7 +405,6 @@ def _get_component_dirs(self): file_paths["meta.yml"] = component_dir / "meta.yml" if self.component_type == "modules": file_paths["environment.yml"] = component_dir / "environment.yml" - file_paths["tests/tags.yml"] = component_dir / "tests" / "tags.yml" file_paths["tests/main.nf.test.j2"] = component_dir / "tests" / "main.nf.test" return file_paths @@ -428,11 +439,15 @@ def _copy_old_files(self, component_old_path): shutil.copyfile(component_old_path / "meta.yml", self.file_paths["meta.yml"]) if self.component_type == "modules": log.debug("Copying original environment.yml file") - shutil.copyfile(component_old_path / "environment.yml", self.file_paths["environment.yml"]) + shutil.copyfile( + component_old_path / "environment.yml", + self.file_paths["environment.yml"], + ) if (component_old_path / "templates").is_dir(): log.debug("Copying original templates directory") shutil.copytree( - component_old_path / "templates", self.file_paths["environment.yml"].parent / "templates" + component_old_path / "templates", + self.file_paths["environment.yml"].parent / "templates", ) # Create a nextflow.config file if it contains information other than publishDir pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) @@ -447,7 +462,14 @@ def _copy_old_files(self, component_old_path): if len(config_lines) > 11: log.debug("Copying nextflow.config file from pytest tests") with open( - Path(self.directory, self.component_type, self.org, self.component_dir, "tests", "nextflow.config"), + Path( + self.directory, + self.component_type, + self.org, + self.component_dir, + "tests", + "nextflow.config", + ), "w+", ) as ofh: ofh.write(config_lines) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index bcba068af..5d0baf63d 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -17,7 +17,14 @@ class NFCoreComponent: """ def __init__( - self, component_name, repo_url, component_dir, repo_type, base_dir, component_type, remote_component=True + self, + component_name, + repo_url, + component_dir, + repo_type, + base_dir, + component_type, + remote_component=True, ): """ Initialize the object @@ -59,7 +66,6 @@ def __init__( self.org = repo_dir self.nftest_testdir = Path(self.component_dir, "tests") self.nftest_main_nf = Path(self.nftest_testdir, "main.nf.test") - self.tags_yml = Path(self.nftest_testdir, "tags.yml") if self.repo_type == "pipeline": patch_fn = f"{self.component_name.replace('/', '-')}.diff" diff --git a/nf_core/module-template/tests/tags.yml b/nf_core/module-template/tests/tags.yml deleted file mode 100644 index e7fac9f5b..000000000 --- a/nf_core/module-template/tests/tags.yml +++ /dev/null @@ -1,2 +0,0 @@ -{{ component_dir }}: - - "modules/{{ org }}/{{ component_dir }}/**" diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index b2b6c2221..694aee9c1 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -18,7 +18,7 @@ def module_tests(_, module: NFCoreComponent): Lint the tests of a module in ``nf-core/modules`` It verifies that the test directory exists - and contains a ``main.nf.test`` a ``main.nf.test.snap`` and ``tags.yml``. + and contains a ``main.nf.test`` and a ``main.nf.test.snap`` """ repo_dir = module.component_dir.parts[: module.component_dir.parts.index(module.component_name.split("/")[0])][-1] @@ -29,9 +29,21 @@ def module_tests(_, module: NFCoreComponent): module.passed.append(("test_dir_exists", "nf-test test directory exists", module.nftest_testdir)) else: if is_pytest: - module.warned.append(("test_dir_exists", "nf-test directory is missing", module.nftest_testdir)) + module.warned.append( + ( + "test_dir_exists", + "nf-test directory is missing", + module.nftest_testdir, + ) + ) else: - module.failed.append(("test_dir_exists", "nf-test directory is missing", module.nftest_testdir)) + module.failed.append( + ( + "test_dir_exists", + "nf-test directory is missing", + module.nftest_testdir, + ) + ) return # Lint the test main.nf file @@ -39,9 +51,21 @@ def module_tests(_, module: NFCoreComponent): module.passed.append(("test_main_nf_exists", "test `main.nf.test` exists", module.nftest_main_nf)) else: if is_pytest: - module.warned.append(("test_main_nf_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) + module.warned.append( + ( + "test_main_nf_exists", + "test `main.nf.test` does not exist", + module.nftest_main_nf, + ) + ) else: - module.failed.append(("test_main_nf_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) + module.failed.append( + ( + "test_main_nf_exists", + "test `main.nf.test` does not exist", + module.nftest_main_nf, + ) + ) if module.nftest_main_nf.is_file(): # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test @@ -50,7 +74,11 @@ def module_tests(_, module: NFCoreComponent): snap_file = module.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): module.passed.append( - ("test_snapshot_exists", "snapshot file `main.nf.test.snap` exists", snap_file) + ( + "test_snapshot_exists", + "snapshot file `main.nf.test.snap` exists", + snap_file, + ) ) # Validate no empty files with open(snap_file) as snap_fh: @@ -133,7 +161,11 @@ def module_tests(_, module: NFCoreComponent): ) else: module.failed.append( - ("test_snapshot_exists", "snapshot file `main.nf.test.snap` does not exist", snap_file) + ( + "test_snapshot_exists", + "snapshot file `main.nf.test.snap` does not exist", + snap_file, + ) ) # Verify that tags are correct. main_nf_tags = module._get_main_nf_tags(module.nftest_main_nf) @@ -148,7 +180,13 @@ def module_tests(_, module: NFCoreComponent): if tag not in main_nf_tags: missing_tags.append(tag) if len(missing_tags) == 0: - module.passed.append(("test_main_tags", "Tags adhere to guidelines", module.nftest_main_nf)) + module.passed.append( + ( + "test_main_tags", + "Tags adhere to guidelines", + module.nftest_main_nf, + ) + ) else: module.failed.append( ( @@ -174,41 +212,20 @@ def module_tests(_, module: NFCoreComponent): ) else: module.passed.append( - ("test_pytest_yml", "module with nf-test not in pytest_modules.yml", pytest_yml_path) - ) - except FileNotFoundError: - module.warned.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) - - if module.tags_yml.is_file(): - # Check that tags.yml exists and it has the correct entry - module.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", module.tags_yml)) - with open(module.tags_yml) as fh: - tags_yml = yaml.safe_load(fh) - if module.component_name in tags_yml.keys(): - module.passed.append(("test_tags_yml", "correct entry in tags.yml", module.tags_yml)) - if f"modules/{module.org}/{module.component_name}/**" in tags_yml[module.component_name]: - module.passed.append(("test_tags_yml", "correct path in tags.yml", module.tags_yml)) - else: - module.failed.append( ( - "test_tags_yml", - f"incorrect path in tags.yml, expected `modules/{module.org}/{module.component_name}/**`, got `{tags_yml[module.component_name][0]}`", - module.tags_yml, + "test_pytest_yml", + "module with nf-test not in pytest_modules.yml", + pytest_yml_path, ) ) - else: - module.failed.append( - ( - "test_tags_yml", - f"incorrect key in tags.yml, should be `{module.component_name}`, got `{list(tags_yml.keys())[0]}`.", - module.tags_yml, - ) + except FileNotFoundError: + module.warned.append( + ( + "test_pytest_yml", + "Could not open pytest_modules.yml file", + pytest_yml_path, ) - else: - if is_pytest: - module.warned.append(("test_tags_yml_exists", "file `tags.yml` does not exist", module.tags_yml)) - else: - module.failed.append(("test_tags_yml_exists", "file `tags.yml` does not exist", module.tags_yml)) + ) # Check that the old test directory does not exist if not is_pytest: @@ -222,4 +239,10 @@ def module_tests(_, module: NFCoreComponent): ) ) else: - module.passed.append(("test_old_test_dir", "Old pytests don't exist for this module", old_test_dir)) + module.passed.append( + ( + "test_old_test_dir", + "Old pytests don't exist for this module", + old_test_dir, + ) + ) diff --git a/nf_core/subworkflow-template/tests/tags.yml b/nf_core/subworkflow-template/tests/tags.yml deleted file mode 100644 index 35cad3678..000000000 --- a/nf_core/subworkflow-template/tests/tags.yml +++ /dev/null @@ -1,2 +0,0 @@ -subworkflows/{{ component_name_underscore }}: - - subworkflows/{{ org }}/{{ component_dir }}/** diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index cfae2d553..b7ab68509 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -18,7 +18,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): Lint the tests of a subworkflow in ``nf-core/modules`` It verifies that the test directory exists - and contains a ``main.nf.test`` a ``main.nf.test.snap`` and ``tags.yml``. + and contains a ``main.nf.test`` and a ``main.nf.test.snap`` Additionally, checks that all included components in test ``main.nf`` are specified in ``test.yml`` """ @@ -26,30 +26,68 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): repo_dir = subworkflow.component_dir.parts[ : subworkflow.component_dir.parts.index(subworkflow.component_name.split("/")[0]) ][-1] - test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", repo_dir, subworkflow.component_name) + test_dir = Path( + subworkflow.base_dir, + "tests", + "subworkflows", + repo_dir, + subworkflow.component_name, + ) pytest_main_nf = Path(test_dir, "main.nf") is_pytest = pytest_main_nf.is_file() log.debug(f"{pytest_main_nf} is pytest: {is_pytest}") if subworkflow.nftest_testdir.is_dir(): - subworkflow.passed.append(("test_dir_exists", "nf-test test directory exists", subworkflow.nftest_testdir)) + subworkflow.passed.append( + ( + "test_dir_exists", + "nf-test test directory exists", + subworkflow.nftest_testdir, + ) + ) else: if is_pytest: - subworkflow.warned.append(("test_dir_exists", "nf-test directory is missing", subworkflow.nftest_testdir)) + subworkflow.warned.append( + ( + "test_dir_exists", + "nf-test directory is missing", + subworkflow.nftest_testdir, + ) + ) else: - subworkflow.failed.append(("test_dir_exists", "nf-test directory is missing", subworkflow.nftest_testdir)) + subworkflow.failed.append( + ( + "test_dir_exists", + "nf-test directory is missing", + subworkflow.nftest_testdir, + ) + ) return # Lint the test main.nf file if subworkflow.nftest_main_nf.is_file(): - subworkflow.passed.append(("test_main_nf_exists", "test `main.nf.test` exists", subworkflow.nftest_main_nf)) + subworkflow.passed.append( + ( + "test_main_nf_exists", + "test `main.nf.test` exists", + subworkflow.nftest_main_nf, + ) + ) else: if is_pytest: subworkflow.warned.append( - ("test_main_nf_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) + ( + "test_main_nf_exists", + "test `main.nf.test` does not exist", + subworkflow.nftest_main_nf, + ) ) else: subworkflow.failed.append( - ("test_main_nf_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) + ( + "test_main_nf_exists", + "test `main.nf.test` does not exist", + subworkflow.nftest_main_nf, + ) ) if subworkflow.nftest_main_nf.is_file(): @@ -58,7 +96,13 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): if "snapshot(" in fh.read(): snap_file = subworkflow.nftest_testdir / "main.nf.test.snap" if snap_file.is_file(): - subworkflow.passed.append(("test_snapshot_exists", "test `main.nf.test.snap` exists", snap_file)) + subworkflow.passed.append( + ( + "test_snapshot_exists", + "test `main.nf.test.snap` exists", + snap_file, + ) + ) # Validate no empty files with open(snap_file) as snap_fh: try: @@ -140,7 +184,11 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) else: subworkflow.failed.append( - ("test_snapshot_exists", "test `main.nf.test.snap` does not exist", snap_file) + ( + "test_snapshot_exists", + "test `main.nf.test.snap` does not exist", + snap_file, + ) ) # Verify that tags are correct. main_nf_tags = subworkflow._get_main_nf_tags(subworkflow.nftest_main_nf) @@ -161,7 +209,13 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): if tag not in main_nf_tags: missing_tags.append(tag) if len(missing_tags) == 0: - subworkflow.passed.append(("test_main_tags", "Tags adhere to guidelines", subworkflow.nftest_main_nf)) + subworkflow.passed.append( + ( + "test_main_tags", + "Tags adhere to guidelines", + subworkflow.nftest_main_nf, + ) + ) else: subworkflow.failed.append( ( @@ -187,43 +241,17 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) else: subworkflow.passed.append( - ("test_pytest_yml", "subworkflow with nf-test not in pytest_modules.yml", pytest_yml_path) + ( + "test_pytest_yml", + "subworkflow with nf-test not in pytest_modules.yml", + pytest_yml_path, + ) ) except FileNotFoundError: - subworkflow.warned.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) - - if subworkflow.tags_yml.is_file(): - # Check tags.yml exists and it has the correct entry - subworkflow.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", subworkflow.tags_yml)) - with open(subworkflow.tags_yml) as fh: - tags_yml = yaml.safe_load(fh) - if "subworkflows/" + subworkflow.component_name in tags_yml.keys(): - subworkflow.passed.append(("test_tags_yml", "correct entry in tags.yml", subworkflow.tags_yml)) - if ( - f"subworkflows/{subworkflow.org}/{subworkflow.component_name}/**" - in tags_yml["subworkflows/" + subworkflow.component_name] - ): - subworkflow.passed.append(("test_tags_yml", "correct path in tags.yml", subworkflow.tags_yml)) - else: - subworkflow.failed.append(("test_tags_yml", "incorrect path in tags.yml", subworkflow.tags_yml)) - else: - subworkflow.failed.append( - ( - "test_tags_yml", - "incorrect entry in tags.yml, should be 'subworkflows/'", - subworkflow.tags_yml, - ) + subworkflow.warned.append( + ( + "test_pytest_yml", + "Could not open pytest_modules.yml file", + pytest_yml_path, ) - else: - if is_pytest: - subworkflow.warned.append(("test_tags_yml_exists", "file `tags.yml` does not exist", subworkflow.tags_yml)) - else: - subworkflow.failed.append(("test_tags_yml_exists", "file `tags.yml` does not exist", subworkflow.tags_yml)) - - # Check that the old test directory does not exist - if not is_pytest: - old_test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", subworkflow.component_name) - if old_test_dir.is_dir(): - subworkflow.failed.append(("test_old_test_dir", "old test directory exists", old_test_dir)) - else: - subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", old_test_dir)) + ) diff --git a/tests/modules/lint.py b/tests/modules/lint.py index 595509de4..6bb74d34e 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -15,7 +15,12 @@ def setup_patch(pipeline_dir: str, modify_module: bool): install_obj = nf_core.modules.ModuleInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=CORRECT_SHA + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + sha=CORRECT_SHA, ) # Install the module @@ -112,7 +117,10 @@ def test_modules_lint_patched_modules(self): # to avoid error from try_apply_patch() during linting with set_wd(self.pipeline_dir): module_lint = nf_core.modules.ModuleLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH, hide_progress=True + dir=self.pipeline_dir, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + hide_progress=True, ) module_lint.lint( print_results=False, @@ -320,10 +328,26 @@ def test_modules_lint_snapshot_file(self): def test_modules_lint_snapshot_file_missing_fail(self): """Test linting a module with a snapshot file missing, which should fail""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap").unlink() + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ).unlink() module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap").touch() + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ).touch() assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -332,10 +356,31 @@ def test_modules_lint_snapshot_file_missing_fail(self): def test_modules_lint_snapshot_file_not_needed(self): """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ) + ) as fh: content = fh.read() new_content = content.replace("snapshot(", "snap (") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ), + "w", + ) as fh: fh.write(new_content) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") @@ -347,12 +392,33 @@ def test_modules_lint_snapshot_file_not_needed(self): def test_modules_environment_yml_file_doesnt_exists(self): """Test linting a module with an environment.yml file""" Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml.bak") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml.bak", + ) ) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml.bak").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml.bak", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) ) assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -371,13 +437,32 @@ def test_modules_environment_yml_file_sorted_correctly(self): def test_modules_environment_yml_file_sorted_incorrectly(self): """Test linting a module with an incorrectly sorted environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: yaml_content = yaml.safe_load(fh) # Add a new dependency to the environment.yml file and reverse the order yaml_content["dependencies"].append("z") yaml_content["dependencies"].reverse() yaml_content = yaml.dump(yaml_content) - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: fh.write(yaml_content) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") @@ -389,10 +474,29 @@ def test_modules_environment_yml_file_sorted_incorrectly(self): def test_modules_environment_yml_file_not_array(self): """Test linting a module with an incorrectly formatted environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: yaml_content = yaml.safe_load(fh) yaml_content["dependencies"] = "z" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: fh.write(yaml.dump(yaml_content)) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") @@ -404,16 +508,45 @@ def test_modules_environment_yml_file_not_array(self): def test_modules_environment_yml_file_name_mismatch(self): """Test linting a module with a different name in the environment.yml file""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: yaml_content = yaml.safe_load(fh) yaml_content["name"] = "bpipe-test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: fh.write(yaml.dump(yaml_content)) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes yaml_content["name"] = "bpipe_test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: fh.write(yaml.dump(yaml_content)) assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -427,14 +560,20 @@ def test_modules_meta_yml_incorrect_licence_field(self): with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: meta_yml = yaml.safe_load(fh) meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: fh.write(yaml.dump(meta_yml)) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: fh.write(yaml.dump(meta_yml)) assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -489,22 +628,57 @@ def test_modules_meta_yml_incorrect_name(self): meta_yml = yaml.safe_load(fh) meta_yml["name"] = "bpipe/test" # need to make the same change to the environment.yml file - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: environment_yml = yaml.safe_load(fh) environment_yml["name"] = "bpipe/test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: fh.write(yaml.dump(meta_yml)) - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: fh.write(yaml.dump(environment_yml)) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes meta_yml["name"] = "bpipe_test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: fh.write(yaml.dump(meta_yml)) environment_yml["name"] = "bpipe_test" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: fh.write(yaml.dump(environment_yml)) assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -531,13 +705,45 @@ def test_modules_missing_test_dir(self): def test_modules_missing_test_main_nf(self): """Test linting a module with a missing test/main.nf file""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.bak") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.bak", + ) ) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.bak").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.bak", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ) ) assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) >= 0 @@ -545,73 +751,6 @@ def test_modules_missing_test_main_nf(self): assert module_lint.failed[0].lint_test == "test_main_nf_exists" -def test_modules_missing_required_tag(self): - """Test linting a module with a missing required tag""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh: - content = fh.read() - new_content = content.replace("modules_nfcore", "foo") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_main_tags" - - -def test_modules_missing_tags_yml(self): - """Test linting a module with a missing tags.yml file""" - tags_path = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml") - tags_path.rename(tags_path.parent / "tags.yml.bak") - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml_exists" - # cleanup - Path(tags_path.parent / "tags.yml.bak").rename(tags_path.parent / "tags.yml") - - -def test_modules_incorrect_tags_yml_key(self): - """Test linting a module with an incorrect key in tags.yml file""" - tags_path = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml") - with open(tags_path) as fh: - content = fh.read() - new_content = content.replace("bpipe/test:", "bpipe_test:") - with open(tags_path, "w") as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=True, module="bpipe/test") - with open(tags_path, "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml" - - -def test_modules_incorrect_tags_yml_values(self): - """Test linting a module with an incorrect path in tags.yml file""" - tags_path = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml") - with open(tags_path) as fh: - content = fh.read() - new_content = content.replace("modules/nf-core/bpipe/test/**", "foo") - with open(tags_path, "w") as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(tags_path, "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml" - - def test_modules_unused_pytest_files(self): """Test linting a nf-test module with files still present in `tests/modules/`""" Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) @@ -647,17 +786,45 @@ def test_nftest_failing_linting(self): def test_modules_absent_version(self): """Test linting a nf-test module if the versions is absent in the snapshot file `""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap")) as fh: + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + ) as fh: content = fh.read() new_content = content.replace("versions", "foo") with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap"), "w" + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ), + "w", ) as fh: fh.write(new_content) module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap"), "w" + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ), + "w", ) as fh: fh.write(content) assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -668,7 +835,15 @@ def test_modules_absent_version(self): def test_modules_empty_file_in_snapshot(self): """Test linting a nf-test module with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" - snap_file = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") + snap_file = Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) snap = json.load(snap_file.open()) content = snap_file.read_text() snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" @@ -690,7 +865,15 @@ def test_modules_empty_file_in_snapshot(self): def test_modules_empty_file_in_stub_snapshot(self): """Test linting a nf-test module with an empty file sha sum in the stub test snapshot, which should make it not fail""" - snap_file = Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") + snap_file = Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) snap = json.load(snap_file.open()) content = snap_file.read_text() snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py index 73d2452b3..540f421ad 100644 --- a/tests/subworkflows/lint.py +++ b/tests/subworkflows/lint.py @@ -80,10 +80,24 @@ def test_subworkflows_lint_snapshot_file(self): def test_subworkflows_lint_snapshot_file_missing_fail(self): """Test linting a subworkflow with a snapshot file missing, which should fail""" - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").unlink() + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).unlink() subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).touch() assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 @@ -91,18 +105,49 @@ def test_subworkflows_lint_snapshot_file_missing_fail(self): def test_subworkflows_lint_snapshot_file_not_needed(self): """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test")) as fh: + with open( + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test", + ) + ) as fh: content = fh.read() new_content = content.replace("snapshot(", "snap (") with open( - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "w" + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test", + ), + "w", ) as fh: fh.write(new_content) - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").unlink() + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).unlink() subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).touch() assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 @@ -112,15 +157,34 @@ def test_subworkflows_lint_less_than_two_modules_warning(self): """Test linting a subworkflow with less than two modules""" self.subworkflow_install.install("bam_stats_samtools") # Remove two modules - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: content = fh.read() new_content = content.replace( - "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", "" + "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", + "", ) new_content = new_content.replace( - "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", "" + "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", + "", ) - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: fh.write(new_content) subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") @@ -135,14 +199,31 @@ def test_subworkflows_lint_less_than_two_modules_warning(self): def test_subworkflows_lint_include_multiple_alias(self): """Test linting a subworkflow with multiple include methods""" self.subworkflow_install.install("bam_stats_samtools") - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: content = fh.read() new_content = content.replace("SAMTOOLS_STATS", "SAMTOOLS_STATS_1") new_content = new_content.replace( "include { SAMTOOLS_STATS_1 ", "include { SAMTOOLS_STATS as SAMTOOLS_STATS_1; SAMTOOLS_STATS as SAMTOOLS_STATS_2 ", ) - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: fh.write(new_content) subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) @@ -169,10 +250,27 @@ def test_subworkflows_lint_capitalization_fail(self): """Test linting a subworkflow with a capitalization fail""" self.subworkflow_install.install("bam_stats_samtools") # change workflow name to lowercase - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf")) as fh: + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: content = fh.read() new_content = content.replace("workflow BAM_STATS_SAMTOOLS {", "workflow bam_stats_samtools {") - with open(Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_stats_samtools", "main.nf"), "w") as fh: + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: fh.write(new_content) subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") @@ -187,7 +285,14 @@ def test_subworkflows_lint_capitalization_fail(self): def test_subworkflows_absent_version(self): """Test linting a nf-test module if the versions is absent in the snapshot file `""" - snap_file = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) with open(snap_file) as fh: content = fh.read() new_content = content.replace("versions", "foo") @@ -242,7 +347,14 @@ def test_subworkflows_missing_main_nf(self): def test_subworkflows_empty_file_in_snapshot(self): """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" - snap_file = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) snap = json.load(snap_file.open()) content = snap_file.read_text() snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" @@ -264,7 +376,14 @@ def test_subworkflows_empty_file_in_snapshot(self): def test_subworkflows_empty_file_in_stub_snapshot(self): """Test linting a nf-test subworkflow with an empty file sha sum in the stub test snapshot, which should make it not fail""" - snap_file = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) snap = json.load(snap_file.open()) content = snap_file.read_text() snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} @@ -282,61 +401,3 @@ def test_subworkflows_empty_file_in_stub_snapshot(self): # reset the file with open(snap_file, "w") as fh: fh.write(content) - - -def test_subworkflows_missing_tags_yml(self): - """Test linting a subworkflow with a missing tags.yml file""" - tags_path = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "tags.yml") - tags_path.rename(tags_path.parent / "tags.yml.bak") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) >= 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "test_tags_yml_exists" - - # cleanup - Path(tags_path.parent / "tags.yml.bak").rename(tags_path.parent / "tags.yml") - - -def test_subworkflows_incorrect_tags_yml_key(self): - """Test linting a subworkflow with an incorrect key in tags.yml file""" - tags_path = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "tags.yml") - with open(tags_path) as fh: - content = fh.read() - new_content = content.replace("test_subworkflow:", "subworkflow:") - with open(tags_path, "w") as fh: - fh.write(new_content) - module_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - module_lint.lint(print_results=True, subworkflow="test_subworkflow") - with open(tags_path, "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml" - # cleanup - with open(tags_path, "w") as fh: - fh.write(content) - - -def test_subworkflows_incorrect_tags_yml_values(self): - """Test linting a subworkflow with an incorrect path in tags.yml file""" - tags_path = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "tags.yml") - with open(tags_path) as fh: - content = fh.read() - new_content = content.replace("subworkflows/nf-core/test_subworkflow/**", "foo") - with open(tags_path, "w") as fh: - fh.write(new_content) - module_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, subworkflow="test_subworkflow") - with open(tags_path, "w") as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_tags_yml" - # cleanup - with open(tags_path, "w") as fh: - fh.write(content) From 4cb6fd14910050438de7845cb9a0781519a9f7e2 Mon Sep 17 00:00:00 2001 From: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Date: Wed, 22 May 2024 11:37:37 -0400 Subject: [PATCH 156/737] mypy fixes --- tests/test_modules.py | 4 ---- tests/test_subworkflows.py | 18 +++++++++++++----- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/tests/test_modules.py b/tests/test_modules.py index a122f16b6..f353a7edf 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -202,8 +202,6 @@ def test_modulesrepo_class(self): test_modules_environment_yml_file_not_array, test_modules_environment_yml_file_sorted_correctly, test_modules_environment_yml_file_sorted_incorrectly, - test_modules_incorrect_tags_yml_key, - test_modules_incorrect_tags_yml_values, test_modules_lint_check_process_labels, test_modules_lint_check_url, test_modules_lint_empty, @@ -220,8 +218,6 @@ def test_modulesrepo_class(self): test_modules_meta_yml_incorrect_name, test_modules_meta_yml_input_mismatch, test_modules_meta_yml_output_mismatch, - test_modules_missing_required_tag, - test_modules_missing_tags_yml, test_modules_missing_test_dir, test_modules_missing_test_main_nf, test_modules_unused_pytest_files, diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 59de1967e..786ba5383 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -33,7 +33,14 @@ def create_modules_repo_dummy(tmp_dir): subworkflow_create.create() # Add dummy content to main.nf.test.snap - test_snap_path = Path(root_dir, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") + test_snap_path = Path( + root_dir, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) test_snap_path.parent.mkdir(parents=True, exist_ok=True) with open(test_snap_path, "w") as fh: json.dump( @@ -70,7 +77,11 @@ def setUp(self): # Set up install objects self.subworkflow_install = nf_core.subworkflows.SubworkflowInstall(self.pipeline_dir, prompt=False, force=False) self.subworkflow_install_gitlab = nf_core.subworkflows.SubworkflowInstall( - self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + self.pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, ) self.subworkflow_install_gitlab_same_org_path = nf_core.subworkflows.SubworkflowInstall( self.pipeline_dir, @@ -136,8 +147,6 @@ def tearDown(self): test_subworkflows_absent_version, test_subworkflows_empty_file_in_snapshot, test_subworkflows_empty_file_in_stub_snapshot, - test_subworkflows_incorrect_tags_yml_key, - test_subworkflows_incorrect_tags_yml_values, test_subworkflows_lint, test_subworkflows_lint_capitalization_fail, test_subworkflows_lint_empty, @@ -150,7 +159,6 @@ def tearDown(self): test_subworkflows_lint_snapshot_file, test_subworkflows_lint_snapshot_file_missing_fail, test_subworkflows_lint_snapshot_file_not_needed, - test_subworkflows_missing_tags_yml, ) from .subworkflows.list import ( # type: ignore[misc] test_subworkflows_install_and_list_subworkflows, From 8eb4f417d09540ab0703751de8d8dbbdbeecc5ed Mon Sep 17 00:00:00 2001 From: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Date: Wed, 22 May 2024 11:39:51 -0400 Subject: [PATCH 157/737] re-add checks for pytest dirs existing --- nf_core/subworkflows/lint/subworkflow_tests.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index b7ab68509..335789cb7 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -255,3 +255,11 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): pytest_yml_path, ) ) + + # Check that the old test directory does not exist + if not is_pytest: + old_test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", subworkflow.component_name) + if old_test_dir.is_dir(): + subworkflow.failed.append(("test_old_test_dir", "old test directory exists", old_test_dir)) + else: + subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", old_test_dir)) From 95976e5cf2c9ba5757a51574773c18c16bcc708a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 22 May 2024 16:51:57 +0000 Subject: [PATCH 158/737] remove test_tags_yml from pytest test --- tests/modules/lint.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/modules/lint.py b/tests/modules/lint.py index 6bb74d34e..e1a4e27ff 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -774,14 +774,13 @@ def test_nftest_failing_linting(self): module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=False, module="kallisto/quant") - assert len(module_lint.failed) == 4, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) >= 0 assert len(module_lint.warned) >= 0 assert module_lint.failed[0].lint_test == "environment_yml_valid" assert module_lint.failed[1].lint_test == "meta_yml_valid" assert module_lint.failed[2].lint_test == "test_main_tags" assert "kallisto/index" in module_lint.failed[2].message - assert module_lint.failed[3].lint_test == "test_tags_yml" def test_modules_absent_version(self): From 2220a743aafea764f97b8a7b186888e689b7b7b3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 23 May 2024 12:30:23 +0200 Subject: [PATCH 159/737] update minimal textual version and snapshots accordingly --- requirements.txt | 2 +- tests/__snapshots__/test_create_app.ambr | 3031 +++++++++++----------- 2 files changed, 1516 insertions(+), 1517 deletions(-) diff --git a/requirements.txt b/requirements.txt index acf30f491..44241e0d9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,6 +21,6 @@ requests_cache rich-click>=1.6.1 rich>=13.3.1 tabulate -textual>=0.47.1 +textual>=0.63.1 trogon pdiff diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index c486ec4f8..f5a519b13 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -22,253 +22,254 @@ font-weight: 700; } - .terminal-1527309810-matrix { + .terminal-3833894853-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1527309810-title { + .terminal-3833894853-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1527309810-r1 { fill: #c5c8c6 } - .terminal-1527309810-r2 { fill: #e3e3e3 } - .terminal-1527309810-r3 { fill: #989898 } - .terminal-1527309810-r4 { fill: #e1e1e1 } - .terminal-1527309810-r5 { fill: #121212 } - .terminal-1527309810-r6 { fill: #0053aa } - .terminal-1527309810-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1527309810-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-1527309810-r9 { fill: #1e1e1e } - .terminal-1527309810-r10 { fill: #008139 } - .terminal-1527309810-r11 { fill: #e2e2e2 } - .terminal-1527309810-r12 { fill: #787878 } - .terminal-1527309810-r13 { fill: #b93c5b } - .terminal-1527309810-r14 { fill: #454a50 } - .terminal-1527309810-r15 { fill: #7ae998 } - .terminal-1527309810-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-1527309810-r17 { fill: #0a180e;font-weight: bold } - .terminal-1527309810-r18 { fill: #000000 } - .terminal-1527309810-r19 { fill: #ddedf9 } + .terminal-3833894853-r1 { fill: #c5c8c6 } + .terminal-3833894853-r2 { fill: #e3e3e3 } + .terminal-3833894853-r3 { fill: #989898 } + .terminal-3833894853-r4 { fill: #e1e1e1 } + .terminal-3833894853-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3833894853-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-3833894853-r7 { fill: #1e1e1e } + .terminal-3833894853-r8 { fill: #008139 } + .terminal-3833894853-r9 { fill: #121212 } + .terminal-3833894853-r10 { fill: #e2e2e2 } + .terminal-3833894853-r11 { fill: #787878 } + .terminal-3833894853-r12 { fill: #b93c5b } + .terminal-3833894853-r13 { fill: #454a50 } + .terminal-3833894853-r14 { fill: #7ae998 } + .terminal-3833894853-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-3833894853-r16 { fill: #0a180e;font-weight: bold } + .terminal-3833894853-r17 { fill: #000000 } + .terminal-3833894853-r18 { fill: #fea62b;font-weight: bold } + .terminal-3833894853-r19 { fill: #a7a9ab } + .terminal-3833894853-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -298,256 +299,257 @@ font-weight: 700; } - .terminal-2230840552-matrix { + .terminal-170499771-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2230840552-title { + .terminal-170499771-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2230840552-r1 { fill: #c5c8c6 } - .terminal-2230840552-r2 { fill: #e3e3e3 } - .terminal-2230840552-r3 { fill: #989898 } - .terminal-2230840552-r4 { fill: #e1e1e1 } - .terminal-2230840552-r5 { fill: #121212 } - .terminal-2230840552-r6 { fill: #0053aa } - .terminal-2230840552-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2230840552-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2230840552-r9 { fill: #1e1e1e } - .terminal-2230840552-r10 { fill: #0f4e2a } - .terminal-2230840552-r11 { fill: #0178d4 } - .terminal-2230840552-r12 { fill: #a7a7a7 } - .terminal-2230840552-r13 { fill: #787878 } - .terminal-2230840552-r14 { fill: #e2e2e2 } - .terminal-2230840552-r15 { fill: #b93c5b } - .terminal-2230840552-r16 { fill: #454a50 } - .terminal-2230840552-r17 { fill: #7ae998 } - .terminal-2230840552-r18 { fill: #e2e3e3;font-weight: bold } - .terminal-2230840552-r19 { fill: #0a180e;font-weight: bold } - .terminal-2230840552-r20 { fill: #000000 } - .terminal-2230840552-r21 { fill: #008139 } - .terminal-2230840552-r22 { fill: #ddedf9 } + .terminal-170499771-r1 { fill: #c5c8c6 } + .terminal-170499771-r2 { fill: #e3e3e3 } + .terminal-170499771-r3 { fill: #989898 } + .terminal-170499771-r4 { fill: #e1e1e1 } + .terminal-170499771-r5 { fill: #4ebf71;font-weight: bold } + .terminal-170499771-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-170499771-r7 { fill: #1e1e1e } + .terminal-170499771-r8 { fill: #0f4e2a } + .terminal-170499771-r9 { fill: #0178d4 } + .terminal-170499771-r10 { fill: #a7a7a7 } + .terminal-170499771-r11 { fill: #787878 } + .terminal-170499771-r12 { fill: #e2e2e2 } + .terminal-170499771-r13 { fill: #b93c5b } + .terminal-170499771-r14 { fill: #121212 } + .terminal-170499771-r15 { fill: #454a50 } + .terminal-170499771-r16 { fill: #7ae998 } + .terminal-170499771-r17 { fill: #e2e3e3;font-weight: bold } + .terminal-170499771-r18 { fill: #0a180e;font-weight: bold } + .terminal-170499771-r19 { fill: #000000 } + .terminal-170499771-r20 { fill: #008139 } + .terminal-170499771-r21 { fill: #fea62b;font-weight: bold } + .terminal-170499771-r22 { fill: #a7a9ab } + .terminal-170499771-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -577,253 +579,251 @@ font-weight: 700; } - .terminal-3444045345-matrix { + .terminal-2364166316-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3444045345-title { + .terminal-2364166316-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3444045345-r1 { fill: #c5c8c6 } - .terminal-3444045345-r2 { fill: #e3e3e3 } - .terminal-3444045345-r3 { fill: #989898 } - .terminal-3444045345-r4 { fill: #e1e1e1 } - .terminal-3444045345-r5 { fill: #121212 } - .terminal-3444045345-r6 { fill: #0053aa } - .terminal-3444045345-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3444045345-r8 { fill: #24292f } - .terminal-3444045345-r9 { fill: #e2e3e3;font-weight: bold } - .terminal-3444045345-r10 { fill: #e2e3e3;font-weight: bold;font-style: italic; } - .terminal-3444045345-r11 { fill: #4ebf71;font-weight: bold } - .terminal-3444045345-r12 { fill: #e1e1e1;font-style: italic; } - .terminal-3444045345-r13 { fill: #7ae998 } - .terminal-3444045345-r14 { fill: #507bb3 } - .terminal-3444045345-r15 { fill: #008139 } - .terminal-3444045345-r16 { fill: #dde6ed;font-weight: bold } - .terminal-3444045345-r17 { fill: #001541 } - .terminal-3444045345-r18 { fill: #e1e1e1;text-decoration: underline; } - .terminal-3444045345-r19 { fill: #ddedf9 } + .terminal-2364166316-r1 { fill: #c5c8c6 } + .terminal-2364166316-r2 { fill: #e3e3e3 } + .terminal-2364166316-r3 { fill: #989898 } + .terminal-2364166316-r4 { fill: #e1e1e1 } + .terminal-2364166316-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2364166316-r6 { fill: #4ebf71;text-decoration: underline; } + .terminal-2364166316-r7 { fill: #4ebf71;font-style: italic;;text-decoration: underline; } + .terminal-2364166316-r8 { fill: #e1e1e1;font-style: italic; } + .terminal-2364166316-r9 { fill: #7ae998 } + .terminal-2364166316-r10 { fill: #008139 } + .terminal-2364166316-r11 { fill: #507bb3 } + .terminal-2364166316-r12 { fill: #dde6ed;font-weight: bold } + .terminal-2364166316-r13 { fill: #001541 } + .terminal-2364166316-r14 { fill: #e1e1e1;text-decoration: underline; } + .terminal-2364166316-r15 { fill: #fea62b;font-weight: bold } + .terminal-2364166316-r16 { fill: #a7a9ab } + .terminal-2364166316-r17 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Choose pipeline type - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -        Choose "nf-core" if:              Choose "Custom" if:         - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ● You want your pipeline to be part of● Your pipeline will never be part of  - the nf-core communitynf-core - ● You think that there's an outside ● You want full control over all - chance that it ever could be part offeatures that are included from the  - nf-coretemplate (including those that are  - mandatory for nf-core). - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁Custom - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -                                  What's the difference?                                  - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions continuous-integration configuration files: - ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) - ▪ Code formatting checks with Prettier - ▪ Auto-fix linting functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core configuration profiles - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Choose pipeline type + + + + + Choose "nf-core" if:Choose "Custom" if: + + ● You want your pipeline to be part of the ● Your pipeline will never be part of  + nf-core communitynf-core + ● You think that there's an outside chance ● You want full control over all features  + that it ever could be part of nf-corethat are included from the template  + (including those that are mandatory for  + nf-core). + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Custom + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + What's the difference? + + Choosing "nf-core" effectively pre-selects the following template features: + + ● GitHub Actions continuous-integration configuration files: + ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) + ▪ Code formatting checks with Prettier + ▪ Auto-fix linting functionality using @nf-core-bot + ▪ Marking old issues as stale + ● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -853,257 +853,257 @@ font-weight: 700; } - .terminal-3071202289-matrix { + .terminal-3598234483-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3071202289-title { + .terminal-3598234483-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3071202289-r1 { fill: #c5c8c6 } - .terminal-3071202289-r2 { fill: #e3e3e3 } - .terminal-3071202289-r3 { fill: #989898 } - .terminal-3071202289-r4 { fill: #e1e1e1 } - .terminal-3071202289-r5 { fill: #121212 } - .terminal-3071202289-r6 { fill: #0053aa } - .terminal-3071202289-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3071202289-r8 { fill: #1e1e1e } - .terminal-3071202289-r9 { fill: #0178d4 } - .terminal-3071202289-r10 { fill: #454a50 } - .terminal-3071202289-r11 { fill: #e2e2e2 } - .terminal-3071202289-r12 { fill: #808080 } - .terminal-3071202289-r13 { fill: #e2e3e3;font-weight: bold } - .terminal-3071202289-r14 { fill: #000000 } - .terminal-3071202289-r15 { fill: #e4e4e4 } - .terminal-3071202289-r16 { fill: #14191f } - .terminal-3071202289-r17 { fill: #507bb3 } - .terminal-3071202289-r18 { fill: #dde6ed;font-weight: bold } - .terminal-3071202289-r19 { fill: #001541 } - .terminal-3071202289-r20 { fill: #7ae998 } - .terminal-3071202289-r21 { fill: #0a180e;font-weight: bold } - .terminal-3071202289-r22 { fill: #008139 } - .terminal-3071202289-r23 { fill: #ddedf9 } + .terminal-3598234483-r1 { fill: #c5c8c6 } + .terminal-3598234483-r2 { fill: #e3e3e3 } + .terminal-3598234483-r3 { fill: #989898 } + .terminal-3598234483-r4 { fill: #e1e1e1 } + .terminal-3598234483-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3598234483-r6 { fill: #1e1e1e } + .terminal-3598234483-r7 { fill: #0178d4 } + .terminal-3598234483-r8 { fill: #454a50 } + .terminal-3598234483-r9 { fill: #e2e2e2 } + .terminal-3598234483-r10 { fill: #808080 } + .terminal-3598234483-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-3598234483-r12 { fill: #000000 } + .terminal-3598234483-r13 { fill: #e4e4e4 } + .terminal-3598234483-r14 { fill: #14191f } + .terminal-3598234483-r15 { fill: #507bb3 } + .terminal-3598234483-r16 { fill: #dde6ed;font-weight: bold } + .terminal-3598234483-r17 { fill: #001541 } + .terminal-3598234483-r18 { fill: #7ae998 } + .terminal-3598234483-r19 { fill: #0a180e;font-weight: bold } + .terminal-3598234483-r20 { fill: #008139 } + .terminal-3598234483-r21 { fill: #fea62b;font-weight: bold } + .terminal-3598234483-r22 { fill: #a7a9ab } + .terminal-3598234483-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference The pipeline Hide help - ▁▁▁▁▁▁▁▁genomeswill be ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configured to  - use a copy of  - the most common  - reference genome - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most  - common reference genome files. - - By selecting this option, your pipeline will include a  - configuration file specifying the paths to these files. - - The required code to use these files will also be included in  - the template. When the pipeline user provides an appropriate  - genome key, the pipeline will automatically download the ▂▂ - required reference files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI The pipeline Show help▅▅ - ▁▁▁▁▁▁▁▁testswill include ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - several GitHub  - actions for  - Continuous  - Integration (CI) - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github The README.md Show help - ▁▁▁▁▁▁▁▁badgesfile of the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - pipeline will  - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference The pipeline will Hide help + ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI The pipeline will Show help + ▁▁▁▁▁▁▁▁testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing▄▄ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md fileShow help + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode q Quit @@ -1133,252 +1133,252 @@ font-weight: 700; } - .terminal-1456849374-matrix { + .terminal-1869771697-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1456849374-title { + .terminal-1869771697-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1456849374-r1 { fill: #c5c8c6 } - .terminal-1456849374-r2 { fill: #e3e3e3 } - .terminal-1456849374-r3 { fill: #989898 } - .terminal-1456849374-r4 { fill: #e1e1e1 } - .terminal-1456849374-r5 { fill: #121212 } - .terminal-1456849374-r6 { fill: #0053aa } - .terminal-1456849374-r7 { fill: #dde8f3;font-weight: bold } - .terminal-1456849374-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-1456849374-r9 { fill: #1e1e1e } - .terminal-1456849374-r10 { fill: #008139 } - .terminal-1456849374-r11 { fill: #e2e2e2 } - .terminal-1456849374-r12 { fill: #b93c5b } - .terminal-1456849374-r13 { fill: #454a50 } - .terminal-1456849374-r14 { fill: #7ae998 } - .terminal-1456849374-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-1456849374-r16 { fill: #0a180e;font-weight: bold } - .terminal-1456849374-r17 { fill: #000000 } - .terminal-1456849374-r18 { fill: #ddedf9 } + .terminal-1869771697-r1 { fill: #c5c8c6 } + .terminal-1869771697-r2 { fill: #e3e3e3 } + .terminal-1869771697-r3 { fill: #989898 } + .terminal-1869771697-r4 { fill: #e1e1e1 } + .terminal-1869771697-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1869771697-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-1869771697-r7 { fill: #1e1e1e } + .terminal-1869771697-r8 { fill: #008139 } + .terminal-1869771697-r9 { fill: #e2e2e2 } + .terminal-1869771697-r10 { fill: #b93c5b } + .terminal-1869771697-r11 { fill: #454a50 } + .terminal-1869771697-r12 { fill: #7ae998 } + .terminal-1869771697-r13 { fill: #e2e3e3;font-weight: bold } + .terminal-1869771697-r14 { fill: #0a180e;font-weight: bold } + .terminal-1869771697-r15 { fill: #000000 } + .terminal-1869771697-r16 { fill: #fea62b;font-weight: bold } + .terminal-1869771697-r17 { fill: #a7a9ab } + .terminal-1869771697-r18 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Final details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackFinish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Final details + + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackFinish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -1408,260 +1408,262 @@ font-weight: 700; } - .terminal-436990287-matrix { + .terminal-1242773313-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-436990287-title { + .terminal-1242773313-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-436990287-r1 { fill: #c5c8c6 } - .terminal-436990287-r2 { fill: #e3e3e3 } - .terminal-436990287-r3 { fill: #989898 } - .terminal-436990287-r4 { fill: #e1e1e1 } - .terminal-436990287-r5 { fill: #121212 } - .terminal-436990287-r6 { fill: #0053aa } - .terminal-436990287-r7 { fill: #dde8f3;font-weight: bold } - .terminal-436990287-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-436990287-r9 { fill: #1e1e1e } - .terminal-436990287-r10 { fill: #008139 } - .terminal-436990287-r11 { fill: #454a50 } - .terminal-436990287-r12 { fill: #787878 } - .terminal-436990287-r13 { fill: #e2e2e2 } - .terminal-436990287-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-436990287-r15 { fill: #000000 } - .terminal-436990287-r16 { fill: #b93c5b } - .terminal-436990287-r17 { fill: #4ebf71 } - .terminal-436990287-r18 { fill: #e2e2e2;font-weight: bold } - .terminal-436990287-r19 { fill: #969696;font-weight: bold } - .terminal-436990287-r20 { fill: #808080 } - .terminal-436990287-r21 { fill: #7ae998 } - .terminal-436990287-r22 { fill: #507bb3 } - .terminal-436990287-r23 { fill: #0a180e;font-weight: bold } - .terminal-436990287-r24 { fill: #dde6ed;font-weight: bold } - .terminal-436990287-r25 { fill: #001541 } - .terminal-436990287-r26 { fill: #ddedf9 } + .terminal-1242773313-r1 { fill: #c5c8c6 } + .terminal-1242773313-r2 { fill: #e3e3e3 } + .terminal-1242773313-r3 { fill: #989898 } + .terminal-1242773313-r4 { fill: #e1e1e1 } + .terminal-1242773313-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1242773313-r6 { fill: #18954b } + .terminal-1242773313-r7 { fill: #e2e2e2 } + .terminal-1242773313-r8 { fill: #e2e2e2;font-style: italic; } + .terminal-1242773313-r9 { fill: #e2e2e2;font-style: italic;;text-decoration: underline; } + .terminal-1242773313-r10 { fill: #a5a5a5;font-style: italic; } + .terminal-1242773313-r11 { fill: #1e1e1e } + .terminal-1242773313-r12 { fill: #008139 } + .terminal-1242773313-r13 { fill: #454a50 } + .terminal-1242773313-r14 { fill: #787878 } + .terminal-1242773313-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-1242773313-r16 { fill: #000000 } + .terminal-1242773313-r17 { fill: #b93c5b } + .terminal-1242773313-r18 { fill: #e2e2e2;font-weight: bold } + .terminal-1242773313-r19 { fill: #969696;font-weight: bold } + .terminal-1242773313-r20 { fill: #808080 } + .terminal-1242773313-r21 { fill: #7ae998 } + .terminal-1242773313-r22 { fill: #507bb3 } + .terminal-1242773313-r23 { fill: #0a180e;font-weight: bold } + .terminal-1242773313-r24 { fill: #dde6ed;font-weight: bold } + .terminal-1242773313-r25 { fill: #001541 } + .terminal-1242773313-r26 { fill: #fea62b;font-weight: bold } + .terminal-1242773313-r27 { fill: #a7a9ab } + .terminal-1242773313-r28 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Now that we have created a new pipeline locally, we can create a new GitHub repository and  - push the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user - account. A core-team member will be able to transfer the repo to nf-core once the  - development has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + + Now that we have created a new pipeline locally, we can create a new GitHub repository and push  + the code to it. + + 💡 Found GitHub username in local GitHub CLI config + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user  + account. A core-team member will be able to transfer the repo to nf-core once the development + has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + +  d Toggle dark mode q Quit @@ -1691,255 +1693,254 @@ font-weight: 700; } - .terminal-2484211597-matrix { + .terminal-396289429-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2484211597-title { + .terminal-396289429-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2484211597-r1 { fill: #c5c8c6 } - .terminal-2484211597-r2 { fill: #e3e3e3 } - .terminal-2484211597-r3 { fill: #989898 } - .terminal-2484211597-r4 { fill: #e1e1e1 } - .terminal-2484211597-r5 { fill: #121212 } - .terminal-2484211597-r6 { fill: #0053aa } - .terminal-2484211597-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2484211597-r8 { fill: #98e024 } - .terminal-2484211597-r9 { fill: #626262 } - .terminal-2484211597-r10 { fill: #9d65ff } - .terminal-2484211597-r11 { fill: #fd971f } - .terminal-2484211597-r12 { fill: #4ebf71;font-weight: bold } - .terminal-2484211597-r13 { fill: #d2d2d2 } - .terminal-2484211597-r14 { fill: #82aaff } - .terminal-2484211597-r15 { fill: #eeffff } - .terminal-2484211597-r16 { fill: #4ebf71 } - .terminal-2484211597-r17 { fill: #e2e2e2 } - .terminal-2484211597-r18 { fill: #969696;font-weight: bold } - .terminal-2484211597-r19 { fill: #7ae998 } - .terminal-2484211597-r20 { fill: #008139 } - .terminal-2484211597-r21 { fill: #ddedf9 } + .terminal-396289429-r1 { fill: #c5c8c6 } + .terminal-396289429-r2 { fill: #e3e3e3 } + .terminal-396289429-r3 { fill: #989898 } + .terminal-396289429-r4 { fill: #e1e1e1 } + .terminal-396289429-r5 { fill: #4ebf71;font-weight: bold } + .terminal-396289429-r6 { fill: #98e024 } + .terminal-396289429-r7 { fill: #626262 } + .terminal-396289429-r8 { fill: #9d65ff } + .terminal-396289429-r9 { fill: #fd971f } + .terminal-396289429-r10 { fill: #d2d2d2 } + .terminal-396289429-r11 { fill: #82aaff } + .terminal-396289429-r12 { fill: #eeffff } + .terminal-396289429-r13 { fill: #18954b } + .terminal-396289429-r14 { fill: #e2e2e2 } + .terminal-396289429-r15 { fill: #969696;font-weight: bold } + .terminal-396289429-r16 { fill: #7ae998 } + .terminal-396289429-r17 { fill: #008139 } + .terminal-396289429-r18 { fill: #fea62b;font-weight: bold } + .terminal-396289429-r19 { fill: #a7a9ab } + .terminal-396289429-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - HowTo create a GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - gitpush--allorigin - - - 💡 Note the --all flag: this is needed to push all branches to the remote. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + HowTo create a GitHub repository + + + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + If you would like to create the GitHub repository later, you can do it manually by following  + these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository: + + + cd<pipeline_directory> + gitremoteaddorigingit@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote: + + + gitpush--allorigin + + + 💡 Note the --all flag: this is needed to push all branches to the remote. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -1969,248 +1970,247 @@ font-weight: 700; } - .terminal-4165331380-matrix { + .terminal-3492397142-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4165331380-title { + .terminal-3492397142-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4165331380-r1 { fill: #c5c8c6 } - .terminal-4165331380-r2 { fill: #e3e3e3 } - .terminal-4165331380-r3 { fill: #989898 } - .terminal-4165331380-r4 { fill: #e1e1e1 } - .terminal-4165331380-r5 { fill: #121212 } - .terminal-4165331380-r6 { fill: #0053aa } - .terminal-4165331380-r7 { fill: #dde8f3;font-weight: bold } - .terminal-4165331380-r8 { fill: #7ae998 } - .terminal-4165331380-r9 { fill: #507bb3 } - .terminal-4165331380-r10 { fill: #4ebf71;font-weight: bold } - .terminal-4165331380-r11 { fill: #dde6ed;font-weight: bold } - .terminal-4165331380-r12 { fill: #008139 } - .terminal-4165331380-r13 { fill: #001541 } - .terminal-4165331380-r14 { fill: #ddedf9 } + .terminal-3492397142-r1 { fill: #c5c8c6 } + .terminal-3492397142-r2 { fill: #e3e3e3 } + .terminal-3492397142-r3 { fill: #989898 } + .terminal-3492397142-r4 { fill: #e1e1e1 } + .terminal-3492397142-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3492397142-r6 { fill: #7ae998 } + .terminal-3492397142-r7 { fill: #507bb3 } + .terminal-3492397142-r8 { fill: #dde6ed;font-weight: bold } + .terminal-3492397142-r9 { fill: #008139 } + .terminal-3492397142-r10 { fill: #001541 } + .terminal-3492397142-r11 { fill: #fea62b;font-weight: bold } + .terminal-3492397142-r12 { fill: #a7a9ab } + .terminal-3492397142-r13 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Create GitHub repository - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - After creating the pipeline template locally, we can create a GitHub repository and push the - code to it. - - Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Create GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + + + After creating the pipeline template locally, we can create a GitHub repository and push the  + code to it. + + Do you want to create a GitHub repository? + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -2240,254 +2240,254 @@ font-weight: 700; } - .terminal-3459022791-matrix { + .terminal-4082092032-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3459022791-title { + .terminal-4082092032-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3459022791-r1 { fill: #c5c8c6 } - .terminal-3459022791-r2 { fill: #e3e3e3 } - .terminal-3459022791-r3 { fill: #989898 } - .terminal-3459022791-r4 { fill: #e1e1e1 } - .terminal-3459022791-r5 { fill: #121212 } - .terminal-3459022791-r6 { fill: #0053aa } - .terminal-3459022791-r7 { fill: #dde8f3;font-weight: bold } - .terminal-3459022791-r8 { fill: #1e1e1e } - .terminal-3459022791-r9 { fill: #507bb3 } - .terminal-3459022791-r10 { fill: #e2e2e2 } - .terminal-3459022791-r11 { fill: #808080 } - .terminal-3459022791-r12 { fill: #dde6ed;font-weight: bold } - .terminal-3459022791-r13 { fill: #001541 } - .terminal-3459022791-r14 { fill: #454a50 } - .terminal-3459022791-r15 { fill: #7ae998 } - .terminal-3459022791-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-3459022791-r17 { fill: #0a180e;font-weight: bold } - .terminal-3459022791-r18 { fill: #000000 } - .terminal-3459022791-r19 { fill: #008139 } - .terminal-3459022791-r20 { fill: #ddedf9 } + .terminal-4082092032-r1 { fill: #c5c8c6 } + .terminal-4082092032-r2 { fill: #e3e3e3 } + .terminal-4082092032-r3 { fill: #989898 } + .terminal-4082092032-r4 { fill: #e1e1e1 } + .terminal-4082092032-r5 { fill: #4ebf71;font-weight: bold } + .terminal-4082092032-r6 { fill: #1e1e1e } + .terminal-4082092032-r7 { fill: #507bb3 } + .terminal-4082092032-r8 { fill: #e2e2e2 } + .terminal-4082092032-r9 { fill: #808080 } + .terminal-4082092032-r10 { fill: #dde6ed;font-weight: bold } + .terminal-4082092032-r11 { fill: #001541 } + .terminal-4082092032-r12 { fill: #454a50 } + .terminal-4082092032-r13 { fill: #7ae998 } + .terminal-4082092032-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-4082092032-r15 { fill: #0a180e;font-weight: bold } + .terminal-4082092032-r16 { fill: #000000 } + .terminal-4082092032-r17 { fill: #008139 } + .terminal-4082092032-r18 { fill: #fea62b;font-weight: bold } + .terminal-4082092032-r19 { fill: #a7a9ab } + .terminal-4082092032-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference The pipeline willShow help - ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI The pipeline willShow help - ▁▁▁▁▁▁▁▁testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions  - for Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md Show help - ▁▁▁▁▁▁▁▁file of the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - pipeline will  - include GitHub  - badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add configurationThe pipeline willShow help - ▁▁▁▁▁▁▁▁filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom - parameters  - requried to run  - nf-core pipelines - at different  - institutions - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference The pipeline will Show help + ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github CI testsThe pipeline will Show help + ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for  + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add Github badgesThe README.md file Show help + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include GitHub + badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Add configuration The pipeline will Show help + ▁▁▁▁▁▁▁▁filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles containing + custom parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode q Quit @@ -2517,254 +2517,254 @@ font-weight: 700; } - .terminal-461754173-matrix { + .terminal-1639960877-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-461754173-title { + .terminal-1639960877-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-461754173-r1 { fill: #c5c8c6 } - .terminal-461754173-r2 { fill: #e3e3e3 } - .terminal-461754173-r3 { fill: #989898 } - .terminal-461754173-r4 { fill: #e1e1e1 } - .terminal-461754173-r5 { fill: #121212 } - .terminal-461754173-r6 { fill: #0053aa } - .terminal-461754173-r7 { fill: #dde8f3;font-weight: bold } - .terminal-461754173-r8 { fill: #1e1e1e } - .terminal-461754173-r9 { fill: #507bb3 } - .terminal-461754173-r10 { fill: #e2e2e2 } - .terminal-461754173-r11 { fill: #808080 } - .terminal-461754173-r12 { fill: #dde6ed;font-weight: bold } - .terminal-461754173-r13 { fill: #001541 } - .terminal-461754173-r14 { fill: #454a50 } - .terminal-461754173-r15 { fill: #7ae998 } - .terminal-461754173-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-461754173-r17 { fill: #0a180e;font-weight: bold } - .terminal-461754173-r18 { fill: #000000 } - .terminal-461754173-r19 { fill: #008139 } - .terminal-461754173-r20 { fill: #ddedf9 } + .terminal-1639960877-r1 { fill: #c5c8c6 } + .terminal-1639960877-r2 { fill: #e3e3e3 } + .terminal-1639960877-r3 { fill: #989898 } + .terminal-1639960877-r4 { fill: #e1e1e1 } + .terminal-1639960877-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1639960877-r6 { fill: #1e1e1e } + .terminal-1639960877-r7 { fill: #507bb3 } + .terminal-1639960877-r8 { fill: #e2e2e2 } + .terminal-1639960877-r9 { fill: #808080 } + .terminal-1639960877-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1639960877-r11 { fill: #001541 } + .terminal-1639960877-r12 { fill: #454a50 } + .terminal-1639960877-r13 { fill: #7ae998 } + .terminal-1639960877-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-1639960877-r15 { fill: #0a180e;font-weight: bold } + .terminal-1639960877-r16 { fill: #000000 } + .terminal-1639960877-r17 { fill: #008139 } + .terminal-1639960877-r18 { fill: #fea62b;font-weight: bold } + .terminal-1639960877-r19 { fill: #a7a9ab } + .terminal-1639960877-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Template features - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference The pipeline willShow help - ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the - most common  - reference genome  - files from  - iGenomes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Use reference The pipeline will Show help + ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from iGenomes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackContinue + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode q Quit @@ -2794,256 +2794,256 @@ font-weight: 700; } - .terminal-2179958535-matrix { + .terminal-2625911002-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2179958535-title { + .terminal-2625911002-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2179958535-r1 { fill: #c5c8c6 } - .terminal-2179958535-r2 { fill: #e3e3e3 } - .terminal-2179958535-r3 { fill: #989898 } - .terminal-2179958535-r4 { fill: #e1e1e1 } - .terminal-2179958535-r5 { fill: #121212 } - .terminal-2179958535-r6 { fill: #0053aa } - .terminal-2179958535-r7 { fill: #dde8f3;font-weight: bold } - .terminal-2179958535-r8 { fill: #a5a5a5;font-style: italic; } - .terminal-2179958535-r9 { fill: #1e1e1e } - .terminal-2179958535-r10 { fill: #0f4e2a } - .terminal-2179958535-r11 { fill: #7b3042 } - .terminal-2179958535-r12 { fill: #a7a7a7 } - .terminal-2179958535-r13 { fill: #787878 } - .terminal-2179958535-r14 { fill: #e2e2e2 } - .terminal-2179958535-r15 { fill: #b93c5b } - .terminal-2179958535-r16 { fill: #454a50 } - .terminal-2179958535-r17 { fill: #166d39 } - .terminal-2179958535-r18 { fill: #e2e3e3;font-weight: bold } - .terminal-2179958535-r19 { fill: #3c8b54;font-weight: bold } - .terminal-2179958535-r20 { fill: #000000 } - .terminal-2179958535-r21 { fill: #5aa86f } - .terminal-2179958535-r22 { fill: #ddedf9 } + .terminal-2625911002-r1 { fill: #c5c8c6 } + .terminal-2625911002-r2 { fill: #e3e3e3 } + .terminal-2625911002-r3 { fill: #989898 } + .terminal-2625911002-r4 { fill: #e1e1e1 } + .terminal-2625911002-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2625911002-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-2625911002-r7 { fill: #1e1e1e } + .terminal-2625911002-r8 { fill: #0f4e2a } + .terminal-2625911002-r9 { fill: #7b3042 } + .terminal-2625911002-r10 { fill: #a7a7a7 } + .terminal-2625911002-r11 { fill: #787878 } + .terminal-2625911002-r12 { fill: #e2e2e2 } + .terminal-2625911002-r13 { fill: #b93c5b } + .terminal-2625911002-r14 { fill: #454a50 } + .terminal-2625911002-r15 { fill: #166d39 } + .terminal-2625911002-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-2625911002-r17 { fill: #3c8b54;font-weight: bold } + .terminal-2625911002-r18 { fill: #000000 } + .terminal-2625911002-r19 { fill: #5aa86f } + .terminal-2625911002-r20 { fill: #fea62b;font-weight: bold } + .terminal-2625911002-r21 { fill: #a7a9ab } + .terminal-2625911002-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Basic details - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -3073,254 +3073,253 @@ font-weight: 700; } - .terminal-1144763792-matrix { + .terminal-3787732750-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1144763792-title { + .terminal-3787732750-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1144763792-r1 { fill: #c5c8c6 } - .terminal-1144763792-r2 { fill: #e3e3e3 } - .terminal-1144763792-r3 { fill: #989898 } - .terminal-1144763792-r4 { fill: #98e024 } - .terminal-1144763792-r5 { fill: #626262 } - .terminal-1144763792-r6 { fill: #9d65ff } - .terminal-1144763792-r7 { fill: #fd971f } - .terminal-1144763792-r8 { fill: #e1e1e1 } - .terminal-1144763792-r9 { fill: #121212 } - .terminal-1144763792-r10 { fill: #0053aa } - .terminal-1144763792-r11 { fill: #dde8f3;font-weight: bold } - .terminal-1144763792-r12 { fill: #e1e1e1;text-decoration: underline; } - .terminal-1144763792-r13 { fill: #4ebf71 } - .terminal-1144763792-r14 { fill: #e2e2e2 } - .terminal-1144763792-r15 { fill: #e2e2e2;text-decoration: underline; } - .terminal-1144763792-r16 { fill: #e2e2e2;font-weight: bold;font-style: italic; } - .terminal-1144763792-r17 { fill: #7ae998 } - .terminal-1144763792-r18 { fill: #4ebf71;font-weight: bold } - .terminal-1144763792-r19 { fill: #008139 } - .terminal-1144763792-r20 { fill: #ddedf9 } + .terminal-3787732750-r1 { fill: #c5c8c6 } + .terminal-3787732750-r2 { fill: #e3e3e3 } + .terminal-3787732750-r3 { fill: #989898 } + .terminal-3787732750-r4 { fill: #98e024 } + .terminal-3787732750-r5 { fill: #626262 } + .terminal-3787732750-r6 { fill: #9d65ff } + .terminal-3787732750-r7 { fill: #fd971f } + .terminal-3787732750-r8 { fill: #e1e1e1 } + .terminal-3787732750-r9 { fill: #4ebf71;font-weight: bold } + .terminal-3787732750-r10 { fill: #e1e1e1;text-decoration: underline; } + .terminal-3787732750-r11 { fill: #18954b } + .terminal-3787732750-r12 { fill: #e2e2e2 } + .terminal-3787732750-r13 { fill: #e2e2e2;text-decoration: underline; } + .terminal-3787732750-r14 { fill: #e2e2e2;font-weight: bold;font-style: italic; } + .terminal-3787732750-r15 { fill: #7ae998 } + .terminal-3787732750-r16 { fill: #008139 } + .terminal-3787732750-r17 { fill: #fea62b;font-weight: bold } + .terminal-3787732750-r18 { fill: #a7a9ab } + .terminal-3787732750-r19 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Welcome to the nf-core pipeline creation wizard - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline  - template. - - The template helps anyone benefit from nf-core best practices, and is a requirement for  - nf-core pipelines. - - 💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans  - with the community as early as possible; ideally before you start on your pipeline! See  - the nf-core guidelines and the #new-pipelines Slack channel for more information. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Let's go! - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - -  D  Toggle dark mode  Q  Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + + + Welcome to the nf-core pipeline creation wizard + + This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. + + The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core  + pipelines. + + 💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with + the community as early as possible; ideally before you start on your pipeline! See the  + nf-core guidelines and the #new-pipelines Slack channel for more information. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Let's go! + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit From d77af41897561d70b99efbda87a223c7c05cf816 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 23 May 2024 11:07:10 +0000 Subject: [PATCH 160/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c1b3a9766..16262bd1c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ - Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) +- update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 8f6c8fb5ff34eecf2f9b0c502753de77123641b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Thu, 23 May 2024 11:21:49 +0000 Subject: [PATCH 161/737] update snapshots fom gitpod --- tests/__snapshots__/test_create_app.ambr | 266 +++++++++--------- .../pytest-0/test_github_details0 | 1 + .../pytest-0/test_github_exit_message0 | 1 + .../pytest-0/test_github_question0 | 1 + 4 files changed, 135 insertions(+), 134 deletions(-) create mode 160000 tmp/pytest-of-gitpod/pytest-0/test_github_details0 create mode 160000 tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 create mode 160000 tmp/pytest-of-gitpod/pytest-0/test_github_question0 diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index f5a519b13..ed7bf8b25 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1408,262 +1408,260 @@ font-weight: 700; } - .terminal-1242773313-matrix { + .terminal-3893066652-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1242773313-title { + .terminal-3893066652-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1242773313-r1 { fill: #c5c8c6 } - .terminal-1242773313-r2 { fill: #e3e3e3 } - .terminal-1242773313-r3 { fill: #989898 } - .terminal-1242773313-r4 { fill: #e1e1e1 } - .terminal-1242773313-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1242773313-r6 { fill: #18954b } - .terminal-1242773313-r7 { fill: #e2e2e2 } - .terminal-1242773313-r8 { fill: #e2e2e2;font-style: italic; } - .terminal-1242773313-r9 { fill: #e2e2e2;font-style: italic;;text-decoration: underline; } - .terminal-1242773313-r10 { fill: #a5a5a5;font-style: italic; } - .terminal-1242773313-r11 { fill: #1e1e1e } - .terminal-1242773313-r12 { fill: #008139 } - .terminal-1242773313-r13 { fill: #454a50 } - .terminal-1242773313-r14 { fill: #787878 } - .terminal-1242773313-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-1242773313-r16 { fill: #000000 } - .terminal-1242773313-r17 { fill: #b93c5b } - .terminal-1242773313-r18 { fill: #e2e2e2;font-weight: bold } - .terminal-1242773313-r19 { fill: #969696;font-weight: bold } - .terminal-1242773313-r20 { fill: #808080 } - .terminal-1242773313-r21 { fill: #7ae998 } - .terminal-1242773313-r22 { fill: #507bb3 } - .terminal-1242773313-r23 { fill: #0a180e;font-weight: bold } - .terminal-1242773313-r24 { fill: #dde6ed;font-weight: bold } - .terminal-1242773313-r25 { fill: #001541 } - .terminal-1242773313-r26 { fill: #fea62b;font-weight: bold } - .terminal-1242773313-r27 { fill: #a7a9ab } - .terminal-1242773313-r28 { fill: #e2e3e3 } + .terminal-3893066652-r1 { fill: #c5c8c6 } + .terminal-3893066652-r2 { fill: #e3e3e3 } + .terminal-3893066652-r3 { fill: #989898 } + .terminal-3893066652-r4 { fill: #e1e1e1 } + .terminal-3893066652-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3893066652-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-3893066652-r7 { fill: #1e1e1e } + .terminal-3893066652-r8 { fill: #008139 } + .terminal-3893066652-r9 { fill: #454a50 } + .terminal-3893066652-r10 { fill: #787878 } + .terminal-3893066652-r11 { fill: #e2e2e2 } + .terminal-3893066652-r12 { fill: #e2e3e3;font-weight: bold } + .terminal-3893066652-r13 { fill: #000000 } + .terminal-3893066652-r14 { fill: #b93c5b } + .terminal-3893066652-r15 { fill: #18954b } + .terminal-3893066652-r16 { fill: #e2e2e2;font-weight: bold } + .terminal-3893066652-r17 { fill: #969696;font-weight: bold } + .terminal-3893066652-r18 { fill: #808080 } + .terminal-3893066652-r19 { fill: #7ae998 } + .terminal-3893066652-r20 { fill: #507bb3 } + .terminal-3893066652-r21 { fill: #0a180e;font-weight: bold } + .terminal-3893066652-r22 { fill: #dde6ed;font-weight: bold } + .terminal-3893066652-r23 { fill: #001541 } + .terminal-3893066652-r24 { fill: #fea62b;font-weight: bold } + .terminal-3893066652-r25 { fill: #a7a9ab } + .terminal-3893066652-r26 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - Now that we have created a new pipeline locally, we can create a new GitHub repository and push  - the code to it. - - 💡 Found GitHub username in local GitHub CLI config - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + + Now that we have created a new pipeline locally, we can create a new GitHub repository and push  + the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user  + account. A core-team member will be able to transfer the repo to nf-core once the development + has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + +  d Toggle dark mode q Quit diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_details0 b/tmp/pytest-of-gitpod/pytest-0/test_github_details0 new file mode 160000 index 000000000..19fc648c4 --- /dev/null +++ b/tmp/pytest-of-gitpod/pytest-0/test_github_details0 @@ -0,0 +1 @@ +Subproject commit 19fc648c4a1fc31b474b13cbfebd322ce9538b95 diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 b/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 new file mode 160000 index 000000000..75465e6bd --- /dev/null +++ b/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 @@ -0,0 +1 @@ +Subproject commit 75465e6bd715b1bf9075c7efbce1f2dd38c4df37 diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_question0 b/tmp/pytest-of-gitpod/pytest-0/test_github_question0 new file mode 160000 index 000000000..07281e3aa --- /dev/null +++ b/tmp/pytest-of-gitpod/pytest-0/test_github_question0 @@ -0,0 +1 @@ +Subproject commit 07281e3aa6aef6d8a282c1cafaf7defaef745565 From e9740b6454df2b238fd2c7a1ab407eee9b193029 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 23 May 2024 12:03:25 +0000 Subject: [PATCH 162/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.5 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4b1941d81..01bd13a9e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.4 + rev: v0.4.5 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 33177912b000658d38479136f315b1e268dc647b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 15 May 2024 10:12:55 +0200 Subject: [PATCH 163/737] move pipeline subcommands for v3.0 --- docs/api/make_lint_md.py | 6 +- nf_core/__main__.py | 268 ++++++++++++++---- nf_core/components/create.py | 2 +- nf_core/components/lint/__init__.py | 2 +- nf_core/launch.py | 2 +- nf_core/modules/lint/__init__.py | 2 +- nf_core/modules/lint/module_todos.py | 2 +- nf_core/modules/modules_json.py | 2 +- .../bump_version}/bump_version.py | 2 +- nf_core/pipelines/create/create.py | 2 +- nf_core/{ => pipelines}/lint/__init__.py | 8 +- .../lint/actions_awsfulltest.py | 0 .../{ => pipelines}/lint/actions_awstest.py | 0 nf_core/{ => pipelines}/lint/actions_ci.py | 0 .../lint/actions_schema_validation.py | 0 nf_core/{ => pipelines}/lint/configs.py | 2 +- nf_core/{ => pipelines}/lint/files_exist.py | 0 .../{ => pipelines}/lint/files_unchanged.py | 0 nf_core/{ => pipelines}/lint/merge_markers.py | 0 nf_core/{ => pipelines}/lint/modules_json.py | 0 .../{ => pipelines}/lint/modules_structure.py | 0 .../{ => pipelines}/lint/multiqc_config.py | 2 +- .../{ => pipelines}/lint/nextflow_config.py | 0 nf_core/{ => pipelines}/lint/nfcore_yml.py | 0 .../lint/pipeline_name_conventions.py | 0 .../{ => pipelines}/lint/pipeline_todos.py | 0 nf_core/{ => pipelines}/lint/readme.py | 0 .../lint/schema_description.py | 0 nf_core/{ => pipelines}/lint/schema_lint.py | 0 nf_core/{ => pipelines}/lint/schema_params.py | 0 nf_core/{ => pipelines}/lint/system_exit.py | 0 .../{ => pipelines}/lint/template_strings.py | 0 .../lint/version_consistency.py | 0 nf_core/{ => pipelines}/lint_utils.py | 0 nf_core/{ => pipelines/sync}/sync.py | 0 nf_core/schema.py | 2 +- nf_core/subworkflows/lint/__init__.py | 2 +- .../subworkflows/lint/subworkflow_todos.py | 2 +- tests/lint/actions_awsfulltest.py | 6 +- tests/lint/actions_awstest.py | 4 +- tests/lint/actions_ci.py | 4 +- tests/lint/actions_schema_validation.py | 8 +- tests/lint/configs.py | 12 +- tests/lint/files_exist.py | 16 +- tests/lint/files_unchanged.py | 4 +- tests/lint/merge_markers.py | 4 +- tests/lint/multiqc_config.py | 20 +- tests/lint/nextflow_config.py | 22 +- tests/lint/nfcore_yml.py | 6 +- tests/lint/template_strings.py | 8 +- tests/lint/version_consistency.py | 4 +- tests/test_bump_version.py | 8 +- tests/test_cli.py | 6 +- tests/test_lint.py | 82 +++--- tests/test_lint_utils.py | 8 +- tests/test_sync.py | 64 ++--- 56 files changed, 382 insertions(+), 212 deletions(-) rename nf_core/{ => pipelines/bump_version}/bump_version.py (98%) rename nf_core/{ => pipelines}/lint/__init__.py (99%) rename nf_core/{ => pipelines}/lint/actions_awsfulltest.py (100%) rename nf_core/{ => pipelines}/lint/actions_awstest.py (100%) rename nf_core/{ => pipelines}/lint/actions_ci.py (100%) rename nf_core/{ => pipelines}/lint/actions_schema_validation.py (100%) rename nf_core/{ => pipelines}/lint/configs.py (98%) rename nf_core/{ => pipelines}/lint/files_exist.py (100%) rename nf_core/{ => pipelines}/lint/files_unchanged.py (100%) rename nf_core/{ => pipelines}/lint/merge_markers.py (100%) rename nf_core/{ => pipelines}/lint/modules_json.py (100%) rename nf_core/{ => pipelines}/lint/modules_structure.py (100%) rename nf_core/{ => pipelines}/lint/multiqc_config.py (99%) rename nf_core/{ => pipelines}/lint/nextflow_config.py (100%) rename nf_core/{ => pipelines}/lint/nfcore_yml.py (100%) rename nf_core/{ => pipelines}/lint/pipeline_name_conventions.py (100%) rename nf_core/{ => pipelines}/lint/pipeline_todos.py (100%) rename nf_core/{ => pipelines}/lint/readme.py (100%) rename nf_core/{ => pipelines}/lint/schema_description.py (100%) rename nf_core/{ => pipelines}/lint/schema_lint.py (100%) rename nf_core/{ => pipelines}/lint/schema_params.py (100%) rename nf_core/{ => pipelines}/lint/system_exit.py (100%) rename nf_core/{ => pipelines}/lint/template_strings.py (100%) rename nf_core/{ => pipelines}/lint/version_consistency.py (100%) rename nf_core/{ => pipelines}/lint_utils.py (100%) rename nf_core/{ => pipelines/sync}/sync.py (100%) diff --git a/docs/api/make_lint_md.py b/docs/api/make_lint_md.py index 35e38a55c..48393094b 100644 --- a/docs/api/make_lint_md.py +++ b/docs/api/make_lint_md.py @@ -3,8 +3,8 @@ import fnmatch import os -import nf_core.lint import nf_core.modules.lint +import nf_core.pipelines.lint import nf_core.subworkflows.lint @@ -31,11 +31,11 @@ def make_docs(docs_basedir, lint_tests, md_template): pipeline_docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "pipeline_lint_tests") make_docs( pipeline_docs_basedir, - nf_core.lint.PipelineLint._get_all_lint_tests(True), + nf_core.pipelines.lint.PipelineLint._get_all_lint_tests(True), """# {0} ```{{eval-rst}} -.. automethod:: nf_core.lint.PipelineLint.{0} +.. automethod:: nf_core.pipelines.lint.PipelineLint.{0} ``` """, ) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 67af238b5..c2423ef1f 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -46,20 +46,17 @@ "name": "Commands for developers", "commands": [ "pipelines", - "lint", "modules", "subworkflows", "schema", "create-logo", - "bump-version", - "sync", ], }, ], "nf-core pipelines": [ { "name": "Pipeline commands", - "commands": ["create"], + "commands": ["create", "lint", "bump-version", "sync"], }, ], "nf-core modules": [ @@ -486,8 +483,8 @@ def licences(pipeline, json): sys.exit(1) -# nf-core lint -@nf_core_cli.command() +# nf-core lint (deprecated) +@nf_core_cli.command(hidden=True, deprecated=True) @click.option( "-d", "--dir", @@ -556,6 +553,7 @@ def lint( sort_by, ): """ + DEPRECATED Check pipeline code against nf-core guidelines. Runs a large number of automated tests to ensure that the supplied pipeline @@ -565,42 +563,8 @@ def lint( You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. See the documentation for details. """ - from nf_core.lint import run_linting - from nf_core.utils import is_pipeline_directory - - # Check if pipeline directory is a pipeline - try: - is_pipeline_directory(dir) - except UserWarning as e: - log.error(e) - sys.exit(1) - - # Run the lint tests! - try: - lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - sort_by, - markdown, - json, - ctx.obj["hide_progress"], - ) - swf_failed = 0 - if subworkflow_lint_obj is not None: - swf_failed = len(subworkflow_lint_obj.failed) - if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: - sys.exit(1) - except AssertionError as e: - log.critical(e) - sys.exit(1) - except UserWarning as e: - log.error(e) - sys.exit(1) + log.error("The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead.") + sys.exit(0) # nf-core pipelines subcommands @@ -679,6 +643,124 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp sys.exit(app.return_code or 0) +# nf-core pipelines lint +@pipelines.command("lint") +@click.pass_context +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory [dim]\[default: current working directory][/]", +) +@click.option( + "--release", + is_flag=True, + default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") + and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", + help="Execute additional checks for release-ready workflows.", +) +@click.option( + "-f", + "--fix", + type=str, + metavar="", + multiple=True, + help="Attempt to automatically fix specified lint test", +) +@click.option( + "-k", + "--key", + type=str, + metavar="", + multiple=True, + help="Run only these lint tests", +) +@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") +@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") +@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") +@click.option( + "--markdown", + type=str, + metavar="", + help="File to write linting results to (Markdown)", +) +@click.option( + "--json", + type=str, + metavar="", + help="File to write linting results to (JSON)", +) +@click.option( + "--sort-by", + type=click.Choice(["module", "test"]), + default="test", + help="Sort lint output by module or test name.", + show_default=True, +) +@click.pass_context +def lint_pipeline( + ctx, + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): + """ + Check pipeline code against nf-core guidelines. + + Runs a large number of automated tests to ensure that the supplied pipeline + meets the nf-core guidelines. Documentation of all lint tests can be found + on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] + + You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. + See the documentation for details. + """ + from nf_core.pipelines.lint import run_linting + from nf_core.utils import is_pipeline_directory + + # Check if pipeline directory is a pipeline + try: + is_pipeline_directory(dir) + except UserWarning as e: + log.error(e) + sys.exit(1) + + # Run the lint tests! + try: + lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + sort_by, + markdown, + json, + ctx.obj["hide_progress"], + ) + swf_failed = 0 + if subworkflow_lint_obj is not None: + swf_failed = len(subworkflow_lint_obj.failed) + if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: + sys.exit(1) + except AssertionError as e: + log.critical(e) + sys.exit(1) + except UserWarning as e: + log.error(e) + sys.exit(1) + + # nf-core create (deprecated) @nf_core_cli.command(hidden=True, deprecated=True) @click.option( @@ -2063,9 +2145,9 @@ def docs(schema_path, output, format, force, columns): schema_obj.print_documentation(output, format, force, columns.split(",")) -# nf-core bump-version -@nf_core_cli.command("bump-version") -@click.argument("new_version", required=True, metavar="") +# nf-core bump-version (deprecated) +@nf_core_cli.command(hidden=True, deprecated=True) +@click.argument("new_version", default="") @click.option( "-d", "--dir", @@ -2081,6 +2163,44 @@ def docs(schema_path, output, format, force, columns): help="Bump required nextflow version instead of pipeline version", ) def bump_version(new_version, dir, nextflow): + """ + DEPRECATED + Update nf-core pipeline version number. + + The pipeline version number is mentioned in a lot of different places + in nf-core pipelines. This tool updates the version for you automatically, + so that you don't accidentally miss any. + + Should be used for each pipeline release, and again for the next + development version after release. + + As well as the pipeline version, you can also change the required version of Nextflow. + """ + log.error( + "The `[magenta]nf-core bump-version[/]` command is deprecated. Use `[magenta]nf-core pipelines bump-version[/]` instead." + ) + sys.exit(0) + + +# nf-core pipelines bump-version +@pipelines.command("bump-version") +@click.pass_context +@click.argument("new_version", required=True, metavar="") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option( + "-n", + "--nextflow", + is_flag=True, + default=False, + help="Bump required nextflow version instead of pipeline version", +) +def bump_version_pipeline(new_version, dir, nextflow): """ Update nf-core pipeline version number. @@ -2093,7 +2213,7 @@ def bump_version(new_version, dir, nextflow): As well as the pipeline version, you can also change the required version of Nextflow. """ - from nf_core.bump_version import bump_nextflow_version, bump_pipeline_version + from nf_core.pipelines.bump_version.bump_version import bump_nextflow_version, bump_pipeline_version from nf_core.utils import Pipeline, is_pipeline_directory try: @@ -2175,8 +2295,8 @@ def logo(logo_text, dir, name, theme, width, format, force): sys.exit(1) -# nf-core sync -@nf_core_cli.command("sync") +# nf-core sync (deprecated) +@nf_core_cli.command(hidden=True, deprecated=True) @click.option( "-d", "--dir", @@ -2207,6 +2327,56 @@ def logo(logo_text, dir, name, theme, width, format, force): @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") def sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): + """ + DEPRECATED + Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. + + To keep nf-core pipelines up to date with improvements in the main + template, we use a method of synchronisation that uses a special + git branch called [cyan i]TEMPLATE[/]. + + This command updates the [cyan i]TEMPLATE[/] branch with the latest version of + the nf-core template, so that these updates can be synchronised with + the pipeline. It is run automatically for all pipelines when ever a + new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. + """ + log.error("The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead.") + sys.exit(0) + + +# nf-core pipelines sync +@pipelines.command("sync") +@click.pass_context +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option( + "-b", + "--from-branch", + type=str, + help="The git branch to use to fetch workflow variables.", +) +@click.option( + "-p", + "--pull-request", + is_flag=True, + default=False, + help="Make a GitHub pull-request with the changes.", +) +@click.option( + "--force_pr", + is_flag=True, + default=False, + help="Force the creation of a pull-request, even if there are no changes.", +) +@click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") +@click.option("-u", "--username", type=str, help="GitHub PR: auth username.") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +def sync_pipeline(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. @@ -2219,7 +2389,7 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y the pipeline. It is run automatically for all pipelines when ever a new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ - from nf_core.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError + from nf_core.pipelines.sync.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError from nf_core.utils import is_pipeline_directory # Check if pipeline directory contains necessary files diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 6c9c01b49..c9afb390d 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -20,7 +20,7 @@ import nf_core import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.lint_utils import run_prettier_on_file +from nf_core.pipelines.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 564dcfaf6..499d31e71 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -19,8 +19,8 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.lint_utils import console from nf_core.modules.modules_json import ModulesJson +from nf_core.pipelines.lint_utils import console from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) diff --git a/nf_core/launch.py b/nf_core/launch.py index bc0cd58ae..423f0728e 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -15,7 +15,7 @@ import nf_core.schema import nf_core.utils -from nf_core.lint_utils import dump_json_with_prettier +from nf_core.pipelines.lint_utils import dump_json_with_prettier log = logging.getLogger(__name__) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index f96683089..b780144ef 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -15,7 +15,7 @@ import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.lint_utils import console +from nf_core.pipelines.lint_utils import console log = logging.getLogger(__name__) diff --git a/nf_core/modules/lint/module_todos.py b/nf_core/modules/lint/module_todos.py index c9c90ec3d..a07005df0 100644 --- a/nf_core/modules/lint/module_todos.py +++ b/nf_core/modules/lint/module_todos.py @@ -1,6 +1,6 @@ import logging -from nf_core.lint.pipeline_todos import pipeline_todos +from nf_core.pipelines.lint.pipeline_todos import pipeline_todos log = logging.getLogger(__name__) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 7d78268e9..b0a4fa661 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -14,12 +14,12 @@ import nf_core.utils from nf_core.components.components_utils import get_components_to_install -from nf_core.lint_utils import dump_json_with_prettier from nf_core.modules.modules_repo import ( NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, ModulesRepo, ) +from nf_core.pipelines.lint_utils import dump_json_with_prettier from .modules_differ import ModulesDiffer diff --git a/nf_core/bump_version.py b/nf_core/pipelines/bump_version/bump_version.py similarity index 98% rename from nf_core/bump_version.py rename to nf_core/pipelines/bump_version/bump_version.py index c5e8931fb..18aa86932 100644 --- a/nf_core/bump_version.py +++ b/nf_core/pipelines/bump_version/bump_version.py @@ -176,7 +176,7 @@ def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patt Args: filename (str): File to scan. - pipeline_obj (nf_core.lint.PipelineLint): A PipelineLint object that holds information + pipeline_obj (nf_core.pipelines.lint.PipelineLint): A PipelineLint object that holds information about the pipeline contents and build files. pattern (str): Regex pattern to apply. diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 381e2f07a..763165da6 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -18,8 +18,8 @@ import nf_core.schema import nf_core.utils from nf_core.create_logo import create_logo -from nf_core.lint_utils import run_prettier_on_file from nf_core.pipelines.create.utils import CreateConfig +from nf_core.pipelines.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) diff --git a/nf_core/lint/__init__.py b/nf_core/pipelines/lint/__init__.py similarity index 99% rename from nf_core/lint/__init__.py rename to nf_core/pipelines/lint/__init__.py index 9292a07fd..4ec62ddb0 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -20,13 +20,13 @@ from rich.panel import Panel from rich.table import Table -import nf_core.lint_utils import nf_core.modules.lint +import nf_core.pipelines.lint_utils import nf_core.subworkflows.lint import nf_core.utils from nf_core import __version__ from nf_core.components.lint import ComponentLint -from nf_core.lint_utils import console +from nf_core.pipelines.lint_utils import console from nf_core.utils import plural_s as _s from nf_core.utils import strip_ansi_codes @@ -623,8 +623,8 @@ def run_linting( module_lint_obj._print_results(show_passed, sort_by=sort_by) if subworkflow_lint_obj is not None: subworkflow_lint_obj._print_results(show_passed, sort_by=sort_by) - nf_core.lint_utils.print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj) - nf_core.lint_utils.print_fixes(lint_obj) + nf_core.pipelines.lint_utils.print_joint_summary(lint_obj, module_lint_obj, subworkflow_lint_obj) + nf_core.pipelines.lint_utils.print_fixes(lint_obj) # Save results to Markdown file if md_fn is not None: diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py similarity index 100% rename from nf_core/lint/actions_awsfulltest.py rename to nf_core/pipelines/lint/actions_awsfulltest.py diff --git a/nf_core/lint/actions_awstest.py b/nf_core/pipelines/lint/actions_awstest.py similarity index 100% rename from nf_core/lint/actions_awstest.py rename to nf_core/pipelines/lint/actions_awstest.py diff --git a/nf_core/lint/actions_ci.py b/nf_core/pipelines/lint/actions_ci.py similarity index 100% rename from nf_core/lint/actions_ci.py rename to nf_core/pipelines/lint/actions_ci.py diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/pipelines/lint/actions_schema_validation.py similarity index 100% rename from nf_core/lint/actions_schema_validation.py rename to nf_core/pipelines/lint/actions_schema_validation.py diff --git a/nf_core/lint/configs.py b/nf_core/pipelines/lint/configs.py similarity index 98% rename from nf_core/lint/configs.py rename to nf_core/pipelines/lint/configs.py index 274152919..f0fa1170c 100644 --- a/nf_core/lint/configs.py +++ b/nf_core/pipelines/lint/configs.py @@ -3,7 +3,7 @@ from pathlib import Path from typing import Dict, List -from nf_core.lint_utils import ignore_file +from nf_core.pipelines.lint_utils import ignore_file log = logging.getLogger(__name__) diff --git a/nf_core/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py similarity index 100% rename from nf_core/lint/files_exist.py rename to nf_core/pipelines/lint/files_exist.py diff --git a/nf_core/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py similarity index 100% rename from nf_core/lint/files_unchanged.py rename to nf_core/pipelines/lint/files_unchanged.py diff --git a/nf_core/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py similarity index 100% rename from nf_core/lint/merge_markers.py rename to nf_core/pipelines/lint/merge_markers.py diff --git a/nf_core/lint/modules_json.py b/nf_core/pipelines/lint/modules_json.py similarity index 100% rename from nf_core/lint/modules_json.py rename to nf_core/pipelines/lint/modules_json.py diff --git a/nf_core/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py similarity index 100% rename from nf_core/lint/modules_structure.py rename to nf_core/pipelines/lint/modules_structure.py diff --git a/nf_core/lint/multiqc_config.py b/nf_core/pipelines/lint/multiqc_config.py similarity index 99% rename from nf_core/lint/multiqc_config.py rename to nf_core/pipelines/lint/multiqc_config.py index 8b4fa2120..2b0fc7902 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/pipelines/lint/multiqc_config.py @@ -3,7 +3,7 @@ import yaml -from nf_core.lint_utils import ignore_file +from nf_core.pipelines.lint_utils import ignore_file def multiqc_config(self) -> Dict[str, List[str]]: diff --git a/nf_core/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py similarity index 100% rename from nf_core/lint/nextflow_config.py rename to nf_core/pipelines/lint/nextflow_config.py diff --git a/nf_core/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py similarity index 100% rename from nf_core/lint/nfcore_yml.py rename to nf_core/pipelines/lint/nfcore_yml.py diff --git a/nf_core/lint/pipeline_name_conventions.py b/nf_core/pipelines/lint/pipeline_name_conventions.py similarity index 100% rename from nf_core/lint/pipeline_name_conventions.py rename to nf_core/pipelines/lint/pipeline_name_conventions.py diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py similarity index 100% rename from nf_core/lint/pipeline_todos.py rename to nf_core/pipelines/lint/pipeline_todos.py diff --git a/nf_core/lint/readme.py b/nf_core/pipelines/lint/readme.py similarity index 100% rename from nf_core/lint/readme.py rename to nf_core/pipelines/lint/readme.py diff --git a/nf_core/lint/schema_description.py b/nf_core/pipelines/lint/schema_description.py similarity index 100% rename from nf_core/lint/schema_description.py rename to nf_core/pipelines/lint/schema_description.py diff --git a/nf_core/lint/schema_lint.py b/nf_core/pipelines/lint/schema_lint.py similarity index 100% rename from nf_core/lint/schema_lint.py rename to nf_core/pipelines/lint/schema_lint.py diff --git a/nf_core/lint/schema_params.py b/nf_core/pipelines/lint/schema_params.py similarity index 100% rename from nf_core/lint/schema_params.py rename to nf_core/pipelines/lint/schema_params.py diff --git a/nf_core/lint/system_exit.py b/nf_core/pipelines/lint/system_exit.py similarity index 100% rename from nf_core/lint/system_exit.py rename to nf_core/pipelines/lint/system_exit.py diff --git a/nf_core/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py similarity index 100% rename from nf_core/lint/template_strings.py rename to nf_core/pipelines/lint/template_strings.py diff --git a/nf_core/lint/version_consistency.py b/nf_core/pipelines/lint/version_consistency.py similarity index 100% rename from nf_core/lint/version_consistency.py rename to nf_core/pipelines/lint/version_consistency.py diff --git a/nf_core/lint_utils.py b/nf_core/pipelines/lint_utils.py similarity index 100% rename from nf_core/lint_utils.py rename to nf_core/pipelines/lint_utils.py diff --git a/nf_core/sync.py b/nf_core/pipelines/sync/sync.py similarity index 100% rename from nf_core/sync.py rename to nf_core/pipelines/sync/sync.py diff --git a/nf_core/schema.py b/nf_core/schema.py index 4f5acfa0a..eee9f9978 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -18,7 +18,7 @@ import nf_core.list import nf_core.utils -from nf_core.lint_utils import dump_json_with_prettier, run_prettier_on_file +from nf_core.pipelines.lint_utils import dump_json_with_prettier, run_prettier_on_file log = logging.getLogger(__name__) diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index cc79ed863..a3cacf295 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -15,7 +15,7 @@ import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.lint_utils import console +from nf_core.pipelines.lint_utils import console log = logging.getLogger(__name__) diff --git a/nf_core/subworkflows/lint/subworkflow_todos.py b/nf_core/subworkflows/lint/subworkflow_todos.py index 91f9f55b0..3417215db 100644 --- a/nf_core/subworkflows/lint/subworkflow_todos.py +++ b/nf_core/subworkflows/lint/subworkflow_todos.py @@ -1,6 +1,6 @@ import logging -from nf_core.lint.pipeline_todos import pipeline_todos +from nf_core.pipelines.lint.pipeline_todos import pipeline_todos log = logging.getLogger(__name__) diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py index bbda92a4d..caf7bbf36 100644 --- a/tests/lint/actions_awsfulltest.py +++ b/tests/lint/actions_awsfulltest.py @@ -2,7 +2,7 @@ import yaml -import nf_core.lint +import nf_core.pipelines.lint def test_actions_awsfulltest_warn(self): @@ -26,7 +26,7 @@ def test_actions_awsfulltest_pass(self): fh.write(awsfulltest_yml) # Make lint object - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.actions_awsfulltest() @@ -51,7 +51,7 @@ def test_actions_awsfulltest_fail(self): yaml.dump(awsfulltest_yml, fh) # Make lint object - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.actions_awsfulltest() diff --git a/tests/lint/actions_awstest.py b/tests/lint/actions_awstest.py index 7bfa6052f..259bf866b 100644 --- a/tests/lint/actions_awstest.py +++ b/tests/lint/actions_awstest.py @@ -2,7 +2,7 @@ import yaml -import nf_core.lint +import nf_core.pipelines.lint def test_actions_awstest_pass(self): @@ -27,7 +27,7 @@ def test_actions_awstest_fail(self): yaml.dump(awstest_yml, fh) # Make lint object - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.actions_awstest() diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py index 8734b2f78..eb438b881 100644 --- a/tests/lint/actions_ci.py +++ b/tests/lint/actions_ci.py @@ -2,7 +2,7 @@ import yaml -import nf_core.lint +import nf_core.pipelines.lint def test_actions_ci_pass(self): @@ -39,7 +39,7 @@ def test_actions_ci_fail_wrong_trigger(self): yaml.dump(ci_yml, fh) # Make lint object - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.actions_ci() diff --git a/tests/lint/actions_schema_validation.py b/tests/lint/actions_schema_validation.py index e202b3b1c..4b00e7bf4 100644 --- a/tests/lint/actions_schema_validation.py +++ b/tests/lint/actions_schema_validation.py @@ -2,7 +2,7 @@ import yaml -import nf_core.lint +import nf_core.pipelines.lint def test_actions_schema_validation_missing_jobs(self): @@ -16,7 +16,7 @@ def test_actions_schema_validation_missing_jobs(self): with open(awstest_yml_path, "w") as fh: yaml.dump(awstest_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.actions_schema_validation() @@ -35,7 +35,7 @@ def test_actions_schema_validation_missing_on(self): with open(awstest_yml_path, "w") as fh: yaml.dump(awstest_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.actions_schema_validation() @@ -55,7 +55,7 @@ def test_actions_schema_validation_fails_for_additional_property(self): with open(awstest_yml_path, "w") as fh: yaml.dump(awstest_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.actions_schema_validation() diff --git a/tests/lint/configs.py b/tests/lint/configs.py index 8610910cd..3ca35cab8 100644 --- a/tests/lint/configs.py +++ b/tests/lint/configs.py @@ -2,15 +2,15 @@ import yaml -import nf_core.lint import nf_core.pipelines.create +import nf_core.pipelines.lint def test_withname_in_modules_config(self): """Tests finding withName in modules.config passes linting.""" new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.modules_config() assert len(result["failed"]) == 0 @@ -26,7 +26,7 @@ def test_superfluous_withname_in_modules_config_fails(self): modules_config = Path(new_pipeline) / "conf" / "modules.config" with open(modules_config, "a") as f: f.write("\nwithName: 'BPIPE' {\n cache = false \n}") - lint_obj = nf_core.lint.PipelineLint(new_pipeline, hide_progress=False) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline, hide_progress=False) lint_obj._load() result = lint_obj.modules_config() assert len(result["failed"]) == 1 @@ -44,7 +44,7 @@ def test_ignore_modules_config(self): with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: yaml.dump(content, f) Path(new_pipeline, "conf", "modules.config").unlink() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.modules_config() assert len(result["ignored"]) == 1 @@ -61,7 +61,7 @@ def test_superfluous_withname_in_base_config_fails(self): base_config = Path(new_pipeline) / "conf" / "base.config" with open(base_config, "a") as f: f.write("\nwithName:CUSTOM_DUMPSOFTWAREVERSIONS {\n cache = false \n}") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.base_config() assert len(result["failed"]) == 1 @@ -79,7 +79,7 @@ def test_ignore_base_config(self): with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: yaml.dump(content, f) Path(new_pipeline, "conf", "base.config").unlink() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.base_config() assert len(result["ignored"]) == 1 diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py index 08da7f14d..679d20987 100644 --- a/tests/lint/files_exist.py +++ b/tests/lint/files_exist.py @@ -1,7 +1,7 @@ import os from pathlib import Path -import nf_core.lint +import nf_core.pipelines.lint def test_files_exist_missing_config(self): @@ -10,7 +10,7 @@ def test_files_exist_missing_config(self): Path(new_pipeline, "CHANGELOG.md").unlink() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" @@ -24,7 +24,7 @@ def test_files_exist_missing_main(self): Path(new_pipeline, "main.nf").unlink() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.files_exist() @@ -38,7 +38,7 @@ def test_files_exist_depreciated_file(self): nf = Path(new_pipeline, "parameters.settings.json") os.system(f"touch {nf}") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.files_exist() @@ -49,7 +49,7 @@ def test_files_exist_pass(self): """Lint check should pass if all files are there""" new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.files_exist() @@ -58,7 +58,7 @@ def test_files_exist_pass(self): def test_files_exist_pass_conditional(self): new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() lint_obj.nf_config["plugins"] = [] lib_dir = Path(new_pipeline, "lib") @@ -71,7 +71,7 @@ def test_files_exist_pass_conditional(self): def test_files_exist_fail_conditional(self): new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() lib_dir = Path(new_pipeline, "lib") lib_dir.mkdir() @@ -90,7 +90,7 @@ def test_files_exist_pass_conditional_nfschema(self): with open(Path(new_pipeline, "nextflow.config"), "w") as f: f.write(config) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() lint_obj.nf_config["manifest.schema"] = "nf-core" results = lint_obj.files_exist() diff --git a/tests/lint/files_unchanged.py b/tests/lint/files_unchanged.py index 601f09b9d..07a722919 100644 --- a/tests/lint/files_unchanged.py +++ b/tests/lint/files_unchanged.py @@ -1,6 +1,6 @@ from pathlib import Path -import nf_core.lint +import nf_core.pipelines.lint def test_files_unchanged_pass(self): @@ -18,7 +18,7 @@ def test_files_unchanged_fail(self): with open(Path(new_pipeline, failing_file), "a") as fh: fh.write("THIS SHOULD NOT BE HERE") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.files_unchanged() assert len(results["failed"]) > 0 diff --git a/tests/lint/merge_markers.py b/tests/lint/merge_markers.py index 64a62e25c..0e3699e19 100644 --- a/tests/lint/merge_markers.py +++ b/tests/lint/merge_markers.py @@ -1,6 +1,6 @@ import os -import nf_core.lint +import nf_core.pipelines.lint def test_merge_markers_found(self): @@ -13,7 +13,7 @@ def test_merge_markers_found(self): with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: fh.write(main_nf_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.merge_markers() diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py index 463d5e765..70c09ae54 100644 --- a/tests/lint/multiqc_config.py +++ b/tests/lint/multiqc_config.py @@ -2,7 +2,7 @@ import yaml -import nf_core.lint +import nf_core.pipelines.lint def test_multiqc_config_exists(self): @@ -10,7 +10,7 @@ def test_multiqc_config_exists(self): # Delete the file new_pipeline = self._make_pipeline_copy() Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.multiqc_config() assert result["failed"] == ["`assets/multiqc_config.yml` not found."] @@ -28,7 +28,7 @@ def test_multiqc_config_ignore(self): with open(Path(new_pipeline, ".nf-core.yml"), "w") as f: yaml.dump(content, f) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.multiqc_config() assert result["ignored"] == ["`assets/multiqc_config.yml` not found, but it is ignored."] @@ -47,7 +47,7 @@ def test_multiqc_config_missing_report_section_order(self): mqc_yml.pop("report_section_order") with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.multiqc_config() # Reset the file @@ -65,7 +65,7 @@ def test_multiqc_incorrect_export_plots(self): mqc_yml["export_plots"] = False with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.multiqc_config() # Reset the file @@ -83,7 +83,7 @@ def test_multiqc_config_report_comment_fail(self): mqc_yml["report_comment"] = "This is a test" with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.multiqc_config() # Reset the file @@ -101,7 +101,7 @@ def test_multiqc_config_report_comment_release_fail(self): mqc_yml_tmp = mqc_yml.copy() with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() # bump version lint_obj.nf_config["manifest.version"] = "1.0" @@ -116,13 +116,13 @@ def test_multiqc_config_report_comment_release_fail(self): def test_multiqc_config_report_comment_release_succeed(self): """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" - import nf_core.bump_version + import nf_core.pipelines.bump_version.bump_version new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() # bump version using the bump_version function - nf_core.bump_version.bump_pipeline_version(lint_obj, "1.0") + nf_core.pipelines.bump_version.bump_version.bump_pipeline_version(lint_obj, "1.0") # lint again lint_obj._load() result = lint_obj.multiqc_config() diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py index 4bd795944..b90298f54 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/lint/nextflow_config.py @@ -2,8 +2,8 @@ import re from pathlib import Path -import nf_core.lint import nf_core.pipelines.create.create +import nf_core.pipelines.lint def test_nextflow_config_example_pass(self): @@ -17,7 +17,7 @@ def test_nextflow_config_example_pass(self): def test_nextflow_config_bad_name_fail(self): """Tests that config variable existence test fails with bad pipeline name""" new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() lint_obj.nf_config["manifest.name"] = "bad_name" @@ -29,7 +29,7 @@ def test_nextflow_config_bad_name_fail(self): def test_nextflow_config_dev_in_release_mode_failed(self): """Tests that config variable existence test fails with dev version in release mode""" new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() lint_obj.release_mode = True @@ -49,7 +49,7 @@ def test_nextflow_config_missing_test_profile_failed(self): fail_content = re.sub(r"\btest\b", "testfail", content) with open(nf_conf_file, "w") as f: f.write(fail_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) > 0 @@ -59,7 +59,7 @@ def test_nextflow_config_missing_test_profile_failed(self): def test_default_values_match(self): """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 @@ -85,7 +85,7 @@ def test_default_values_fail(self): fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) with open(nf_schema_file, "w") as f: f.write(fail_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 2 @@ -106,7 +106,7 @@ def test_catch_params_assignment_in_main_nf(self): main_nf_file = Path(new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: f.write("params.max_time = 42") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 1 @@ -123,7 +123,7 @@ def test_allow_params_reference_in_main_nf(self): main_nf_file = Path(new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: f.write("params.max_time == 42") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 @@ -138,7 +138,7 @@ def test_default_values_ignored(self): f.write( "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" ) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() lint_obj._load_lint_config() result = lint_obj.nextflow_config() @@ -172,7 +172,7 @@ def test_default_values_float(self): with open(nf_schema_file, "w") as f: f.write(fail_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 @@ -202,7 +202,7 @@ def test_default_values_float_fail(self): with open(nf_schema_file, "w") as f: f.write(fail_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() result = lint_obj.nextflow_config() diff --git a/tests/lint/nfcore_yml.py b/tests/lint/nfcore_yml.py index 9d745a634..94d2870e1 100644 --- a/tests/lint/nfcore_yml.py +++ b/tests/lint/nfcore_yml.py @@ -1,8 +1,8 @@ import re from pathlib import Path -import nf_core.lint import nf_core.pipelines.create +import nf_core.pipelines.lint def test_nfcore_yml_pass(self): @@ -26,7 +26,7 @@ def test_nfcore_yml_fail_repo_type(self): new_content = content.replace("repository_type: pipeline", "repository_type: foo") with open(nf_core_yml, "w") as fh: fh.write(new_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.nfcore_yml() assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) @@ -44,7 +44,7 @@ def test_nfcore_yml_fail_nfcore_version(self): new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) with open(nf_core_yml, "w") as fh: fh.write(new_content) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() results = lint_obj.nfcore_yml() assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) diff --git a/tests/lint/template_strings.py b/tests/lint/template_strings.py index 50c956b21..2db9e20a3 100644 --- a/tests/lint/template_strings.py +++ b/tests/lint/template_strings.py @@ -1,8 +1,8 @@ import subprocess from pathlib import Path -import nf_core.lint import nf_core.pipelines.create +import nf_core.pipelines.lint def test_template_strings(self): @@ -13,7 +13,7 @@ def test_template_strings(self): with open(txt_file, "w") as f: f.write("my {{ template_string }}") subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.template_strings() assert len(result["failed"]) == 1 @@ -27,7 +27,7 @@ def test_template_strings_ignored(self): nf_core_yml = Path(new_pipeline) / ".nf-core.yml" with open(nf_core_yml, "w") as f: f.write("repository_type: pipeline\nlint:\n template_strings: False") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() lint_obj._lint_pipeline() assert len(lint_obj.failed) == 0 @@ -46,7 +46,7 @@ def test_template_strings_ignore_file(self): nf_core_yml = Path(new_pipeline) / ".nf-core.yml" with open(nf_core_yml, "w") as f: f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() result = lint_obj.template_strings() assert len(result["failed"]) == 0 diff --git a/tests/lint/version_consistency.py b/tests/lint/version_consistency.py index 4763020fb..1be57969f 100644 --- a/tests/lint/version_consistency.py +++ b/tests/lint/version_consistency.py @@ -1,11 +1,11 @@ -import nf_core.lint import nf_core.pipelines.create.create +import nf_core.pipelines.lint def test_version_consistency(self): """Tests that config variable existence test fails with bad pipeline name""" new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load_pipeline_config() lint_obj.nextflow_config() diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 059e18e92..ad7efcd1f 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -4,7 +4,7 @@ import yaml -import nf_core.bump_version +import nf_core.pipelines.bump_version.bump_version import nf_core.pipelines.create.create import nf_core.utils @@ -24,7 +24,7 @@ def test_bump_pipeline_version(datafiles, tmp_path): pipeline_obj._load() # Bump the version number - nf_core.bump_version.bump_pipeline_version(pipeline_obj, "1.1") + nf_core.pipelines.bump_version.bump_version.bump_pipeline_version(pipeline_obj, "1.1") new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) # Check nextflow.config @@ -44,7 +44,7 @@ def test_dev_bump_pipeline_version(datafiles, tmp_path): pipeline_obj._load() # Bump the version number - nf_core.bump_version.bump_pipeline_version(pipeline_obj, "v1.2dev") + nf_core.pipelines.bump_version.bump_version.bump_pipeline_version(pipeline_obj, "v1.2dev") new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) # Check the pipeline config @@ -65,7 +65,7 @@ def test_bump_nextflow_version(datafiles, tmp_path): # Bump the version number to a specific version, preferably one # we're not already on version = "22.04.3" - nf_core.bump_version.bump_nextflow_version(pipeline_obj, version) + nf_core.pipelines.bump_version.bump_version.bump_nextflow_version(pipeline_obj, version) new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) # Check nextflow.config diff --git a/tests/test_cli.py b/tests/test_cli.py index 76d167101..95a3d6ab6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -283,7 +283,7 @@ def test_create_app(self, mock_create): mock_create.return_value.run.assert_called_once() @mock.patch("nf_core.utils.is_pipeline_directory") - @mock.patch("nf_core.lint.run_linting") + @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint(self, mock_lint, mock_is_pipeline): """Test nf-core lint""" mock_lint_results = (mock.MagicMock, mock.MagicMock, mock.MagicMock) @@ -353,7 +353,7 @@ def test_lint_dir_is_not_pipeline(self, mock_is_pipeline): assert captured_logs.records[-1].levelname == "ERROR" @mock.patch("nf_core.utils.is_pipeline_directory") - @mock.patch("nf_core.lint.run_linting") + @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint_log_assert_error(self, mock_lint, mock_is_pipeline): """Test nf-core lint logs assertion errors""" error_txt = "AssertionError has been raised" @@ -368,7 +368,7 @@ def test_lint_log_assert_error(self, mock_lint, mock_is_pipeline): assert captured_logs.records[-1].levelname == "CRITICAL" @mock.patch("nf_core.utils.is_pipeline_directory") - @mock.patch("nf_core.lint.run_linting") + @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint_log_user_warning(self, mock_lint, mock_is_pipeline): """Test nf-core lint logs assertion errors""" error_txt = "AssertionError has been raised" diff --git a/tests/test_lint.py b/tests/test_lint.py index aaf833080..c3a03c579 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -9,8 +9,8 @@ import yaml -import nf_core.lint import nf_core.pipelines.create.create +import nf_core.pipelines.lint from .utils import with_temporary_folder @@ -32,7 +32,7 @@ def setUp(self): self.create_obj.init_pipeline() # Base lint object on this directory - self.lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir) + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.test_pipeline_dir) def tearDown(self): """Clean up temporary files and folders""" @@ -56,7 +56,7 @@ def test_run_linting_function(self): We don't really check any of this code as it's just a series of function calls and we're testing each of those individually. This is mostly to check for syntax errors.""" - nf_core.lint.run_linting(self.test_pipeline_dir, False) + nf_core.pipelines.lint.run_linting(self.test_pipeline_dir, False) def test_init_pipeline_lint(self): """Simply create a PipelineLint object. @@ -64,7 +64,7 @@ def test_init_pipeline_lint(self): This checks that all of the lint test imports are working properly, we also check that the git sha was found and that the release flag works properly """ - lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir, True) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.test_pipeline_dir, True) # Tests that extra test is added for release mode assert "version_consistency" in lint_obj.lint_tests @@ -82,7 +82,7 @@ def test_load_lint_config_ignore_all_tests(self): # Make a copy of the test pipeline and create a lint object new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) # Make a config file listing all test names config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} @@ -167,7 +167,7 @@ def test_sphinx_md_files(self): existing_docs.append(os.path.join(docs_basedir, fn)) # Check .md files against each test name - lint_obj = nf_core.lint.PipelineLint("", True) + lint_obj = nf_core.pipelines.lint.PipelineLint("", True) for test_name in lint_obj.lint_tests: fn = os.path.join(docs_basedir, f"{test_name}.md") assert os.path.exists(fn), f"Could not find lint docs .md file: {fn}" @@ -259,40 +259,40 @@ def test_sphinx_md_files(self): # def test_critical_missingfiles_example(self): # """Tests for missing nextflow config and main.nf files""" -# lint_obj = nf_core.lint.run_linting(PATH_CRITICAL_EXAMPLE, False) +# lint_obj = nf_core.pipelines.lint.run_linting(PATH_CRITICAL_EXAMPLE, False) # assert len(lint_obj.failed) > 0 # # def test_failing_missingfiles_example(self): # """Tests for missing files like Dockerfile or LICENSE""" -# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_FAILING_EXAMPLE) # lint_obj.check_files_exist() # expectations = {"failed": 6, "warned": 2, "passed": 14} # self.assess_lint_status(lint_obj, **expectations) # # def test_mit_licence_example_pass(self): # """Tests that MIT test works with good MIT licences""" -# good_lint_obj = nf_core.lint.PipelineLint(PATH_CRITICAL_EXAMPLE) +# good_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_CRITICAL_EXAMPLE) # good_lint_obj.check_licence() # expectations = {"failed": 0, "warned": 0, "passed": 1} # self.assess_lint_status(good_lint_obj, **expectations) # # def test_mit_license_example_with_failed(self): # """Tests that MIT test works with bad MIT licences""" -# bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# bad_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_FAILING_EXAMPLE) # bad_lint_obj.check_licence() # expectations = {"failed": 1, "warned": 0, "passed": 0} # self.assess_lint_status(bad_lint_obj, **expectations) # # def test_config_variable_example_pass(self): # """Tests that config variable existence test works with good pipeline example""" -# good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# good_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # good_lint_obj.check_nextflow_config() # expectations = {"failed": 0, "warned": 1, "passed": 34} # self.assess_lint_status(good_lint_obj, **expectations) # # def test_config_variable_example_with_failed(self): # """Tests that config variable existence test fails with bad pipeline example""" -# bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# bad_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_FAILING_EXAMPLE) # bad_lint_obj.check_nextflow_config() # expectations = {"failed": 19, "warned": 6, "passed": 10} # self.assess_lint_status(bad_lint_obj, **expectations) @@ -300,28 +300,28 @@ def test_sphinx_md_files(self): # @pytest.mark.xfail(raises=AssertionError, strict=True) # def test_config_variable_error(self): # """Tests that config variable existence test falls over nicely with nextflow can't run""" -# bad_lint_obj = nf_core.lint.PipelineLint("/non/existant/path") +# bad_lint_obj = nf_core.pipelines.lint.PipelineLint("/non/existant/path") # bad_lint_obj.check_nextflow_config() # # # def test_wrong_license_examples_with_failed(self): # """Tests for checking the license test behavior""" # for example in PATHS_WRONG_LICENSE_EXAMPLE: -# lint_obj = nf_core.lint.PipelineLint(example) +# lint_obj = nf_core.pipelines.lint.PipelineLint(example) # lint_obj.check_licence() # expectations = {"failed": 1, "warned": 0, "passed": 0} # self.assess_lint_status(lint_obj, **expectations) # # def test_missing_license_example(self): # """Tests for missing license behavior""" -# lint_obj = nf_core.lint.PipelineLint(PATH_MISSING_LICENSE_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_MISSING_LICENSE_EXAMPLE) # lint_obj.check_licence() # expectations = {"failed": 1, "warned": 0, "passed": 0} # self.assess_lint_status(lint_obj, **expectations) # # def test_readme_pass(self): # """Tests that the pipeline README file checks work with a good example""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.minNextflowVersion = "20.04.0" # lint_obj.files = ["environment.yml"] # lint_obj.check_readme() @@ -330,7 +330,7 @@ def test_sphinx_md_files(self): # # def test_readme_warn(self): # """Tests that the pipeline README file checks fail """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.minNextflowVersion = "0.28.0" # lint_obj.check_readme() # expectations = {"failed": 1, "warned": 0, "passed": 0} @@ -338,7 +338,7 @@ def test_sphinx_md_files(self): # # def test_readme_fail(self): # """Tests that the pipeline README file checks give warnings with a bad example""" -# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_FAILING_EXAMPLE) # lint_obj.files = ["environment.yml"] # lint_obj.check_readme() # expectations = {"failed": 0, "warned": 2, "passed": 0} @@ -346,7 +346,7 @@ def test_sphinx_md_files(self): # # def test_dockerfile_pass(self): # """Tests if a valid Dockerfile passes the lint checks""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["Dockerfile"] # lint_obj.check_docker() # expectations = {"failed": 0, "warned": 0, "passed": 1} @@ -354,7 +354,7 @@ def test_sphinx_md_files(self): # # def test_version_consistency_pass(self): # """Tests the workflow version and container version sucessfully""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.config["manifest.version"] = "0.4" # lint_obj.config["process.container"] = "nfcore/tools:0.4" # lint_obj.check_version_consistency() @@ -366,7 +366,7 @@ def test_sphinx_md_files(self): # and simulate wrong release tag""" # os.environ["GITHUB_REF"] = "refs/tags/0.5" # os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.config["manifest.version"] = "0.4" # lint_obj.config["process.container"] = "nfcore/tools:0.4" # lint_obj.check_version_consistency() @@ -378,7 +378,7 @@ def test_sphinx_md_files(self): # and simulate wrong release tag""" # os.environ["GITHUB_REF"] = "refs/tags/0.5dev" # os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.config["manifest.version"] = "0.4" # lint_obj.config["process.container"] = "nfcore/tools:0.4" # lint_obj.check_version_consistency() @@ -390,7 +390,7 @@ def test_sphinx_md_files(self): # and simulate wrong missing docker version tag""" # os.environ["GITHUB_REF"] = "refs/tags/0.4" # os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.config["manifest.version"] = "0.4" # lint_obj.config["process.container"] = "nfcore/tools" # lint_obj.check_version_consistency() @@ -402,7 +402,7 @@ def test_sphinx_md_files(self): # and simulate correct release tag""" # os.environ["GITHUB_REF"] = "refs/tags/0.4" # os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.config["manifest.version"] = "0.4" # lint_obj.config["process.container"] = "nfcore/tools:0.4" # lint_obj.check_version_consistency() @@ -411,7 +411,7 @@ def test_sphinx_md_files(self): # # def test_conda_env_pass(self): # """ Tests the conda environment config checks with a working example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: # lint_obj.conda_config = yaml.safe_load(fh) @@ -423,7 +423,7 @@ def test_sphinx_md_files(self): # # def test_conda_env_fail(self): # """ Tests the conda environment config fails with a bad example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: # lint_obj.conda_config = yaml.safe_load(fh) @@ -441,20 +441,20 @@ def test_sphinx_md_files(self): # # Define the behaviour of the request get mock # mock_get.side_effect = requests.exceptions.Timeout() # # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.conda_config["channels"] = ["bioconda"] # lint_obj.check_anaconda_package("multiqc=1.6") # # def test_conda_env_skip(self): # """ Tests the conda environment config is skipped when not needed """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.check_conda_env_yaml() # expectations = {"failed": 0, "warned": 0, "passed": 0} # self.assess_lint_status(lint_obj, **expectations) # # def test_conda_dockerfile_pass(self): # """ Tests the conda Dockerfile test works with a working example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.version = "1.11" # lint_obj.files = ["environment.yml", "Dockerfile"] # with open(os.path.join(PATH_WORKING_EXAMPLE, "Dockerfile"), "r") as fh: @@ -466,7 +466,7 @@ def test_sphinx_md_files(self): # # def test_conda_dockerfile_fail(self): # """ Tests the conda Dockerfile test fails with a bad example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.version = "1.11" # lint_obj.files = ["environment.yml", "Dockerfile"] # lint_obj.conda_config["name"] = "nf-core-tools-0.4" @@ -477,14 +477,14 @@ def test_sphinx_md_files(self): # # def test_conda_dockerfile_skip(self): # """ Tests the conda Dockerfile test is skipped when not needed """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.check_conda_dockerfile() # expectations = {"failed": 0, "warned": 0, "passed": 0} # self.assess_lint_status(lint_obj, **expectations) # # def test_pip_no_version_fail(self): # """ Tests the pip dependency version definition is present """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # lint_obj.pipeline_name = "tools" # lint_obj.config["manifest.version"] = "0.4" @@ -495,7 +495,7 @@ def test_sphinx_md_files(self): # # def test_pip_package_not_latest_warn(self): # """ Tests the pip dependency version definition is present """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # lint_obj.pipeline_name = "tools" # lint_obj.config["manifest.version"] = "0.4" @@ -511,7 +511,7 @@ def test_sphinx_md_files(self): # # Define the behaviour of the request get mock # mock_get.side_effect = requests.exceptions.Timeout() # # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # lint_obj.pipeline_name = "tools" # lint_obj.config["manifest.version"] = "0.4" @@ -527,7 +527,7 @@ def test_sphinx_md_files(self): # # Define the behaviour of the request get mock # mock_get.side_effect = requests.exceptions.ConnectionError() # # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # lint_obj.pipeline_name = "tools" # lint_obj.config["manifest.version"] = "0.4" @@ -538,7 +538,7 @@ def test_sphinx_md_files(self): # # def test_pip_dependency_fail(self): # """ Tests the PyPi API package information query """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # lint_obj.pipeline_name = "tools" # lint_obj.config["manifest.version"] = "0.4" @@ -551,7 +551,7 @@ def test_sphinx_md_files(self): # """Tests that linting fails, if conda dependency # package version is not available on Anaconda. # """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # lint_obj.pipeline_name = "tools" # lint_obj.config["manifest.version"] = "0.4" @@ -564,7 +564,7 @@ def test_sphinx_md_files(self): # """Tests that linting fails, if conda dependency # package version is not available on Anaconda. # """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # lint_obj.files = ["environment.yml"] # lint_obj.pipeline_name = "tools" # lint_obj.config["manifest.version"] = "0.4" @@ -575,8 +575,8 @@ def test_sphinx_md_files(self): # # def test_pipeline_name_pass(self): # """Tests pipeline name good pipeline example: lower case, no punctuation""" -# # good_lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE) -# good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# # good_lint_obj = nf_core.pipelines.lint.run_linting(PATH_WORKING_EXAMPLE) +# good_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # good_lint_obj.pipeline_name = "tools" # good_lint_obj.check_pipeline_name() # expectations = {"failed": 0, "warned": 0, "passed": 1} @@ -584,7 +584,7 @@ def test_sphinx_md_files(self): # # def test_pipeline_name_critical(self): # """Tests that warning is returned for pipeline not adhering to naming convention""" -# critical_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) +# critical_lint_obj = nf_core.pipelines.lint.PipelineLint(PATH_WORKING_EXAMPLE) # critical_lint_obj.pipeline_name = "Tools123" # critical_lint_obj.check_pipeline_name() # expectations = {"failed": 0, "warned": 1, "passed": 0} diff --git a/tests/test_lint_utils.py b/tests/test_lint_utils.py index 2b624a3ec..a4b7caf30 100644 --- a/tests/test_lint_utils.py +++ b/tests/test_lint_utils.py @@ -3,7 +3,7 @@ import git import pytest -import nf_core.lint_utils +import nf_core.pipelines.lint_utils JSON_WITH_SYNTAX_ERROR = "{'a':1, 1}" JSON_MALFORMED = "{'a':1}" @@ -50,16 +50,16 @@ def git_dir_with_json_syntax_error(temp_git_repo): def test_run_prettier_on_formatted_file(formatted_json): - nf_core.lint_utils.run_prettier_on_file(formatted_json) + nf_core.pipelines.lint_utils.run_prettier_on_file(formatted_json) assert formatted_json.read_text() == JSON_FORMATTED def test_run_prettier_on_malformed_file(malformed_json): - nf_core.lint_utils.run_prettier_on_file(malformed_json) + nf_core.pipelines.lint_utils.run_prettier_on_file(malformed_json) assert malformed_json.read_text() == JSON_FORMATTED def test_run_prettier_on_syntax_error_file(syntax_error_json, caplog): - nf_core.lint_utils.run_prettier_on_file(syntax_error_json) + nf_core.pipelines.lint_utils.run_prettier_on_file(syntax_error_json) expected_critical_log = "SyntaxError: Unexpected token (1:10)" assert expected_critical_log in caplog.text diff --git a/tests/test_sync.py b/tests/test_sync.py index 40e68dc7d..0cacbf544 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -12,7 +12,7 @@ import pytest import nf_core.pipelines.create.create -import nf_core.sync +import nf_core.pipelines.sync.sync from .utils import with_temporary_folder @@ -46,8 +46,8 @@ def tearDown(self): @with_temporary_folder def test_inspect_sync_dir_notgit(self, tmp_dir): """Try syncing an empty directory""" - psync = nf_core.sync.PipelineSync(tmp_dir) - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + psync = nf_core.pipelines.sync.sync.PipelineSync(tmp_dir) + with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert "does not appear to be a git repository" in exc_info.value.args[0] @@ -57,9 +57,9 @@ def test_inspect_sync_dir_dirty(self): test_fn = Path(self.pipeline_dir) / "uncommitted" test_fn.touch() # Try to sync, check we halt with the right error - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) try: - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert exc_info.value.args[0].startswith("Uncommitted changes found in pipeline directory!") finally: @@ -68,8 +68,8 @@ def test_inspect_sync_dir_dirty(self): def test_get_wf_config_no_branch(self): """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error - psync = nf_core.sync.PipelineSync(self.pipeline_dir, from_branch="foo") - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir, from_branch="foo") + with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() assert exc_info.value.args[0] == "Branch `foo` not found!" @@ -77,9 +77,9 @@ def test_get_wf_config_no_branch(self): def test_get_wf_config_missing_required_config(self): """Try getting a workflow config, then make it miss a required config option""" # Try to sync, check we halt with the right error - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.required_config_vars = ["fakethisdoesnotexist"] - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() # Check that we did actually get some config back @@ -89,26 +89,26 @@ def test_get_wf_config_missing_required_config(self): def test_checkout_template_branch(self): """Try checking out the TEMPLATE branch of the pipeline""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() def test_checkout_template_branch_no_template(self): """Try checking out the TEMPLATE branch of the pipeline when it does not exist""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.delete_head("TEMPLATE") - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: psync.checkout_template_branch() assert exc_info.value.args[0] == "Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'" def test_delete_template_branch_files(self): """Confirm that we can delete all files in the TEMPLATE branch""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -118,7 +118,7 @@ def test_delete_template_branch_files(self): def test_create_template_pipeline(self): """Confirm that we can delete all files in the TEMPLATE branch""" # First, delete all the files - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -132,7 +132,7 @@ def test_create_template_pipeline(self): def test_commit_template_changes_nochanges(self): """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -142,7 +142,7 @@ def test_commit_template_changes_nochanges(self): def test_commit_template_changes_changes(self): """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -159,7 +159,7 @@ def test_commit_template_changes_changes(self): def test_push_template_branch_error(self): """Try pushing the changes, but without a remote (should fail)""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -168,13 +168,13 @@ def test_push_template_branch_error(self): test_fn.touch() psync.commit_template_changes() # Try to push changes - with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.sync.PullRequestExceptionError) as exc_info: psync.push_template_branch() assert exc_info.value.args[0].startswith("Could not push TEMPLATE branch") def test_create_merge_base_branch(self): """Try creating a merge base branch""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -193,7 +193,7 @@ def test_create_merge_base_branch_thrice(self): end, so it is needed to call it a third time to make sure this is picked up. """ - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -206,7 +206,7 @@ def test_create_merge_base_branch_thrice(self): def test_push_merge_branch(self): """Try pushing merge branch""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.create_remote("origin", self.remote_path) @@ -218,12 +218,12 @@ def test_push_merge_branch(self): def test_push_merge_branch_without_create_branch(self): """Try pushing merge branch without creating first""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.create_remote("origin", self.remote_path) - with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.sync.PullRequestExceptionError) as exc_info: psync.push_merge_branch() assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'") @@ -313,7 +313,7 @@ def json(self): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) def test_make_pull_request_success(self, mock_post, mock_get): """Try making a PR - successful response""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.gh_api.get = mock_get psync.gh_api.post = mock_post psync.gh_username = "no_existing_pr" @@ -326,13 +326,13 @@ def test_make_pull_request_success(self, mock_post, mock_get): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) def test_make_pull_request_bad_response(self, mock_post, mock_get): """Try making a PR and getting a 404 error""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.gh_api.get = mock_get psync.gh_api.post = mock_post psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.sync.PullRequestExceptionError) as exc_info: psync.make_pull_request() assert exc_info.value.args[0].startswith( "Something went badly wrong - GitHub API PR failed - got return code 404" @@ -341,7 +341,7 @@ def test_make_pull_request_bad_response(self, mock_post, mock_get): @mock.patch("nf_core.utils.gh_api.get", side_effect=mocked_requests_get) def test_close_open_template_merge_prs(self, mock_get): """Try closing all open prs""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.get = mock_get @@ -360,7 +360,7 @@ def test_close_open_template_merge_prs(self, mock_get): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) def test_close_open_pr(self, mock_patch, mock_post): - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.post = mock_post @@ -383,7 +383,7 @@ def test_close_open_pr(self, mock_patch, mock_post): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) def test_close_open_pr_fail(self, mock_patch, mock_post): - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.post = mock_post @@ -405,7 +405,7 @@ def test_close_open_pr_fail(self, mock_patch, mock_post): def test_reset_target_dir(self): """Try resetting target pipeline directory""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -417,12 +417,12 @@ def test_reset_target_dir(self): def test_reset_target_dir_fake_branch(self): """Try resetting target pipeline directory but original branch does not exist""" - psync = nf_core.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.original_branch = "fake_branch" - with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") From d6799d55b91136ed128d340f4484001e41dbcb9d Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 15 May 2024 08:16:22 +0000 Subject: [PATCH 164/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16262bd1c..60e632bd4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) - update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) +- move pipeline subcommands for v3.0 ([#2983](https://github.com/nf-core/tools/pull/2983)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From bc1d3dbebd2f3ce0ae1367227e26d64ac243d81c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 15 May 2024 12:35:32 +0200 Subject: [PATCH 165/737] more pipeline commands path changes to fix tests --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- .github/actions/create-lint-wf/action.yml | 18 ++++++++--------- .../create-test-lint-wf-template.yml | 16 +++++++-------- CONTRIBUTING.md | 4 ++-- docs/api/_src/index.md | 7 ++++--- nf_core/__main__.py | 1 - .../pipeline-template/.github/CONTRIBUTING.md | 4 ++-- .../.github/PULL_REQUEST_TEMPLATE.md | 2 +- .../.github/workflows/linting.yml | 6 +++--- nf_core/pipelines/lint/__init__.py | 2 +- nf_core/pipelines/lint/files_exist.py | 2 +- nf_core/pipelines/lint/files_unchanged.py | 2 +- nf_core/pipelines/lint/nextflow_config.py | 2 +- nf_core/pipelines/lint/schema_lint.py | 2 +- nf_core/pipelines/lint/version_consistency.py | 2 +- nf_core/pipelines/lint_utils.py | 2 +- nf_core/pipelines/sync/sync.py | 2 +- nf_core/utils.py | 2 +- tests/test_cli.py | 20 +++++++++---------- tests/test_sync.py | 2 +- 20 files changed, 50 insertions(+), 50 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 5043b37ac..5b627a145 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -24,7 +24,7 @@ body: description: Steps to reproduce the behaviour. Please paste the command and output from your terminal. render: console placeholder: | - $ nf-core lint ... + $ nf-core pipelines lint ... Some output where something broke diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 9052c90dd..8760901db 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -30,9 +30,9 @@ runs: nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" # Try syncing it before we change anything - - name: nf-core sync + - name: nf-core pipelines sync shell: bash - run: nf-core --log-file log.txt sync --dir nf-core-testpipeline/ + run: nf-core --log-file log.txt pipelines sync --dir nf-core-testpipeline/ working-directory: create-lint-wf # Run code style linting @@ -59,20 +59,20 @@ runs: run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-lint-wf - # Run nf-core linting - - name: nf-core lint + # Run nf-core pipelines linting + - name: nf-core pipelines lint shell: bash - run: nf-core --verbose --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned + run: nf-core --verbose --log-file log.txt --hide-progress pipelines lint --dir nf-core-testpipeline --fail-ignored --fail-warned working-directory: create-lint-wf - - name: nf-core bump-version to release + - name: nf-core pipelines bump-version to release shell: bash - run: nf-core --log-file log.txt bump-version --dir nf-core-testpipeline/ 1.1 + run: nf-core --log-file log.txt pipelines bump-version --dir nf-core-testpipeline/ 1.1 working-directory: create-lint-wf - - name: nf-core lint in release mode + - name: nf-core pipelines lint in release mode shell: bash - run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release + run: nf-core --log-file log.txt --hide-progress pipelines lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release working-directory: create-lint-wf - name: Upload log file artifact diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 4ae271453..b823ca72d 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -119,8 +119,8 @@ jobs: rm -rf create-test-lint-wf/results # Try syncing it before we change anything - - name: nf-core sync - run: nf-core --log-file log.txt sync --dir create-test-lint-wf/my-prefix-testpipeline/ + - name: nf-core pipelines sync + run: nf-core --log-file log.txt pipelines sync --dir create-test-lint-wf/my-prefix-testpipeline/ # Run code style linting - name: Run pre-commit @@ -138,18 +138,18 @@ jobs: working-directory: create-test-lint-wf # Run nf-core linting - - name: nf-core lint - run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned + - name: nf-core pipelines lint + run: nf-core --log-file log.txt --hide-progress pipelines lint --dir my-prefix-testpipeline --fail-warned working-directory: create-test-lint-wf # Run bump-version - - name: nf-core bump-version - run: nf-core --log-file log.txt bump-version --dir my-prefix-testpipeline/ 1.1 + - name: nf-core pipelines bump-version + run: nf-core --log-file log.txt pipelines bump-version --dir my-prefix-testpipeline/ 1.1 working-directory: create-test-lint-wf # Run nf-core linting in release mode - - name: nf-core lint in release mode - run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned --release + - name: nf-core pipelines lint in release mode + run: nf-core --log-file log.txt --hide-progress pipelines lint --dir my-prefix-testpipeline --fail-warned --release working-directory: create-test-lint-wf - name: Tar files diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 04d327bd8..2b2dfc1be 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -107,8 +107,8 @@ This ensures that any changes we make to either the linting or the template stay You can replicate this process locally with the following commands: ```bash -nf-core create -n testpipeline -d "This pipeline is for testing" -nf-core lint nf-core-testpipeline +nf-core pipelines create -n testpipeline -d "This pipeline is for testing" +nf-core pipelines lint nf-core-testpipeline ``` ## GitHub Codespaces diff --git a/docs/api/_src/index.md b/docs/api/_src/index.md index af3813bc5..037ca9547 100644 --- a/docs/api/_src/index.md +++ b/docs/api/_src/index.md @@ -16,6 +16,7 @@ This documentation is for the `nf-core/tools` package. ## Contents -- [Pipeline code lint tests](pipeline_lint_tests/) (run by `nf-core lint`) -- [Module code lint tests](module_lint_tests/) (run by `nf-core modules lint`) -- [Subworkflow code lint tests](subworkflow_lint_tests/) (run by `nf-core subworkflows lint`) +- [Pipeline code lint tests](pipeline_lint_tests/index.md) (run by `nf-core pipelines lint`) +- [Module code lint tests](module_lint_tests/index.md) (run by `nf-core modules lint`) +- [Subworkflow code lint tests](subworkflow_lint_tests/index.md) (run by `nf-core subworkflows lint`) +- [nf-core/tools Python package API reference](api/index.md) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index c2423ef1f..921b5c8cf 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -645,7 +645,6 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp # nf-core pipelines lint @pipelines.command("lint") -@click.pass_context @click.option( "-d", "--dir", diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 3f541162d..dc05d165d 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -44,7 +44,7 @@ There are typically two types of tests that run: ### Lint tests `nf-core` has a [set of guidelines](https://nf-co.re/developers/guidelines) which all pipelines must adhere to. -To enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core lint ` command. +To enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core pipelines lint ` command. If any failures or warnings are encountered, please follow the listed URL for more documentation. @@ -111,7 +111,7 @@ Please use the following naming schemes, to make it easy to understand what is g ### Nextflow version bumping -If you are using a new feature from core Nextflow, you may bump the minimum required version of nextflow in the pipeline with: `nf-core bump-version --nextflow . [min-nf-version]` +If you are using a new feature from core Nextflow, you may bump the minimum required version of nextflow in the pipeline with: `nf-core pipelines bump-version --nextflow . [min-nf-version]` ### Images and figures diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index 9ad257a0b..dee23ccab 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -19,7 +19,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/t {%- if is_nfcore %} - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. {%- endif %} -- [ ] Make sure your code lints (`nf-core lint`). +- [ ] Make sure your code lints (`nf-core pipelines lint`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). - [ ] Usage Documentation in `docs/usage.md` is updated. diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index b2cde075f..f681380fd 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -1,6 +1,6 @@ name: nf-core linting # This workflow is triggered on pushes and PRs to the repository. -# It runs the `nf-core lint` and markdown lint tests to ensure +# It runs the `nf-core pipelines lint` and markdown lint tests to ensure # that the code meets the nf-core guidelines. {%- raw %} on: push: @@ -46,12 +46,12 @@ jobs: python -m pip install --upgrade pip pip install nf-core - - name: Run nf-core lint + - name: Run nf-core pipelines lint env: GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }} - run: nf-core -l lint_log.txt lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md + run: nf-core -l lint_log.txt pipelines lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md - name: Save PR number if: ${{ always() }} diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 4ec62ddb0..cf5ba3913 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -439,7 +439,7 @@ def _get_results_md(self): comment_body_text = f"Posted for pipeline commit {self.git_sha[:7]}" if self.git_sha is not None else "" timestamp = now.strftime("%Y-%m-%d %H:%M:%S") markdown = ( - f"## `nf-core lint` overall result: {overall_result}\n\n" + f"## `nf-core pipelines lint` overall result: {overall_result}\n\n" f"{comment_body_text}\n\n" f"```diff{test_passed_count}{test_ignored_count}{test_fixed_count}{test_warning_count}{test_failure_count}" "\n```\n\n" diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index 0c8710062..c6f622b3e 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -104,7 +104,7 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]: lib/nfcore_external_java_deps.jar # if "nf-validation" is in nextflow.config - .. tip:: You can configure the ``nf-core lint`` tests to ignore any of these checks by setting + .. tip:: You can configure the ``nf-core pipelines lint`` tests to ignore any of these checks by setting the ``files_exist`` key as follows in your ``.nf-core.yml`` config file. For example: .. code-block:: yaml diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index fc044b659..bafd8aec7 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -49,7 +49,7 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: .prettierignore - .. tip:: You can configure the ``nf-core lint`` tests to ignore any of these checks by setting + .. tip:: You can configure the ``nf-core pipelines lint`` tests to ignore any of these checks by setting the ``files_unchanged`` key as follows in your ``.nf-core.yml`` config file. For example: .. code-block:: yaml diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 47b7d78f5..4fa49acbc 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -26,7 +26,7 @@ def nextflow_config(self): * ``manifest.version`` * The version of this pipeline. This should correspond to a `GitHub release `_. - * If ``--release`` is set when running ``nf-core lint``, the version number must not contain the string ``dev`` + * If ``--release`` is set when running ``nf-core pipelines lint``, the version number must not contain the string ``dev`` * If ``--release`` is _not_ set, the version should end in ``dev`` (warning triggered if not) * ``manifest.nextflowVersion`` diff --git a/nf_core/pipelines/lint/schema_lint.py b/nf_core/pipelines/lint/schema_lint.py index 178063d5d..3342dc4b9 100644 --- a/nf_core/pipelines/lint/schema_lint.py +++ b/nf_core/pipelines/lint/schema_lint.py @@ -58,7 +58,7 @@ def schema_lint(self): } .. tip:: You can check your pipeline schema without having to run the entire pipeline lint - by running ``nf-core schema lint`` instead of ``nf-core lint`` + by running ``nf-core schema lint`` instead of ``nf-core pipelines lint`` """ passed = [] warned = [] diff --git a/nf_core/pipelines/lint/version_consistency.py b/nf_core/pipelines/lint/version_consistency.py index e396ca9e7..5fe24ed72 100644 --- a/nf_core/pipelines/lint/version_consistency.py +++ b/nf_core/pipelines/lint/version_consistency.py @@ -4,7 +4,7 @@ def version_consistency(self): """Pipeline and container version number consistency. - .. note:: This test only runs when the ``--release`` flag is set for ``nf-core lint``, + .. note:: This test only runs when the ``--release`` flag is set for ``nf-core pipelines lint``, or ``$GITHUB_REF`` is equal to ``master``. This lint fetches the pipeline version number from three possible locations: diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index 167600bfc..4ccf79007 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -50,7 +50,7 @@ def print_fixes(lint_obj): if lint_obj.could_fix: fix_flags = "".join([f" --fix {fix}" for fix in lint_obj.could_fix]) wf_dir = "" if lint_obj.wf_path == "." else f"--dir {lint_obj.wf_path}" - fix_cmd = f"nf-core lint {wf_dir} {fix_flags}" + fix_cmd = f"nf-core pipelines lint {wf_dir} {fix_flags}" console.print( "\nTip: Some of these linting errors can automatically be resolved with the following command:\n\n" f"[blue] {fix_cmd}\n" diff --git a/nf_core/pipelines/sync/sync.py b/nf_core/pipelines/sync/sync.py index 81082c24b..ccee2a9c4 100644 --- a/nf_core/pipelines/sync/sync.py +++ b/nf_core/pipelines/sync/sync.py @@ -185,7 +185,7 @@ def inspect_sync_dir(self): # Check to see if there are uncommitted changes on current branch if self.repo.is_dirty(untracked_files=True): raise SyncExceptionError( - "Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core sync" + "Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core pipelines sync" ) def get_wf_config(self): diff --git a/nf_core/utils.py b/nf_core/utils.py index 8c50f0a49..9dae4b9e5 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -578,7 +578,7 @@ def request_retry(self, url, post_data=None): """ Try to fetch a URL, keep retrying if we get a certain return code. - Used in nf-core sync code because we get 403 errors: too many simultaneous requests + Used in nf-core pipelines sync code because we get 403 errors: too many simultaneous requests See https://github.com/nf-core/tools/issues/911 """ if not self.has_init: diff --git a/tests/test_cli.py b/tests/test_cli.py index 95a3d6ab6..53780b14e 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -285,7 +285,7 @@ def test_create_app(self, mock_create): @mock.patch("nf_core.utils.is_pipeline_directory") @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint(self, mock_lint, mock_is_pipeline): - """Test nf-core lint""" + """Test nf-core pipelines lint""" mock_lint_results = (mock.MagicMock, mock.MagicMock, mock.MagicMock) mock_lint_results[0].failed = [] mock_lint_results[1].failed = [] @@ -305,7 +305,7 @@ def test_lint(self, mock_lint, mock_is_pipeline): "json": "output_file.json", } - cmd = ["lint"] + self.assemble_params(params) + cmd = ["pipelines", "lint"] + self.assemble_params(params) result = self.invoke_cli(cmd) assert result.exit_code == 0 @@ -324,12 +324,12 @@ def test_lint(self, mock_lint, mock_is_pipeline): ) def test_lint_no_dir(self): - """Test nf-core lint fails if --dir does not exist""" + """Test nf-core pipelines lint fails if --dir does not exist""" params = { "dir": "/bad/path", } - cmd = ["lint"] + self.assemble_params(params) + cmd = ["pipelines", "lint"] + self.assemble_params(params) result = self.invoke_cli(cmd) assert result.exit_code == 2 @@ -340,11 +340,11 @@ def test_lint_no_dir(self): @mock.patch("nf_core.utils.is_pipeline_directory") def test_lint_dir_is_not_pipeline(self, mock_is_pipeline): - """Test nf-core lint logs an error if not called from a pipeline directory.""" + """Test nf-core pipelines lint logs an error if not called from a pipeline directory.""" error_txt = "UserWarning has been raised" mock_is_pipeline.side_effect = UserWarning(error_txt) - cmd = ["lint"] + cmd = ["pipelines", "lint"] with self.assertLogs() as captured_logs: result = self.invoke_cli(cmd) @@ -355,11 +355,11 @@ def test_lint_dir_is_not_pipeline(self, mock_is_pipeline): @mock.patch("nf_core.utils.is_pipeline_directory") @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint_log_assert_error(self, mock_lint, mock_is_pipeline): - """Test nf-core lint logs assertion errors""" + """Test nf-core pipelines lint logs assertion errors""" error_txt = "AssertionError has been raised" mock_lint.side_effect = AssertionError(error_txt) - cmd = ["lint"] + cmd = ["pipelines", "lint"] with self.assertLogs() as captured_logs: result = self.invoke_cli(cmd) @@ -370,11 +370,11 @@ def test_lint_log_assert_error(self, mock_lint, mock_is_pipeline): @mock.patch("nf_core.utils.is_pipeline_directory") @mock.patch("nf_core.pipelines.lint.run_linting") def test_lint_log_user_warning(self, mock_lint, mock_is_pipeline): - """Test nf-core lint logs assertion errors""" + """Test nf-core pipelines lint logs assertion errors""" error_txt = "AssertionError has been raised" mock_lint.side_effect = UserWarning(error_txt) - cmd = ["lint"] + cmd = ["pipelines", "lint"] with self.assertLogs() as captured_logs: result = self.invoke_cli(cmd) diff --git a/tests/test_sync.py b/tests/test_sync.py index 0cacbf544..b98033538 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -349,7 +349,7 @@ def test_close_open_template_merge_prs(self, mock_get): psync.gh_repo = "list_prs/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - with mock.patch("nf_core.sync.PipelineSync.close_open_pr") as mock_close_open_pr: + with mock.patch("nf_core.pipelines.sync.sync.PipelineSync.close_open_pr") as mock_close_open_pr: psync.close_open_template_merge_prs() prs = mock_get(f"https://api.github.com/repos/{psync.gh_repo}/pulls").data From 7f197b337d7e84e0be2d6d5ac998d706726ffd42 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 23 May 2024 18:24:08 +0200 Subject: [PATCH 166/737] pass ctx to pipeliens sync command --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 921b5c8cf..398668d33 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2375,7 +2375,7 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def sync_pipeline(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def sync_pipeline(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. From c14a24a288d7841bad31ae3edab3fcb9b50ad0cb Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 24 May 2024 14:08:15 +0200 Subject: [PATCH 167/737] add init files --- nf_core/__main__.py | 2 +- nf_core/pipelines/bump_version/__init__.py | 0 nf_core/pipelines/sync/__init__.py | 0 3 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 nf_core/pipelines/bump_version/__init__.py create mode 100644 nf_core/pipelines/sync/__init__.py diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 398668d33..393456684 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2199,7 +2199,7 @@ def bump_version(new_version, dir, nextflow): default=False, help="Bump required nextflow version instead of pipeline version", ) -def bump_version_pipeline(new_version, dir, nextflow): +def bump_version_pipeline(ctx, new_version, dir, nextflow): """ Update nf-core pipeline version number. diff --git a/nf_core/pipelines/bump_version/__init__.py b/nf_core/pipelines/bump_version/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/nf_core/pipelines/sync/__init__.py b/nf_core/pipelines/sync/__init__.py new file mode 100644 index 000000000..e69de29bb From e7136379bbadd7d3c883a9c80be3d37ce68a307c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 27 May 2024 13:06:23 +0200 Subject: [PATCH 168/737] return directory if base_dir is the root directory --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 8c50f0a49..1a1e8a2a9 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1069,7 +1069,7 @@ def determine_base_dir(directory="."): config_fn = get_first_available_path(base_dir, CONFIG_PATHS) if config_fn: break - return directory if base_dir == start_dir else base_dir + return directory if (base_dir == start_dir or str(base_dir) == base_dir.root) else base_dir def get_first_available_path(directory, paths): From ae47db3608feaa95cb9165a876740fcee96534ac Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 27 May 2024 11:07:57 +0000 Subject: [PATCH 169/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16262bd1c..a5795245a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) - update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) +- return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From efd397d94ee223f8dd60f1c3a88a04211bc4b27e Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 27 May 2024 14:39:17 +0200 Subject: [PATCH 170/737] add org to main.nf.test tag --- nf_core/components/create.py | 4 ++++ nf_core/module-template/tests/main.nf.test.j2 | 2 +- nf_core/subworkflow-template/tests/main.nf.test.j2 | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 6c9c01b49..e603db385 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -157,6 +157,10 @@ def create(self): if self.component_type == "modules": self._get_module_structure_components() + # Add a valid organization name for nf-test tags + not_alphabet = re.compile(r'[^a-zA-Z]') + self.org_alphabet = not_alphabet.sub('', self.org) + # Create component template with jinja2 self._render_template() log.info(f"Created component template: '{self.component_name}'") diff --git a/nf_core/module-template/tests/main.nf.test.j2 b/nf_core/module-template/tests/main.nf.test.j2 index 1f70df64b..456c989f8 100644 --- a/nf_core/module-template/tests/main.nf.test.j2 +++ b/nf_core/module-template/tests/main.nf.test.j2 @@ -7,7 +7,7 @@ nextflow_process { process "{{ component_name_underscore|upper }}" tag "modules" - tag "modules_nfcore" + tag "modules_{{ org_alphabet }}" {%- if subtool %} tag "{{ component }}" {%- endif %} diff --git a/nf_core/subworkflow-template/tests/main.nf.test.j2 b/nf_core/subworkflow-template/tests/main.nf.test.j2 index c44e19a4e..8aaf6e0c7 100644 --- a/nf_core/subworkflow-template/tests/main.nf.test.j2 +++ b/nf_core/subworkflow-template/tests/main.nf.test.j2 @@ -7,7 +7,7 @@ nextflow_workflow { workflow "{{ component_name_underscore|upper }}" tag "subworkflows" - tag "subworkflows_nfcore" + tag "subworkflows_{{ org_alphabet }}" tag "subworkflows/{{ component_name }}" // TODO nf-core: Add tags for all modules used within this subworkflow. Example: tag "samtools" From a446f29d01470e05870552d6bb496236460d46b6 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 27 May 2024 15:19:50 +0200 Subject: [PATCH 171/737] make sure linting passes this new feature --- nf_core/modules/lint/module_tests.py | 6 +++++- nf_core/subworkflows/lint/subworkflow_tests.py | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index b2b6c2221..080fc2bdc 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -4,6 +4,7 @@ import json import logging +import re from pathlib import Path import yaml @@ -137,7 +138,10 @@ def module_tests(_, module: NFCoreComponent): ) # Verify that tags are correct. main_nf_tags = module._get_main_nf_tags(module.nftest_main_nf) - required_tags = ["modules", "modules_nfcore", module.component_name] + not_alphabet = re.compile(r'[^a-zA-Z]') + org_alp = not_alphabet.sub('', module.org) + org_alphabet = org_alp if org_alp != "" else "nfcore" + required_tags = ["modules", f"modules_{org_alphabet}", module.component_name] if module.component_name.count("/") == 1: required_tags.append(module.component_name.split("/")[0]) chained_components_tags = module._get_included_components_in_chained_tests(module.nftest_main_nf) diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index cfae2d553..87e850e8f 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -4,6 +4,7 @@ import json import logging +import re from pathlib import Path import yaml @@ -144,10 +145,13 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) # Verify that tags are correct. main_nf_tags = subworkflow._get_main_nf_tags(subworkflow.nftest_main_nf) + not_alphabet = re.compile(r'[^a-zA-Z]') + org_alp = not_alphabet.sub('', subworkflow.org) + org_alphabet = org_alp if org_alp != "" else "nfcore" required_tags = [ "subworkflows", f"subworkflows/{subworkflow.component_name}", - "subworkflows_nfcore", + f"subworkflows_{org_alphabet}", ] included_components = [] if subworkflow.main_nf.is_file(): From 7bae122e1da8fb54511ebdfe02894a602621dbce Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 27 May 2024 15:27:20 +0200 Subject: [PATCH 172/737] update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16262bd1c..259982e39 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,8 @@ ### Components +- The `modules_nfcore` tag in the `main.nf.test` file of modules/subworkflows now displays the organization name in custom modules repositories ([#3005](https://github.com/nf-core/tools/pull/3005)) + ### General - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 ([#2974](https://github.com/nf-core/tools/pull/2974)) From a4ecb70909da3b6986c25d8986ba956391bcd0fc Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 27 May 2024 15:32:45 +0200 Subject: [PATCH 173/737] ruff! --- nf_core/components/create.py | 4 ++-- nf_core/modules/lint/module_tests.py | 4 ++-- nf_core/subworkflows/lint/subworkflow_tests.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/nf_core/components/create.py b/nf_core/components/create.py index e603db385..78c30b517 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -158,8 +158,8 @@ def create(self): self._get_module_structure_components() # Add a valid organization name for nf-test tags - not_alphabet = re.compile(r'[^a-zA-Z]') - self.org_alphabet = not_alphabet.sub('', self.org) + not_alphabet = re.compile(r"[^a-zA-Z]") + self.org_alphabet = not_alphabet.sub("", self.org) # Create component template with jinja2 self._render_template() diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 080fc2bdc..4bf4ea774 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -138,8 +138,8 @@ def module_tests(_, module: NFCoreComponent): ) # Verify that tags are correct. main_nf_tags = module._get_main_nf_tags(module.nftest_main_nf) - not_alphabet = re.compile(r'[^a-zA-Z]') - org_alp = not_alphabet.sub('', module.org) + not_alphabet = re.compile(r"[^a-zA-Z]") + org_alp = not_alphabet.sub("", module.org) org_alphabet = org_alp if org_alp != "" else "nfcore" required_tags = ["modules", f"modules_{org_alphabet}", module.component_name] if module.component_name.count("/") == 1: diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 87e850e8f..fe7b40407 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -145,8 +145,8 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): ) # Verify that tags are correct. main_nf_tags = subworkflow._get_main_nf_tags(subworkflow.nftest_main_nf) - not_alphabet = re.compile(r'[^a-zA-Z]') - org_alp = not_alphabet.sub('', subworkflow.org) + not_alphabet = re.compile(r"[^a-zA-Z]") + org_alp = not_alphabet.sub("", subworkflow.org) org_alphabet = org_alp if org_alp != "" else "nfcore" required_tags = [ "subworkflows", From 6d9feb3a900db74980bce66523e65f2470cdb507 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 22:45:25 +0000 Subject: [PATCH 174/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 01bd13a9e..8521c4410 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.5 + rev: v0.4.6 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From f94ec87ccd246eba4db29609e1b0f5ed44b2fb80 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 28 May 2024 22:46:12 +0000 Subject: [PATCH 175/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ae43fd4e7..63bef50b8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) - update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 ([#3006](https://github.com/nf-core/tools/pull/3006)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 60b79b156ccd77f369ad174997fdb0284db62682 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 29 May 2024 15:35:19 +0200 Subject: [PATCH 176/737] use pydantic validation context on module initialization. use global variable for pipeline type --- nf_core/pipelines/create/__init__.py | 16 +++++----- nf_core/pipelines/create/utils.py | 46 +++++++++++++++++++++++----- 2 files changed, 46 insertions(+), 16 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index da6a69322..c118d8520 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -5,6 +5,7 @@ from textual.app import App from textual.widgets import Button +from nf_core.pipelines.create import utils from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline from nf_core.pipelines.create.finaldetails import FinalDetails @@ -14,15 +15,10 @@ from nf_core.pipelines.create.loggingscreen import LoggingScreen from nf_core.pipelines.create.nfcorepipeline import NfcorePipeline from nf_core.pipelines.create.pipelinetype import ChoosePipelineType -from nf_core.pipelines.create.utils import ( - CreateConfig, - CustomLogHandler, - LoggingConsole, -) from nf_core.pipelines.create.welcome import WelcomeScreen -log_handler = CustomLogHandler( - console=LoggingConsole(classes="log_console"), +log_handler = utils.CustomLogHandler( + console=utils.LoggingConsole(classes="log_console"), rich_tracebacks=True, show_time=False, show_path=False, @@ -36,7 +32,7 @@ log_handler.setLevel("INFO") -class PipelineCreateApp(App[CreateConfig]): +class PipelineCreateApp(App[utils.CreateConfig]): """A Textual app to manage stopwatches.""" CSS_PATH = "create.tcss" @@ -60,7 +56,7 @@ class PipelineCreateApp(App[CreateConfig]): } # Initialise config as empty - TEMPLATE_CONFIG = CreateConfig() + TEMPLATE_CONFIG = utils.CreateConfig() # Initialise pipeline type PIPELINE_TYPE = None @@ -79,9 +75,11 @@ def on_button_pressed(self, event: Button.Pressed) -> None: self.push_screen("choose_type") elif event.button.id == "type_nfcore": self.PIPELINE_TYPE = "nfcore" + utils.PIPELINE_TYPE_GLOBAL = "nfcore" self.push_screen("basic_details") elif event.button.id == "type_custom": self.PIPELINE_TYPE = "custom" + utils.PIPELINE_TYPE_GLOBAL = "custom" self.push_screen("basic_details") elif event.button.id == "continue": self.push_screen("final_details") diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 6006452ba..0754c54b5 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -1,9 +1,11 @@ import re +from contextlib import contextmanager +from contextvars import ContextVar from logging import LogRecord from pathlib import Path -from typing import Optional, Union +from typing import Any, Dict, Iterator, Optional, Union -from pydantic import BaseModel, ConfigDict, ValidationError, field_validator +from pydantic import BaseModel, ConfigDict, ValidationError, ValidationInfo, field_validator from rich.logging import RichHandler from textual import on from textual._context import active_app @@ -14,6 +16,22 @@ from textual.widget import Widget from textual.widgets import Button, Input, Markdown, RichLog, Static, Switch +# Use ContextVar to define a context on the model initialization +_init_context_var: ContextVar = ContextVar("_init_context_var", default={}) + + +@contextmanager +def init_context(value: Dict[str, Any]) -> Iterator[None]: + token = _init_context_var.set(value) + try: + yield + finally: + _init_context_var.reset(token) + + +# Define a global variable to store the pipeline type +PIPELINE_TYPE_GLOBAL: str | None = None + class CreateConfig(BaseModel): """Pydantic model for the nf-core create config.""" @@ -30,12 +48,25 @@ class CreateConfig(BaseModel): model_config = ConfigDict(extra="allow") + def __init__(self, /, **data: Any) -> None: + """Custom init method to allow using a context on the model initialization.""" + self.__pydantic_validator__.validate_python( + data, + self_instance=self, + context=_init_context_var.get(), + ) + @field_validator("name") @classmethod - def name_nospecialchars(cls, v: str) -> str: + def name_nospecialchars(cls, v: str, info: ValidationInfo) -> str: """Check that the pipeline name is simple.""" - if not re.match(r"^[a-z]+$", v): - raise ValueError("Must be lowercase without punctuation.") + context = info.context + if context and context["is_nfcore"]: + if not re.match(r"^[a-z]+$", v): + raise ValueError("Must be lowercase without punctuation.") + else: + if not re.match(r"^[a-zA-Z-_]+$", v): + raise ValueError("Must not contain special characters. Only '-' or '_' are allowed.") return v @field_validator("org", "description", "author", "version", "outdir") @@ -117,8 +148,9 @@ def validate(self, value: str) -> ValidationResult: If it fails, return the error messages.""" try: - CreateConfig(**{f"{self.key}": value}) - return self.success() + with init_context({"is_nfcore": PIPELINE_TYPE_GLOBAL == "nfcore"}): + CreateConfig(**{f"{self.key}": value}) + return self.success() except ValidationError as e: return self.failure(", ".join([err["msg"] for err in e.errors()])) From 0186535fe3e6a7c8f79fdd379ebc0d2d20f423d1 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 29 May 2024 13:40:21 +0000 Subject: [PATCH 177/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 63bef50b8..af62a6ed5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ - update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 ([#3006](https://github.com/nf-core/tools/pull/3006)) +- Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 6275553f13f948ad76a6ebd0ec6ad2b73fa07cd1 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 29 May 2024 15:51:36 +0200 Subject: [PATCH 178/737] use union for typing --- nf_core/pipelines/create/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 0754c54b5..d70fea140 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -30,7 +30,7 @@ def init_context(value: Dict[str, Any]) -> Iterator[None]: # Define a global variable to store the pipeline type -PIPELINE_TYPE_GLOBAL: str | None = None +PIPELINE_TYPE_GLOBAL: Union[str, None] = None class CreateConfig(BaseModel): From 2b724e0e7f98d8f6b5956eda6b776041728885bd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 30 May 2024 13:46:53 +0200 Subject: [PATCH 179/737] use NFCORE_PIPELINE as a boolean instead of a PIPELINE_TYPE string --- nf_core/pipelines/create/__init__.py | 10 +++++----- nf_core/pipelines/create/basicdetails.py | 8 ++++---- nf_core/pipelines/create/utils.py | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index c118d8520..56e25bf1d 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -59,7 +59,7 @@ class PipelineCreateApp(App[utils.CreateConfig]): TEMPLATE_CONFIG = utils.CreateConfig() # Initialise pipeline type - PIPELINE_TYPE = None + NFCORE_PIPELINE = True # Log handler LOG_HANDLER = log_handler @@ -74,12 +74,12 @@ def on_button_pressed(self, event: Button.Pressed) -> None: if event.button.id == "start": self.push_screen("choose_type") elif event.button.id == "type_nfcore": - self.PIPELINE_TYPE = "nfcore" - utils.PIPELINE_TYPE_GLOBAL = "nfcore" + self.NFCORE_PIPELINE = True + utils.NFCORE_PIPELINE_GLOBAL = True self.push_screen("basic_details") elif event.button.id == "type_custom": - self.PIPELINE_TYPE = "custom" - utils.PIPELINE_TYPE_GLOBAL = "custom" + self.NFCORE_PIPELINE = False + utils.NFCORE_PIPELINE_GLOBAL = False self.push_screen("basic_details") elif event.button.id == "continue": self.push_screen("final_details") diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index b88ede10d..09484fa2e 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -39,7 +39,7 @@ def compose(self) -> ComposeResult: "GitHub organisation", "nf-core", classes="column", - disabled=self.parent.PIPELINE_TYPE == "nfcore", + disabled=self.parent.NFCORE_PIPELINE, ) yield TextInput( "name", @@ -85,7 +85,7 @@ def on_screen_resume(self): add_hide_class(self.parent, "exist_warn") for text_input in self.query("TextInput"): if text_input.field_id == "org": - text_input.disabled = self.parent.PIPELINE_TYPE == "nfcore" + text_input.disabled = self.parent.NFCORE_PIPELINE @on(Button.Pressed) def on_button_pressed(self, event: Button.Pressed) -> None: @@ -102,9 +102,9 @@ def on_button_pressed(self, event: Button.Pressed) -> None: try: self.parent.TEMPLATE_CONFIG = CreateConfig(**config) if event.button.id == "next": - if self.parent.PIPELINE_TYPE == "nfcore": + if self.parent.NFCORE_PIPELINE: self.parent.push_screen("type_nfcore") - elif self.parent.PIPELINE_TYPE == "custom": + else: self.parent.push_screen("type_custom") except ValueError: pass diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index d70fea140..f1e0bae3c 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -30,7 +30,7 @@ def init_context(value: Dict[str, Any]) -> Iterator[None]: # Define a global variable to store the pipeline type -PIPELINE_TYPE_GLOBAL: Union[str, None] = None +NFCORE_PIPELINE_GLOBAL: bool = True class CreateConfig(BaseModel): @@ -148,7 +148,7 @@ def validate(self, value: str) -> ValidationResult: If it fails, return the error messages.""" try: - with init_context({"is_nfcore": PIPELINE_TYPE_GLOBAL == "nfcore"}): + with init_context({"is_nfcore": NFCORE_PIPELINE_GLOBAL}): CreateConfig(**{f"{self.key}": value}) return self.success() except ValidationError as e: From 69968b817df2df7063a790db97f56ba39c926fc6 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 30 May 2024 15:27:58 +0200 Subject: [PATCH 180/737] move all commands under pipelines and organise help message --- nf_core/__main__.py | 1818 ++++++++++------- .../{bump_version => }/bump_version.py | 0 nf_core/pipelines/bump_version/__init__.py | 0 nf_core/pipelines/create/create.py | 6 +- nf_core/{ => pipelines}/create_logo.py | 0 nf_core/{ => pipelines}/download.py | 4 +- nf_core/{ => pipelines}/launch.py | 6 +- nf_core/pipelines/lint/nextflow_config.py | 2 +- nf_core/pipelines/lint/schema_description.py | 4 +- nf_core/pipelines/lint/schema_lint.py | 4 +- nf_core/pipelines/lint/schema_params.py | 4 +- nf_core/{ => pipelines}/list.py | 0 nf_core/{ => pipelines}/params_file.py | 6 +- nf_core/{ => pipelines}/refgenie.py | 0 nf_core/{ => pipelines}/schema.py | 4 +- nf_core/pipelines/{sync => }/sync.py | 2 +- nf_core/pipelines/sync/__init__.py | 0 tests/test_create_logo.py | 22 +- tests/test_download.py | 2 +- tests/test_launch.py | 20 +- tests/test_list.py | 44 +- tests/test_params_file.py | 6 +- tests/test_schema.py | 4 +- tests/test_utils.py | 10 +- 24 files changed, 1207 insertions(+), 761 deletions(-) rename nf_core/pipelines/{bump_version => }/bump_version.py (100%) delete mode 100644 nf_core/pipelines/bump_version/__init__.py rename nf_core/{ => pipelines}/create_logo.py (100%) rename nf_core/{ => pipelines}/download.py (99%) rename nf_core/{ => pipelines}/launch.py (99%) rename nf_core/{ => pipelines}/list.py (100%) rename nf_core/{ => pipelines}/params_file.py (98%) rename nf_core/{ => pipelines}/refgenie.py (100%) rename nf_core/{ => pipelines}/schema.py (99%) rename nf_core/pipelines/{sync => }/sync.py (99%) delete mode 100644 nf_core/pipelines/sync/__init__.py diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 393456684..4921e15d5 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -14,9 +14,9 @@ from trogon import tui from nf_core import __version__ -from nf_core.download import DownloadError from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE -from nf_core.params_file import ParamsFileBuilder +from nf_core.pipelines.download import DownloadError +from nf_core.pipelines.params_file import ParamsFileBuilder from nf_core.utils import check_if_outdated, nfcore_logo, rich_force_colors, setup_nfcore_dir # Set up logging as the root logger @@ -32,31 +32,23 @@ click.rich_click.COMMAND_GROUPS = { "nf-core": [ { - "name": "Commands for users", - "commands": [ - "list", - "launch", - "create-params-file", - "download", - "licences", - "tui", - ], - }, - { - "name": "Commands for developers", + "name": "Commands", "commands": [ "pipelines", "modules", "subworkflows", - "schema", - "create-logo", + "tui", ], }, ], "nf-core pipelines": [ { - "name": "Pipeline commands", - "commands": ["create", "lint", "bump-version", "sync"], + "name": "For users", + "commands": ["list", "launch", "download", "create-params-file", "licences"], + }, + { + "name": "For developers", + "commands": ["create", "lint", "bump-version", "sync", "schema", "create-logo"], }, ], "nf-core modules": [ @@ -66,19 +58,20 @@ }, { "name": "Developing new modules", - "commands": ["create", "lint", "bump-versions", "test"], + "commands": ["create", "lint", "test", "bump-versions"], }, ], "nf-core subworkflows": [ { "name": "For pipelines", - "commands": ["info", "install", "list", "remove", "update"], + "commands": ["list", "info", "install", "update", "remove"], }, { "name": "Developing new subworkflows", - "commands": ["create", "test", "lint"], + "commands": ["create", "lint", "test"], }, ], + "nf-core pipelines schema": [{"name": "Schema commands", "commands": ["validate", "build", "lint", "docs"]}], } click.rich_click.OPTION_GROUPS = { "nf-core modules list local": [{"options": ["--dir", "--json", "--help"]}], @@ -189,195 +182,239 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): } -# nf-core list -@nf_core_cli.command("list") -@click.argument("keywords", required=False, nargs=-1, metavar="") -@click.option( - "-s", - "--sort", - type=click.Choice(["release", "pulled", "name", "stars"]), - default="release", - help="How to sort listed pipelines", -) -@click.option("--json", is_flag=True, default=False, help="Print full output as JSON") -@click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") -def list_pipelines(keywords, sort, json, show_archived): +# nf-core pipelines subcommands +@nf_core_cli.group() +@click.pass_context +def pipelines(ctx): """ - List available nf-core pipelines with local info. - - Checks the web for a list of nf-core pipelines with their latest releases. - Shows which nf-core pipelines you have pulled locally and whether they are up to date. + Commands to manage nf-core pipelines. """ - from nf_core.list import list_workflows - - stdout.print(list_workflows(keywords, sort, json, show_archived)) + # ensure that ctx.obj exists and is a dict (in case `cli()` is called + # by means other than the `if` block below) + ctx.ensure_object(dict) -# nf-core launch -@nf_core_cli.command() -@click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)") -@click.option("-i", "--id", help="ID for web-gui launch parameter set") -@click.option( - "-c", - "--command-only", - is_flag=True, - default=False, - help="Create Nextflow command with params (no params file)", -) -@click.option( - "-o", - "--params-out", - type=click.Path(), - default=os.path.join(os.getcwd(), "nf-params.json"), - help="Path to save run parameters file", -) -@click.option( - "-p", - "--params-in", - type=click.Path(exists=True), - help="Set of input run params to use from a previous run", -) -@click.option( - "-a", - "--save-all", - is_flag=True, - default=False, - help="Save all parameters, even if unchanged from default", -) +# nf-core pipelines create +@pipelines.command("create") +@click.pass_context @click.option( - "-x", - "--show-hidden", - is_flag=True, - default=False, - help="Show hidden params which don't normally need changing", + "-n", + "--name", + type=str, + help="The name of your new pipeline", ) +@click.option("-d", "--description", type=str, help="A short description of your pipeline") +@click.option("-a", "--author", type=str, help="Name of the main author(s)") +@click.option("--version", type=str, default="1.0.0dev", help="The initial version number to use") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") +@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @click.option( - "-u", - "--url", + "--organisation", type=str, - default="https://nf-co.re/launch", - help="Customise the builder URL (for development work)", + default="nf-core", + help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", ) -def launch( - pipeline, - id, - revision, - command_only, - params_in, - params_out, - save_all, - show_hidden, - url, -): +def create_pipeline(ctx, name, description, author, version, force, outdir, template_yaml, organisation): """ - Launch a pipeline using a web GUI or command line prompts. - - Uses the pipeline schema file to collect inputs for all available pipeline - parameters. Parameter names, descriptions and help text are shown. - The pipeline schema is used to validate all inputs as they are entered. - - When finished, saves a file with the selected parameters which can be - passed to Nextflow using the -params-file option. + Create a new pipeline using the nf-core template. - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory or an ID from the nf-core web launch tool. + Uses the nf-core template to make a skeleton Nextflow pipeline with all required + files, boilerplate code and best-practices. + \n\n + Run without any command line arguments to use an interactive interface. """ - from nf_core.launch import Launch + from nf_core.pipelines.create import PipelineCreateApp + from nf_core.pipelines.create.create import PipelineCreate - launcher = Launch( - pipeline, - revision, - command_only, - params_in, - params_out, - save_all, - show_hidden, - url, - id, - ) - if not launcher.launch_pipeline(): + if (name and description and author) or (template_yaml): + # If all command arguments are used, run without the interactive interface + try: + create_obj = PipelineCreate( + name, + description, + author, + version=version, + force=force, + outdir=outdir, + template_config=template_yaml, + organisation=organisation, + ) + create_obj.init_pipeline() + except UserWarning as e: + log.error(e) + sys.exit(1) + elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": + log.error( + "[red]Partial arguments supplied.[/] " + "Run without [i]any[/] arguments for an interactive interface, " + "or with at least name + description + author to use non-interactively." + ) sys.exit(1) + else: + log.info("Launching interactive nf-core pipeline creation tool.") + app = PipelineCreateApp() + app.run() + sys.exit(app.return_code or 0) -# nf-core create-params-file -@nf_core_cli.command() -@click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--revision", help="Release/branch/SHA of the pipeline (if remote)") +# nf-core pipelines lint +@pipelines.command("lint") @click.option( - "-o", - "--output", - type=str, - default="nf-params.yml", - metavar="", - help="Output filename. Defaults to `nf-params.yml`.", + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory [dim]\[default: current working directory][/]", ) -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( - "-x", - "--show-hidden", + "--release", is_flag=True, - default=False, - help="Show hidden params which don't normally need changing", + default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") + and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", + help="Execute additional checks for release-ready workflows.", ) -def create_params_file(pipeline, revision, output, force, show_hidden): - """ - Build a parameter file for a pipeline. - - Uses the pipeline schema file to generate a YAML parameters file. - Parameters are set to the pipeline defaults and descriptions are shown in comments. - After the output file is generated, it can then be edited as needed before - passing to nextflow using the `-params-file` option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory. - """ - builder = ParamsFileBuilder(pipeline, revision) - - if not builder.write_params_file(output, show_hidden=show_hidden, force=force): - sys.exit(1) - - -# nf-core download -@nf_core_cli.command() -@click.argument("pipeline", required=False, metavar="") @click.option( - "-r", - "--revision", + "-f", + "--fix", + type=str, + metavar="", multiple=True, - help="Pipeline release to download. Multiple invocations are possible, e.g. `-r 1.1 -r 1.2`", + help="Attempt to automatically fix specified lint test", ) -@click.option("-o", "--outdir", type=str, help="Output directory") @click.option( - "-x", - "--compress", - type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), - help="Archive compression type", + "-k", + "--key", + type=str, + metavar="", + multiple=True, + help="Run only these lint tests", ) -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") -# TODO: Remove this in a future release. Deprecated in March 2024. +@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") +@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") +@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option( - "-t", - "--tower", - is_flag=True, - default=False, - hidden=True, - help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", + "--markdown", + type=str, + metavar="", + help="File to write linting results to (Markdown)", ) @click.option( - "--platform", - is_flag=True, - default=False, - help="Download for Seqera Platform (formerly Nextflow Tower)", + "--json", + type=str, + metavar="", + help="File to write linting results to (JSON)", ) @click.option( - "-d", - "--download-configuration", - is_flag=True, - default=False, - help="Include configuration profiles in download. Not available with `--platform`", + "--sort-by", + type=click.Choice(["module", "test"]), + default="test", + help="Sort lint output by module or test name.", + show_default=True, ) -@click.option( +@click.pass_context +def lint_pipeline( + ctx, + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): + """ + Check pipeline code against nf-core guidelines. + + Runs a large number of automated tests to ensure that the supplied pipeline + meets the nf-core guidelines. Documentation of all lint tests can be found + on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] + + You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. + See the documentation for details. + """ + from nf_core.pipelines.lint import run_linting + from nf_core.utils import is_pipeline_directory + + # Check if pipeline directory is a pipeline + try: + is_pipeline_directory(dir) + except UserWarning as e: + log.error(e) + sys.exit(1) + + # Run the lint tests! + try: + lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + sort_by, + markdown, + json, + ctx.obj["hide_progress"], + ) + swf_failed = 0 + if subworkflow_lint_obj is not None: + swf_failed = len(subworkflow_lint_obj.failed) + if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: + sys.exit(1) + except AssertionError as e: + log.critical(e) + sys.exit(1) + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines download +@pipelines.command("download") +@click.argument("pipeline", required=False, metavar="") +@click.option( + "-r", + "--revision", + multiple=True, + help="Pipeline release to download. Multiple invocations are possible, e.g. `-r 1.1 -r 1.2`", +) +@click.option("-o", "--outdir", type=str, help="Output directory") +@click.option( + "-x", + "--compress", + type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), + help="Archive compression type", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +# TODO: Remove this in a future release. Deprecated in March 2024. +@click.option( + "-t", + "--tower", + is_flag=True, + default=False, + hidden=True, + help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", +) +@click.option( + "--platform", + is_flag=True, + default=False, + help="Download for Seqera Platform (formerly Nextflow Tower)", +) +@click.option( + "-d", + "--download-configuration", + is_flag=True, + default=False, + help="Include configuration profiles in download. Not available with `--platform`", +) +@click.option( "--tag", multiple=True, help="Add custom alias tags to `--platform` downloads. For example, `--tag \"3.10=validated\"` adds the custom 'validated' tag to the 3.10 release.", @@ -415,7 +452,9 @@ def create_params_file(pipeline, revision, output, force, show_hidden): default=4, help="Number of parallel image downloads", ) -def download( +@click.pass_context +def download_pipeline( + ctx, pipeline, revision, outdir, @@ -437,7 +476,7 @@ def download( Collects all files in a single archive and configures the downloaded workflow to use relative paths to the configs and singularity images. """ - from nf_core.download import DownloadWorkflow + from nf_core.pipelines.download import DownloadWorkflow if tower: log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") @@ -460,374 +499,531 @@ def download( dl.download_workflow() -# nf-core licences -@nf_core_cli.command() -@click.argument("pipeline", required=True, metavar="") -@click.option("--json", is_flag=True, default=False, help="Print output in JSON") -def licences(pipeline, json): +# nf-core pipelines create-params-file +@pipelines.command("create-params-file") +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--revision", help="Release/branch/SHA of the pipeline (if remote)") +@click.option( + "-o", + "--output", + type=str, + default="nf-params.yml", + metavar="", + help="Output filename. Defaults to `nf-params.yml`.", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +@click.option( + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", +) +@click.pass_context +def create_params_file_pipeline(ctx, pipeline, revision, output, force, show_hidden): """ - List software licences for a given workflow (DSL1 only). + Build a parameter file for a pipeline. - Checks the pipeline environment.yml file which lists all conda software packages, which is not available for DSL2 workflows. Therefore, this command only supports DSL1 workflows (for now). - Each of these is queried against the anaconda.org API to find the licence. - Package name, version and licence is printed to the command line. + Uses the pipeline schema file to generate a YAML parameters file. + Parameters are set to the pipeline defaults and descriptions are shown in comments. + After the output file is generated, it can then be edited as needed before + passing to nextflow using the `-params-file` option. + + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory. """ - from nf_core.licences import WorkflowLicences + builder = ParamsFileBuilder(pipeline, revision) - lic = WorkflowLicences(pipeline) - lic.as_json = json - try: - stdout.print(lic.run_licences()) - except LookupError as e: - log.error(e) + if not builder.write_params_file(output, show_hidden=show_hidden, force=force): sys.exit(1) -# nf-core lint (deprecated) -@nf_core_cli.command(hidden=True, deprecated=True) -@click.option( - "-d", - "--dir", - type=click.Path(exists=True), - default=".", - help=r"Pipeline directory [dim]\[default: current working directory][/]", -) +# nf-core pipelines launch +@pipelines.command("launch") +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)") +@click.option("-i", "--id", help="ID for web-gui launch parameter set") @click.option( - "--release", + "-c", + "--command-only", is_flag=True, - default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" - and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") - and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", - help="Execute additional checks for release-ready workflows.", + default=False, + help="Create Nextflow command with params (no params file)", ) @click.option( - "-f", - "--fix", - type=str, - metavar="", - multiple=True, - help="Attempt to automatically fix specified lint test", + "-o", + "--params-out", + type=click.Path(), + default=os.path.join(os.getcwd(), "nf-params.json"), + help="Path to save run parameters file", ) @click.option( - "-k", - "--key", - type=str, - metavar="", - multiple=True, - help="Run only these lint tests", + "-p", + "--params-in", + type=click.Path(exists=True), + help="Set of input run params to use from a previous run", ) -@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") -@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") -@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option( - "--markdown", - type=str, - metavar="", - help="File to write linting results to (Markdown)", + "-a", + "--save-all", + is_flag=True, + default=False, + help="Save all parameters, even if unchanged from default", ) @click.option( - "--json", - type=str, - metavar="", - help="File to write linting results to (JSON)", + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", ) @click.option( - "--sort-by", - type=click.Choice(["module", "test"]), - default="test", - help="Sort lint output by module or test name.", - show_default=True, + "-u", + "--url", + type=str, + default="https://nf-co.re/launch", + help="Customise the builder URL (for development work)", ) @click.pass_context -def lint( +def launch_pipeline( ctx, - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - markdown, - json, - sort_by, + pipeline, + id, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, ): """ - DEPRECATED - Check pipeline code against nf-core guidelines. - - Runs a large number of automated tests to ensure that the supplied pipeline - meets the nf-core guidelines. Documentation of all lint tests can be found - on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] + Launch a pipeline using a web GUI or command line prompts. - You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. - See the documentation for details. - """ - log.error("The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead.") - sys.exit(0) + Uses the pipeline schema file to collect inputs for all available pipeline + parameters. Parameter names, descriptions and help text are shown. + The pipeline schema is used to validate all inputs as they are entered. + When finished, saves a file with the selected parameters which can be + passed to Nextflow using the -params-file option. -# nf-core pipelines subcommands -@nf_core_cli.group() + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory or an ID from the nf-core web launch tool. + """ + from nf_core.pipelines.launch import Launch + + launcher = Launch( + pipeline, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, + id, + ) + if not launcher.launch_pipeline(): + sys.exit(1) + + +# nf-core pipelnies list +@pipelines.command("list") +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option( + "-s", + "--sort", + type=click.Choice(["release", "pulled", "name", "stars"]), + default="release", + help="How to sort listed pipelines", +) +@click.option("--json", is_flag=True, default=False, help="Print full output as JSON") +@click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") @click.pass_context -def pipelines(ctx): +def list_pipelines(ctx, keywords, sort, json, show_archived): """ - Commands to manage nf-core pipelines. + List available nf-core pipelines with local info. + + Checks the web for a list of nf-core pipelines with their latest releases. + Shows which nf-core pipelines you have pulled locally and whether they are up to date. """ - # ensure that ctx.obj exists and is a dict (in case `cli()` is called - # by means other than the `if` block below) - ctx.ensure_object(dict) + from nf_core.pipelines.list import list_workflows + stdout.print(list_workflows(keywords, sort, json, show_archived)) -# nf-core pipelines create -@pipelines.command("create") + +# nf-core pipelines sync +@pipelines.command("sync") @click.pass_context @click.option( - "-n", - "--name", - type=str, - help="The name of your new pipeline", + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-d", "--description", type=str, help="A short description of your pipeline") -@click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, default="1.0.0dev", help="The initial version number to use") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") -@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") -@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @click.option( - "--organisation", + "-b", + "--from-branch", type=str, - default="nf-core", - help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", + help="The git branch to use to fetch workflow variables.", ) -def create_pipeline(ctx, name, description, author, version, force, outdir, template_yaml, organisation): +@click.option( + "-p", + "--pull-request", + is_flag=True, + default=False, + help="Make a GitHub pull-request with the changes.", +) +@click.option( + "--force_pr", + is_flag=True, + default=False, + help="Force the creation of a pull-request, even if there are no changes.", +) +@click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") +@click.option("-u", "--username", type=str, help="GitHub PR: auth username.") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +def sync_pipeline(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ - Create a new pipeline using the nf-core template. + Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. - Uses the nf-core template to make a skeleton Nextflow pipeline with all required - files, boilerplate code and best-practices. - \n\n - Run without any command line arguments to use an interactive interface. + To keep nf-core pipelines up to date with improvements in the main + template, we use a method of synchronisation that uses a special + git branch called [cyan i]TEMPLATE[/]. + + This command updates the [cyan i]TEMPLATE[/] branch with the latest version of + the nf-core template, so that these updates can be synchronised with + the pipeline. It is run automatically for all pipelines when ever a + new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ - from nf_core.pipelines.create import PipelineCreateApp - from nf_core.pipelines.create.create import PipelineCreate + from nf_core.pipelines.sync.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError + from nf_core.utils import is_pipeline_directory - if (name and description and author) or (template_yaml): - # If all command arguments are used, run without the interactive interface - try: - create_obj = PipelineCreate( - name, - description, - author, - version=version, - force=force, - outdir=outdir, - template_config=template_yaml, - organisation=organisation, - ) - create_obj.init_pipeline() - except UserWarning as e: - log.error(e) - sys.exit(1) - elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": - log.error( - "[red]Partial arguments supplied.[/] " - "Run without [i]any[/] arguments for an interactive interface, " - "or with at least name + description + author to use non-interactively." - ) + # Check if pipeline directory contains necessary files + is_pipeline_directory(dir) + + # Sync the given pipeline dir + sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + try: + sync_obj.sync() + except (SyncExceptionError, PullRequestExceptionError) as e: + log.error(e) sys.exit(1) - else: - log.info("Launching interactive nf-core pipeline creation tool.") - app = PipelineCreateApp() - app.run() - sys.exit(app.return_code or 0) -# nf-core pipelines lint -@pipelines.command("lint") +# nf-core pipelines bump-version +@pipelines.command("bump-version") +@click.pass_context +@click.argument("new_version", required=True, metavar="") @click.option( "-d", "--dir", type=click.Path(exists=True), default=".", - help=r"Pipeline directory [dim]\[default: current working directory][/]", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option( - "--release", + "-n", + "--nextflow", is_flag=True, - default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" - and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") - and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", - help="Execute additional checks for release-ready workflows.", + default=False, + help="Bump required nextflow version instead of pipeline version", ) +def bump_version_pipeline(ctx, new_version, dir, nextflow): + """ + Update nf-core pipeline version number. + + The pipeline version number is mentioned in a lot of different places + in nf-core pipelines. This tool updates the version for you automatically, + so that you don't accidentally miss any. + + Should be used for each pipeline release, and again for the next + development version after release. + + As well as the pipeline version, you can also change the required version of Nextflow. + """ + from nf_core.pipelines.bump_version.bump_version import bump_nextflow_version, bump_pipeline_version + from nf_core.utils import Pipeline, is_pipeline_directory + + try: + # Check if pipeline directory contains necessary files + is_pipeline_directory(dir) + + # Make a pipeline object and load config etc + pipeline_obj = Pipeline(dir) + pipeline_obj._load() + + # Bump the pipeline version number + if not nextflow: + bump_pipeline_version(pipeline_obj, new_version) + else: + bump_nextflow_version(pipeline_obj, new_version) + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines create-logo +@pipelines.command("create-logo") +@click.argument("logo-text", metavar="") +@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") @click.option( - "-f", - "--fix", + "-n", + "--name", type=str, - metavar="", - multiple=True, - help="Attempt to automatically fix specified lint test", + help="Name of the output file (with or without '.png' suffix).", ) @click.option( - "-k", - "--key", - type=str, - metavar="", - multiple=True, - help="Run only these lint tests", + "--theme", + type=click.Choice(["light", "dark"]), + default="light", + help="Theme for the logo.", + show_default=True, ) -@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") -@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") -@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @click.option( - "--markdown", - type=str, - metavar="", - help="File to write linting results to (Markdown)", + "--width", + type=int, + default=2300, + help="Width of the logo in pixels.", + show_default=True, ) @click.option( - "--json", - type=str, - metavar="", - help="File to write linting results to (JSON)", + "--format", + type=click.Choice(["png", "svg"]), + default="png", + help="Image format of the logo, either PNG or SVG.", + show_default=True, ) @click.option( - "--sort-by", - type=click.Choice(["module", "test"]), - default="test", - help="Sort lint output by module or test name.", - show_default=True, + "-f", + "--force", + is_flag=True, + default=False, + help="Overwrite any files if they already exist", ) -@click.pass_context -def lint_pipeline( - ctx, - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - markdown, - json, - sort_by, -): +def logo_pipeline(logo_text, dir, name, theme, width, format, force): """ - Check pipeline code against nf-core guidelines. - - Runs a large number of automated tests to ensure that the supplied pipeline - meets the nf-core guidelines. Documentation of all lint tests can be found - on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] + Generate a logo with the nf-core logo template. - You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. - See the documentation for details. + This command generates an nf-core pipeline logo, using the supplied """ - from nf_core.pipelines.lint import run_linting - from nf_core.utils import is_pipeline_directory + from nf_core.pipelines.create_logo import create_logo - # Check if pipeline directory is a pipeline try: - is_pipeline_directory(dir) + if dir == ".": + dir = Path.cwd() + logo_path = create_logo(logo_text, dir, name, theme, width, format, force) + # Print path to logo relative to current working directory + try: + logo_path = Path(logo_path).relative_to(Path.cwd()) + except ValueError: + logo_path = Path(logo_path) + log.info(f"Created logo: [magenta]{logo_path}[/]") except UserWarning as e: log.error(e) sys.exit(1) - # Run the lint tests! + +# nf-core licences +@nf_core_cli.command() +@click.argument("pipeline", required=True, metavar="") +@click.option("--json", is_flag=True, default=False, help="Print output in JSON") +def licences(pipeline, json): + """ + List software licences for a given workflow (DSL1 only). + + Checks the pipeline environment.yml file which lists all conda software packages, which is not available for DSL2 workflows. Therefore, this command only supports DSL1 workflows (for now). + Each of these is queried against the anaconda.org API to find the licence. + Package name, version and licence is printed to the command line. + """ + from nf_core.licences import WorkflowLicences + + lic = WorkflowLicences(pipeline) + lic.as_json = json try: - lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - sort_by, - markdown, - json, - ctx.obj["hide_progress"], - ) - swf_failed = 0 - if subworkflow_lint_obj is not None: - swf_failed = len(subworkflow_lint_obj.failed) - if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: - sys.exit(1) - except AssertionError as e: - log.critical(e) + stdout.print(lic.run_licences()) + except LookupError as e: + log.error(e) sys.exit(1) - except UserWarning as e: + + +# nf-core pipelines schema subcommands +@pipelines.group("schema") +def pipeline_schema(): + """ + Suite of tools for developers to manage pipeline schema. + + All nf-core pipelines should have a nextflow_schema.json file in their + root directory that describes the different pipeline parameters. + """ + pass + + +# nf-core pipelines schema validate +@pipeline_schema.command("validate") +@click.argument("pipeline", required=True, metavar="") +@click.argument("params", type=click.Path(exists=True), required=True, metavar="") +def validate_schema(pipeline, params): + """ + Validate a set of parameters against a pipeline schema. + + Nextflow can be run using the -params-file flag, which loads + script parameters from a JSON file. + + This command takes such a file and validates it against the pipeline + schema, checking whether all schema rules are satisfied. + """ + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(pipeline) + # Load and check schema + schema_obj.load_lint_schema() + except AssertionError as e: log.error(e) sys.exit(1) + schema_obj.load_input_params(params) + try: + schema_obj.validate_params() + except AssertionError: + sys.exit(1) -# nf-core create (deprecated) -@nf_core_cli.command(hidden=True, deprecated=True) +# nf-core pipelines schema build +@pipeline_schema.command("build") @click.option( - "-n", - "--name", + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option( + "--no-prompts", + is_flag=True, + help="Do not confirm changes, just update parameters and exit", +) +@click.option( + "--web-only", + is_flag=True, + help="Skip building using Nextflow config, just launch the web tool", +) +@click.option( + "--url", type=str, - help="The name of your new pipeline", + default="https://nf-co.re/pipeline_schema_builder", + help="Customise the builder URL (for development work)", ) -@click.option("-d", "--description", type=str, help="A short description of your pipeline") -@click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, help="The initial version number to use") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") -@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") -@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -@click.option("--plain", is_flag=True, help="Use the standard nf-core template") -def create(name, description, author, version, force, outdir, template_yaml, plain): +def build_schema(dir, no_prompts, web_only, url): """ - DEPRECATED - Create a new pipeline using the nf-core template. + Interactively build a pipeline schema from Nextflow params. + + Automatically detects parameters from the pipeline config and main.nf and + compares these to the pipeline schema. Prompts to add or remove parameters + if the two do not match one another. + + Once all parameters are accounted for, can launch a web GUI tool on the + https://nf-co.re website where you can annotate and organise parameters. + Listens for this to be completed and saves the updated schema. + """ + from nf_core.pipelines.schema import PipelineSchema + + try: + schema_obj = PipelineSchema() + if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: + sys.exit(1) + except (UserWarning, AssertionError) as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines schema lint +@pipeline_schema.command("lint") +@click.argument( + "schema_path", + type=click.Path(exists=True), + default="nextflow_schema.json", + metavar="", +) +def lint_schema(schema_path): + """ + Check that a given pipeline schema is valid. - Uses the nf-core template to make a skeleton Nextflow pipeline with all required - files, boilerplate code and best-practices. + Checks whether the pipeline schema validates as JSON Schema Draft 7 + and adheres to the additional nf-core schema requirements. + + This function runs as part of the nf-core lint command, this is a convenience + command that does just the schema linting nice and quickly. + + If no schema path is provided, "nextflow_schema.json" will be used (if it exists). """ - log.error( - "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." - ) - sys.exit(0) + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(schema_path) + schema_obj.load_lint_schema() + # Validate title and description - just warnings as schema should still work fine + try: + schema_obj.validate_schema_title_description() + except AssertionError as e: + log.warning(e) + except AssertionError: + sys.exit(1) -# nf-core modules subcommands -@nf_core_cli.group() +# nf-core pipelines schema docs +@pipeline_schema.command("docs") +@click.argument( + "schema_path", + type=click.Path(exists=True), + default="nextflow_schema.json", + required=False, + metavar="", +) @click.option( - "-g", - "--git-remote", + "-o", + "--output", type=str, - default=NF_CORE_MODULES_REMOTE, - help="Remote git repo to fetch files from", + metavar="", + help="Output filename. Defaults to standard out.", ) @click.option( - "-b", - "--branch", - type=str, - default=None, - help="Branch of git repository hosting modules.", + "-x", + "--format", + type=click.Choice(["markdown", "html"]), + default="markdown", + help="Format to output docs in.", ) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( - "-N", - "--no-pull", - is_flag=True, - default=False, - help="Do not pull in latest changes to local clone of modules repository.", + "-c", + "--columns", + type=str, + metavar="", + help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", + default="parameter,description,type,default,required,hidden", ) -@click.pass_context -def modules(ctx, git_remote, branch, no_pull): +def docs_schema(schema_path, output, format, force, columns): """ - Commands to manage Nextflow DSL2 modules (tool wrappers). + Outputs parameter documentation for a pipeline schema. """ - # ensure that ctx.obj exists and is a dict (in case `cli()` is called - # by means other than the `if` block below) - ctx.ensure_object(dict) + if not os.path.exists(schema_path): + log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") + sys.exit(1) - # Place the arguments in a context object - ctx.obj["modules_repo_url"] = git_remote - ctx.obj["modules_repo_branch"] = branch - ctx.obj["modules_repo_no_pull"] = no_pull + from nf_core.pipelines.schema import PipelineSchema + schema_obj = PipelineSchema() + # Assume we're in a pipeline dir root if schema path not set + schema_obj.get_schema_path(schema_path) + schema_obj.load_schema() + schema_obj.print_documentation(output, format, force, columns.split(",")) -# nf-core subworkflows click command + +# nf-core modules subcommands @nf_core_cli.group() @click.option( "-g", @@ -851,9 +1047,9 @@ def modules(ctx, git_remote, branch, no_pull): help="Do not pull in latest changes to local clone of modules repository.", ) @click.pass_context -def subworkflows(ctx, git_remote, branch, no_pull): +def modules(ctx, git_remote, branch, no_pull): """ - Commands to manage Nextflow DSL2 subworkflows (tool wrappers). + Commands to manage Nextflow DSL2 modules (tool wrappers). """ # ensure that ctx.obj exists and is a dict (in case `cli()` is called # by means other than the `if` block below) @@ -1489,6 +1685,44 @@ def bump_versions(ctx, tool, dir, all, show_all): sys.exit(1) +# nf-core subworkflows click command +@nf_core_cli.group() +@click.option( + "-g", + "--git-remote", + type=str, + default=NF_CORE_MODULES_REMOTE, + help="Remote git repo to fetch files from", +) +@click.option( + "-b", + "--branch", + type=str, + default=None, + help="Branch of git repository hosting modules.", +) +@click.option( + "-N", + "--no-pull", + is_flag=True, + default=False, + help="Do not pull in latest changes to local clone of modules repository.", +) +@click.pass_context +def subworkflows(ctx, git_remote, branch, no_pull): + """ + Commands to manage Nextflow DSL2 subworkflows (tool wrappers). + """ + # ensure that ctx.obj exists and is a dict (in case `cli()` is called + # by means other than the `if` block below) + ctx.ensure_object(dict) + + # Place the arguments in a context object + ctx.obj["modules_repo_url"] = git_remote + ctx.obj["modules_repo_branch"] = branch + ctx.obj["modules_repo_no_pull"] = no_pull + + # nf-core subworkflows create @subworkflows.command("create") @click.pass_context @@ -1970,8 +2204,11 @@ def subworkflows_update( sys.exit(1) -# nf-core schema subcommands -@nf_core_cli.group() +## DEPRECATED commands since v3.0.0 + + +# nf-core schema subcommands (deprecated) +@nf_core_cli.group(deprecated=True, hidden=True) def schema(): """ Suite of tools for developers to manage pipeline schema. @@ -1982,12 +2219,13 @@ def schema(): pass -# nf-core schema validate -@schema.command() +# nf-core schema validate (deprecated) +@schema.command("validate", deprecated=True) @click.argument("pipeline", required=True, metavar="") @click.argument("params", type=click.Path(exists=True), required=True, metavar="") def validate(pipeline, params): """ + DEPRECATED Validate a set of parameters against a pipeline schema. Nextflow can be run using the -params-file flag, which loads @@ -1996,25 +2234,14 @@ def validate(pipeline, params): This command takes such a file and validates it against the pipeline schema, checking whether all schema rules are satisfied. """ - from nf_core.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(pipeline) - # Load and check schema - schema_obj.load_lint_schema() - except AssertionError as e: - log.error(e) - sys.exit(1) - schema_obj.load_input_params(params) - try: - schema_obj.validate_params() - except AssertionError: - sys.exit(1) + log.error( + "The `[magenta]nf-core schema validate[/]` command is deprecated. Use `[magenta]nf-core pipelines schema validate[/]` instead." + ) + sys.exit(0) -# nf-core schema build -@schema.command() +# nf-core schema build (deprecated) +@schema.command("build", deprecated=True) @click.option( "-d", "--dir", @@ -2040,6 +2267,7 @@ def validate(pipeline, params): ) def build(dir, no_prompts, web_only, url): """ + DEPRECATED Interactively build a pipeline schema from Nextflow params. Automatically detects parameters from the pipeline config and main.nf and @@ -2050,19 +2278,14 @@ def build(dir, no_prompts, web_only, url): https://nf-co.re website where you can annotate and organise parameters. Listens for this to be completed and saves the updated schema. """ - from nf_core.schema import PipelineSchema - - try: - schema_obj = PipelineSchema() - if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: - sys.exit(1) - except (UserWarning, AssertionError) as e: - log.error(e) - sys.exit(1) + log.error( + "The `[magenta]nf-core schema build[/]` command is deprecated. Use `[magenta]nf-core pipelines schema build[/]` instead." + ) + sys.exit(0) -# nf-core schema lint -@schema.command("lint") +# nf-core schema lint (deprecated) +@schema.command("lint", deprecated=True) @click.argument( "schema_path", type=click.Path(exists=True), @@ -2071,6 +2294,7 @@ def build(dir, no_prompts, web_only, url): ) def schema_lint(schema_path): """ + DEPRECATED Check that a given pipeline schema is valid. Checks whether the pipeline schema validates as JSON Schema Draft 7 @@ -2081,67 +2305,153 @@ def schema_lint(schema_path): If no schema path is provided, "nextflow_schema.json" will be used (if it exists). """ - from nf_core.schema import PipelineSchema + log.error( + "The `[magenta]nf-core schema lint[/]` command is deprecated. Use `[magenta]nf-core pipelines schema lint[/]` instead." + ) + sys.exit(0) - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(schema_path) - schema_obj.load_lint_schema() - # Validate title and description - just warnings as schema should still work fine - try: - schema_obj.validate_schema_title_description() - except AssertionError as e: - log.warning(e) - except AssertionError: - sys.exit(1) + +# nf-core schema docs (deprecated) +@schema.command("docs", deprecated=True) +@click.argument( + "schema_path", + type=click.Path(exists=True), + default="nextflow_schema.json", + required=False, + metavar="", +) +@click.option( + "-o", + "--output", + type=str, + metavar="", + help="Output filename. Defaults to standard out.", +) +@click.option( + "-x", + "--format", + type=click.Choice(["markdown", "html"]), + default="markdown", + help="Format to output docs in.", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +@click.option( + "-c", + "--columns", + type=str, + metavar="", + help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", + default="parameter,description,type,default,required,hidden", +) +def docs(schema_path, output, format, force, columns): + """ + DEPRECATED + Outputs parameter documentation for a pipeline schema. + """ + log.error( + "The `[magenta]nf-core schema docs[/]` command is deprecated. Use `[magenta]nf-core pipelines schema docs[/]` instead." + ) + sys.exit(0) + + +# nf-core create-logo (deprecated) +@nf_core_cli.command("create-logo", deprecated=True, hidden=True) +@click.argument("logo-text", metavar="") +@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") +@click.option( + "-n", + "--name", + type=str, + help="Name of the output file (with or without '.png' suffix).", +) +@click.option( + "--theme", + type=click.Choice(["light", "dark"]), + default="light", + help="Theme for the logo.", + show_default=True, +) +@click.option( + "--width", + type=int, + default=2300, + help="Width of the logo in pixels.", + show_default=True, +) +@click.option( + "--format", + type=click.Choice(["png", "svg"]), + default="png", + help="Image format of the logo, either PNG or SVG.", + show_default=True, +) +@click.option( + "-f", + "--force", + is_flag=True, + default=False, + help="Overwrite any files if they already exist", +) +def logo(logo_text, dir, name, theme, width, format, force): + """ + DEPRECATED + Generate a logo with the nf-core logo template. + + This command generates an nf-core pipeline logo, using the supplied + """ + log.error( + "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." + ) + sys.exit(0) -@schema.command() -@click.argument( - "schema_path", +# nf-core sync (deprecated) +@nf_core_cli.command(hidden=True, deprecated=True) +@click.option( + "-d", + "--dir", type=click.Path(exists=True), - default="nextflow_schema.json", - required=False, - metavar="", + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option( - "-o", - "--output", + "-b", + "--from-branch", type=str, - metavar="", - help="Output filename. Defaults to standard out.", + help="The git branch to use to fetch workflow variables.", ) @click.option( - "-x", - "--format", - type=click.Choice(["markdown", "html"]), - default="markdown", - help="Format to output docs in.", + "-p", + "--pull-request", + is_flag=True, + default=False, + help="Make a GitHub pull-request with the changes.", ) -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") @click.option( - "-c", - "--columns", - type=str, - metavar="", - help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", - default="parameter,description,type,default,required,hidden", + "--force_pr", + is_flag=True, + default=False, + help="Force the creation of a pull-request, even if there are no changes.", ) -def docs(schema_path, output, format, force, columns): - """ - Outputs parameter documentation for a pipeline schema. +@click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") +@click.option("-u", "--username", type=str, help="GitHub PR: auth username.") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +def sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ - if not os.path.exists(schema_path): - log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") - sys.exit(1) + DEPRECATED + Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. - from nf_core.schema import PipelineSchema + To keep nf-core pipelines up to date with improvements in the main + template, we use a method of synchronisation that uses a special + git branch called [cyan i]TEMPLATE[/]. - schema_obj = PipelineSchema() - # Assume we're in a pipeline dir root if schema path not set - schema_obj.get_schema_path(schema_path) - schema_obj.load_schema() - schema_obj.print_documentation(output, format, force, columns.split(",")) + This command updates the [cyan i]TEMPLATE[/] branch with the latest version of + the nf-core template, so that these updates can be synchronised with + the pipeline. It is run automatically for all pipelines when ever a + new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. + """ + log.error("The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead.") + sys.exit(0) # nf-core bump-version (deprecated) @@ -2181,226 +2491,360 @@ def bump_version(new_version, dir, nextflow): sys.exit(0) -# nf-core pipelines bump-version -@pipelines.command("bump-version") -@click.pass_context -@click.argument("new_version", required=True, metavar="") +# nf-core list (deprecated) +@nf_core_cli.command("list", deprecated=True, hidden=True) +@click.argument("keywords", required=False, nargs=-1, metavar="") @click.option( - "-d", - "--dir", + "-s", + "--sort", + type=click.Choice(["release", "pulled", "name", "stars"]), + default="release", + help="How to sort listed pipelines", +) +@click.option("--json", is_flag=True, default=False, help="Print full output as JSON") +@click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") +def list(keywords, sort, json, show_archived): + """ + DEPRECATED + List available nf-core pipelines with local info. + + Checks the web for a list of nf-core pipelines with their latest releases. + Shows which nf-core pipelines you have pulled locally and whether they are up to date. + """ + log.error("The `[magenta]nf-core list[/]` command is deprecated. Use `[magenta]nf-core pipelines list[/]` instead.") + sys.exit(0) + + +# nf-core launch (deprecated) +@nf_core_cli.command(deprecated=True, hidden=True) +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)") +@click.option("-i", "--id", help="ID for web-gui launch parameter set") +@click.option( + "-c", + "--command-only", + is_flag=True, + default=False, + help="Create Nextflow command with params (no params file)", +) +@click.option( + "-o", + "--params-out", + type=click.Path(), + default=os.path.join(os.getcwd(), "nf-params.json"), + help="Path to save run parameters file", +) +@click.option( + "-p", + "--params-in", type=click.Path(exists=True), - default=".", - help=r"Pipeline directory. [dim]\[default: current working directory][/]", + help="Set of input run params to use from a previous run", ) @click.option( - "-n", - "--nextflow", + "-a", + "--save-all", is_flag=True, default=False, - help="Bump required nextflow version instead of pipeline version", + help="Save all parameters, even if unchanged from default", ) -def bump_version_pipeline(ctx, new_version, dir, nextflow): +@click.option( + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", +) +@click.option( + "-u", + "--url", + type=str, + default="https://nf-co.re/launch", + help="Customise the builder URL (for development work)", +) +def launch( + pipeline, + id, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, +): """ - Update nf-core pipeline version number. + DEPRECATED + Launch a pipeline using a web GUI or command line prompts. - The pipeline version number is mentioned in a lot of different places - in nf-core pipelines. This tool updates the version for you automatically, - so that you don't accidentally miss any. + Uses the pipeline schema file to collect inputs for all available pipeline + parameters. Parameter names, descriptions and help text are shown. + The pipeline schema is used to validate all inputs as they are entered. - Should be used for each pipeline release, and again for the next - development version after release. + When finished, saves a file with the selected parameters which can be + passed to Nextflow using the -params-file option. - As well as the pipeline version, you can also change the required version of Nextflow. + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory or an ID from the nf-core web launch tool. """ - from nf_core.pipelines.bump_version.bump_version import bump_nextflow_version, bump_pipeline_version - from nf_core.utils import Pipeline, is_pipeline_directory + log.error( + "The `[magenta]nf-core launch[/]` command is deprecated. Use `[magenta]nf-core pipelines launch[/]` instead." + ) + sys.exit(0) - try: - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - # Make a pipeline object and load config etc - pipeline_obj = Pipeline(dir) - pipeline_obj._load() +# nf-core create-params-file (deprecated) +@nf_core_cli.command(deprecated=True, hidden=True) +@click.argument("pipeline", required=False, metavar="") +@click.option("-r", "--revision", help="Release/branch/SHA of the pipeline (if remote)") +@click.option( + "-o", + "--output", + type=str, + default="nf-params.yml", + metavar="", + help="Output filename. Defaults to `nf-params.yml`.", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +@click.option( + "-x", + "--show-hidden", + is_flag=True, + default=False, + help="Show hidden params which don't normally need changing", +) +def create_params_file(pipeline, revision, output, force, show_hidden): + """ + DEPRECATED + Build a parameter file for a pipeline. - # Bump the pipeline version number - if not nextflow: - bump_pipeline_version(pipeline_obj, new_version) - else: - bump_nextflow_version(pipeline_obj, new_version) - except UserWarning as e: - log.error(e) - sys.exit(1) + Uses the pipeline schema file to generate a YAML parameters file. + Parameters are set to the pipeline defaults and descriptions are shown in comments. + After the output file is generated, it can then be edited as needed before + passing to nextflow using the `-params-file` option. + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory. + """ + log.error( + "The `[magenta]nf-core create-params-file[/]` command is deprecated. Use `[magenta]nf-core pipelines create-params-file[/]` instead." + ) + sys.exit(0) -# nf-core create-logo -@nf_core_cli.command("create-logo") -@click.argument("logo-text", metavar="") -@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") + +# nf-core download (deprecated) +@nf_core_cli.command(deprecated=True, hidden=True) +@click.argument("pipeline", required=False, metavar="") +@click.option( + "-r", + "--revision", + multiple=True, + help="Pipeline release to download. Multiple invocations are possible, e.g. `-r 1.1 -r 1.2`", +) +@click.option("-o", "--outdir", type=str, help="Output directory") +@click.option( + "-x", + "--compress", + type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), + help="Archive compression type", +) +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +@click.option( + "-t", + "--tower", + is_flag=True, + default=False, + hidden=True, + help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", +) +@click.option( + "--platform", + is_flag=True, + default=False, + help="Download for Seqera Platform (formerly Nextflow Tower)", +) +@click.option( + "-d", + "--download-configuration", + is_flag=True, + default=False, + help="Include configuration profiles in download. Not available with `--platform`", +) @click.option( - "-n", - "--name", - type=str, - help="Name of the output file (with or without '.png' suffix).", + "--tag", + multiple=True, + help="Add custom alias tags to `--platform` downloads. For example, `--tag \"3.10=validated\"` adds the custom 'validated' tag to the 3.10 release.", ) @click.option( - "--theme", - type=click.Choice(["light", "dark"]), - default="light", - help="Theme for the logo.", - show_default=True, + "-s", + "--container-system", + type=click.Choice(["none", "singularity"]), + help="Download container images of required software.", ) @click.option( - "--width", - type=int, - default=2300, - help="Width of the logo in pixels.", - show_default=True, + "-l", + "--container-library", + multiple=True, + help="Container registry/library or mirror to pull images from.", ) @click.option( - "--format", - type=click.Choice(["png", "svg"]), - default="png", - help="Image format of the logo, either PNG or SVG.", - show_default=True, + "-u", + "--container-cache-utilisation", + type=click.Choice(["amend", "copy", "remote"]), + help="Utilise a `singularity.cacheDir` in the download process, if applicable.", ) @click.option( - "-f", - "--force", - is_flag=True, - default=False, - help="Overwrite any files if they already exist", + "-i", + "--container-cache-index", + type=str, + help="List of images already available in a remote `singularity.cacheDir`.", ) -def logo(logo_text, dir, name, theme, width, format, force): +@click.option( + "-p", + "--parallel-downloads", + type=int, + default=4, + help="Number of parallel image downloads", +) +def download( + pipeline, + revision, + outdir, + compress, + force, + tower, + platform, + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, +): """ - Generate a logo with the nf-core logo template. + DEPRECATED + Download a pipeline, nf-core/configs and pipeline singularity images. - This command generates an nf-core pipeline logo, using the supplied + Collects all files in a single archive and configures the downloaded + workflow to use relative paths to the configs and singularity images. """ - from nf_core.create_logo import create_logo - - try: - if dir == ".": - dir = Path.cwd() - logo_path = create_logo(logo_text, dir, name, theme, width, format, force) - # Print path to logo relative to current working directory - try: - logo_path = Path(logo_path).relative_to(Path.cwd()) - except ValueError: - logo_path = Path(logo_path) - log.info(f"Created logo: [magenta]{logo_path}[/]") - except UserWarning as e: - log.error(e) - sys.exit(1) + log.error( + "The `[magenta]nf-core download[/]` command is deprecated. Use `[magenta]nf-core pipelines download[/]` instead." + ) + sys.exit(0) -# nf-core sync (deprecated) +# nf-core lint (deprecated) @nf_core_cli.command(hidden=True, deprecated=True) @click.option( "-d", "--dir", type=click.Path(exists=True), default=".", - help=r"Pipeline directory. [dim]\[default: current working directory][/]", + help=r"Pipeline directory [dim]\[default: current working directory][/]", ) @click.option( - "-b", - "--from-branch", + "--release", + is_flag=True, + default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") + and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", + help="Execute additional checks for release-ready workflows.", +) +@click.option( + "-f", + "--fix", type=str, - help="The git branch to use to fetch workflow variables.", + metavar="", + multiple=True, + help="Attempt to automatically fix specified lint test", ) @click.option( - "-p", - "--pull-request", - is_flag=True, - default=False, - help="Make a GitHub pull-request with the changes.", + "-k", + "--key", + type=str, + metavar="", + multiple=True, + help="Run only these lint tests", +) +@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") +@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") +@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") +@click.option( + "--markdown", + type=str, + metavar="", + help="File to write linting results to (Markdown)", ) @click.option( - "--force_pr", - is_flag=True, - default=False, - help="Force the creation of a pull-request, even if there are no changes.", + "--json", + type=str, + metavar="", + help="File to write linting results to (JSON)", ) -@click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") -@click.option("-u", "--username", type=str, help="GitHub PR: auth username.") -@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +@click.option( + "--sort-by", + type=click.Choice(["module", "test"]), + default="test", + help="Sort lint output by module or test name.", + show_default=True, +) +@click.pass_context +def lint( + ctx, + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): """ DEPRECATED - Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. + Check pipeline code against nf-core guidelines. - To keep nf-core pipelines up to date with improvements in the main - template, we use a method of synchronisation that uses a special - git branch called [cyan i]TEMPLATE[/]. + Runs a large number of automated tests to ensure that the supplied pipeline + meets the nf-core guidelines. Documentation of all lint tests can be found + on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] - This command updates the [cyan i]TEMPLATE[/] branch with the latest version of - the nf-core template, so that these updates can be synchronised with - the pipeline. It is run automatically for all pipelines when ever a - new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. + You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. + See the documentation for details. """ - log.error("The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead.") + log.error("The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead.") sys.exit(0) -# nf-core pipelines sync -@pipelines.command("sync") -@click.pass_context -@click.option( - "-d", - "--dir", - type=click.Path(exists=True), - default=".", - help=r"Pipeline directory. [dim]\[default: current working directory][/]", -) +# nf-core create (deprecated) +@nf_core_cli.command(hidden=True, deprecated=True) @click.option( - "-b", - "--from-branch", + "-n", + "--name", type=str, - help="The git branch to use to fetch workflow variables.", -) -@click.option( - "-p", - "--pull-request", - is_flag=True, - default=False, - help="Make a GitHub pull-request with the changes.", -) -@click.option( - "--force_pr", - is_flag=True, - default=False, - help="Force the creation of a pull-request, even if there are no changes.", + help="The name of your new pipeline", ) -@click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") -@click.option("-u", "--username", type=str, help="GitHub PR: auth username.") +@click.option("-d", "--description", type=str, help="A short description of your pipeline") +@click.option("-a", "--author", type=str, help="Name of the main author(s)") +@click.option("--version", type=str, help="The initial version number to use") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") +@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def sync_pipeline(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +@click.option("--plain", is_flag=True, help="Use the standard nf-core template") +def create(name, description, author, version, force, outdir, template_yaml, plain): """ - Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. - - To keep nf-core pipelines up to date with improvements in the main - template, we use a method of synchronisation that uses a special - git branch called [cyan i]TEMPLATE[/]. + DEPRECATED + Create a new pipeline using the nf-core template. - This command updates the [cyan i]TEMPLATE[/] branch with the latest version of - the nf-core template, so that these updates can be synchronised with - the pipeline. It is run automatically for all pipelines when ever a - new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. + Uses the nf-core template to make a skeleton Nextflow pipeline with all required + files, boilerplate code and best-practices. """ - from nf_core.pipelines.sync.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError - from nf_core.utils import is_pipeline_directory - - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Sync the given pipeline dir - sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) - try: - sync_obj.sync() - except (SyncExceptionError, PullRequestExceptionError) as e: - log.error(e) - sys.exit(1) + log.error( + "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." + ) + sys.exit(0) # Main script is being run - launch the CLI diff --git a/nf_core/pipelines/bump_version/bump_version.py b/nf_core/pipelines/bump_version.py similarity index 100% rename from nf_core/pipelines/bump_version/bump_version.py rename to nf_core/pipelines/bump_version.py diff --git a/nf_core/pipelines/bump_version/__init__.py b/nf_core/pipelines/bump_version/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 763165da6..bdbbca646 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -15,10 +15,10 @@ import yaml import nf_core -import nf_core.schema +import nf_core.pipelines.schema import nf_core.utils -from nf_core.create_logo import create_logo from nf_core.pipelines.create.utils import CreateConfig +from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -382,7 +382,7 @@ def update_nextflow_schema(self): """ schema_path = self.outdir / "nextflow_schema.json" - schema = nf_core.schema.PipelineSchema() + schema = nf_core.pipelines.schema.PipelineSchema() schema.schema_filename = schema_path schema.no_prompts = True schema.load_schema() diff --git a/nf_core/create_logo.py b/nf_core/pipelines/create_logo.py similarity index 100% rename from nf_core/create_logo.py rename to nf_core/pipelines/create_logo.py diff --git a/nf_core/download.py b/nf_core/pipelines/download.py similarity index 99% rename from nf_core/download.py rename to nf_core/pipelines/download.py index f5ab3a0f5..5a2acf489 100644 --- a/nf_core/download.py +++ b/nf_core/pipelines/download.py @@ -23,7 +23,7 @@ from pkg_resources import parse_version as version_parser import nf_core -import nf_core.list +import nf_core.pipelines.list import nf_core.utils from nf_core.synced_repo import RemoteProgressbar, SyncedRepo from nf_core.utils import ( @@ -169,7 +169,7 @@ def __init__( self.containers_remote = [] # stores the remote images provided in the file. # Fetch remote workflows - self.wfs = nf_core.list.Workflows() + self.wfs = nf_core.pipelines.list.Workflows() self.wfs.get_remote_workflows() def download_workflow(self): diff --git a/nf_core/launch.py b/nf_core/pipelines/launch.py similarity index 99% rename from nf_core/launch.py rename to nf_core/pipelines/launch.py index 423f0728e..77a7948ea 100644 --- a/nf_core/launch.py +++ b/nf_core/pipelines/launch.py @@ -13,7 +13,7 @@ from rich.markdown import Markdown from rich.prompt import Confirm -import nf_core.schema +import nf_core.pipelines.schema import nf_core.utils from nf_core.pipelines.lint_utils import dump_json_with_prettier @@ -138,7 +138,7 @@ def launch_pipeline(self): # Check if we have a web ID if self.web_id is not None: - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() try: if not self.get_web_launch_response(): log.info( @@ -191,7 +191,7 @@ def get_pipeline_schema(self): """Load and validate the schema from the supplied pipeline""" # Set up the schema - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() # Check if this is a local directory localpath = os.path.abspath(os.path.expanduser(self.pipeline)) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 4fa49acbc..f62100a70 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -3,7 +3,7 @@ import re from pathlib import Path -from nf_core.schema import PipelineSchema +from nf_core.pipelines.schema import PipelineSchema log = logging.getLogger(__name__) diff --git a/nf_core/pipelines/lint/schema_description.py b/nf_core/pipelines/lint/schema_description.py index 90735f609..82165b6e7 100644 --- a/nf_core/pipelines/lint/schema_description.py +++ b/nf_core/pipelines/lint/schema_description.py @@ -1,4 +1,4 @@ -import nf_core.schema +import nf_core.pipelines.schema def schema_description(self): @@ -17,7 +17,7 @@ def schema_description(self): # First, get the top-level config options for the pipeline # Schema object already created in the `schema_lint` test - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.schema_obj.get_schema_path(self.wf_path) self.schema_obj.get_wf_params() self.schema_obj.no_prompts = True diff --git a/nf_core/pipelines/lint/schema_lint.py b/nf_core/pipelines/lint/schema_lint.py index 3342dc4b9..21858a0f4 100644 --- a/nf_core/pipelines/lint/schema_lint.py +++ b/nf_core/pipelines/lint/schema_lint.py @@ -1,6 +1,6 @@ import logging -import nf_core.schema +import nf_core.pipelines.schema def schema_lint(self): @@ -68,7 +68,7 @@ def schema_lint(self): logging.getLogger("nf_core.schema").setLevel(logging.ERROR) # Lint the schema - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.schema_obj.get_schema_path(self.wf_path) try: diff --git a/nf_core/pipelines/lint/schema_params.py b/nf_core/pipelines/lint/schema_params.py index 9280fe470..4c569defd 100644 --- a/nf_core/pipelines/lint/schema_params.py +++ b/nf_core/pipelines/lint/schema_params.py @@ -1,4 +1,4 @@ -import nf_core.schema +import nf_core.pipelines.schema def schema_params(self): @@ -15,7 +15,7 @@ def schema_params(self): failed = [] # First, get the top-level config options for the pipeline - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.schema_obj.get_schema_path(self.wf_path) self.schema_obj.get_wf_params() self.schema_obj.no_prompts = True diff --git a/nf_core/list.py b/nf_core/pipelines/list.py similarity index 100% rename from nf_core/list.py rename to nf_core/pipelines/list.py diff --git a/nf_core/params_file.py b/nf_core/pipelines/params_file.py similarity index 98% rename from nf_core/params_file.py rename to nf_core/pipelines/params_file.py index 78798b065..0384521c7 100644 --- a/nf_core/params_file.py +++ b/nf_core/pipelines/params_file.py @@ -8,9 +8,9 @@ import questionary -import nf_core.list +import nf_core.pipelines.list import nf_core.utils -from nf_core.schema import PipelineSchema +from nf_core.pipelines.schema import PipelineSchema log = logging.getLogger(__name__) @@ -97,7 +97,7 @@ def __init__( self.schema_obj: Optional[PipelineSchema] = None # Fetch remote workflows - self.wfs = nf_core.list.Workflows() + self.wfs = nf_core.pipelines.list.Workflows() self.wfs.get_remote_workflows() def get_pipeline(self): diff --git a/nf_core/refgenie.py b/nf_core/pipelines/refgenie.py similarity index 100% rename from nf_core/refgenie.py rename to nf_core/pipelines/refgenie.py diff --git a/nf_core/schema.py b/nf_core/pipelines/schema.py similarity index 99% rename from nf_core/schema.py rename to nf_core/pipelines/schema.py index eee9f9978..28a7d0b4f 100644 --- a/nf_core/schema.py +++ b/nf_core/pipelines/schema.py @@ -16,7 +16,7 @@ from rich.prompt import Confirm from rich.syntax import Syntax -import nf_core.list +import nf_core.pipelines.list import nf_core.utils from nf_core.pipelines.lint_utils import dump_json_with_prettier, run_prettier_on_file @@ -66,7 +66,7 @@ def get_schema_path( # Path does not exist - assume a name of a remote workflow elif not local_only: - self.pipeline_dir = nf_core.list.get_local_wf(path, revision=revision) + self.pipeline_dir = nf_core.pipelines.list.get_local_wf(path, revision=revision) self.schema_filename = Path(self.pipeline_dir or "", "nextflow_schema.json") # check if the schema file exists if not self.schema_filename.exists(): diff --git a/nf_core/pipelines/sync/sync.py b/nf_core/pipelines/sync.py similarity index 99% rename from nf_core/pipelines/sync/sync.py rename to nf_core/pipelines/sync.py index ccee2a9c4..f9a874c7e 100644 --- a/nf_core/pipelines/sync/sync.py +++ b/nf_core/pipelines/sync.py @@ -15,8 +15,8 @@ from git import GitCommandError, InvalidGitRepositoryError import nf_core -import nf_core.list import nf_core.pipelines.create.create +import nf_core.pipelines.list import nf_core.utils log = logging.getLogger(__name__) diff --git a/nf_core/pipelines/sync/__init__.py b/nf_core/pipelines/sync/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/test_create_logo.py b/tests/test_create_logo.py index b3c01638e..8942894ce 100644 --- a/tests/test_create_logo.py +++ b/tests/test_create_logo.py @@ -4,7 +4,7 @@ import unittest from pathlib import Path -import nf_core.create_logo +import nf_core.pipelines.create_logo class TestCreateLogo(unittest.TestCase): @@ -23,7 +23,7 @@ def test_create_logo_png(self): """Test that the create-logo command works for PNGs""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG @@ -37,7 +37,7 @@ def test_create_logo_png_dark(self): """Test that the create-logo command works for dark PNGs""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, theme="dark") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, theme="dark") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG @@ -51,7 +51,7 @@ def test_create_log_png_width(self): """Test that the create-logo command works for PNGs with a custom width""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, width=100) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, width=100) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG @@ -65,12 +65,12 @@ def test_create_logo_twice(self): """Test that the create-logo command returns an info message when run twice""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Create the logo again and capture the log output with self.assertLogs(level="INFO") as log: - nf_core.create_logo.create_logo("pipes", self.tempdir_path) + nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) # Check that the log message is correct self.assertIn("Logo already exists", log.output[0]) @@ -79,13 +79,15 @@ def test_create_logo_without_text_fail(self): # Create a logo with self.assertRaises(UserWarning): - nf_core.create_logo.create_logo("", self.tempdir_path) + nf_core.pipelines.create_logo.create_logo("", self.tempdir_path) def test_create_logo_with_filename(self): """Test that the create-logo command works with a custom filename""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", Path(self.tempdir_path / "custom_dir"), filename="custom") + logo_fn = nf_core.pipelines.create_logo.create_logo( + "pipes", Path(self.tempdir_path / "custom_dir"), filename="custom" + ) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the parent directory name @@ -97,7 +99,7 @@ def test_create_logo_svg(self): """Test that the create-logo command works for SVGs""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, format="svg") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, format="svg") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a SVG @@ -113,7 +115,7 @@ def test_create_logo_svg_dark(self): """Test that the create-logo command works for svgs and dark theme""" # Create a logo - logo_fn = nf_core.create_logo.create_logo("pipes", self.tempdir_path, format="svg", theme="dark") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, format="svg", theme="dark") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a SVG diff --git a/tests/test_download.py b/tests/test_download.py index e090885bb..2d4231c7d 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -14,7 +14,7 @@ import nf_core.pipelines.create.create import nf_core.utils -from nf_core.download import ContainerError, DownloadWorkflow, WorkflowRepo +from nf_core.pipelines.download import ContainerError, DownloadWorkflow, WorkflowRepo from nf_core.synced_repo import SyncedRepo from nf_core.utils import run_cmd diff --git a/tests/test_launch.py b/tests/test_launch.py index 043055a2d..977485341 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -8,8 +8,8 @@ import pytest -import nf_core.launch import nf_core.pipelines.create.create +import nf_core.pipelines.launch from .utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder @@ -21,7 +21,7 @@ def setUp(self): """Create a new PipelineSchema and Launch objects""" self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() self.nf_params_fn = os.path.join(self.tmp_dir, "nf-params.json") - self.launcher = nf_core.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) + self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) def tearDown(self): """Clean up temporary files and folders""" @@ -32,13 +32,13 @@ def tearDown(self): if Path(self.tmp_dir).exists(): shutil.rmtree(self.tmp_dir) - @mock.patch.object(nf_core.launch.Launch, "prompt_web_gui", side_effect=[True]) - @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") + @mock.patch.object(nf_core.pipelines.launch.Launch, "prompt_web_gui", side_effect=[True]) + @mock.patch.object(nf_core.pipelines.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): """Test the main launch function""" self.launcher.launch_pipeline() - @mock.patch.object(nf_core.launch.Confirm, "ask", side_effect=[False]) + @mock.patch.object(nf_core.pipelines.launch.Confirm, "ask", side_effect=[False]) def test_launch_file_exists(self, mock_confirm): """Test that we detect an existing params file and return""" # Make an empty params file to be overwritten @@ -46,9 +46,9 @@ def test_launch_file_exists(self, mock_confirm): # Try and to launch, return with error assert self.launcher.launch_pipeline() is False - @mock.patch.object(nf_core.launch.Launch, "prompt_web_gui", side_effect=[True]) - @mock.patch.object(nf_core.launch.Launch, "launch_web_gui") - @mock.patch.object(nf_core.launch.Confirm, "ask", side_effect=[False]) + @mock.patch.object(nf_core.pipelines.launch.Launch, "prompt_web_gui", side_effect=[True]) + @mock.patch.object(nf_core.pipelines.launch.Launch, "launch_web_gui") + @mock.patch.object(nf_core.pipelines.launch.Confirm, "ask", side_effect=[False]) def test_launch_file_exists_overwrite(self, mock_webbrowser, mock_lauch_web_gui, mock_confirm): """Test that we detect an existing params file and we overwrite it""" # Make an empty params file to be overwritten @@ -70,7 +70,7 @@ def test_make_pipeline_schema(self, tmp_path): ) create_obj.init_pipeline() os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) - self.launcher = nf_core.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) + self.launcher = nf_core.pipelines.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) self.launcher.get_pipeline_schema() assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 assert self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]["outdir"] == { @@ -186,7 +186,7 @@ def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): } ], ) - @mock.patch.object(nf_core.launch.Launch, "sanitise_web_response") + @mock.patch.object(nf_core.pipelines.launch.Launch, "sanitise_web_response") def test_get_web_launch_response_valid(self, mock_poll_nfcore_web_api, mock_sanitise): """Test polling the website for a launch response - complete, valid response""" self.launcher.get_pipeline_schema() diff --git a/tests/test_list.py b/tests/test_list.py index c78276b41..e11dbb12e 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -12,7 +12,7 @@ import pytest from rich.console import Console -import nf_core.list +import nf_core.pipelines.list # create a temporary directory that can be used by the tests in this file tmp = Path(tempfile.mkdtemp()) @@ -26,7 +26,7 @@ class TestList(unittest.TestCase): @mock.patch("subprocess.check_output") def test_working_listcall(self, mock_subprocess): """Test that listing pipelines works""" - wf_table = nf_core.list.list_workflows() + wf_table = nf_core.pipelines.list.list_workflows() console = Console(record=True) console.print(wf_table) output = console.export_text() @@ -36,7 +36,7 @@ def test_working_listcall(self, mock_subprocess): @mock.patch("subprocess.check_output") def test_working_listcall_archived(self, mock_subprocess): """Test that listing pipelines works, showing archived pipelines""" - wf_table = nf_core.list.list_workflows(show_archived=True) + wf_table = nf_core.pipelines.list.list_workflows(show_archived=True) console = Console(record=True) console.print(wf_table) output = console.export_text() @@ -45,7 +45,7 @@ def test_working_listcall_archived(self, mock_subprocess): @mock.patch("subprocess.check_output") def test_working_listcall_json(self, mock_subprocess): """Test that listing pipelines with JSON works""" - wf_json_str = nf_core.list.list_workflows(as_json=True) + wf_json_str = nf_core.pipelines.list.list_workflows(as_json=True) wf_json = json.loads(wf_json_str) for wf in wf_json["remote_workflows"]: if wf["name"] == "ampliseq": @@ -56,22 +56,22 @@ def test_working_listcall_json(self, mock_subprocess): def test_pretty_datetime(self): """Test that the pretty datetime function works""" now = datetime.now() - nf_core.list.pretty_date(now) + nf_core.pipelines.list.pretty_date(now) now_ts = time.mktime(now.timetuple()) - nf_core.list.pretty_date(now_ts) + nf_core.pipelines.list.pretty_date(now_ts) def test_local_workflows_and_fail(self): """Test the local workflow class and try to get local Nextflow workflow information""" - loc_wf = nf_core.list.LocalWorkflow("myWF") + loc_wf = nf_core.pipelines.list.LocalWorkflow("myWF") with pytest.raises(RuntimeError): loc_wf.get_local_nf_workflow_details() def test_local_workflows_compare_and_fail_silently(self): """Test the workflow class and try to compare local and remote workflows""" - wfs = nf_core.list.Workflows() - lwf_ex = nf_core.list.LocalWorkflow("myWF") + wfs = nf_core.pipelines.list.Workflows() + lwf_ex = nf_core.pipelines.list.LocalWorkflow("myWF") lwf_ex.full_name = "my Workflow" lwf_ex.commit_sha = "aw3s0meh1sh" @@ -86,7 +86,7 @@ def test_local_workflows_compare_and_fail_silently(self): "releases": [], } - rwf_ex = nf_core.list.RemoteWorkflow(remote) + rwf_ex = nf_core.pipelines.list.RemoteWorkflow(remote) rwf_ex.commit_sha = "aw3s0meh1sh" rwf_ex.releases = [{"tag_sha": "aw3s0meh1sh"}] @@ -114,7 +114,7 @@ def test_parse_local_workflow_and_succeed(self, mock_local_wf): assert os.environ["NXF_ASSETS"] == tmp_nxf_str with open(tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") - workflows_obj = nf_core.list.Workflows() + workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() assert len(workflows_obj.local_workflows) == 1 @@ -128,20 +128,20 @@ def test_parse_local_workflow_home(self, mock_local_wf, mock_subprocess): assert os.environ["NXF_ASSETS"] == tmp_nxf_str with open(tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") - workflows_obj = nf_core.list.Workflows() + workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() @mock.patch("os.stat") @mock.patch("git.Repo") def test_local_workflow_investigation(self, mock_repo, mock_stat): - local_wf = nf_core.list.LocalWorkflow("dummy") + local_wf = nf_core.pipelines.list.LocalWorkflow("dummy") local_wf.local_path = tmp mock_repo.head.commit.hexsha = "h00r4y" mock_stat.st_mode = 1 local_wf.get_local_nf_workflow_details() def test_worflow_filter(self): - workflows_obj = nf_core.list.Workflows(["rna", "myWF"]) + workflows_obj = nf_core.pipelines.list.Workflows(["rna", "myWF"]) remote = { "name": "myWF", @@ -154,7 +154,7 @@ def test_worflow_filter(self): "releases": [], } - rwf_ex = nf_core.list.RemoteWorkflow(remote) + rwf_ex = nf_core.pipelines.list.RemoteWorkflow(remote) rwf_ex.commit_sha = "aw3s0meh1sh" rwf_ex.releases = [{"tag_sha": "aw3s0meh1sh"}] @@ -169,7 +169,7 @@ def test_worflow_filter(self): "releases": [], } - rwf_ex2 = nf_core.list.RemoteWorkflow(remote2) + rwf_ex2 = nf_core.pipelines.list.RemoteWorkflow(remote2) rwf_ex2.commit_sha = "aw3s0meh1sh" rwf_ex2.releases = [{"tag_sha": "aw3s0meh1sh"}] @@ -182,11 +182,11 @@ def test_filter_archived_workflows(self): """ Test that archived workflows are not shown by default """ - workflows_obj = nf_core.list.Workflows() + workflows_obj = nf_core.pipelines.list.Workflows() remote1 = {"name": "myWF", "full_name": "my Workflow", "archived": True, "releases": []} - rwf_ex1 = nf_core.list.RemoteWorkflow(remote1) + rwf_ex1 = nf_core.pipelines.list.RemoteWorkflow(remote1) remote2 = {"name": "myWF", "full_name": "my Workflow", "archived": False, "releases": []} - rwf_ex2 = nf_core.list.RemoteWorkflow(remote2) + rwf_ex2 = nf_core.pipelines.list.RemoteWorkflow(remote2) workflows_obj.remote_workflows.append(rwf_ex1) workflows_obj.remote_workflows.append(rwf_ex2) @@ -200,11 +200,11 @@ def test_show_archived_workflows(self): """ Test that archived workflows can be shown optionally """ - workflows_obj = nf_core.list.Workflows(show_archived=True) + workflows_obj = nf_core.pipelines.list.Workflows(show_archived=True) remote1 = {"name": "myWF", "full_name": "my Workflow", "archived": True, "releases": []} - rwf_ex1 = nf_core.list.RemoteWorkflow(remote1) + rwf_ex1 = nf_core.pipelines.list.RemoteWorkflow(remote1) remote2 = {"name": "myWF", "full_name": "my Workflow", "archived": False, "releases": []} - rwf_ex2 = nf_core.list.RemoteWorkflow(remote2) + rwf_ex2 = nf_core.pipelines.list.RemoteWorkflow(remote2) workflows_obj.remote_workflows.append(rwf_ex1) workflows_obj.remote_workflows.append(rwf_ex2) diff --git a/tests/test_params_file.py b/tests/test_params_file.py index 673139c3b..7e3e4b4f4 100644 --- a/tests/test_params_file.py +++ b/tests/test_params_file.py @@ -5,8 +5,8 @@ from pathlib import Path import nf_core.pipelines.create.create -import nf_core.schema -from nf_core.params_file import ParamsFileBuilder +import nf_core.pipelines.schema +from nf_core.pipelines.params_file import ParamsFileBuilder class TestParamsFileBuilder: @@ -15,7 +15,7 @@ class TestParamsFileBuilder: @classmethod def setup_class(cls): """Create a new PipelineSchema object""" - cls.schema_obj = nf_core.schema.PipelineSchema() + cls.schema_obj = nf_core.pipelines.schema.PipelineSchema() cls.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # Create a test pipeline in temp directory diff --git a/tests/test_schema.py b/tests/test_schema.py index b4be7ae45..4cb157c08 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -13,7 +13,7 @@ import yaml import nf_core.pipelines.create.create -import nf_core.schema +import nf_core.pipelines.schema from .utils import with_temporary_file, with_temporary_folder @@ -23,7 +23,7 @@ class TestSchema(unittest.TestCase): def setUp(self): """Create a new PipelineSchema object""" - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # Create a test pipeline in temp directory diff --git a/tests/test_utils.py b/tests/test_utils.py index 85f4e3c54..89ba0444f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -10,8 +10,8 @@ import pytest import requests -import nf_core.list import nf_core.pipelines.create.create +import nf_core.pipelines.list import nf_core.utils from .utils import with_temporary_folder @@ -160,7 +160,7 @@ def test_pip_erroneous_package(self): nf_core.utils.pip_package("not_a_package=1.0") def test_get_repo_releases_branches_nf_core(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("methylseq", wfs) for r in wf_releases: @@ -171,7 +171,7 @@ def test_get_repo_releases_branches_nf_core(self): assert "dev" in wf_branches.keys() def test_get_repo_releases_branches_not_nf_core(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("MultiQC/MultiQC", wfs) for r in wf_releases: @@ -182,13 +182,13 @@ def test_get_repo_releases_branches_not_nf_core(self): assert "main" in wf_branches.keys() def test_get_repo_releases_branches_not_exists(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() with pytest.raises(AssertionError): nf_core.utils.get_repo_releases_branches("made_up_pipeline", wfs) def test_get_repo_releases_branches_not_exists_slash(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() with pytest.raises(AssertionError): nf_core.utils.get_repo_releases_branches("made-up/pipeline", wfs) From 033eaa074cd3314c206d8dc948de7e830dacfba6 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 31 May 2024 09:29:56 +0200 Subject: [PATCH 181/737] fix commands imports and mentions --- .github/workflows/create-lint-wf.yml | 8 +-- nf_core/__main__.py | 8 +-- .../pipeline-template/.github/CONTRIBUTING.md | 6 +- .../.github/workflows/download_pipeline.yml | 6 +- nf_core/pipelines/download.py | 12 ++-- nf_core/pipelines/launch.py | 6 +- .../lint/actions_schema_validation.py | 2 +- nf_core/pipelines/lint/schema_lint.py | 6 +- nf_core/pipelines/params_file.py | 2 +- nf_core/pipelines/schema.py | 2 +- nf_core/utils.py | 6 +- setup.py | 2 +- tests/lint/multiqc_config.py | 4 +- tests/test_bump_version.py | 8 +-- tests/test_cli.py | 36 +++++----- tests/test_download.py | 18 ++--- tests/test_list.py | 4 +- tests/test_sync.py | 66 +++++++++---------- 18 files changed, 102 insertions(+), 100 deletions(-) diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 03b9aa241..7e90febb7 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -79,12 +79,12 @@ jobs: working-directory: create-lint-wf # Run the other nf-core commands - - name: nf-core list - run: nf-core --log-file log.txt list + - name: nf-core pipelines list + run: nf-core --log-file log.txt pipelines list working-directory: create-lint-wf - - name: nf-core schema - run: nf-core --log-file log.txt schema build --dir nf-core-testpipeline/ --no-prompts + - name: nf-core pipelines schema + run: nf-core --log-file log.txt pipelines schema build --dir nf-core-testpipeline/ --no-prompts working-directory: create-lint-wf - name: Cleanup work directory diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 4921e15d5..2d34b8e88 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -697,7 +697,7 @@ def sync_pipeline(ctx, dir, from_branch, pull_request, github_repository, userna the pipeline. It is run automatically for all pipelines when ever a new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ - from nf_core.pipelines.sync.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError + from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError from nf_core.utils import is_pipeline_directory # Check if pipeline directory contains necessary files @@ -743,7 +743,7 @@ def bump_version_pipeline(ctx, new_version, dir, nextflow): As well as the pipeline version, you can also change the required version of Nextflow. """ - from nf_core.pipelines.bump_version.bump_version import bump_nextflow_version, bump_pipeline_version + from nf_core.pipelines.bump_version import bump_nextflow_version, bump_pipeline_version from nf_core.utils import Pipeline, is_pipeline_directory try: @@ -952,9 +952,9 @@ def lint_schema(schema_path): Check that a given pipeline schema is valid. Checks whether the pipeline schema validates as JSON Schema Draft 7 - and adheres to the additional nf-core schema requirements. + and adheres to the additional nf-core pipelines schema requirements. - This function runs as part of the nf-core lint command, this is a convenience + This function runs as part of the nf-core pipelines lint command, this is a convenience command that does just the schema linting nice and quickly. If no schema path is provided, "nextflow_schema.json" will be used (if it exists). diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index dc05d165d..e22a66425 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -23,7 +23,7 @@ If you'd like to write some code for {{ name }}, the standard workflow is as fol 1. Check that there isn't already an issue about your idea in the [{{ name }} issues](https://github.com/{{ name }}/issues) to avoid duplicating work. If there isn't one already, please create one so that others know you're working on this 2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [{{ name }} repository](https://github.com/{{ name }}) to your GitHub account 3. Make the necessary changes / additions within your forked repository following [Pipeline conventions](#pipeline-contribution-conventions) -4. Use `nf-core schema build` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). +4. Use `nf-core pipelines schema build` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). 5. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged If you're not used to this workflow with git, you can start with some [docs from GitHub](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests) or even their [excellent `git` resources](https://try.github.io/). @@ -83,7 +83,7 @@ If you wish to contribute a new step, please use the following coding standards: 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new parameters to `nextflow.config` with a default (see below). -5. Add any new parameters to `nextflow_schema.json` with help text (via the `nf-core schema build` tool). +5. Add any new parameters to `nextflow_schema.json` with help text (via the `nf-core pipelines schema build` tool). 6. Add sanity checks and validation for all relevant parameters. 7. Perform local tests to validate that the new code works as expected. 8. If applicable, add a new test command in `.github/workflow/ci.yml`. @@ -94,7 +94,7 @@ If you wish to contribute a new step, please use the following coding standards: Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. -Once there, use `nf-core schema build` to add to `nextflow_schema.json`. +Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json`. ### Default processes resource requirements diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index ebea16c5c..99a42d86d 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -1,4 +1,4 @@ -name: Test successful pipeline download with 'nf-core download' +name: Test successful pipeline download with 'nf-core pipelines download' # Run the workflow when: # - dispatched manually @@ -8,7 +8,7 @@ on: workflow_dispatch: inputs: testbranch: - description: "The specific branch you wish to utilize for the test execution of nf-core download." + description: "The specific branch you wish to utilize for the test execution of nf-core pipelines download." required: true default: "dev" pull_request: @@ -58,7 +58,7 @@ jobs: env: NXF_SINGULARITY_CACHEDIR: ./ run: | - nf-core download ${{ env.REPO_LOWERCASE }} \ + nf-core pipelines download ${{ env.REPO_LOWERCASE }} \ --revision ${{ env.REPO_BRANCH }} \ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ --compress "none" \ diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 5a2acf489..8454f894a 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -42,7 +42,7 @@ class DownloadError(RuntimeError): - """A custom exception that is raised when nf-core download encounters a problem that we already took into consideration. + """A custom exception that is raised when nf-core pipelines download encounters a problem that we already took into consideration. In this case, we do not want to print the traceback, but give the user some concise, helpful feedback instead. """ @@ -505,7 +505,7 @@ def prompt_singularity_cachedir_creation(self): with open(os.path.expanduser(shellprofile_path), "a") as f: f.write( "\n\n#######################################\n" - f"## Added by `nf-core download` v{nf_core.__version__} ##\n" + f"## Added by `nf-core pipelines download` v{nf_core.__version__} ##\n" + f'export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"' + "\n#######################################\n" ) @@ -687,7 +687,7 @@ def wf_use_local_configs(self, revision_dirname): # Append the singularity.cacheDir to the end if we need it if self.container_system == "singularity" and self.container_cache_utilisation == "copy": nfconfig += ( - f"\n\n// Added by `nf-core download` v{nf_core.__version__} //\n" + f"\n\n// Added by `nf-core pipelines download` v{nf_core.__version__} //\n" + 'singularity.cacheDir = "${projectDir}/../singularity-images/"' + "\n///////////////////////////////////////" ) @@ -1730,7 +1730,7 @@ def __add_additional_tags(self) -> None: # Although "dev-null" is a syntactically-valid local-part that is equally valid for delivery, # and only the receiving MTA can decide whether to accept it, it is to my best knowledge configured with # a Postfix discard mail delivery agent (https://www.postfix.org/discard.8.html), so incoming mails should be sinkholed. - self.ensure_git_user_config(f"nf-core download v{nf_core.__version__}", "dev-null@example.com") + self.ensure_git_user_config(f"nf-core pipelines download v{nf_core.__version__}", "dev-null@example.com") for additional_tag in self.additional_tags: # A valid git branch or tag name can contain alphanumeric characters, underscores, hyphens, and dots. @@ -1740,7 +1740,9 @@ def __add_additional_tags(self) -> None: if self.repo.is_valid_object(anchor) and not self.repo.is_valid_object(tag): try: self.repo.create_tag( - tag, ref=anchor, message=f"Synonynmous tag to {anchor}; added by `nf-core download`." + tag, + ref=anchor, + message=f"Synonynmous tag to {anchor}; added by `nf-core pipelines download`.", ) except (GitCommandError, InvalidGitRepositoryError) as e: log.error(f"[red]Additional tag(s) could not be applied:[/]\n{e}\n") diff --git a/nf_core/pipelines/launch.py b/nf_core/pipelines/launch.py index 77a7948ea..3a5f97e78 100644 --- a/nf_core/pipelines/launch.py +++ b/nf_core/pipelines/launch.py @@ -38,7 +38,7 @@ def __init__( """Initialise the Launcher class Args: - schema: An nf_core.schema.PipelineSchema() object + schema: An nf_core.pipelines.schema.PipelineSchema() object """ self.pipeline = pipeline @@ -59,7 +59,7 @@ def __init__( self.nextflow_cmd = None # Fetch remote workflows - self.wfs = nf_core.list.Workflows() + self.wfs = nf_core.pipelines.list.Workflows() self.wfs.get_remote_workflows() # Prepend property names with a single hyphen in case we have parameters with the same ID @@ -340,7 +340,7 @@ def get_web_launch_response(self): elif web_response["status"] == "waiting_for_user": return False elif web_response["status"] == "launch_params_complete": - log.info("Found completed parameters from nf-core launch GUI") + log.info("Found completed parameters from nf-core pipelines launch GUI") try: # Set everything that we can with the cache results # NB: If using web builder, may have only run with --id and nothing else diff --git a/nf_core/pipelines/lint/actions_schema_validation.py b/nf_core/pipelines/lint/actions_schema_validation.py index 7e878a1af..b4be42b54 100644 --- a/nf_core/pipelines/lint/actions_schema_validation.py +++ b/nf_core/pipelines/lint/actions_schema_validation.py @@ -23,7 +23,7 @@ def actions_schema_validation(self) -> Dict[str, List[str]]: warned: List[str] = [] # Only show error messages from schema - logging.getLogger("nf_core.schema").setLevel(logging.ERROR) + logging.getLogger("nf_core.pipelines.schema").setLevel(logging.ERROR) # Get all workflow files action_workflows = glob.glob(os.path.join(self.wf_path, ".github/workflows/*.y*ml")) diff --git a/nf_core/pipelines/lint/schema_lint.py b/nf_core/pipelines/lint/schema_lint.py index 21858a0f4..6786c5012 100644 --- a/nf_core/pipelines/lint/schema_lint.py +++ b/nf_core/pipelines/lint/schema_lint.py @@ -10,7 +10,7 @@ def schema_lint(self): pipeline parameters (eg. ``params.something``, ``--something``). .. tip:: Reminder: you should generally never need to edit this JSON file by hand. - The ``nf-core schema build`` command can create *and edit* the file for you + The ``nf-core pipelines schema build`` command can create *and edit* the file for you to keep it up to date, with a friendly user-interface for customisation. The lint test checks the schema for the following: @@ -58,14 +58,14 @@ def schema_lint(self): } .. tip:: You can check your pipeline schema without having to run the entire pipeline lint - by running ``nf-core schema lint`` instead of ``nf-core pipelines lint`` + by running ``nf-core pipelines schema lint`` instead of ``nf-core pipelines lint`` """ passed = [] warned = [] failed = [] # Only show error messages from schema - logging.getLogger("nf_core.schema").setLevel(logging.ERROR) + logging.getLogger("nf_core.pipelines.schema").setLevel(logging.ERROR) # Lint the schema self.schema_obj = nf_core.pipelines.schema.PipelineSchema() diff --git a/nf_core/pipelines/params_file.py b/nf_core/pipelines/params_file.py index 0384521c7..d61b7cfbc 100644 --- a/nf_core/pipelines/params_file.py +++ b/nf_core/pipelines/params_file.py @@ -124,7 +124,7 @@ def get_pipeline(self): ).unsafe_ask() # Get the schema - self.schema_obj = nf_core.schema.PipelineSchema() + self.schema_obj = nf_core.pipelines.schema.PipelineSchema() self.schema_obj.get_schema_path(self.pipeline, local_only=False, revision=self.pipeline_revision) self.schema_obj.get_wf_params() diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 28a7d0b4f..7f562bff3 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -912,7 +912,7 @@ def get_web_builder_response(self): if web_response["status"] == "waiting_for_user": return False if web_response["status"] == "web_builder_edited": - log.info("Found saved status from nf-core schema builder") + log.info("Found saved status from nf-core pipelines schema builder") try: self.schema = web_response["schema"] self.remove_schema_empty_definitions() diff --git a/nf_core/utils.py b/nf_core/utils.py index 9dae4b9e5..29f026c84 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -854,7 +854,7 @@ def prompt_remote_pipeline_name(wfs): """Prompt for the pipeline name with questionary Args: - wfs: A nf_core.list.Workflows() object, where get_remote_workflows() has been called. + wfs: A nf_core.pipelines.list.Workflows() object, where get_remote_workflows() has been called. Returns: pipeline (str): GitHub repo - username/repo @@ -934,7 +934,7 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): class SingularityCacheFilePathValidator(questionary.Validator): """ - Validator for file path specified as --singularity-cache-index argument in nf-core download + Validator for file path specified as --singularity-cache-index argument in nf-core pipelines download """ def validate(self, value): @@ -954,7 +954,7 @@ def get_repo_releases_branches(pipeline, wfs): Args: pipeline (str): GitHub repo username/repo - wfs: A nf_core.list.Workflows() object, where get_remote_workflows() has been called. + wfs: A nf_core.pipelines.list.Workflows() object, where get_remote_workflows() has been called. Returns: wf_releases, wf_branches (tuple): Array of releases, Array of branches diff --git a/setup.py b/setup.py index 013c863b9..8f32daa90 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ license="MIT", entry_points={ "console_scripts": ["nf-core=nf_core.__main__:run_nf_core"], - "refgenie.hooks.post_update": ["nf-core-refgenie=nf_core.refgenie:update_config"], + "refgenie.hooks.post_update": ["nf-core-refgenie=nf_core.pipelines.refgenie:update_config"], }, python_requires=">=3.8, <4", install_requires=required, diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py index 70c09ae54..7f1fdbd67 100644 --- a/tests/lint/multiqc_config.py +++ b/tests/lint/multiqc_config.py @@ -116,13 +116,13 @@ def test_multiqc_config_report_comment_release_fail(self): def test_multiqc_config_report_comment_release_succeed(self): """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" - import nf_core.pipelines.bump_version.bump_version + import nf_core.pipelines.bump_version new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() # bump version using the bump_version function - nf_core.pipelines.bump_version.bump_version.bump_pipeline_version(lint_obj, "1.0") + nf_core.pipelines.bump_version.bump_pipeline_version(lint_obj, "1.0") # lint again lint_obj._load() result = lint_obj.multiqc_config() diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index ad7efcd1f..260637c06 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -4,7 +4,7 @@ import yaml -import nf_core.pipelines.bump_version.bump_version +import nf_core.pipelines.bump_version import nf_core.pipelines.create.create import nf_core.utils @@ -24,7 +24,7 @@ def test_bump_pipeline_version(datafiles, tmp_path): pipeline_obj._load() # Bump the version number - nf_core.pipelines.bump_version.bump_version.bump_pipeline_version(pipeline_obj, "1.1") + nf_core.pipelines.bump_version.bump_pipeline_version(pipeline_obj, "1.1") new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) # Check nextflow.config @@ -44,7 +44,7 @@ def test_dev_bump_pipeline_version(datafiles, tmp_path): pipeline_obj._load() # Bump the version number - nf_core.pipelines.bump_version.bump_version.bump_pipeline_version(pipeline_obj, "v1.2dev") + nf_core.pipelines.bump_version.bump_pipeline_version(pipeline_obj, "v1.2dev") new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) # Check the pipeline config @@ -65,7 +65,7 @@ def test_bump_nextflow_version(datafiles, tmp_path): # Bump the version number to a specific version, preferably one # we're not already on version = "22.04.3" - nf_core.pipelines.bump_version.bump_version.bump_nextflow_version(pipeline_obj, version) + nf_core.pipelines.bump_version.bump_nextflow_version(pipeline_obj, version) new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) # Check nextflow.config diff --git a/tests/test_cli.py b/tests/test_cli.py index 53780b14e..8142caa21 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -79,7 +79,7 @@ def test_cli_verbose(self): # Checks that -v was considered valid assert "No such option: -v" not in nf_core.utils.strip_ansi_codes(result.output) - @mock.patch("nf_core.list.list_workflows", return_value="pipeline test list") + @mock.patch("nf_core.pipelines.list.list_workflows", return_value="pipeline test list") def test_cli_list(self, mock_list_workflows): """Test nf-core pipelines are listed and cli parameters are passed on.""" params = { @@ -87,7 +87,7 @@ def test_cli_list(self, mock_list_workflows): "json": None, "show-archived": None, } - cmd = ["list"] + self.assemble_params(params) + ["kw1", "kw2"] + cmd = ["pipelines", "list"] + self.assemble_params(params) + ["kw1", "kw2"] result = self.invoke_cli(cmd) mock_list_workflows.assert_called_once_with( @@ -96,7 +96,7 @@ def test_cli_list(self, mock_list_workflows): assert result.exit_code == 0 assert "pipeline test list" in result.output - @mock.patch("nf_core.launch.Launch") + @mock.patch("nf_core.pipelines.launch.Launch") def test_cli_launch(self, mock_launcher): """Test nf-core pipeline is launched and cli parameters are passed on.""" mock_launcher.return_value.launch_pipeline.return_value = True @@ -112,7 +112,7 @@ def test_cli_launch(self, mock_launcher): "show-hidden": None, "url": "builder_url", } - cmd = ["launch"] + self.assemble_params(params) + ["pipeline_name"] + cmd = ["pipelines", "launch"] + self.assemble_params(params) + ["pipeline_name"] result = self.invoke_cli(cmd) assert result.exit_code == 0 @@ -131,7 +131,7 @@ def test_cli_launch(self, mock_launcher): mock_launcher.return_value.launch_pipeline.assert_called_once() - @mock.patch("nf_core.launch.Launch") + @mock.patch("nf_core.pipelines.launch.Launch") def test_cli_launch_no_params_in(self, mock_launcher): """Test nf-core pipeline fails when params-in does not exist""" mock_launcher.return_value.launch_pipeline.return_value = True @@ -139,7 +139,7 @@ def test_cli_launch_no_params_in(self, mock_launcher): params = { "params-in": "/fake/path", } - cmd = ["launch"] + self.assemble_params(params) + ["pipeline_name"] + cmd = ["pipelines", "launch"] + self.assemble_params(params) + ["pipeline_name"] result = self.invoke_cli(cmd) assert result.exit_code == 2 @@ -150,15 +150,15 @@ def test_cli_launch_no_params_in(self, mock_launcher): mock_launcher.assert_not_called() - @mock.patch("nf_core.launch.Launch") + @mock.patch("nf_core.pipelines.launch.Launch") def test_cli_launch_fail(self, mock_launcher): """Test nf-core pipeline fails with exit code 1 when pipeline fails.""" mock_launcher.return_value.launch_pipeline.return_value = False - cmd = ["launch", "pipeline_name"] + cmd = ["pipelines", "launch", "pipeline_name"] result = self.invoke_cli(cmd) assert result.exit_code == 1 - @mock.patch("nf_core.download.DownloadWorkflow") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow") def test_cli_download(self, mock_dl): """Test nf-core pipeline is downloaded and cli parameters are passed on.""" params = { @@ -176,7 +176,7 @@ def test_cli_download(self, mock_dl): "parallel-downloads": 2, } - cmd = ["download"] + self.assemble_params(params) + ["pipeline_name"] + cmd = ["pipelines", "download"] + self.assemble_params(params) + ["pipeline_name"] result = self.invoke_cli(cmd) assert result.exit_code == 0 @@ -382,31 +382,31 @@ def test_lint_log_user_warning(self, mock_lint, mock_is_pipeline): assert error_txt in captured_logs.output[-1] assert captured_logs.records[-1].levelname == "ERROR" - @mock.patch("nf_core.schema.PipelineSchema.get_schema_path") + @mock.patch("nf_core.pipelines.schema.PipelineSchema.get_schema_path") def test_schema_lint(self, mock_get_schema_path): - """Test nf-core schema lint defaults to nextflow_schema.json""" - cmd = ["schema", "lint"] + """Test nf-core pipelines schema lint defaults to nextflow_schema.json""" + cmd = ["pipelines", "schema", "lint"] with self.runner.isolated_filesystem(): with open("nextflow_schema.json", "w") as f: f.write("{}") self.invoke_cli(cmd) mock_get_schema_path.assert_called_with("nextflow_schema.json") - @mock.patch("nf_core.schema.PipelineSchema.get_schema_path") + @mock.patch("nf_core.pipelines.schema.PipelineSchema.get_schema_path") def test_schema_lint_filename(self, mock_get_schema_path): - """Test nf-core schema lint accepts a filename""" - cmd = ["schema", "lint", "some_other_filename"] + """Test nf-core pipelines schema lint accepts a filename""" + cmd = ["pipelines", "schema", "lint", "some_other_filename"] with self.runner.isolated_filesystem(): with open("some_other_filename", "w") as f: f.write("{}") self.invoke_cli(cmd) mock_get_schema_path.assert_called_with("some_other_filename") - @mock.patch("nf_core.create_logo.create_logo") + @mock.patch("nf_core.pipelines.create_logo.create_logo") def test_create_logo(self, mock_create_logo): # Set up the mock to return a specific value - cmd = ["create-logo", "test"] + cmd = ["pipelines", "create-logo", "test"] result = self.invoke_cli(cmd) mock_create_logo.assert_called_with("test", Path.cwd(), None, "light", 2300, "png", False) diff --git a/tests/test_download.py b/tests/test_download.py index 2d4231c7d..c3a8c5546 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -44,7 +44,7 @@ def __contains__(self, item: str) -> bool: # Tests for 'get_release_hash' # def test_get_release_hash_release(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() pipeline = "methylseq" download_obj = DownloadWorkflow(pipeline=pipeline, revision="1.6") @@ -62,7 +62,7 @@ def test_get_release_hash_release(self): ) def test_get_release_hash_branch(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() # Exoseq pipeline is archived, so `dev` branch should be stable pipeline = "exoseq" @@ -81,7 +81,7 @@ def test_get_release_hash_branch(self): ) def test_get_release_hash_non_existent_release(self): - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() pipeline = "methylseq" download_obj = DownloadWorkflow(pipeline=pipeline, revision="thisisfake") @@ -564,7 +564,7 @@ def test_remote_container_functionality(self, tmp_dir): # Tests for the main entry method 'download_workflow' # @with_temporary_folder - @mock.patch("nf_core.download.DownloadWorkflow.singularity_pull_image") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow.singularity_pull_image") @mock.patch("shutil.which") def test_download_workflow_with_success(self, tmp_dir, mock_download_image, mock_singularity_installed): os.environ["NXF_SINGULARITY_CACHEDIR"] = "foo" @@ -585,7 +585,7 @@ def test_download_workflow_with_success(self, tmp_dir, mock_download_image, mock # Test Download for Seqera Platform # @with_temporary_folder - @mock.patch("nf_core.download.DownloadWorkflow.get_singularity_images") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow.get_singularity_images") def test_download_workflow_for_platform(self, tmp_dir, _): download_obj = DownloadWorkflow( pipeline="nf-core/rnaseq", @@ -601,7 +601,7 @@ def test_download_workflow_for_platform(self, tmp_dir, _): assert isinstance(download_obj.wf_sha, dict) and len(download_obj.wf_sha) == 0 assert isinstance(download_obj.wf_download_url, dict) and len(download_obj.wf_download_url) == 0 - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() ( download_obj.pipeline, @@ -646,7 +646,7 @@ def test_download_workflow_for_platform(self, tmp_dir, _): # # Brief test adding a single custom tag to Seqera Platform download # - @mock.patch("nf_core.download.DownloadWorkflow.get_singularity_images") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow.get_singularity_images") @with_temporary_folder def test_download_workflow_for_platform_with_one_custom_tag(self, _, tmp_dir): download_obj = DownloadWorkflow( @@ -662,7 +662,7 @@ def test_download_workflow_for_platform_with_one_custom_tag(self, _, tmp_dir): # # Test adding custom tags to Seqera Platform download (full test) # - @mock.patch("nf_core.download.DownloadWorkflow.get_singularity_images") + @mock.patch("nf_core.pipelines.download.DownloadWorkflow.get_singularity_images") @with_temporary_folder def test_download_workflow_for_platform_with_custom_tags(self, _, tmp_dir): with self._caplog.at_level(logging.INFO): @@ -690,7 +690,7 @@ def test_download_workflow_for_platform_with_custom_tags(self, _, tmp_dir): assert isinstance(download_obj.wf_download_url, dict) and len(download_obj.wf_download_url) == 0 assert isinstance(download_obj.additional_tags, list) and len(download_obj.additional_tags) == 5 - wfs = nf_core.list.Workflows() + wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() ( download_obj.pipeline, diff --git a/tests/test_list.py b/tests/test_list.py index e11dbb12e..21d239287 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -106,7 +106,7 @@ def test_local_workflows_compare_and_fail_silently(self): rwf_ex.releases = None @mock.patch.dict(os.environ, {"NXF_ASSETS": tmp_nxf_str}) - @mock.patch("nf_core.list.LocalWorkflow") + @mock.patch("nf_core.pipelines.list.LocalWorkflow") def test_parse_local_workflow_and_succeed(self, mock_local_wf): test_path = tmp_nxf / "nf-core" if not os.path.isdir(test_path): @@ -119,7 +119,7 @@ def test_parse_local_workflow_and_succeed(self, mock_local_wf): assert len(workflows_obj.local_workflows) == 1 @mock.patch.dict(os.environ, {"NXF_ASSETS": tmp_nxf_str}) - @mock.patch("nf_core.list.LocalWorkflow") + @mock.patch("nf_core.pipelines.list.LocalWorkflow") @mock.patch("subprocess.check_output") def test_parse_local_workflow_home(self, mock_local_wf, mock_subprocess): test_path = tmp_nxf / "nf-core" diff --git a/tests/test_sync.py b/tests/test_sync.py index b98033538..ca90071d9 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -12,7 +12,7 @@ import pytest import nf_core.pipelines.create.create -import nf_core.pipelines.sync.sync +import nf_core.pipelines.sync from .utils import with_temporary_folder @@ -46,8 +46,8 @@ def tearDown(self): @with_temporary_folder def test_inspect_sync_dir_notgit(self, tmp_dir): """Try syncing an empty directory""" - psync = nf_core.pipelines.sync.sync.PipelineSync(tmp_dir) - with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: + psync = nf_core.pipelines.sync.PipelineSync(tmp_dir) + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert "does not appear to be a git repository" in exc_info.value.args[0] @@ -57,9 +57,9 @@ def test_inspect_sync_dir_dirty(self): test_fn = Path(self.pipeline_dir) / "uncommitted" test_fn.touch() # Try to sync, check we halt with the right error - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) try: - with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() assert exc_info.value.args[0].startswith("Uncommitted changes found in pipeline directory!") finally: @@ -68,8 +68,8 @@ def test_inspect_sync_dir_dirty(self): def test_get_wf_config_no_branch(self): """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir, from_branch="foo") - with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir, from_branch="foo") + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() assert exc_info.value.args[0] == "Branch `foo` not found!" @@ -77,9 +77,9 @@ def test_get_wf_config_no_branch(self): def test_get_wf_config_missing_required_config(self): """Try getting a workflow config, then make it miss a required config option""" # Try to sync, check we halt with the right error - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.required_config_vars = ["fakethisdoesnotexist"] - with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() psync.get_wf_config() # Check that we did actually get some config back @@ -89,26 +89,26 @@ def test_get_wf_config_missing_required_config(self): def test_checkout_template_branch(self): """Try checking out the TEMPLATE branch of the pipeline""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() def test_checkout_template_branch_no_template(self): """Try checking out the TEMPLATE branch of the pipeline when it does not exist""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.delete_head("TEMPLATE") - with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.checkout_template_branch() assert exc_info.value.args[0] == "Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'" def test_delete_template_branch_files(self): """Confirm that we can delete all files in the TEMPLATE branch""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -118,7 +118,7 @@ def test_delete_template_branch_files(self): def test_create_template_pipeline(self): """Confirm that we can delete all files in the TEMPLATE branch""" # First, delete all the files - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -132,7 +132,7 @@ def test_create_template_pipeline(self): def test_commit_template_changes_nochanges(self): """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -142,7 +142,7 @@ def test_commit_template_changes_nochanges(self): def test_commit_template_changes_changes(self): """Try to commit the TEMPLATE branch, but no changes were made""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -159,7 +159,7 @@ def test_commit_template_changes_changes(self): def test_push_template_branch_error(self): """Try pushing the changes, but without a remote (should fail)""" # Check out the TEMPLATE branch but skip making the new template etc. - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() @@ -168,13 +168,13 @@ def test_push_template_branch_error(self): test_fn.touch() psync.commit_template_changes() # Try to push changes - with pytest.raises(nf_core.pipelines.sync.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: psync.push_template_branch() assert exc_info.value.args[0].startswith("Could not push TEMPLATE branch") def test_create_merge_base_branch(self): """Try creating a merge base branch""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -193,7 +193,7 @@ def test_create_merge_base_branch_thrice(self): end, so it is needed to call it a third time to make sure this is picked up. """ - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -206,7 +206,7 @@ def test_create_merge_base_branch_thrice(self): def test_push_merge_branch(self): """Try pushing merge branch""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.create_remote("origin", self.remote_path) @@ -218,12 +218,12 @@ def test_push_merge_branch(self): def test_push_merge_branch_without_create_branch(self): """Try pushing merge branch without creating first""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.repo.create_remote("origin", self.remote_path) - with pytest.raises(nf_core.pipelines.sync.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: psync.push_merge_branch() assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'") @@ -313,7 +313,7 @@ def json(self): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) def test_make_pull_request_success(self, mock_post, mock_get): """Try making a PR - successful response""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.gh_api.get = mock_get psync.gh_api.post = mock_post psync.gh_username = "no_existing_pr" @@ -326,13 +326,13 @@ def test_make_pull_request_success(self, mock_post, mock_get): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) def test_make_pull_request_bad_response(self, mock_post, mock_get): """Try making a PR and getting a 404 error""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.gh_api.get = mock_get psync.gh_api.post = mock_post psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - with pytest.raises(nf_core.pipelines.sync.sync.PullRequestExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: psync.make_pull_request() assert exc_info.value.args[0].startswith( "Something went badly wrong - GitHub API PR failed - got return code 404" @@ -341,7 +341,7 @@ def test_make_pull_request_bad_response(self, mock_post, mock_get): @mock.patch("nf_core.utils.gh_api.get", side_effect=mocked_requests_get) def test_close_open_template_merge_prs(self, mock_get): """Try closing all open prs""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.get = mock_get @@ -349,7 +349,7 @@ def test_close_open_template_merge_prs(self, mock_get): psync.gh_repo = "list_prs/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - with mock.patch("nf_core.pipelines.sync.sync.PipelineSync.close_open_pr") as mock_close_open_pr: + with mock.patch("nf_core.pipelines.sync.PipelineSync.close_open_pr") as mock_close_open_pr: psync.close_open_template_merge_prs() prs = mock_get(f"https://api.github.com/repos/{psync.gh_repo}/pulls").data @@ -360,7 +360,7 @@ def test_close_open_template_merge_prs(self, mock_get): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) def test_close_open_pr(self, mock_patch, mock_post): - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.post = mock_post @@ -383,7 +383,7 @@ def test_close_open_pr(self, mock_patch, mock_post): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) def test_close_open_pr_fail(self, mock_patch, mock_post): - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.gh_api.post = mock_post @@ -405,7 +405,7 @@ def test_close_open_pr_fail(self, mock_patch, mock_post): def test_reset_target_dir(self): """Try resetting target pipeline directory""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() @@ -417,12 +417,12 @@ def test_reset_target_dir(self): def test_reset_target_dir_fake_branch(self): """Try resetting target pipeline directory but original branch does not exist""" - psync = nf_core.pipelines.sync.sync.PipelineSync(self.pipeline_dir) + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.original_branch = "fake_branch" - with pytest.raises(nf_core.pipelines.sync.sync.SyncExceptionError) as exc_info: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") From 7fb360d72c2a1c3dab77dba84e222afc565fd553 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 31 May 2024 10:01:41 +0200 Subject: [PATCH 182/737] show options after commands on help message. Thanks @dwreeves for the suggestion --- nf_core/__main__.py | 13 ++++++++++++- requirements.txt | 2 +- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 2d34b8e88..a263139b4 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -107,6 +107,17 @@ def normalize_case(ctx, param, component_name): return component_name.casefold() +# Define a custom click group class to sort options and commands in the help message +# TODO: Remove this class and use COMMANDS_BEFORE_OPTIONS when rich-click is updated +# See https://github.com/ewels/rich-click/issues/200 for more information +class CustomRichGroup(click.RichGroup): + def format_options(self, ctx, formatter) -> None: + from rich_click.rich_help_rendering import get_rich_options + + self.format_commands(ctx, formatter) + get_rich_options(self, ctx, formatter) + + def run_nf_core(): # print nf-core header if environment variable is not set if os.environ.get("_NF_CORE_COMPLETE") is None: @@ -133,7 +144,7 @@ def run_nf_core(): @tui() -@click.group(context_settings=dict(help_option_names=["-h", "--help"])) +@click.group(context_settings=dict(help_option_names=["-h", "--help"]), cls=CustomRichGroup) @click.version_option(__version__) @click.option( "-v", diff --git a/requirements.txt b/requirements.txt index 44241e0d9..0574083fd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,7 +18,7 @@ questionary>=1.8.0 refgenie requests requests_cache -rich-click>=1.6.1 +rich-click==1.8.* rich>=13.3.1 tabulate textual>=0.63.1 From 63c4c061564a1854a6144fea0c70beda608311a1 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 31 May 2024 10:07:23 +0200 Subject: [PATCH 183/737] rename command tui to interface --- nf_core/__main__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index a263139b4..c21aee30a 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -37,7 +37,7 @@ "pipelines", "modules", "subworkflows", - "tui", + "interface", ], }, ], @@ -143,7 +143,10 @@ def run_nf_core(): nf_core_cli(auto_envvar_prefix="NFCORE") -@tui() +@tui( + command="interface", + help="Launch the nf-core interface", +) @click.group(context_settings=dict(help_option_names=["-h", "--help"]), cls=CustomRichGroup) @click.version_option(__version__) @click.option( From 7191f98562abfa9072f1e4d987c97169a8730155 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 31 May 2024 10:13:15 +0200 Subject: [PATCH 184/737] remove nf-core licences command --- nf_core/__main__.py | 24 --- nf_core/licences.py | 115 -------------- tests/test_cli.py | 32 ---- tests/test_licenses.py | 57 ------- tests/test_lint.py | 337 ----------------------------------------- 5 files changed, 565 deletions(-) delete mode 100644 nf_core/licences.py delete mode 100644 tests/test_licenses.py diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 67af238b5..3a3c35e27 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -38,7 +38,6 @@ "launch", "create-params-file", "download", - "licences", "tui", ], }, @@ -463,29 +462,6 @@ def download( dl.download_workflow() -# nf-core licences -@nf_core_cli.command() -@click.argument("pipeline", required=True, metavar="") -@click.option("--json", is_flag=True, default=False, help="Print output in JSON") -def licences(pipeline, json): - """ - List software licences for a given workflow (DSL1 only). - - Checks the pipeline environment.yml file which lists all conda software packages, which is not available for DSL2 workflows. Therefore, this command only supports DSL1 workflows (for now). - Each of these is queried against the anaconda.org API to find the licence. - Package name, version and licence is printed to the command line. - """ - from nf_core.licences import WorkflowLicences - - lic = WorkflowLicences(pipeline) - lic.as_json = json - try: - stdout.print(lic.run_licences()) - except LookupError as e: - log.error(e) - sys.exit(1) - - # nf-core lint @nf_core_cli.command() @click.option( diff --git a/nf_core/licences.py b/nf_core/licences.py deleted file mode 100644 index be737280f..000000000 --- a/nf_core/licences.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Lists software licences for a given workflow.""" - -import json -import logging -import os - -import requests -import rich.console -import rich.table -import yaml - -import nf_core.utils - -log = logging.getLogger(__name__) - - -class WorkflowLicences: - """A nf-core workflow licenses collection. - - Tries to retrieve the license information from all dependencies - of a given nf-core pipeline. - - A condensed overview with license per dependency can be printed out. - - Args: - pipeline (str): An existing nf-core pipeline name, like `nf-core/hlatyping` - or short `hlatyping`. - """ - - def __init__(self, pipeline): - self.pipeline = pipeline - self.conda_config = None - if self.pipeline.startswith("nf-core/"): - self.pipeline = self.pipeline[8:] - self.conda_packages = {} - self.conda_package_licences = {} - self.as_json = False - - def run_licences(self): - """ - Run the nf-core licences action - """ - self.get_environment_file() - self.fetch_conda_licences() - return self.print_licences() - - def get_environment_file(self): - """Get the conda environment file for the pipeline""" - if os.path.exists(self.pipeline): - pipeline_obj = nf_core.utils.Pipeline(self.pipeline) - pipeline_obj._load() - if pipeline_obj._fp("environment.yml") not in pipeline_obj.files: - raise LookupError( - "No `environment.yml` file found. (Note: DSL2 pipelines are currently not supported by this command.)" - ) - self.conda_config = pipeline_obj.conda_config - else: - env_url = f"https://raw.githubusercontent.com/nf-core/{self.pipeline}/master/environment.yml" - log.debug(f"Fetching environment.yml file: {env_url}") - response = requests.get(env_url) - # Check that the pipeline exists - if response.status_code == 404: - raise LookupError( - f"Couldn't find pipeline conda file: {env_url}. (Note: DSL2 pipelines are currently not supported by this command.)" - ) - self.conda_config = yaml.safe_load(response.text) - - def fetch_conda_licences(self): - """Fetch package licences from Anaconda and PyPi.""" - - # Check conda dependency list - deps = self.conda_config.get("dependencies", []) - deps_data = {} - log.info(f"Fetching licence information for {len(deps)} tools") - for dep in deps: - try: - if isinstance(dep, str): - dep_channels = self.conda_config.get("channels", []) - deps_data[dep] = nf_core.utils.anaconda_package(dep, dep_channels) - elif isinstance(dep, dict): - deps_data[dep] = nf_core.utils.pip_package(dep) - except ValueError: - log.error(f"Couldn't get licence information for {dep}") - - for dep, data in deps_data.items(): - _, depver = dep.split("=", 1) - self.conda_package_licences[dep] = nf_core.utils.parse_anaconda_licence(data, depver) - - def print_licences(self): - """Prints the fetched license information. - - Args: - as_json (boolean): Prints the information in JSON. Defaults to False. - """ - log.info("Warning: This tool only prints licence information for the software tools packaged using conda.") - log.info("The pipeline may use other software and dependencies not described here. ") - - if self.as_json: - return json.dumps(self.conda_package_licences, indent=4) - else: - table = rich.table.Table("Package Name", "Version", "Licence") - licence_list = [] - for dep, licences in self.conda_package_licences.items(): - depname, depver = dep.split("=", 1) - try: - depname = depname.split("::")[1] - except IndexError: - pass - licence_list.append([depname, depver, ", ".join(licences)]) - # Sort by licence, then package name - licence_list = sorted(sorted(licence_list), key=lambda x: x[2]) - # Add table rows - for lic in licence_list: - table.add_row(*lic) - return table diff --git a/tests/test_cli.py b/tests/test_cli.py index 76d167101..dd19097e5 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -199,38 +199,6 @@ def test_cli_download(self, mock_dl): mock_dl.return_value.download_workflow.assert_called_once() - @mock.patch("nf_core.licences.WorkflowLicences") - def test_licences(self, mock_lic): - """Test nf-core pipeline licence is printed out and cli parameters are passed on.""" - licence_text = "dummy licence text" - mock_lic.return_value.run_licences.return_value = licence_text - - params = { - "json": None, - } - - cmd = ["licences"] + self.assemble_params(params) + ["pipeline_name"] - result = self.invoke_cli(cmd) - - assert result.exit_code == 0 - assert licence_text in result.output - - mock_lic.assert_called_once_with(cmd[-1]) - - @mock.patch("nf_core.licences.WorkflowLicences") - def test_licences_log_error(self, mock_lic): - """Test LookupError is logged""" - error_txt = "LookupError has been raised" - mock_lic.return_value.run_licences.side_effect = LookupError(error_txt) - - cmd = ["licences", "pipeline_name"] - with self.assertLogs() as captured_logs: - result = self.invoke_cli(cmd) - - assert result.exit_code == 1 - assert error_txt in captured_logs.output[-1] - assert captured_logs.records[-1].levelname == "ERROR" - @mock.patch("nf_core.pipelines.create.create.PipelineCreate") def test_create(self, mock_create): """Test nf-core pipeline is created and cli parameters are passed on.""" diff --git a/tests/test_licenses.py b/tests/test_licenses.py deleted file mode 100644 index 8023c9e89..000000000 --- a/tests/test_licenses.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Some tests covering the pipeline creation sub command.""" -# import json -# import os -# import tempfile -# import unittest -# -# import pytest -# from rich.console import Console -# -# import nf_core.create -# import nf_core.licences - -# TODO nf-core: Assess and strip out if no longer required for DSL2 - -# class WorkflowLicensesTest(unittest.TestCase): -# """A class that performs tests on the workflow license -# retrieval functionality of nf-core tools.""" - -# def setUp(self): -# """ Create a new pipeline, then make a Licence object """ -# # Set up the schema -# self.pipeline_dir = os.path.join(tempfile.mkdtemp(), "test_pipeline") -# self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) -# self.create_obj.init_pipeline() -# self.license_obj = nf_core.licences.WorkflowLicences(self.pipeline_dir) - -# def test_run_licences_successful(self): -# console = Console(record=True) -# console.print(self.license_obj.run_licences()) -# output = console.export_text() -# assert "GPL v3" in output - -# def test_run_licences_successful_json(self): -# self.license_obj.as_json = True -# console = Console(record=True) -# console.print(self.license_obj.run_licences()) -# output = json.loads(console.export_text()) -# for package in output: -# if "multiqc" in package: -# assert output[package][0] == "GPL v3" -# break -# else: -# raise LookupError("Could not find MultiQC") - -# def test_get_environment_file_local(self): -# self.license_obj.get_environment_file() -# assert any(["multiqc" in k for k in self.license_obj.conda_config["dependencies"]]) - -# def test_get_environment_file_remote(self): -# self.license_obj = nf_core.licences.WorkflowLicences("methylseq") -# self.license_obj.get_environment_file() -# assert any(["multiqc" in k for k in self.license_obj.conda_config["dependencies"]]) - -# @pytest.mark.xfail(raises=LookupError, strict=True) -# def test_get_environment_file_nonexistent(self): -# self.license_obj = nf_core.licences.WorkflowLicences("fubarnotreal") -# self.license_obj.get_environment_file() diff --git a/tests/test_lint.py b/tests/test_lint.py index aaf833080..183af6462 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -253,340 +253,3 @@ def test_sphinx_md_files(self): test_template_strings_ignored, ) from .lint.version_consistency import test_version_consistency # type: ignore[misc] - - -# TODO nf-core: Assess and strip out if no longer required for DSL2 - -# def test_critical_missingfiles_example(self): -# """Tests for missing nextflow config and main.nf files""" -# lint_obj = nf_core.lint.run_linting(PATH_CRITICAL_EXAMPLE, False) -# assert len(lint_obj.failed) > 0 -# -# def test_failing_missingfiles_example(self): -# """Tests for missing files like Dockerfile or LICENSE""" -# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# lint_obj.check_files_exist() -# expectations = {"failed": 6, "warned": 2, "passed": 14} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_mit_licence_example_pass(self): -# """Tests that MIT test works with good MIT licences""" -# good_lint_obj = nf_core.lint.PipelineLint(PATH_CRITICAL_EXAMPLE) -# good_lint_obj.check_licence() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_mit_license_example_with_failed(self): -# """Tests that MIT test works with bad MIT licences""" -# bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# bad_lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(bad_lint_obj, **expectations) -# -# def test_config_variable_example_pass(self): -# """Tests that config variable existence test works with good pipeline example""" -# good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# good_lint_obj.check_nextflow_config() -# expectations = {"failed": 0, "warned": 1, "passed": 34} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_config_variable_example_with_failed(self): -# """Tests that config variable existence test fails with bad pipeline example""" -# bad_lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# bad_lint_obj.check_nextflow_config() -# expectations = {"failed": 19, "warned": 6, "passed": 10} -# self.assess_lint_status(bad_lint_obj, **expectations) -# -# @pytest.mark.xfail(raises=AssertionError, strict=True) -# def test_config_variable_error(self): -# """Tests that config variable existence test falls over nicely with nextflow can't run""" -# bad_lint_obj = nf_core.lint.PipelineLint("/non/existant/path") -# bad_lint_obj.check_nextflow_config() -# -# -# def test_wrong_license_examples_with_failed(self): -# """Tests for checking the license test behavior""" -# for example in PATHS_WRONG_LICENSE_EXAMPLE: -# lint_obj = nf_core.lint.PipelineLint(example) -# lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_missing_license_example(self): -# """Tests for missing license behavior""" -# lint_obj = nf_core.lint.PipelineLint(PATH_MISSING_LICENSE_EXAMPLE) -# lint_obj.check_licence() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_pass(self): -# """Tests that the pipeline README file checks work with a good example""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.minNextflowVersion = "20.04.0" -# lint_obj.files = ["environment.yml"] -# lint_obj.check_readme() -# expectations = {"failed": 0, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_warn(self): -# """Tests that the pipeline README file checks fail """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.minNextflowVersion = "0.28.0" -# lint_obj.check_readme() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_readme_fail(self): -# """Tests that the pipeline README file checks give warnings with a bad example""" -# lint_obj = nf_core.lint.PipelineLint(PATH_FAILING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.check_readme() -# expectations = {"failed": 0, "warned": 2, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_dockerfile_pass(self): -# """Tests if a valid Dockerfile passes the lint checks""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["Dockerfile"] -# lint_obj.check_docker() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_pass(self): -# """Tests the workflow version and container version sucessfully""" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_env_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.5" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_numeric_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.5dev" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_no_docker_version_fail(self): -# """Tests the behaviour, when a git activity is a release -# and simulate wrong missing docker version tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.4" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools" -# lint_obj.check_version_consistency() -# expectations = {"failed": 1, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_version_consistency_with_env_pass(self): -# """Tests the behaviour, when a git activity is a release -# and simulate correct release tag""" -# os.environ["GITHUB_REF"] = "refs/tags/0.4" -# os.environ["GITHUB_REPOSITORY"] = "nf-core/testpipeline" -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.config["process.container"] = "nfcore/tools:0.4" -# lint_obj.check_version_consistency() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_env_pass(self): -# """ Tests the conda environment config checks with a working example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: -# lint_obj.conda_config = yaml.safe_load(fh) -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 4, "passed": 5} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_env_fail(self): -# """ Tests the conda environment config fails with a bad example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "environment.yml"), "r") as fh: -# lint_obj.conda_config = yaml.safe_load(fh) -# lint_obj.conda_config["dependencies"] = ["fastqc", "multiqc=0.9", "notapackaage=0.4"] -# lint_obj.pipeline_name = "not_tools" -# lint_obj.config["manifest.version"] = "0.23" -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 3, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# @pytest.mark.xfail(raises=ValueError, strict=True) -# def test_conda_env_timeout(self, mock_get): -# """ Tests the conda environment handles API timeouts """ -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.Timeout() -# # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.conda_config["channels"] = ["bioconda"] -# lint_obj.check_anaconda_package("multiqc=1.6") -# -# def test_conda_env_skip(self): -# """ Tests the conda environment config is skipped when not needed """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_pass(self): -# """ Tests the conda Dockerfile test works with a working example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.version = "1.11" -# lint_obj.files = ["environment.yml", "Dockerfile"] -# with open(os.path.join(PATH_WORKING_EXAMPLE, "Dockerfile"), "r") as fh: -# lint_obj.dockerfile = fh.read().splitlines() -# lint_obj.conda_config["name"] = "nf-core-tools-0.4" -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_fail(self): -# """ Tests the conda Dockerfile test fails with a bad example """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.version = "1.11" -# lint_obj.files = ["environment.yml", "Dockerfile"] -# lint_obj.conda_config["name"] = "nf-core-tools-0.4" -# lint_obj.dockerfile = ["fubar"] -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 5, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dockerfile_skip(self): -# """ Tests the conda Dockerfile test is skipped when not needed """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.check_conda_dockerfile() -# expectations = {"failed": 0, "warned": 0, "passed": 0} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_no_version_fail(self): -# """ Tests the pip dependency version definition is present """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 1} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_package_not_latest_warn(self): -# """ Tests the pip dependency version definition is present """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.4"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# def test_pypi_timeout_warn(self, mock_get): -# """Tests the PyPi connection and simulates a request timeout, which should -# return in an addiional warning in the linting""" -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.Timeout() -# # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# @mock.patch("requests.get") -# def test_pypi_connection_error_warn(self, mock_get): -# """Tests the PyPi connection and simulates a connection error, which should -# result in an additional warning, as we cannot test if dependent module is latest""" -# # Define the behaviour of the request get mock -# mock_get.side_effect = requests.exceptions.ConnectionError() -# # Now do the test -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 0, "warned": 1, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_dependency_fail(self): -# """ Tests the PyPi API package information query """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["notpresent==1.5"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_conda_dependency_fails(self): -# """Tests that linting fails, if conda dependency -# package version is not available on Anaconda. -# """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": ["openjdk=0.0.0"]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pip_dependency_fails(self): -# """Tests that linting fails, if conda dependency -# package version is not available on Anaconda. -# """ -# lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# lint_obj.files = ["environment.yml"] -# lint_obj.pipeline_name = "tools" -# lint_obj.config["manifest.version"] = "0.4" -# lint_obj.conda_config = {"name": "nf-core-tools-0.4", "dependencies": [{"pip": ["multiqc==0.0"]}]} -# lint_obj.check_conda_env_yaml() -# expectations = {"failed": 1, "warned": 0, "passed": 2} -# self.assess_lint_status(lint_obj, **expectations) -# -# def test_pipeline_name_pass(self): -# """Tests pipeline name good pipeline example: lower case, no punctuation""" -# # good_lint_obj = nf_core.lint.run_linting(PATH_WORKING_EXAMPLE) -# good_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# good_lint_obj.pipeline_name = "tools" -# good_lint_obj.check_pipeline_name() -# expectations = {"failed": 0, "warned": 0, "passed": 1} -# self.assess_lint_status(good_lint_obj, **expectations) -# -# def test_pipeline_name_critical(self): -# """Tests that warning is returned for pipeline not adhering to naming convention""" -# critical_lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) -# critical_lint_obj.pipeline_name = "Tools123" -# critical_lint_obj.check_pipeline_name() -# expectations = {"failed": 0, "warned": 1, "passed": 0} -# self.assess_lint_status(critical_lint_obj, **expectations) -# From aed32a9f90169680a1c45c144845c3f215746e98 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 31 May 2024 08:16:09 +0000 Subject: [PATCH 185/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1d7678c4..bb116c9a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,7 @@ - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 ([#3006](https://github.com/nf-core/tools/pull/3006)) - Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Remove nf-core licenses command ([#3012](https://github.com/nf-core/tools/pull/3012)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 02017aac324b790d7afedfbaa2edc9ca8fb7fb9f Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 31 May 2024 23:13:04 +0200 Subject: [PATCH 186/737] README - absolute image paths Make the logo render on PyPI --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 85b608bf7..58fb708a0 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@

- - nf-core/tools + + nf-core/tools

From 089264292c7f1430c2e17f0978c1169dfc369ff7 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 31 May 2024 21:13:59 +0000 Subject: [PATCH 187/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1d7678c4..e8a8d1fbf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,7 @@ - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 ([#3006](https://github.com/nf-core/tools/pull/3006)) - Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- README - absolute image paths ([#3013](https://github.com/nf-core/tools/pull/3013)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From b96f56b15d24431fb97382bed7715bccfafd21df Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 31 May 2024 23:35:39 +0200 Subject: [PATCH 188/737] Conda module linting: Include package name in log file MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Tried to do this with GitHub Copilot Workspaces but it failed 😅 As I was looking at the code anyway I could see the fix so figured I would put it manually. Closes https://github.com/nf-core/tools/issues/3009 Untested, done in web browser only. --- nf_core/modules/lint/main_nf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index fd4d81f7f..81308ba5c 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -367,14 +367,14 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): # response = _bioconda_package(bp) response = nf_core.utils.anaconda_package(bp) except LookupError: - self.warned.append(("bioconda_version", "Conda version not specified correctly", self.main_nf)) + self.warned.append(("bioconda_version", f"Conda version not specified correctly: {bp}", self.main_nf)) except ValueError: - self.failed.append(("bioconda_version", "Conda version not specified correctly", self.main_nf)) + self.failed.append(("bioconda_version", f"Conda version not specified correctly: {bp}", self.main_nf)) else: # Check that required version is available at all if bioconda_version not in response.get("versions"): self.failed.append( - ("bioconda_version", f"Conda package had unknown version: `{bioconda_version}`", self.main_nf) + ("bioconda_version", f"Conda package {bp} had unknown version: `{bioconda_version}`", self.main_nf) ) continue # No need to test for latest version, continue linting # Check version is latest available From 93acb91abe8c418ac9a8b9f06b237e2ec2d980e2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 31 May 2024 22:20:54 +0000 Subject: [PATCH 189/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.7 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8521c4410..0bfe44f87 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.6 + rev: v0.4.7 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From d4a91832343df619927ed1fdcc776c0daffb5e5a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 31 May 2024 22:21:50 +0000 Subject: [PATCH 190/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1d7678c4..515189e30 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,7 @@ - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 ([#3006](https://github.com/nf-core/tools/pull/3006)) - Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.7 ([#3015](https://github.com/nf-core/tools/pull/3015)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 847a3763d1b67a9e75432bf0c74a0c531891d687 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 3 Jun 2024 08:40:07 +0000 Subject: [PATCH 191/737] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1d7678c4..6b421ba4d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ ### Linting - Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) +- Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) ### Download From 005972e33d0d5808791fbcc18a6af914b249babc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 5 Jun 2024 19:58:25 +0000 Subject: [PATCH 192/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.8 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0bfe44f87..2e1373904 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.7 + rev: v0.4.8 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From ee5a22b263ffe926fc4e437adcc45831d13ab4e8 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 5 Jun 2024 19:59:12 +0000 Subject: [PATCH 193/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 996650e00..1d82830b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ - Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) - README - absolute image paths ([#3013](https://github.com/nf-core/tools/pull/3013)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.7 ([#3015](https://github.com/nf-core/tools/pull/3015)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.8 ([#3017](https://github.com/nf-core/tools/pull/3017)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 8ee2040007c222825dc8356a731a024424cc1397 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 8 Jun 2024 01:11:09 +0000 Subject: [PATCH 194/737] Update python:3.12-slim Docker digest to e3ae8cf --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index fe4162b4f..6fe79d2eb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:afc139a0a640942491ec481ad8dda10f2c5b753f5c969393b12480155fe15a63 +FROM python:3.12-slim@sha256:e3ae8cf03c4f0abbfef13a8147478a7cd92798a94fa729a36a185d9106cbae32 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From a63a653ab230e6b576d3e1cd952a51433a852ea4 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Sun, 9 Jun 2024 03:35:11 +0000 Subject: [PATCH 195/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d82830b7..715bc2dc1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ - README - absolute image paths ([#3013](https://github.com/nf-core/tools/pull/3013)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.7 ([#3015](https://github.com/nf-core/tools/pull/3015)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.8 ([#3017](https://github.com/nf-core/tools/pull/3017)) +- Update python:3.12-slim Docker digest to e3ae8cf ([#3020](https://github.com/nf-core/tools/pull/3020)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 05f60b440aa3a2b5125ed3cc65ae8b86dc979ef2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 12 Jun 2024 13:30:29 +0000 Subject: [PATCH 196/737] Update dawidd6/action-download-artifact action to v6 --- nf_core/pipeline-template/.github/workflows/linting_comment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index ea408fd6f..908dcea15 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@09f2f74827fd3a8607589e5ad7f9398816f540fe # v3 + uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 with: workflow: linting.yml workflow_conclusion: completed From 90f7e344d7e8d07060701bca0cedd36ce7866bf9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 13 Jun 2024 18:31:15 +0000 Subject: [PATCH 197/737] Update python:3.12-slim Docker digest to 2fba8e7 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 6fe79d2eb..9ebc7e985 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:e3ae8cf03c4f0abbfef13a8147478a7cd92798a94fa729a36a185d9106cbae32 +FROM python:3.12-slim@sha256:2fba8e70a87bcc9f6edd20dda0a1d4adb32046d2acbca7361bc61da5a106a914 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From 6f444c96d3292dd3fd9fd4863ec7716de6ed4c1d Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 14 Jun 2024 23:01:49 +0000 Subject: [PATCH 198/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 715bc2dc1..9da8c1ab2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.7 ([#3015](https://github.com/nf-core/tools/pull/3015)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.8 ([#3017](https://github.com/nf-core/tools/pull/3017)) - Update python:3.12-slim Docker digest to e3ae8cf ([#3020](https://github.com/nf-core/tools/pull/3020)) +- Update python:3.12-slim Docker digest to 2fba8e7 ([#3023](https://github.com/nf-core/tools/pull/3023)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From a15a6f0e7f94a6d57e4d35a349b96bd29e7f886e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 15 Jun 2024 04:47:47 +0000 Subject: [PATCH 199/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.9 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2e1373904..6d8e34e0b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.8 + rev: v0.4.9 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 1e47693b295eea4c2a5f7b6b0351305c10133a31 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Wed, 19 Jun 2024 15:03:44 +0200 Subject: [PATCH 200/737] Linting: Remove 'defaults' from conda environment.yml file. See nf-core/modules#5829 --- nf_core/module-template/environment.yml | 1 - .../pipeline-template/modules/nf-core/fastqc/environment.yml | 1 - .../pipeline-template/modules/nf-core/multiqc/environment.yml | 1 - nf_core/pipeline-template/nextflow.config | 2 +- .../subworkflows/nf-core/utils_nextflow_pipeline/main.nf | 2 +- tests/test_utils.py | 2 +- 6 files changed, 3 insertions(+), 6 deletions(-) diff --git a/nf_core/module-template/environment.yml b/nf_core/module-template/environment.yml index dcf510aff..f234f8542 100644 --- a/nf_core/module-template/environment.yml +++ b/nf_core/module-template/environment.yml @@ -4,6 +4,5 @@ name: "{{ component_name_underscore }}" channels: - conda-forge - bioconda - - defaults dependencies: - "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml index 1787b38a9..0d5be45f2 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml @@ -2,6 +2,5 @@ name: fastqc channels: - conda-forge - bioconda - - defaults dependencies: - bioconda::fastqc=0.12.1 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml index ca39fb67e..329ddb487 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -2,6 +2,5 @@ name: multiqc channels: - conda-forge - bioconda - - defaults dependencies: - bioconda::multiqc=1.21 diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 2e6a56b00..6202aa831 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -98,7 +98,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false - conda.channels = ['conda-forge', 'bioconda', 'defaults'] + conda.channels = ['conda-forge', 'bioconda'] apptainer.enabled = false } mamba { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index ac31f28f6..e770d91b9 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -102,7 +102,7 @@ def checkCondaChannels() { // Check that all channels are present // This channel list is ordered by required channel priority. - def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] + def required_channels_in_order = ['conda-forge', 'bioconda'] def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean // Check that they are in the right order diff --git a/tests/test_utils.py b/tests/test_utils.py index 85f4e3c54..48288be81 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -105,7 +105,7 @@ def test_load_pipeline_config(self): # def test_load_conda_env(self): # """Load the pipeline Conda environment.yml file""" # self.pipeline_obj._load_conda_environment() - # assert self.pipeline_obj.conda_config["channels"] == ["conda-forge", "bioconda", "defaults"] + # assert self.pipeline_obj.conda_config["channels"] == ["conda-forge", "bioconda"] def test_list_files_git(self): """Test listing pipeline files using `git ls`""" From d6191cce7843918cf256da0f9b0fe0940e813288 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 19 Jun 2024 13:05:27 +0000 Subject: [PATCH 201/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9da8c1ab2..3e2be8fd9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) - Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) +- Remove defaults from conda environment.yml file. ([#3029](https://github.com/nf-core/tools/pull/3029)) ### Download From e7d034ee664d8111fc22f8e38e8f501868046c1a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 20 Jun 2024 18:21:07 +0000 Subject: [PATCH 202/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.10 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6d8e34e0b..4360c7fd7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.9 + rev: v0.4.10 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From eff71e6b847dfa7e813d82ab8abbf91f6efd394b Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 20 Jun 2024 18:21:52 +0000 Subject: [PATCH 203/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9da8c1ab2..60207e1a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.8 ([#3017](https://github.com/nf-core/tools/pull/3017)) - Update python:3.12-slim Docker digest to e3ae8cf ([#3020](https://github.com/nf-core/tools/pull/3020)) - Update python:3.12-slim Docker digest to 2fba8e7 ([#3023](https://github.com/nf-core/tools/pull/3023)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.10 ([#3031](https://github.com/nf-core/tools/pull/3031)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From b3dc8c3319582300b9f007a5097fd7d322da4c8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Tue, 25 Jun 2024 10:40:18 +0200 Subject: [PATCH 204/737] Update CHANGELOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Júlia Mir Pedrol --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb116c9a4..18bd70062 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,7 +26,7 @@ - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 ([#3006](https://github.com/nf-core/tools/pull/3006)) - Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) -- Remove nf-core licenses command ([#3012](https://github.com/nf-core/tools/pull/3012)) +- Remove nf-core licences command ([#3012](https://github.com/nf-core/tools/pull/3012)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 324cef26f2f2687e5935e611d42f8e871781a2d4 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 25 Jun 2024 12:56:10 +0200 Subject: [PATCH 205/737] make top-level commands work with a warning deprecation message --- nf_core/__main__.py | 261 +++++++++++++++++++++++++++++++++++++++----- 1 file changed, 233 insertions(+), 28 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index c21aee30a..341382f64 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -641,7 +641,7 @@ def launch_pipeline( sys.exit(1) -# nf-core pipelnies list +# nf-core pipelines list @pipelines.command("list") @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option( @@ -2248,10 +2248,24 @@ def validate(pipeline, params): This command takes such a file and validates it against the pipeline schema, checking whether all schema rules are satisfied. """ - log.error( + log.warning( "The `[magenta]nf-core schema validate[/]` command is deprecated. Use `[magenta]nf-core pipelines schema validate[/]` instead." ) - sys.exit(0) + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(pipeline) + # Load and check schema + schema_obj.load_lint_schema() + except AssertionError as e: + log.error(e) + sys.exit(1) + schema_obj.load_input_params(params) + try: + schema_obj.validate_params() + except AssertionError: + sys.exit(1) # nf-core schema build (deprecated) @@ -2292,10 +2306,18 @@ def build(dir, no_prompts, web_only, url): https://nf-co.re website where you can annotate and organise parameters. Listens for this to be completed and saves the updated schema. """ - log.error( + log.warning( "The `[magenta]nf-core schema build[/]` command is deprecated. Use `[magenta]nf-core pipelines schema build[/]` instead." ) - sys.exit(0) + from nf_core.pipelines.schema import PipelineSchema + + try: + schema_obj = PipelineSchema() + if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: + sys.exit(1) + except (UserWarning, AssertionError) as e: + log.error(e) + sys.exit(1) # nf-core schema lint (deprecated) @@ -2319,10 +2341,22 @@ def schema_lint(schema_path): If no schema path is provided, "nextflow_schema.json" will be used (if it exists). """ - log.error( + log.warning( "The `[magenta]nf-core schema lint[/]` command is deprecated. Use `[magenta]nf-core pipelines schema lint[/]` instead." ) - sys.exit(0) + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(schema_path) + schema_obj.load_lint_schema() + # Validate title and description - just warnings as schema should still work fine + try: + schema_obj.validate_schema_title_description() + except AssertionError as e: + log.warning(e) + except AssertionError: + sys.exit(1) # nf-core schema docs (deprecated) @@ -2362,10 +2396,20 @@ def docs(schema_path, output, format, force, columns): DEPRECATED Outputs parameter documentation for a pipeline schema. """ - log.error( + log.warning( "The `[magenta]nf-core schema docs[/]` command is deprecated. Use `[magenta]nf-core pipelines schema docs[/]` instead." ) - sys.exit(0) + if not os.path.exists(schema_path): + log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") + sys.exit(1) + + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + # Assume we're in a pipeline dir root if schema path not set + schema_obj.get_schema_path(schema_path) + schema_obj.load_schema() + schema_obj.print_documentation(output, format, force, columns.split(",")) # nf-core create-logo (deprecated) @@ -2413,10 +2457,24 @@ def logo(logo_text, dir, name, theme, width, format, force): This command generates an nf-core pipeline logo, using the supplied """ - log.error( + log.warning( "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." ) - sys.exit(0) + from nf_core.pipelines.create_logo import create_logo + + try: + if dir == ".": + dir = Path.cwd() + logo_path = create_logo(logo_text, dir, name, theme, width, format, force) + # Print path to logo relative to current working directory + try: + logo_path = Path(logo_path).relative_to(Path.cwd()) + except ValueError: + logo_path = Path(logo_path) + log.info(f"Created logo: [magenta]{logo_path}[/]") + except UserWarning as e: + log.error(e) + sys.exit(1) # nf-core sync (deprecated) @@ -2464,8 +2522,22 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y the pipeline. It is run automatically for all pipelines when ever a new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ - log.error("The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead.") - sys.exit(0) + log.warning( + "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." + ) + from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError + from nf_core.utils import is_pipeline_directory + + # Check if pipeline directory contains necessary files + is_pipeline_directory(dir) + + # Sync the given pipeline dir + sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + try: + sync_obj.sync() + except (SyncExceptionError, PullRequestExceptionError) as e: + log.error(e) + sys.exit(1) # nf-core bump-version (deprecated) @@ -2499,10 +2571,28 @@ def bump_version(new_version, dir, nextflow): As well as the pipeline version, you can also change the required version of Nextflow. """ - log.error( + log.warning( "The `[magenta]nf-core bump-version[/]` command is deprecated. Use `[magenta]nf-core pipelines bump-version[/]` instead." ) - sys.exit(0) + from nf_core.pipelines.bump_version import bump_nextflow_version, bump_pipeline_version + from nf_core.utils import Pipeline, is_pipeline_directory + + try: + # Check if pipeline directory contains necessary files + is_pipeline_directory(dir) + + # Make a pipeline object and load config etc + pipeline_obj = Pipeline(dir) + pipeline_obj._load() + + # Bump the pipeline version number + if not nextflow: + bump_pipeline_version(pipeline_obj, new_version) + else: + bump_nextflow_version(pipeline_obj, new_version) + except UserWarning as e: + log.error(e) + sys.exit(1) # nf-core list (deprecated) @@ -2525,8 +2615,12 @@ def list(keywords, sort, json, show_archived): Checks the web for a list of nf-core pipelines with their latest releases. Shows which nf-core pipelines you have pulled locally and whether they are up to date. """ - log.error("The `[magenta]nf-core list[/]` command is deprecated. Use `[magenta]nf-core pipelines list[/]` instead.") - sys.exit(0) + log.warning( + "The `[magenta]nf-core list[/]` command is deprecated. Use `[magenta]nf-core pipelines list[/]` instead." + ) + from nf_core.pipelines.list import list_workflows + + stdout.print(list_workflows(keywords, sort, json, show_archived)) # nf-core launch (deprecated) @@ -2600,10 +2694,24 @@ def launch( Run using a remote pipeline name (such as GitHub `user/repo` or a URL), a local pipeline directory or an ID from the nf-core web launch tool. """ - log.error( + log.warning( "The `[magenta]nf-core launch[/]` command is deprecated. Use `[magenta]nf-core pipelines launch[/]` instead." ) - sys.exit(0) + from nf_core.pipelines.launch import Launch + + launcher = Launch( + pipeline, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, + id, + ) + if not launcher.launch_pipeline(): + sys.exit(1) # nf-core create-params-file (deprecated) @@ -2639,10 +2747,13 @@ def create_params_file(pipeline, revision, output, force, show_hidden): Run using a remote pipeline name (such as GitHub `user/repo` or a URL), a local pipeline directory. """ - log.error( + log.warning( "The `[magenta]nf-core create-params-file[/]` command is deprecated. Use `[magenta]nf-core pipelines create-params-file[/]` instead." ) - sys.exit(0) + builder = ParamsFileBuilder(pipeline, revision) + + if not builder.write_params_file(output, show_hidden=show_hidden, force=force): + sys.exit(1) # nf-core download (deprecated) @@ -2742,10 +2853,30 @@ def download( Collects all files in a single archive and configures the downloaded workflow to use relative paths to the configs and singularity images. """ - log.error( + log.warning( "The `[magenta]nf-core download[/]` command is deprecated. Use `[magenta]nf-core pipelines download[/]` instead." ) - sys.exit(0) + from nf_core.pipelines.download import DownloadWorkflow + + if tower: + log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") + + dl = DownloadWorkflow( + pipeline, + revision, + outdir, + compress, + force, + tower or platform, # True if either specified + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, + ) + dl.download_workflow() # nf-core lint (deprecated) @@ -2828,8 +2959,45 @@ def lint( You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. See the documentation for details. """ - log.error("The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead.") - sys.exit(0) + log.warning( + "The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead." + ) + from nf_core.pipelines.lint import run_linting + from nf_core.utils import is_pipeline_directory + + # Check if pipeline directory is a pipeline + try: + is_pipeline_directory(dir) + except UserWarning as e: + log.error(e) + sys.exit(1) + + # Run the lint tests! + try: + lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + sort_by, + markdown, + json, + ctx.obj["hide_progress"], + ) + swf_failed = 0 + if subworkflow_lint_obj is not None: + swf_failed = len(subworkflow_lint_obj.failed) + if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: + sys.exit(1) + except AssertionError as e: + log.critical(e) + sys.exit(1) + except UserWarning as e: + log.error(e) + sys.exit(1) # nf-core create (deprecated) @@ -2847,7 +3015,13 @@ def lint( @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @click.option("--plain", is_flag=True, help="Use the standard nf-core template") -def create(name, description, author, version, force, outdir, template_yaml, plain): +@click.option( + "--organisation", + type=str, + default="nf-core", + help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", +) +def create(name, description, author, version, force, outdir, template_yaml, plain, organisation): """ DEPRECATED Create a new pipeline using the nf-core template. @@ -2855,10 +3029,41 @@ def create(name, description, author, version, force, outdir, template_yaml, pla Uses the nf-core template to make a skeleton Nextflow pipeline with all required files, boilerplate code and best-practices. """ - log.error( + log.warning( "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." ) - sys.exit(0) + from nf_core.pipelines.create import PipelineCreateApp + from nf_core.pipelines.create.create import PipelineCreate + + if (name and description and author) or (template_yaml): + # If all command arguments are used, run without the interactive interface + try: + create_obj = PipelineCreate( + name, + description, + author, + version=version, + force=force, + outdir=outdir, + template_config=template_yaml, + organisation=organisation, + ) + create_obj.init_pipeline() + except UserWarning as e: + log.error(e) + sys.exit(1) + elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": + log.error( + "[red]Partial arguments supplied.[/] " + "Run without [i]any[/] arguments for an interactive interface, " + "or with at least name + description + author to use non-interactively." + ) + sys.exit(1) + else: + log.info("Launching interactive nf-core pipeline creation tool.") + app = PipelineCreateApp() + app.run() + sys.exit(app.return_code or 0) # Main script is being run - launch the CLI From 31b3663e2137b849c53f646190052efdb08000f7 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 25 Jun 2024 10:57:35 +0000 Subject: [PATCH 206/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c06d6017d..008c75584 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,6 +34,7 @@ - Update python:3.12-slim Docker digest to e3ae8cf ([#3020](https://github.com/nf-core/tools/pull/3020)) - Update python:3.12-slim Docker digest to 2fba8e7 ([#3023](https://github.com/nf-core/tools/pull/3023)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.10 ([#3031](https://github.com/nf-core/tools/pull/3031)) +- Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 44f8b213d0194259553ed12daa6d90f2e8fed203 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 11:14:15 +0000 Subject: [PATCH 207/737] Update pre-commit hook pre-commit/mirrors-mypy to v1.10.1 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4360c7fd7..34856956c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.10.0" + rev: "v1.10.1" hooks: - id: mypy additional_dependencies: From ee2241a4e763239c3d45f343f7d5e930c8f1561f Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 25 Jun 2024 14:52:57 +0200 Subject: [PATCH 208/737] Replace check_max with resourceLimits --- CHANGELOG.md | 1 + nf_core/pipeline-template/conf/base.config | 40 +++++++++++++--------- nf_core/pipeline-template/nextflow.config | 33 ------------------ 3 files changed, 24 insertions(+), 50 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 008c75584..c65b999a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,6 +42,7 @@ - Don't cache pip in `linting.yml` ([#2961](https://github.com/nf-core/tools/pull/2961)) - Lint pipelines with the nf-core template version and post comment if it is outdated ([#2978](https://github.com/nf-core/tools/pull/2978)) +- Replaces the old custom `check_max()` function with the Nextflow native `resourceLimits` directive ([]()) ### General diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index 9c62bf063..72a912fe4 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -11,9 +11,15 @@ process { // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } + + resourceLimits = [ + cpus: params.max_cpus, + memory: params.max_memory, + time: params.max_time + ] errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 @@ -27,30 +33,30 @@ process { // TODO nf-core: Customise requirements for specific processes. // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors withLabel:process_single { - cpus = { check_max( 1 , 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_low { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 12.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 2 * task.attempt } + memory = { 12.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_medium { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 36.GB * task.attempt, 'memory' ) } - time = { check_max( 8.h * task.attempt, 'time' ) } + cpus = { 6 * task.attempt } + memory = { 36.GB * task.attempt } + time = { 8.h * task.attempt } } withLabel:process_high { - cpus = { check_max( 12 * task.attempt, 'cpus' ) } - memory = { check_max( 72.GB * task.attempt, 'memory' ) } - time = { check_max( 16.h * task.attempt, 'time' ) } + cpus = { 12 * task.attempt } + memory = { 72.GB * task.attempt } + time = { 16.h * task.attempt } } withLabel:process_long { - time = { check_max( 20.h * task.attempt, 'time' ) } + time = { 20.h * task.attempt } } withLabel:process_high_memory { - memory = { check_max( 200.GB * task.attempt, 'memory' ) } + memory = { 200.GB * task.attempt } } withLabel:error_ignore { errorStrategy = 'ignore' diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 2e6a56b00..0316c2e0c 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -265,36 +265,3 @@ manifest { // Load modules.config for DSL2 module specific options includeConfig 'conf/modules.config' - -// Function to ensure that resource requirements don't go beyond -// a maximum limit -def check_max(obj, type) { - if (type == 'memory') { - try { - if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) - return params.max_memory as nextflow.util.MemoryUnit - else - return obj - } catch (all) { - println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'time') { - try { - if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) - return params.max_time as nextflow.util.Duration - else - return obj - } catch (all) { - println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'cpus') { - try { - return Math.min( obj, params.max_cpus as int ) - } catch (all) { - println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" - return obj - } - } -} From ce49baddf0a39478d1d5483d5cfeac56c32e1e77 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 25 Jun 2024 14:55:43 +0200 Subject: [PATCH 209/737] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c65b999a5..60808610d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,7 +42,7 @@ - Don't cache pip in `linting.yml` ([#2961](https://github.com/nf-core/tools/pull/2961)) - Lint pipelines with the nf-core template version and post comment if it is outdated ([#2978](https://github.com/nf-core/tools/pull/2978)) -- Replaces the old custom `check_max()` function with the Nextflow native `resourceLimits` directive ([]()) +- Replaces the old custom `check_max()` function with the Nextflow native `resourceLimits` directive ([#3037](https://github.com/nf-core/tools/pull/3037)) ### General From c8abbef61080458001fe8eb590a26a05c627cea1 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 25 Jun 2024 15:13:28 +0200 Subject: [PATCH 210/737] Code alignment, and try to get test to run on GHA Actions runner --- .github/workflows/create-test-wf.yml | 2 +- nf_core/pipeline-template/conf/base.config | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index a95a47745..cb2aad267 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -75,7 +75,7 @@ jobs: pwd # echo content of current directory ls -la - nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results + nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results --max_cpus 4 - name: Upload log file artifact if: ${{ always() }} diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index 72a912fe4..fb3a8456b 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -16,9 +16,9 @@ process { time = { 4.h * task.attempt } resourceLimits = [ - cpus: params.max_cpus, + cpus: params.max_cpus, memory: params.max_memory, - time: params.max_time + time: params.max_time ] errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } From fc4de3324111f2000afbc5ac4827f61388600453 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 29 May 2024 11:42:57 +0200 Subject: [PATCH 211/737] mock github cretentials for snapshot tests --- tests/__snapshots__/test_create_app.ambr | 263 +++++++++++------------ tests/test_create_app.py | 14 +- 2 files changed, 139 insertions(+), 138 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index ed7bf8b25..4036695bb 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1408,260 +1408,259 @@ font-weight: 700; } - .terminal-3893066652-matrix { + .terminal-2925039714-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3893066652-title { + .terminal-2925039714-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3893066652-r1 { fill: #c5c8c6 } - .terminal-3893066652-r2 { fill: #e3e3e3 } - .terminal-3893066652-r3 { fill: #989898 } - .terminal-3893066652-r4 { fill: #e1e1e1 } - .terminal-3893066652-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3893066652-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-3893066652-r7 { fill: #1e1e1e } - .terminal-3893066652-r8 { fill: #008139 } - .terminal-3893066652-r9 { fill: #454a50 } - .terminal-3893066652-r10 { fill: #787878 } - .terminal-3893066652-r11 { fill: #e2e2e2 } - .terminal-3893066652-r12 { fill: #e2e3e3;font-weight: bold } - .terminal-3893066652-r13 { fill: #000000 } - .terminal-3893066652-r14 { fill: #b93c5b } - .terminal-3893066652-r15 { fill: #18954b } - .terminal-3893066652-r16 { fill: #e2e2e2;font-weight: bold } - .terminal-3893066652-r17 { fill: #969696;font-weight: bold } - .terminal-3893066652-r18 { fill: #808080 } - .terminal-3893066652-r19 { fill: #7ae998 } - .terminal-3893066652-r20 { fill: #507bb3 } - .terminal-3893066652-r21 { fill: #0a180e;font-weight: bold } - .terminal-3893066652-r22 { fill: #dde6ed;font-weight: bold } - .terminal-3893066652-r23 { fill: #001541 } - .terminal-3893066652-r24 { fill: #fea62b;font-weight: bold } - .terminal-3893066652-r25 { fill: #a7a9ab } - .terminal-3893066652-r26 { fill: #e2e3e3 } + .terminal-2925039714-r1 { fill: #c5c8c6 } + .terminal-2925039714-r2 { fill: #e3e3e3 } + .terminal-2925039714-r3 { fill: #989898 } + .terminal-2925039714-r4 { fill: #e1e1e1 } + .terminal-2925039714-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2925039714-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-2925039714-r7 { fill: #1e1e1e } + .terminal-2925039714-r8 { fill: #008139 } + .terminal-2925039714-r9 { fill: #454a50 } + .terminal-2925039714-r10 { fill: #e2e2e2 } + .terminal-2925039714-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-2925039714-r12 { fill: #000000 } + .terminal-2925039714-r13 { fill: #b93c5b } + .terminal-2925039714-r14 { fill: #18954b } + .terminal-2925039714-r15 { fill: #e2e2e2;font-weight: bold } + .terminal-2925039714-r16 { fill: #969696;font-weight: bold } + .terminal-2925039714-r17 { fill: #808080 } + .terminal-2925039714-r18 { fill: #7ae998 } + .terminal-2925039714-r19 { fill: #507bb3 } + .terminal-2925039714-r20 { fill: #0a180e;font-weight: bold } + .terminal-2925039714-r21 { fill: #dde6ed;font-weight: bold } + .terminal-2925039714-r22 { fill: #001541 } + .terminal-2925039714-r23 { fill: #fea62b;font-weight: bold } + .terminal-2925039714-r24 { fill: #a7a9ab } + .terminal-2925039714-r25 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - Now that we have created a new pipeline locally, we can create a new GitHub repository and push  - the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + + Now that we have created a new pipeline locally, we can create a new GitHub repository and push  + the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user  + account. A core-team member will be able to transfer the repo to nf-core once the development + has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + +  d Toggle dark mode q Quit diff --git a/tests/test_create_app.py b/tests/test_create_app.py index f01ea5b6b..980939f48 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -1,5 +1,7 @@ """Test Pipeline Create App""" +from unittest import mock + from nf_core.pipelines.create import PipelineCreateApp @@ -216,7 +218,8 @@ async def run_before(pilot) -> None: assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) -def test_github_details(tmpdir, snap_compare): +@mock.patch("nf_core.pipelines.create.githubrepo.GithubRepo._get_github_credentials") +def test_github_details(mock_get_github_credentials, tmpdir, snap_compare): """Test snapshot for the github_repo screen. Steps to get to this screen: screen welcome > press start > @@ -229,7 +232,10 @@ def test_github_details(tmpdir, snap_compare): """ async def run_before(pilot) -> None: - delete = ["backspace"] * 50 + mock_get_github_credentials.return_value = ( + None, + None, + ) # mock the github credentials to have consistent snapshots await pilot.click("#start") await pilot.click("#type_nfcore") await pilot.click("#name") @@ -247,10 +253,6 @@ async def run_before(pilot) -> None: await pilot.app.workers.wait_for_complete() await pilot.click("#close_screen") await pilot.click("#github_repo") - await pilot.click("#gh_username") - await pilot.press(*delete) # delete field automatically filled using github CLI - await pilot.press("tab") - await pilot.press(*delete) assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) From 997c1725e8e05ae5358f8db9f987fc0373e0a772 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 29 May 2024 09:45:37 +0000 Subject: [PATCH 212/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 008c75584..5cf0c7930 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ - Update python:3.12-slim Docker digest to 2fba8e7 ([#3023](https://github.com/nf-core/tools/pull/3023)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.10 ([#3031](https://github.com/nf-core/tools/pull/3031)) - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) +- Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 75a42480df66d891b3e58ef966d4506ca88a807d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Tue, 25 Jun 2024 09:59:01 +0000 Subject: [PATCH 213/737] update snapshot --- tests/__snapshots__/test_create_app.ambr | 1272 ++++++++--------- .../pytest-0/test_github_details0 | 2 +- .../pytest-0/test_github_exit_message0 | 2 +- .../pytest-0/test_github_question0 | 2 +- .../pytest-1/test_github_details0 | 1 + .../pytest-1/test_github_exit_message0 | 1 + .../pytest-1/test_github_question0 | 1 + 7 files changed, 640 insertions(+), 641 deletions(-) create mode 160000 tmp/pytest-of-gitpod/pytest-1/test_github_details0 create mode 160000 tmp/pytest-of-gitpod/pytest-1/test_github_exit_message0 create mode 160000 tmp/pytest-of-gitpod/pytest-1/test_github_question0 diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 4036695bb..9aa2da2a1 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -22,254 +22,253 @@ font-weight: 700; } - .terminal-3833894853-matrix { + .terminal-2299698092-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3833894853-title { + .terminal-2299698092-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3833894853-r1 { fill: #c5c8c6 } - .terminal-3833894853-r2 { fill: #e3e3e3 } - .terminal-3833894853-r3 { fill: #989898 } - .terminal-3833894853-r4 { fill: #e1e1e1 } - .terminal-3833894853-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3833894853-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-3833894853-r7 { fill: #1e1e1e } - .terminal-3833894853-r8 { fill: #008139 } - .terminal-3833894853-r9 { fill: #121212 } - .terminal-3833894853-r10 { fill: #e2e2e2 } - .terminal-3833894853-r11 { fill: #787878 } - .terminal-3833894853-r12 { fill: #b93c5b } - .terminal-3833894853-r13 { fill: #454a50 } - .terminal-3833894853-r14 { fill: #7ae998 } - .terminal-3833894853-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-3833894853-r16 { fill: #0a180e;font-weight: bold } - .terminal-3833894853-r17 { fill: #000000 } - .terminal-3833894853-r18 { fill: #fea62b;font-weight: bold } - .terminal-3833894853-r19 { fill: #a7a9ab } - .terminal-3833894853-r20 { fill: #e2e3e3 } + .terminal-2299698092-r1 { fill: #c5c8c6 } + .terminal-2299698092-r2 { fill: #e3e3e3 } + .terminal-2299698092-r3 { fill: #989898 } + .terminal-2299698092-r4 { fill: #e1e1e1 } + .terminal-2299698092-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2299698092-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-2299698092-r7 { fill: #1e1e1e } + .terminal-2299698092-r8 { fill: #008139 } + .terminal-2299698092-r9 { fill: #121212 } + .terminal-2299698092-r10 { fill: #e2e2e2 } + .terminal-2299698092-r11 { fill: #787878 } + .terminal-2299698092-r12 { fill: #454a50 } + .terminal-2299698092-r13 { fill: #7ae998 } + .terminal-2299698092-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-2299698092-r15 { fill: #0a180e;font-weight: bold } + .terminal-2299698092-r16 { fill: #000000 } + .terminal-2299698092-r17 { fill: #fea62b;font-weight: bold } + .terminal-2299698092-r18 { fill: #a7a9ab } + .terminal-2299698092-r19 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -299,257 +298,256 @@ font-weight: 700; } - .terminal-170499771-matrix { + .terminal-4102136482-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-170499771-title { + .terminal-4102136482-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-170499771-r1 { fill: #c5c8c6 } - .terminal-170499771-r2 { fill: #e3e3e3 } - .terminal-170499771-r3 { fill: #989898 } - .terminal-170499771-r4 { fill: #e1e1e1 } - .terminal-170499771-r5 { fill: #4ebf71;font-weight: bold } - .terminal-170499771-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-170499771-r7 { fill: #1e1e1e } - .terminal-170499771-r8 { fill: #0f4e2a } - .terminal-170499771-r9 { fill: #0178d4 } - .terminal-170499771-r10 { fill: #a7a7a7 } - .terminal-170499771-r11 { fill: #787878 } - .terminal-170499771-r12 { fill: #e2e2e2 } - .terminal-170499771-r13 { fill: #b93c5b } - .terminal-170499771-r14 { fill: #121212 } - .terminal-170499771-r15 { fill: #454a50 } - .terminal-170499771-r16 { fill: #7ae998 } - .terminal-170499771-r17 { fill: #e2e3e3;font-weight: bold } - .terminal-170499771-r18 { fill: #0a180e;font-weight: bold } - .terminal-170499771-r19 { fill: #000000 } - .terminal-170499771-r20 { fill: #008139 } - .terminal-170499771-r21 { fill: #fea62b;font-weight: bold } - .terminal-170499771-r22 { fill: #a7a9ab } - .terminal-170499771-r23 { fill: #e2e3e3 } + .terminal-4102136482-r1 { fill: #c5c8c6 } + .terminal-4102136482-r2 { fill: #e3e3e3 } + .terminal-4102136482-r3 { fill: #989898 } + .terminal-4102136482-r4 { fill: #e1e1e1 } + .terminal-4102136482-r5 { fill: #4ebf71;font-weight: bold } + .terminal-4102136482-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-4102136482-r7 { fill: #1e1e1e } + .terminal-4102136482-r8 { fill: #0f4e2a } + .terminal-4102136482-r9 { fill: #0178d4 } + .terminal-4102136482-r10 { fill: #a7a7a7 } + .terminal-4102136482-r11 { fill: #787878 } + .terminal-4102136482-r12 { fill: #e2e2e2 } + .terminal-4102136482-r13 { fill: #121212 } + .terminal-4102136482-r14 { fill: #454a50 } + .terminal-4102136482-r15 { fill: #7ae998 } + .terminal-4102136482-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-4102136482-r17 { fill: #0a180e;font-weight: bold } + .terminal-4102136482-r18 { fill: #000000 } + .terminal-4102136482-r19 { fill: #008139 } + .terminal-4102136482-r20 { fill: #fea62b;font-weight: bold } + .terminal-4102136482-r21 { fill: #a7a9ab } + .terminal-4102136482-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -1133,252 +1131,251 @@ font-weight: 700; } - .terminal-1869771697-matrix { + .terminal-1422742483-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1869771697-title { + .terminal-1422742483-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1869771697-r1 { fill: #c5c8c6 } - .terminal-1869771697-r2 { fill: #e3e3e3 } - .terminal-1869771697-r3 { fill: #989898 } - .terminal-1869771697-r4 { fill: #e1e1e1 } - .terminal-1869771697-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1869771697-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-1869771697-r7 { fill: #1e1e1e } - .terminal-1869771697-r8 { fill: #008139 } - .terminal-1869771697-r9 { fill: #e2e2e2 } - .terminal-1869771697-r10 { fill: #b93c5b } - .terminal-1869771697-r11 { fill: #454a50 } - .terminal-1869771697-r12 { fill: #7ae998 } - .terminal-1869771697-r13 { fill: #e2e3e3;font-weight: bold } - .terminal-1869771697-r14 { fill: #0a180e;font-weight: bold } - .terminal-1869771697-r15 { fill: #000000 } - .terminal-1869771697-r16 { fill: #fea62b;font-weight: bold } - .terminal-1869771697-r17 { fill: #a7a9ab } - .terminal-1869771697-r18 { fill: #e2e3e3 } + .terminal-1422742483-r1 { fill: #c5c8c6 } + .terminal-1422742483-r2 { fill: #e3e3e3 } + .terminal-1422742483-r3 { fill: #989898 } + .terminal-1422742483-r4 { fill: #e1e1e1 } + .terminal-1422742483-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1422742483-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-1422742483-r7 { fill: #1e1e1e } + .terminal-1422742483-r8 { fill: #008139 } + .terminal-1422742483-r9 { fill: #e2e2e2 } + .terminal-1422742483-r10 { fill: #454a50 } + .terminal-1422742483-r11 { fill: #7ae998 } + .terminal-1422742483-r12 { fill: #e2e3e3;font-weight: bold } + .terminal-1422742483-r13 { fill: #0a180e;font-weight: bold } + .terminal-1422742483-r14 { fill: #000000 } + .terminal-1422742483-r15 { fill: #fea62b;font-weight: bold } + .terminal-1422742483-r16 { fill: #a7a9ab } + .terminal-1422742483-r17 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Final details - - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackFinish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Final details + + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackFinish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -1408,259 +1405,258 @@ font-weight: 700; } - .terminal-2925039714-matrix { + .terminal-4244045401-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2925039714-title { + .terminal-4244045401-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2925039714-r1 { fill: #c5c8c6 } - .terminal-2925039714-r2 { fill: #e3e3e3 } - .terminal-2925039714-r3 { fill: #989898 } - .terminal-2925039714-r4 { fill: #e1e1e1 } - .terminal-2925039714-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2925039714-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-2925039714-r7 { fill: #1e1e1e } - .terminal-2925039714-r8 { fill: #008139 } - .terminal-2925039714-r9 { fill: #454a50 } - .terminal-2925039714-r10 { fill: #e2e2e2 } - .terminal-2925039714-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-2925039714-r12 { fill: #000000 } - .terminal-2925039714-r13 { fill: #b93c5b } - .terminal-2925039714-r14 { fill: #18954b } - .terminal-2925039714-r15 { fill: #e2e2e2;font-weight: bold } - .terminal-2925039714-r16 { fill: #969696;font-weight: bold } - .terminal-2925039714-r17 { fill: #808080 } - .terminal-2925039714-r18 { fill: #7ae998 } - .terminal-2925039714-r19 { fill: #507bb3 } - .terminal-2925039714-r20 { fill: #0a180e;font-weight: bold } - .terminal-2925039714-r21 { fill: #dde6ed;font-weight: bold } - .terminal-2925039714-r22 { fill: #001541 } - .terminal-2925039714-r23 { fill: #fea62b;font-weight: bold } - .terminal-2925039714-r24 { fill: #a7a9ab } - .terminal-2925039714-r25 { fill: #e2e3e3 } + .terminal-4244045401-r1 { fill: #c5c8c6 } + .terminal-4244045401-r2 { fill: #e3e3e3 } + .terminal-4244045401-r3 { fill: #989898 } + .terminal-4244045401-r4 { fill: #e1e1e1 } + .terminal-4244045401-r5 { fill: #4ebf71;font-weight: bold } + .terminal-4244045401-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-4244045401-r7 { fill: #1e1e1e } + .terminal-4244045401-r8 { fill: #008139 } + .terminal-4244045401-r9 { fill: #454a50 } + .terminal-4244045401-r10 { fill: #e2e2e2 } + .terminal-4244045401-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-4244045401-r12 { fill: #000000 } + .terminal-4244045401-r13 { fill: #18954b } + .terminal-4244045401-r14 { fill: #e2e2e2;font-weight: bold } + .terminal-4244045401-r15 { fill: #969696;font-weight: bold } + .terminal-4244045401-r16 { fill: #808080 } + .terminal-4244045401-r17 { fill: #7ae998 } + .terminal-4244045401-r18 { fill: #507bb3 } + .terminal-4244045401-r19 { fill: #0a180e;font-weight: bold } + .terminal-4244045401-r20 { fill: #dde6ed;font-weight: bold } + .terminal-4244045401-r21 { fill: #001541 } + .terminal-4244045401-r22 { fill: #fea62b;font-weight: bold } + .terminal-4244045401-r23 { fill: #a7a9ab } + .terminal-4244045401-r24 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - Now that we have created a new pipeline locally, we can create a new GitHub repository and push  - the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + + Now that we have created a new pipeline locally, we can create a new GitHub repository and push  + the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user  + account. A core-team member will be able to transfer the repo to nf-core once the development + has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -2791,256 +2787,256 @@ font-weight: 700; } - .terminal-2625911002-matrix { + .terminal-496814773-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2625911002-title { + .terminal-496814773-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2625911002-r1 { fill: #c5c8c6 } - .terminal-2625911002-r2 { fill: #e3e3e3 } - .terminal-2625911002-r3 { fill: #989898 } - .terminal-2625911002-r4 { fill: #e1e1e1 } - .terminal-2625911002-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2625911002-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-2625911002-r7 { fill: #1e1e1e } - .terminal-2625911002-r8 { fill: #0f4e2a } - .terminal-2625911002-r9 { fill: #7b3042 } - .terminal-2625911002-r10 { fill: #a7a7a7 } - .terminal-2625911002-r11 { fill: #787878 } - .terminal-2625911002-r12 { fill: #e2e2e2 } - .terminal-2625911002-r13 { fill: #b93c5b } - .terminal-2625911002-r14 { fill: #454a50 } - .terminal-2625911002-r15 { fill: #166d39 } - .terminal-2625911002-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-2625911002-r17 { fill: #3c8b54;font-weight: bold } - .terminal-2625911002-r18 { fill: #000000 } - .terminal-2625911002-r19 { fill: #5aa86f } - .terminal-2625911002-r20 { fill: #fea62b;font-weight: bold } - .terminal-2625911002-r21 { fill: #a7a9ab } - .terminal-2625911002-r22 { fill: #e2e3e3 } + .terminal-496814773-r1 { fill: #c5c8c6 } + .terminal-496814773-r2 { fill: #e3e3e3 } + .terminal-496814773-r3 { fill: #989898 } + .terminal-496814773-r4 { fill: #e1e1e1 } + .terminal-496814773-r5 { fill: #4ebf71;font-weight: bold } + .terminal-496814773-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-496814773-r7 { fill: #1e1e1e } + .terminal-496814773-r8 { fill: #0f4e2a } + .terminal-496814773-r9 { fill: #7b3042 } + .terminal-496814773-r10 { fill: #a7a7a7 } + .terminal-496814773-r11 { fill: #787878 } + .terminal-496814773-r12 { fill: #e2e2e2 } + .terminal-496814773-r13 { fill: #b93c5b } + .terminal-496814773-r14 { fill: #454a50 } + .terminal-496814773-r15 { fill: #166d39 } + .terminal-496814773-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-496814773-r17 { fill: #3c8b54;font-weight: bold } + .terminal-496814773-r18 { fill: #000000 } + .terminal-496814773-r19 { fill: #5aa86f } + .terminal-496814773-r20 { fill: #fea62b;font-weight: bold } + .terminal-496814773-r21 { fill: #a7a9ab } + .terminal-496814773-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_details0 b/tmp/pytest-of-gitpod/pytest-0/test_github_details0 index 19fc648c4..ee82f320c 160000 --- a/tmp/pytest-of-gitpod/pytest-0/test_github_details0 +++ b/tmp/pytest-of-gitpod/pytest-0/test_github_details0 @@ -1 +1 @@ -Subproject commit 19fc648c4a1fc31b474b13cbfebd322ce9538b95 +Subproject commit ee82f320cb567b302e7328c1cacab94a98dae787 diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 b/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 index 75465e6bd..f7fe48cf9 160000 --- a/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 +++ b/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 @@ -1 +1 @@ -Subproject commit 75465e6bd715b1bf9075c7efbce1f2dd38c4df37 +Subproject commit f7fe48cf9d00ab24581686a6d4226d2e9005c607 diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_question0 b/tmp/pytest-of-gitpod/pytest-0/test_github_question0 index 07281e3aa..dce3324ac 160000 --- a/tmp/pytest-of-gitpod/pytest-0/test_github_question0 +++ b/tmp/pytest-of-gitpod/pytest-0/test_github_question0 @@ -1 +1 @@ -Subproject commit 07281e3aa6aef6d8a282c1cafaf7defaef745565 +Subproject commit dce3324acbbe32f905afae8553e042f39404b37e diff --git a/tmp/pytest-of-gitpod/pytest-1/test_github_details0 b/tmp/pytest-of-gitpod/pytest-1/test_github_details0 new file mode 160000 index 000000000..e2638dca9 --- /dev/null +++ b/tmp/pytest-of-gitpod/pytest-1/test_github_details0 @@ -0,0 +1 @@ +Subproject commit e2638dca91b137a96b491008b1eeef2bfd791bec diff --git a/tmp/pytest-of-gitpod/pytest-1/test_github_exit_message0 b/tmp/pytest-of-gitpod/pytest-1/test_github_exit_message0 new file mode 160000 index 000000000..74cda800a --- /dev/null +++ b/tmp/pytest-of-gitpod/pytest-1/test_github_exit_message0 @@ -0,0 +1 @@ +Subproject commit 74cda800a86fd07e93bbf8e00e4890516e3d838d diff --git a/tmp/pytest-of-gitpod/pytest-1/test_github_question0 b/tmp/pytest-of-gitpod/pytest-1/test_github_question0 new file mode 160000 index 000000000..444ebc537 --- /dev/null +++ b/tmp/pytest-of-gitpod/pytest-1/test_github_question0 @@ -0,0 +1 @@ +Subproject commit 444ebc5370bf91ea808f5d65c5788064f0480045 From 7c8260010aac15e844c9776d4a2a13c40f8e0834 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 26 Jun 2024 09:58:04 +0000 Subject: [PATCH 214/737] pause after clicking a button on snapshot test --- tests/__snapshots__/test_create_app.ambr | 255 +++++++++++------------ tests/test_create_app.py | 1 + 2 files changed, 128 insertions(+), 128 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 9aa2da2a1..89a5699f4 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -2787,256 +2787,255 @@ font-weight: 700; } - .terminal-496814773-matrix { + .terminal-3046427883-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-496814773-title { + .terminal-3046427883-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-496814773-r1 { fill: #c5c8c6 } - .terminal-496814773-r2 { fill: #e3e3e3 } - .terminal-496814773-r3 { fill: #989898 } - .terminal-496814773-r4 { fill: #e1e1e1 } - .terminal-496814773-r5 { fill: #4ebf71;font-weight: bold } - .terminal-496814773-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-496814773-r7 { fill: #1e1e1e } - .terminal-496814773-r8 { fill: #0f4e2a } - .terminal-496814773-r9 { fill: #7b3042 } - .terminal-496814773-r10 { fill: #a7a7a7 } - .terminal-496814773-r11 { fill: #787878 } - .terminal-496814773-r12 { fill: #e2e2e2 } - .terminal-496814773-r13 { fill: #b93c5b } - .terminal-496814773-r14 { fill: #454a50 } - .terminal-496814773-r15 { fill: #166d39 } - .terminal-496814773-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-496814773-r17 { fill: #3c8b54;font-weight: bold } - .terminal-496814773-r18 { fill: #000000 } - .terminal-496814773-r19 { fill: #5aa86f } - .terminal-496814773-r20 { fill: #fea62b;font-weight: bold } - .terminal-496814773-r21 { fill: #a7a9ab } - .terminal-496814773-r22 { fill: #e2e3e3 } + .terminal-3046427883-r1 { fill: #c5c8c6 } + .terminal-3046427883-r2 { fill: #e3e3e3 } + .terminal-3046427883-r3 { fill: #989898 } + .terminal-3046427883-r4 { fill: #e1e1e1 } + .terminal-3046427883-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3046427883-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-3046427883-r7 { fill: #1e1e1e } + .terminal-3046427883-r8 { fill: #0f4e2a } + .terminal-3046427883-r9 { fill: #7b3042 } + .terminal-3046427883-r10 { fill: #a7a7a7 } + .terminal-3046427883-r11 { fill: #787878 } + .terminal-3046427883-r12 { fill: #e2e2e2 } + .terminal-3046427883-r13 { fill: #b93c5b } + .terminal-3046427883-r14 { fill: #454a50 } + .terminal-3046427883-r15 { fill: #7ae998 } + .terminal-3046427883-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-3046427883-r17 { fill: #000000 } + .terminal-3046427883-r18 { fill: #008139 } + .terminal-3046427883-r19 { fill: #fea62b;font-weight: bold } + .terminal-3046427883-r20 { fill: #a7a9ab } + .terminal-3046427883-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit diff --git a/tests/test_create_app.py b/tests/test_create_app.py index 980939f48..8c89b92cb 100644 --- a/tests/test_create_app.py +++ b/tests/test_create_app.py @@ -106,6 +106,7 @@ async def run_before(pilot) -> None: await pilot.click("#start") await pilot.click("#type_nfcore") await pilot.click("#next") + await pilot.pause() assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) From aec56ce5281154d6ced36ba6d4c2ce88a5c454e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 26 Jun 2024 11:03:40 +0000 Subject: [PATCH 215/737] fix pytests --- nf_core/modules/bump_versions.py | 4 +++- .../.github/workflows/template_version_comment.yml | 3 ++- tests/test_modules.py | 4 ++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 9b54174d5..6556dcf0f 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -241,7 +241,9 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: # change version in environment.yml with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) - re.sub(bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'", env_yml["dependencies"]) + env_yml["dependencies"][0] = re.sub( + bioconda_packages[0], f"bioconda::{bioconda_tool_name}={last_ver}", env_yml["dependencies"][0] + ) with open(module.environment_yml, "w") as fh: yaml.dump(env_yml, fh, default_flow_style=False, Dumper=custom_yaml_dumper()) diff --git a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml index e21283309..58db2eb63 100644 --- a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml @@ -1,6 +1,6 @@ name: nf-core template version comment # This workflow is triggered on PRs to check if the pipeline template version matches the latest nf-core version. -# It posts a comment to the PR, even if it comes from a fork. +# It posts a comment to the PR, even if it comes from a fork.{%- raw %} on: pull_request_target @@ -40,3 +40,4 @@ jobs: Please update your pipeline to the latest version. For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). + #{%- endraw %} diff --git a/tests/test_modules.py b/tests/test_modules.py index f353a7edf..107b24566 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -37,8 +37,8 @@ def create_modules_repo_dummy(tmp_dir): fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) # mock biocontainers and anaconda response with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "bpipe", "0.9.11--hdfd78af_0") - mock_biocontainers_api_calls(rsps, "bpipe", "0.9.11--hdfd78af_0") + mock_anaconda_api_calls(rsps, "bpipe", "0.9.12--hdfd78af_0") + mock_biocontainers_api_calls(rsps, "bpipe", "0.9.12--hdfd78af_0") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_single", False, False) with requests_cache.disabled(): From 5d570cddde472951b569ac906f38d6ee6a32b334 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 26 Jun 2024 15:44:23 +0200 Subject: [PATCH 216/737] display input textbox with grid --- nf_core/pipelines/create/create.tcss | 6 + nf_core/pipelines/create/utils.py | 19 +- tests/__snapshots__/test_create_app.ambr | 1271 +++++++++++----------- 3 files changed, 655 insertions(+), 641 deletions(-) diff --git a/nf_core/pipelines/create/create.tcss b/nf_core/pipelines/create/create.tcss index 67394a9de..747be3f75 100644 --- a/nf_core/pipelines/create/create.tcss +++ b/nf_core/pipelines/create/create.tcss @@ -30,6 +30,12 @@ width: auto; } +.text-input-grid { + padding: 1 1 1 1; + grid-size: 1 3; + grid-rows: 3 3 auto; + height: auto; +} .field_help { padding: 1 1 0 1; color: $text-muted; diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index f1e0bae3c..c15d61e26 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -10,7 +10,7 @@ from textual import on from textual._context import active_app from textual.app import ComposeResult -from textual.containers import HorizontalScroll +from textual.containers import Grid, HorizontalScroll from textual.message import Message from textual.validation import ValidationResult, Validator from textual.widget import Widget @@ -116,14 +116,17 @@ def __init__(self, field_id, placeholder, description, default=None, password=No self.password: bool = password def compose(self) -> ComposeResult: - yield Static(self.description, classes="field_help") - yield Input( - placeholder=self.placeholder, - validators=[ValidateConfig(self.field_id)], - value=self.default, - password=self.password, + yield Grid( + Static(self.description, classes="field_help"), + Input( + placeholder=self.placeholder, + validators=[ValidateConfig(self.field_id)], + value=self.default, + password=self.password, + ), + Static(classes="validation_msg"), + classes="text-input-grid", ) - yield Static(classes="validation_msg") @on(Input.Changed) @on(Input.Submitted) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 89a5699f4..7133bb098 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -22,253 +22,254 @@ font-weight: 700; } - .terminal-2299698092-matrix { + .terminal-207444902-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2299698092-title { + .terminal-207444902-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2299698092-r1 { fill: #c5c8c6 } - .terminal-2299698092-r2 { fill: #e3e3e3 } - .terminal-2299698092-r3 { fill: #989898 } - .terminal-2299698092-r4 { fill: #e1e1e1 } - .terminal-2299698092-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2299698092-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-2299698092-r7 { fill: #1e1e1e } - .terminal-2299698092-r8 { fill: #008139 } - .terminal-2299698092-r9 { fill: #121212 } - .terminal-2299698092-r10 { fill: #e2e2e2 } - .terminal-2299698092-r11 { fill: #787878 } - .terminal-2299698092-r12 { fill: #454a50 } - .terminal-2299698092-r13 { fill: #7ae998 } - .terminal-2299698092-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-2299698092-r15 { fill: #0a180e;font-weight: bold } - .terminal-2299698092-r16 { fill: #000000 } - .terminal-2299698092-r17 { fill: #fea62b;font-weight: bold } - .terminal-2299698092-r18 { fill: #a7a9ab } - .terminal-2299698092-r19 { fill: #e2e3e3 } + .terminal-207444902-r1 { fill: #c5c8c6 } + .terminal-207444902-r2 { fill: #e3e3e3 } + .terminal-207444902-r3 { fill: #989898 } + .terminal-207444902-r4 { fill: #e1e1e1 } + .terminal-207444902-r5 { fill: #4ebf71;font-weight: bold } + .terminal-207444902-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-207444902-r7 { fill: #1e1e1e } + .terminal-207444902-r8 { fill: #008139 } + .terminal-207444902-r9 { fill: #121212 } + .terminal-207444902-r10 { fill: #e2e2e2 } + .terminal-207444902-r11 { fill: #787878 } + .terminal-207444902-r12 { fill: #b93c5b } + .terminal-207444902-r13 { fill: #454a50 } + .terminal-207444902-r14 { fill: #7ae998 } + .terminal-207444902-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-207444902-r16 { fill: #0a180e;font-weight: bold } + .terminal-207444902-r17 { fill: #000000 } + .terminal-207444902-r18 { fill: #fea62b;font-weight: bold } + .terminal-207444902-r19 { fill: #a7a9ab } + .terminal-207444902-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + + GitHub organisationWorkflow name + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + A short description of your pipeline. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + Name of the main author / authors + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -298,256 +299,257 @@ font-weight: 700; } - .terminal-4102136482-matrix { + .terminal-813458076-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4102136482-title { + .terminal-813458076-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4102136482-r1 { fill: #c5c8c6 } - .terminal-4102136482-r2 { fill: #e3e3e3 } - .terminal-4102136482-r3 { fill: #989898 } - .terminal-4102136482-r4 { fill: #e1e1e1 } - .terminal-4102136482-r5 { fill: #4ebf71;font-weight: bold } - .terminal-4102136482-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-4102136482-r7 { fill: #1e1e1e } - .terminal-4102136482-r8 { fill: #0f4e2a } - .terminal-4102136482-r9 { fill: #0178d4 } - .terminal-4102136482-r10 { fill: #a7a7a7 } - .terminal-4102136482-r11 { fill: #787878 } - .terminal-4102136482-r12 { fill: #e2e2e2 } - .terminal-4102136482-r13 { fill: #121212 } - .terminal-4102136482-r14 { fill: #454a50 } - .terminal-4102136482-r15 { fill: #7ae998 } - .terminal-4102136482-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-4102136482-r17 { fill: #0a180e;font-weight: bold } - .terminal-4102136482-r18 { fill: #000000 } - .terminal-4102136482-r19 { fill: #008139 } - .terminal-4102136482-r20 { fill: #fea62b;font-weight: bold } - .terminal-4102136482-r21 { fill: #a7a9ab } - .terminal-4102136482-r22 { fill: #e2e3e3 } + .terminal-813458076-r1 { fill: #c5c8c6 } + .terminal-813458076-r2 { fill: #e3e3e3 } + .terminal-813458076-r3 { fill: #989898 } + .terminal-813458076-r4 { fill: #e1e1e1 } + .terminal-813458076-r5 { fill: #4ebf71;font-weight: bold } + .terminal-813458076-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-813458076-r7 { fill: #1e1e1e } + .terminal-813458076-r8 { fill: #0f4e2a } + .terminal-813458076-r9 { fill: #0178d4 } + .terminal-813458076-r10 { fill: #a7a7a7 } + .terminal-813458076-r11 { fill: #787878 } + .terminal-813458076-r12 { fill: #e2e2e2 } + .terminal-813458076-r13 { fill: #b93c5b } + .terminal-813458076-r14 { fill: #121212 } + .terminal-813458076-r15 { fill: #454a50 } + .terminal-813458076-r16 { fill: #7ae998 } + .terminal-813458076-r17 { fill: #e2e3e3;font-weight: bold } + .terminal-813458076-r18 { fill: #0a180e;font-weight: bold } + .terminal-813458076-r19 { fill: #000000 } + .terminal-813458076-r20 { fill: #008139 } + .terminal-813458076-r21 { fill: #fea62b;font-weight: bold } + .terminal-813458076-r22 { fill: #a7a9ab } + .terminal-813458076-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + + GitHub organisationWorkflow name + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + A short description of your pipeline. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + Name of the main author / authors + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -1131,251 +1133,252 @@ font-weight: 700; } - .terminal-1422742483-matrix { + .terminal-1204198466-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1422742483-title { + .terminal-1204198466-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1422742483-r1 { fill: #c5c8c6 } - .terminal-1422742483-r2 { fill: #e3e3e3 } - .terminal-1422742483-r3 { fill: #989898 } - .terminal-1422742483-r4 { fill: #e1e1e1 } - .terminal-1422742483-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1422742483-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-1422742483-r7 { fill: #1e1e1e } - .terminal-1422742483-r8 { fill: #008139 } - .terminal-1422742483-r9 { fill: #e2e2e2 } - .terminal-1422742483-r10 { fill: #454a50 } - .terminal-1422742483-r11 { fill: #7ae998 } - .terminal-1422742483-r12 { fill: #e2e3e3;font-weight: bold } - .terminal-1422742483-r13 { fill: #0a180e;font-weight: bold } - .terminal-1422742483-r14 { fill: #000000 } - .terminal-1422742483-r15 { fill: #fea62b;font-weight: bold } - .terminal-1422742483-r16 { fill: #a7a9ab } - .terminal-1422742483-r17 { fill: #e2e3e3 } + .terminal-1204198466-r1 { fill: #c5c8c6 } + .terminal-1204198466-r2 { fill: #e3e3e3 } + .terminal-1204198466-r3 { fill: #989898 } + .terminal-1204198466-r4 { fill: #e1e1e1 } + .terminal-1204198466-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1204198466-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-1204198466-r7 { fill: #1e1e1e } + .terminal-1204198466-r8 { fill: #008139 } + .terminal-1204198466-r9 { fill: #e2e2e2 } + .terminal-1204198466-r10 { fill: #b93c5b } + .terminal-1204198466-r11 { fill: #454a50 } + .terminal-1204198466-r12 { fill: #7ae998 } + .terminal-1204198466-r13 { fill: #e2e3e3;font-weight: bold } + .terminal-1204198466-r14 { fill: #0a180e;font-weight: bold } + .terminal-1204198466-r15 { fill: #000000 } + .terminal-1204198466-r16 { fill: #fea62b;font-weight: bold } + .terminal-1204198466-r17 { fill: #a7a9ab } + .terminal-1204198466-r18 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Final details - - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackFinish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Final details + + + + + First version of the pipelinePath to the output directory where the  + pipeline will be created + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + 1.0.0dev. + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackFinish + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode q Quit @@ -1405,258 +1408,259 @@ font-weight: 700; } - .terminal-4244045401-matrix { + .terminal-3078297506-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4244045401-title { + .terminal-3078297506-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4244045401-r1 { fill: #c5c8c6 } - .terminal-4244045401-r2 { fill: #e3e3e3 } - .terminal-4244045401-r3 { fill: #989898 } - .terminal-4244045401-r4 { fill: #e1e1e1 } - .terminal-4244045401-r5 { fill: #4ebf71;font-weight: bold } - .terminal-4244045401-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-4244045401-r7 { fill: #1e1e1e } - .terminal-4244045401-r8 { fill: #008139 } - .terminal-4244045401-r9 { fill: #454a50 } - .terminal-4244045401-r10 { fill: #e2e2e2 } - .terminal-4244045401-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-4244045401-r12 { fill: #000000 } - .terminal-4244045401-r13 { fill: #18954b } - .terminal-4244045401-r14 { fill: #e2e2e2;font-weight: bold } - .terminal-4244045401-r15 { fill: #969696;font-weight: bold } - .terminal-4244045401-r16 { fill: #808080 } - .terminal-4244045401-r17 { fill: #7ae998 } - .terminal-4244045401-r18 { fill: #507bb3 } - .terminal-4244045401-r19 { fill: #0a180e;font-weight: bold } - .terminal-4244045401-r20 { fill: #dde6ed;font-weight: bold } - .terminal-4244045401-r21 { fill: #001541 } - .terminal-4244045401-r22 { fill: #fea62b;font-weight: bold } - .terminal-4244045401-r23 { fill: #a7a9ab } - .terminal-4244045401-r24 { fill: #e2e3e3 } + .terminal-3078297506-r1 { fill: #c5c8c6 } + .terminal-3078297506-r2 { fill: #e3e3e3 } + .terminal-3078297506-r3 { fill: #989898 } + .terminal-3078297506-r4 { fill: #e1e1e1 } + .terminal-3078297506-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3078297506-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-3078297506-r7 { fill: #454a50 } + .terminal-3078297506-r8 { fill: #e2e3e3;font-weight: bold } + .terminal-3078297506-r9 { fill: #1e1e1e } + .terminal-3078297506-r10 { fill: #008139 } + .terminal-3078297506-r11 { fill: #000000 } + .terminal-3078297506-r12 { fill: #e2e2e2 } + .terminal-3078297506-r13 { fill: #b93c5b } + .terminal-3078297506-r14 { fill: #18954b } + .terminal-3078297506-r15 { fill: #e2e2e2;font-weight: bold } + .terminal-3078297506-r16 { fill: #969696;font-weight: bold } + .terminal-3078297506-r17 { fill: #808080 } + .terminal-3078297506-r18 { fill: #7ae998 } + .terminal-3078297506-r19 { fill: #507bb3 } + .terminal-3078297506-r20 { fill: #0a180e;font-weight: bold } + .terminal-3078297506-r21 { fill: #dde6ed;font-weight: bold } + .terminal-3078297506-r22 { fill: #001541 } + .terminal-3078297506-r23 { fill: #fea62b;font-weight: bold } + .terminal-3078297506-r24 { fill: #a7a9ab } + .terminal-3078297506-r25 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - Now that we have created a new pipeline locally, we can create a new GitHub repository and push  - the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + + Now that we have created a new pipeline locally, we can create a new GitHub repository and push  + the code to it. + + + + + Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + for login.Show + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub username•••••••••••• + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-coremypipeline + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user  + account. A core-team member will be able to transfer the repo to nf-core once the development + has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + +  d Toggle dark mode q Quit @@ -2787,255 +2791,256 @@ font-weight: 700; } - .terminal-3046427883-matrix { + .terminal-1853179481-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3046427883-title { + .terminal-1853179481-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3046427883-r1 { fill: #c5c8c6 } - .terminal-3046427883-r2 { fill: #e3e3e3 } - .terminal-3046427883-r3 { fill: #989898 } - .terminal-3046427883-r4 { fill: #e1e1e1 } - .terminal-3046427883-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3046427883-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-3046427883-r7 { fill: #1e1e1e } - .terminal-3046427883-r8 { fill: #0f4e2a } - .terminal-3046427883-r9 { fill: #7b3042 } - .terminal-3046427883-r10 { fill: #a7a7a7 } - .terminal-3046427883-r11 { fill: #787878 } - .terminal-3046427883-r12 { fill: #e2e2e2 } - .terminal-3046427883-r13 { fill: #b93c5b } - .terminal-3046427883-r14 { fill: #454a50 } - .terminal-3046427883-r15 { fill: #7ae998 } - .terminal-3046427883-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-3046427883-r17 { fill: #000000 } - .terminal-3046427883-r18 { fill: #008139 } - .terminal-3046427883-r19 { fill: #fea62b;font-weight: bold } - .terminal-3046427883-r20 { fill: #a7a9ab } - .terminal-3046427883-r21 { fill: #e2e3e3 } + .terminal-1853179481-r1 { fill: #c5c8c6 } + .terminal-1853179481-r2 { fill: #e3e3e3 } + .terminal-1853179481-r3 { fill: #989898 } + .terminal-1853179481-r4 { fill: #e1e1e1 } + .terminal-1853179481-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1853179481-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-1853179481-r7 { fill: #1e1e1e } + .terminal-1853179481-r8 { fill: #0f4e2a } + .terminal-1853179481-r9 { fill: #7b3042 } + .terminal-1853179481-r10 { fill: #a7a7a7 } + .terminal-1853179481-r11 { fill: #787878 } + .terminal-1853179481-r12 { fill: #e2e2e2 } + .terminal-1853179481-r13 { fill: #b93c5b } + .terminal-1853179481-r14 { fill: #454a50 } + .terminal-1853179481-r15 { fill: #166d39 } + .terminal-1853179481-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-1853179481-r17 { fill: #3c8b54;font-weight: bold } + .terminal-1853179481-r18 { fill: #000000 } + .terminal-1853179481-r19 { fill: #5aa86f } + .terminal-1853179481-r20 { fill: #fea62b;font-weight: bold } + .terminal-1853179481-r21 { fill: #a7a9ab } + .terminal-1853179481-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + + GitHub organisationWorkflow name + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + + + A short description of your pipeline. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + + + Name of the main author / authors + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackNext + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + +  d Toggle dark mode q Quit From cbd9791dc53a671cacc8b41e1f7c08df0fb81c4a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 26 Jun 2024 13:55:45 +0000 Subject: [PATCH 217/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d1628fd91..73cd98af1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.10 ([#3031](https://github.com/nf-core/tools/pull/3031)) - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) - Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 4937fc5abd520606a54cd7b988016c53e24a7503 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 27 Jun 2024 06:48:22 +0000 Subject: [PATCH 218/737] Update python:3.12-slim Docker digest to da2d7af --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 9ebc7e985..66ee3ab2f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:2fba8e70a87bcc9f6edd20dda0a1d4adb32046d2acbca7361bc61da5a106a914 +FROM python:3.12-slim@sha256:da2d7af143dab7cd5b0d5a5c9545fe14e67fc24c394fcf1cf15e8ea16cbd8637 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From fb984e4e19a68bc900d56f386e0d3fcff86b002e Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 28 Jun 2024 09:40:08 +0000 Subject: [PATCH 219/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d1628fd91..6d3f4f088 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.10 ([#3031](https://github.com/nf-core/tools/pull/3031)) - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) - Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From a2b226fd8e6c3a476f59b56523bb643775352550 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 28 Jun 2024 13:20:05 +0000 Subject: [PATCH 220/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.5.0 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 34856956c..6cefa7fc0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.10 + rev: v0.5.0 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 4f7139953d11cfacd4844ae1d96c6fdd440a6323 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 28 Jun 2024 16:39:51 +0200 Subject: [PATCH 221/737] Template: Run awsfulltest on PRs to `master` with two PR approvals --- .../.github/workflows/awsfulltest.yml | 23 +++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 56ecb6030..f0d072442 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -1,18 +1,33 @@ name: nf-core AWS full size tests -# This workflow is triggered on published releases. +# This workflow is triggered on PRs opened against the master branch. # It can be additionally triggered manually with GitHub actions workflow dispatch button. # It runs the -profile 'test_full' on AWS batch on: - release: - types: [published] + pull_request: + branches: + - master workflow_dispatch: + pull_request_review: + types: [submitted] + jobs: run-platform: name: Run AWS full tests - if: github.repository == '{{ name }}' + if: github.repository == '{{ name }}' && github.event.review.state == 'approved' runs-on: ubuntu-latest steps: + - uses: octokit/request-action@v2.x + id: check_approvals + with: + route: GET /repos/${{ github.repository }}/pulls/${{ github.event.review.number }}/reviews + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - id: test_variables + run: | + JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}' + CURRENT_APPROVALS_COUNT=$(echo $JSON_RESPONSE | jq -c '[.[] | select(.state | contains("APPROVED")) ] | length') + test $CURRENT_APPROVALS_COUNT -ge 2 || exit 1 # At least 2 approvals are required - name: Launch workflow via Seqera Platform uses: seqeralabs/action-tower-launch@v2 # TODO nf-core: You can customise AWS full pipeline tests as required From 407075741219497441444beba9bad62604844732 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 28 Jun 2024 14:42:04 +0000 Subject: [PATCH 222/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d3f4f088..d417b9d57 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Template - Change paths to test data ([#2985](https://github.com/nf-core/tools/pull/2985)) +- Run awsfulltest on PRs to `master` with two PR approvals ([#3042](https://github.com/nf-core/tools/pull/3042)) ### Linting From 044fc1d6750f4cf9d3ccf87bcb6ed9f3e61760b8 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 1 Jul 2024 14:30:55 +0200 Subject: [PATCH 223/737] Remove default basic resource limits in template (should be set by users) --- nf_core/pipeline-template/conf/base.config | 6 ------ 1 file changed, 6 deletions(-) diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index fb3a8456b..fa292339e 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -15,12 +15,6 @@ process { memory = { 6.GB * task.attempt } time = { 4.h * task.attempt } - resourceLimits = [ - cpus: params.max_cpus, - memory: params.max_memory, - time: params.max_time - ] - errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' From 528a207ca8e2e5169140876b5d7cdbf5c5845d5f Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 1 Jul 2024 14:31:31 +0200 Subject: [PATCH 224/737] Add a basic set of resource limits in test configs, matching GithubActions runner limits --- nf_core/pipeline-template/conf/test.config | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index 827e21b7b..2b9715efb 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -10,6 +10,14 @@ ---------------------------------------------------------------------------------------- */ +process { + resourceLimits = [ + cpus: 4, + memory: '16.GB', + time: '1.h' + ] +} + params { config_profile_name = 'Test profile' config_profile_description = 'Minimal test dataset to check pipeline function' From 936802fbfc553dde07a2acb9df134c995f5ab2d3 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 1 Jul 2024 14:37:13 +0200 Subject: [PATCH 225/737] Bump minimum NXF version to allow resourceLimits --- nf_core/pipeline-template/nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 0316c2e0c..d575c6186 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -258,7 +258,7 @@ manifest { homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=23.04.0' + nextflowVersion = '!>=24.04.2' version = '{{ version }}' doi = '' } From 06a1dbe41203c284b92d34912d14f12d77aebb72 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 1 Jul 2024 14:43:23 +0200 Subject: [PATCH 226/737] Add linting check for now deprecated params --- nf_core/pipelines/lint/nextflow_config.py | 89 ++++++++++++++++++----- 1 file changed, 69 insertions(+), 20 deletions(-) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index f62100a70..f1c4c536c 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -87,6 +87,9 @@ def nextflow_config(self): * ``params.nf_required_version``: The old method for specifying the minimum Nextflow version. Replaced by ``manifest.nextflowVersion`` * ``params.container``: The old method for specifying the dockerhub container address. Replaced by ``process.container`` * ``igenomesIgnore``: Changed to ``igenomes_ignore`` + * ``params.max_cpus``: Old method of specifying the maximum number of CPUs a process can request. Replaced by native Nextflow `resourceLimits`directive. + * ``params.max_memory``: Old method of specifying the maximum number of memory can request. Replaced by native Nextflow `resourceLimits`directive. + * ``params.max_time``: Old method of specifying the maximum number of CPUs can request. Replaced by native Nextflow `resourceLimits`directive. .. tip:: The ``snake_case`` convention should now be used when defining pipeline parameters @@ -170,6 +173,9 @@ def nextflow_config(self): "params.igenomesIgnore", "params.name", "params.enable_conda", + "params.max_cpus", + "params.max_memory", + "params.max_time", ] # Remove field that should be ignored according to the linting config @@ -200,9 +206,13 @@ def nextflow_config(self): ignored.append(f"Config variable ignored: {self._wrap_quotes(cf)}") break if cf not in self.nf_config.keys(): - passed.append(f"Config variable (correctly) not found: {self._wrap_quotes(cf)}") + passed.append( + f"Config variable (correctly) not found: {self._wrap_quotes(cf)}" + ) else: - failed.append(f"Config variable (incorrectly) found: {self._wrap_quotes(cf)}") + failed.append( + f"Config variable (incorrectly) found: {self._wrap_quotes(cf)}" + ) # Check and warn if the process configuration is done with deprecated syntax process_with_deprecated_syntax = list( @@ -222,9 +232,13 @@ def nextflow_config(self): if k in ignore_configs: continue if self.nf_config.get(k) == "true": - passed.append(f"Config ``{k}`` had correct value: ``{self.nf_config.get(k)}``") + passed.append( + f"Config ``{k}`` had correct value: ``{self.nf_config.get(k)}``" + ) else: - failed.append(f"Config ``{k}`` did not have correct value: ``{self.nf_config.get(k)}``") + failed.append( + f"Config ``{k}`` did not have correct value: ``{self.nf_config.get(k)}``" + ) if "manifest.name" not in ignore_configs: # Check that the pipeline name starts with nf-core @@ -233,7 +247,9 @@ def nextflow_config(self): if not manifest_name.startswith("nf-core/"): raise AssertionError() except (AssertionError, IndexError): - failed.append(f"Config ``manifest.name`` did not begin with ``nf-core/``:\n {manifest_name}") + failed.append( + f"Config ``manifest.name`` did not begin with ``nf-core/``:\n {manifest_name}" + ) else: passed.append("Config ``manifest.name`` began with ``nf-core/``") @@ -249,7 +265,9 @@ def nextflow_config(self): ) else: - passed.append("Config variable ``manifest.homePage`` began with https://github.com/nf-core/") + passed.append( + "Config variable ``manifest.homePage`` began with https://github.com/nf-core/" + ) # Check that the DAG filename ends in ``.svg`` if "dag.file" in self.nf_config: @@ -257,12 +275,21 @@ def nextflow_config(self): if self.nf_config["dag.file"].strip("'\"").endswith(default_dag_format): passed.append(f"Config ``dag.file`` ended with ``{default_dag_format}``") else: - failed.append(f"Config ``dag.file`` did not end with ``{default_dag_format}``") + failed.append( + f"Config ``dag.file`` did not end with ``{default_dag_format}``" + ) # Check that the minimum nextflowVersion is set properly if "manifest.nextflowVersion" in self.nf_config: - if self.nf_config.get("manifest.nextflowVersion", "").strip("\"'").lstrip("!").startswith(">="): - passed.append("Config variable ``manifest.nextflowVersion`` started with >= or !>=") + if ( + self.nf_config.get("manifest.nextflowVersion", "") + .strip("\"'") + .lstrip("!") + .startswith(">=") + ): + passed.append( + "Config variable ``manifest.nextflowVersion`` started with >= or !>=" + ) else: failed.append( "Config ``manifest.nextflowVersion`` did not start with ``>=`` or ``!>=`` : " @@ -272,7 +299,9 @@ def nextflow_config(self): # Check that the pipeline version contains ``dev`` if not self.release_mode and "manifest.version" in self.nf_config: if self.nf_config["manifest.version"].strip(" '\"").endswith("dev"): - passed.append(f"Config ``manifest.version`` ends in ``dev``: ``{self.nf_config['manifest.version']}``") + passed.append( + f"Config ``manifest.version`` ends in ``dev``: ``{self.nf_config['manifest.version']}``" + ) else: warned.append( f"Config ``manifest.version`` should end in ``dev``: ``{self.nf_config['manifest.version']}``" @@ -291,18 +320,32 @@ def nextflow_config(self): if "custom_config" not in ignore_configs: # Check if custom profile params are set correctly - if self.nf_config.get("params.custom_config_version", "").strip("'") == "master": + if ( + self.nf_config.get("params.custom_config_version", "").strip("'") + == "master" + ): passed.append("Config `params.custom_config_version` is set to `master`") else: - failed.append("Config `params.custom_config_version` is not set to `master`") + failed.append( + "Config `params.custom_config_version` is not set to `master`" + ) - custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/{}".format( - self.nf_config.get("params.custom_config_version", "").strip("'") + custom_config_base = ( + "https://raw.githubusercontent.com/nf-core/configs/{}".format( + self.nf_config.get("params.custom_config_version", "").strip("'") + ) ) - if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: - passed.append(f"Config `params.custom_config_base` is set to `{custom_config_base}`") + if ( + self.nf_config.get("params.custom_config_base", "").strip("'") + == custom_config_base + ): + passed.append( + f"Config `params.custom_config_base` is set to `{custom_config_base}`" + ) else: - failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") + failed.append( + f"Config `params.custom_config_base` is not set to `{custom_config_base}`" + ) # Check that lines for loading custom profiles exist lines = [ @@ -344,7 +387,9 @@ def nextflow_config(self): match = re.search(r"\bprofiles\s*{", cleaned_content) if not match: - failed.append("nextflow.config does not contain `profiles` scope, but `test` profile is required") + failed.append( + "nextflow.config does not contain `profiles` scope, but `test` profile is required" + ) else: # Extract profiles scope content and check for test profile start = match.end() @@ -360,7 +405,9 @@ def nextflow_config(self): if re.search(r"\btest\s*{", profiles_content): passed.append("nextflow.config contains configuration profile `test`") else: - failed.append("nextflow.config does not contain configuration profile `test`") + failed.append( + "nextflow.config does not contain configuration profile `test`" + ) # Check that the default values in nextflow.config match the default values defined in the nextflow_schema.json ignore_defaults = [] @@ -404,7 +451,9 @@ def nextflow_config(self): schema_default = str(schema.schema_defaults[param_name]) config_default = str(self.nf_config[param]) if config_default is not None and config_default == schema_default: - passed.append(f"Config default value correct: {param}= {schema_default}") + passed.append( + f"Config default value correct: {param}= {schema_default}" + ) else: failed.append( f"Config default value incorrect: `{param}` is set as {self._wrap_quotes(schema_default)} in `nextflow_schema.json` but is {self._wrap_quotes(self.nf_config[param])} in `nextflow.config`." From 47abfc2a2dbf9f8ae42a70bc5b833e1836fdb121 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 1 Jul 2024 14:44:55 +0200 Subject: [PATCH 227/737] Add linting check for check_ params and continue removing from everywhere (not finished) --- nf_core/pipeline-template/conf/test.config | 5 --- nf_core/pipeline-template/nextflow.config | 6 ---- .../pipeline-template/nextflow_schema.json | 35 ------------------- 3 files changed, 46 deletions(-) diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index 2b9715efb..0f1d97410 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -22,11 +22,6 @@ params { config_profile_name = 'Test profile' config_profile_description = 'Minimal test dataset to check pipeline function' - // Limit resources so that this can run on GitHub Actions - max_cpus = 2 - max_memory = '6.GB' - max_time = '6.h' - // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets // TODO nf-core: Give any required params for the test so that command line flags are not needed diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index d575c6186..7d9040104 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -50,12 +50,6 @@ params { config_profile_url = null {%- endif %} - // Max resource options - // Defaults only, expecting to be overwritten - max_memory = '128.GB' - max_cpus = 16 - max_time = '240.h' - // Schema validation default options validationFailUnrecognisedParams = false validationLenientMode = false diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 18bad71b7..07a90f625 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -124,41 +124,6 @@ } } }, - "max_job_request_options": { - "title": "Max job request options", - "type": "object", - "fa_icon": "fab fa-acquisitions-incorporated", - "description": "Set the top limit for requested resources for any single job.", - "help_text": "If you are running on a smaller system, a pipeline step requesting more resources than are available may cause the Nextflow to stop the run with an error. These options allow you to cap the maximum resources requested by any single job so that the pipeline will run on your system.\n\nNote that you can not _increase_ the resources requested by any job using these options. For that you will need your own configuration file. See [the nf-core website](https://nf-co.re/usage/configuration) for details.", - "properties": { - "max_cpus": { - "type": "integer", - "description": "Maximum number of CPUs that can be requested for any single job.", - "default": 16, - "fa_icon": "fas fa-microchip", - "hidden": true, - "help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`" - }, - "max_memory": { - "type": "string", - "description": "Maximum amount of memory that can be requested for any single job.", - "default": "128.GB", - "fa_icon": "fas fa-memory", - "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", - "hidden": true, - "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`" - }, - "max_time": { - "type": "string", - "description": "Maximum amount of time that can be requested for any single job.", - "default": "240.h", - "fa_icon": "far fa-clock", - "pattern": "^(\\d+\\.?\\s*(s|m|h|d|day)\\s*)+$", - "hidden": true, - "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" - } - } - }, "generic_options": { "title": "Generic options", "type": "object", From 2e1dc9650ebe24935ff1545627f579e46e6a967f Mon Sep 17 00:00:00 2001 From: Maxime U Garcia Date: Wed, 3 Jul 2024 09:47:39 +0200 Subject: [PATCH 228/737] Remove deprecated syntax --- nf_core/pipeline-template/main.nf | 2 -- 1 file changed, 2 deletions(-) diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 1fd6a5b27..c13a0d24e 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -11,8 +11,6 @@ ---------------------------------------------------------------------------------------- */ -nextflow.enable.dsl = 2 - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS / WORKFLOWS From 20089357e17aa5c850476a61fe67a8e6033ce17a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 3 Jul 2024 07:48:32 +0000 Subject: [PATCH 229/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d3f4f088..63f4bdc43 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) - Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) - Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) +- Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 034ae05e7db07fcef41a711464ecea65bfdf5e6c Mon Sep 17 00:00:00 2001 From: Maxime U Garcia Date: Wed, 3 Jul 2024 09:57:33 +0200 Subject: [PATCH 230/737] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 63f4bdc43..a13ccc229 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Template - Change paths to test data ([#2985](https://github.com/nf-core/tools/pull/2985)) +- Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) ### Linting @@ -38,7 +39,6 @@ - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) - Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) - Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) -- Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From d863c4d8fcbe3ef41ae8f0d16f1c314e0c45a6d9 Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 08:29:04 +0000 Subject: [PATCH 231/737] add default limit_output argument for write_diff_file & print_diff --- nf_core/modules/modules_differ.py | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index dc2b163dd..fb1fc31dc 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -133,6 +133,7 @@ def write_diff_file( for_git=True, dsp_from_dir=None, dsp_to_dir=None, + limit_output=False, ): """ Writes the diffs of a module to the diff file. @@ -174,8 +175,14 @@ def write_diff_file( else: fh.write(f"Changes in module '{Path(repo_path, module)}'\n") - for _, (diff_status, diff) in diffs.items(): - if diff_status != ModulesDiffer.DiffEnum.UNCHANGED: + for file, (diff_status, diff) in diffs.items(): + if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + # The files are identical + fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") + elif limit_output and file != "main.nf": + # Skip printing the diff for files other than main.nf + fh.write(f"Changes in '{Path(module, file)}' not shown\n") + else: # The file has changed write the diff lines to the file for line in diff: fh.write(line) @@ -219,7 +226,15 @@ def append_modules_json_diff(diff_path, old_modules_json, new_modules_json, modu @staticmethod def print_diff( - module, repo_path, from_dir, to_dir, current_version=None, new_version=None, dsp_from_dir=None, dsp_to_dir=None + module, + repo_path, + from_dir, + to_dir, + current_version=None, + new_version=None, + dsp_from_dir=None, + dsp_to_dir=None, + limit_output=False, ): """ Prints the diffs between two module versions to the terminal @@ -261,6 +276,9 @@ def print_diff( elif diff_status == ModulesDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") + elif limit_output and file != "main.nf": + # Skip printing the diff for files other than main.nf + log.info(f"Changes in '{Path(module, file)}' not shown") else: # The file has changed log.info(f"Changes in '{Path(module, file)}':") From 5b9d3f54eb206b835035ccdc349130026c42eb9d Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 3 Jul 2024 08:33:07 +0000 Subject: [PATCH 232/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d3f4f088..fc4a23349 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) - Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) - Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) +- add default limit_output argument for write_diff_file & print_diff ([#3047](https://github.com/nf-core/tools/pull/3047)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 1997ee41a974e26dd6e9de7e30e91ff0240fe49f Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 09:38:23 +0000 Subject: [PATCH 233/737] adding limit_output to click --- nf_core/__main__.py | 10 ++++++++++ nf_core/components/update.py | 17 +++++++++++++---- nf_core/modules/update.py | 2 ++ 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index ea0018e2a..86bcde86e 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1214,6 +1214,14 @@ def modules_install(ctx, tool, dir, prompt, force, sha): default=False, help="Prompt for the version of the module", ) +@click.option( + "-l", + "--limit-output", + "limit_output", + is_flag=True, + default=False, + help="Limit ouput to only the difference in main.nf", +) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") @click.option( "-a", @@ -1256,6 +1264,7 @@ def modules_update( preview, save_diff, update_deps, + limit_output, ): """ Update DSL2 modules within a pipeline. @@ -1277,6 +1286,7 @@ def modules_update( ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], + limit_output, ) exit_status = module_install.update(tool) if not exit_status and install_all: diff --git a/nf_core/components/update.py b/nf_core/components/update.py index a54c47232..3c3cdca6c 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -38,6 +38,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, + limit_output=False, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.force = force @@ -76,7 +77,7 @@ def _parameter_checks(self): if not self.has_valid_directory(): raise UserWarning("The command was not run in a valid pipeline directory.") - def update(self, component=None, silent=False, updated=None, check_diff_exist=True) -> bool: + def update(self, component=None, silent=False, updated=None, check_diff_exist=True, limit_output=False) -> bool: """Updates a specified module/subworkflow or all modules/subworkflows in a pipeline. If updating a subworkflow: updates all modules used in that subworkflow. @@ -231,6 +232,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr version, dsp_from_dir=component_dir, dsp_to_dir=component_dir, + limit_output=limit_output, ) updated.append(component) except UserWarning as e: @@ -271,6 +273,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr version, dsp_from_dir=component_dir, dsp_to_dir=component_dir, + limit_output=limit_output, ) # Ask the user if they want to install the component @@ -875,7 +878,9 @@ def get_components_to_update(self, component): return modules_to_update, subworkflows_to_update - def update_linked_components(self, modules_to_update, subworkflows_to_update, updated=None, check_diff_exist=True): + def update_linked_components( + self, modules_to_update, subworkflows_to_update, updated=None, check_diff_exist=True, limit_output=False + ): """ Update modules and subworkflows linked to the component being updated. """ @@ -883,7 +888,9 @@ def update_linked_components(self, modules_to_update, subworkflows_to_update, up if s_update in updated: continue original_component_type, original_update_all = self._change_component_type("subworkflows") - self.update(s_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) + self.update( + s_update, silent=True, updated=updated, check_diff_exist=check_diff_exist, limit_output=limit_output + ) self._reset_component_type(original_component_type, original_update_all) for m_update in modules_to_update: @@ -891,7 +898,9 @@ def update_linked_components(self, modules_to_update, subworkflows_to_update, up continue original_component_type, original_update_all = self._change_component_type("modules") try: - self.update(m_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) + self.update( + m_update, silent=True, updated=updated, check_diff_exist=check_diff_exist, limit_output=limit_output + ) except LookupError as e: # If the module to be updated is not available, check if there has been a name change if "not found in list of available" in str(e): diff --git a/nf_core/modules/update.py b/nf_core/modules/update.py index 9d53bf201..f6cf5235a 100644 --- a/nf_core/modules/update.py +++ b/nf_core/modules/update.py @@ -15,6 +15,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, + limit_output=False, ): super().__init__( pipeline_dir, @@ -29,4 +30,5 @@ def __init__( remote_url, branch, no_pull, + limit_output, ) From 6dd57dfb8c2052a851ec0bae220aecf33ba95f37 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 3 Jul 2024 11:32:23 +0200 Subject: [PATCH 234/737] pin all textual versions --- requirements-dev.txt | 4 +- requirements.txt | 2 +- tests/__snapshots__/test_create_app.ambr | 3014 +++++++++++----------- 3 files changed, 1510 insertions(+), 1510 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index fa98655ae..ebfbb530d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ responses ruff Sphinx sphinx-rtd-theme -textual-dev>=1.2.1 +textual-dev==1.5.1 mypy types-PyYAML types-requests @@ -15,5 +15,5 @@ types-Markdown types-PyYAML types-requests types-setuptools -pytest-textual-snapshot +pytest-textual-snapshot==0.4.0 ruff diff --git a/requirements.txt b/requirements.txt index 0574083fd..ccfc1bc9c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,6 +21,6 @@ requests_cache rich-click==1.8.* rich>=13.3.1 tabulate -textual>=0.63.1 +textual==0.71.0 trogon pdiff diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 89a5699f4..aa1d36f97 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -22,253 +22,253 @@ font-weight: 700; } - .terminal-2299698092-matrix { + .terminal-3554683094-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2299698092-title { + .terminal-3554683094-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2299698092-r1 { fill: #c5c8c6 } - .terminal-2299698092-r2 { fill: #e3e3e3 } - .terminal-2299698092-r3 { fill: #989898 } - .terminal-2299698092-r4 { fill: #e1e1e1 } - .terminal-2299698092-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2299698092-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-2299698092-r7 { fill: #1e1e1e } - .terminal-2299698092-r8 { fill: #008139 } - .terminal-2299698092-r9 { fill: #121212 } - .terminal-2299698092-r10 { fill: #e2e2e2 } - .terminal-2299698092-r11 { fill: #787878 } - .terminal-2299698092-r12 { fill: #454a50 } - .terminal-2299698092-r13 { fill: #7ae998 } - .terminal-2299698092-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-2299698092-r15 { fill: #0a180e;font-weight: bold } - .terminal-2299698092-r16 { fill: #000000 } - .terminal-2299698092-r17 { fill: #fea62b;font-weight: bold } - .terminal-2299698092-r18 { fill: #a7a9ab } - .terminal-2299698092-r19 { fill: #e2e3e3 } + .terminal-3554683094-r1 { fill: #c5c8c6 } + .terminal-3554683094-r2 { fill: #e3e3e3 } + .terminal-3554683094-r3 { fill: #989898 } + .terminal-3554683094-r4 { fill: #e1e1e1 } + .terminal-3554683094-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3554683094-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-3554683094-r7 { fill: #1e1e1e } + .terminal-3554683094-r8 { fill: #008139 } + .terminal-3554683094-r9 { fill: #121212 } + .terminal-3554683094-r10 { fill: #e2e2e2 } + .terminal-3554683094-r11 { fill: #787878 } + .terminal-3554683094-r12 { fill: #454a50 } + .terminal-3554683094-r13 { fill: #7ae998 } + .terminal-3554683094-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-3554683094-r15 { fill: #0a180e;font-weight: bold } + .terminal-3554683094-r16 { fill: #000000 } + .terminal-3554683094-r17 { fill: #fea62b;font-weight: bold } + .terminal-3554683094-r18 { fill: #a7a9ab } + .terminal-3554683094-r19 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-corePipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Next  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode  q Quit  @@ -298,256 +298,256 @@ font-weight: 700; } - .terminal-4102136482-matrix { + .terminal-2636639375-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4102136482-title { + .terminal-2636639375-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4102136482-r1 { fill: #c5c8c6 } - .terminal-4102136482-r2 { fill: #e3e3e3 } - .terminal-4102136482-r3 { fill: #989898 } - .terminal-4102136482-r4 { fill: #e1e1e1 } - .terminal-4102136482-r5 { fill: #4ebf71;font-weight: bold } - .terminal-4102136482-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-4102136482-r7 { fill: #1e1e1e } - .terminal-4102136482-r8 { fill: #0f4e2a } - .terminal-4102136482-r9 { fill: #0178d4 } - .terminal-4102136482-r10 { fill: #a7a7a7 } - .terminal-4102136482-r11 { fill: #787878 } - .terminal-4102136482-r12 { fill: #e2e2e2 } - .terminal-4102136482-r13 { fill: #121212 } - .terminal-4102136482-r14 { fill: #454a50 } - .terminal-4102136482-r15 { fill: #7ae998 } - .terminal-4102136482-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-4102136482-r17 { fill: #0a180e;font-weight: bold } - .terminal-4102136482-r18 { fill: #000000 } - .terminal-4102136482-r19 { fill: #008139 } - .terminal-4102136482-r20 { fill: #fea62b;font-weight: bold } - .terminal-4102136482-r21 { fill: #a7a9ab } - .terminal-4102136482-r22 { fill: #e2e3e3 } + .terminal-2636639375-r1 { fill: #c5c8c6 } + .terminal-2636639375-r2 { fill: #e3e3e3 } + .terminal-2636639375-r3 { fill: #989898 } + .terminal-2636639375-r4 { fill: #e1e1e1 } + .terminal-2636639375-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2636639375-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-2636639375-r7 { fill: #1e1e1e } + .terminal-2636639375-r8 { fill: #0f4e2a } + .terminal-2636639375-r9 { fill: #0178d4 } + .terminal-2636639375-r10 { fill: #a7a7a7 } + .terminal-2636639375-r11 { fill: #787878 } + .terminal-2636639375-r12 { fill: #e2e2e2 } + .terminal-2636639375-r13 { fill: #121212 } + .terminal-2636639375-r14 { fill: #454a50 } + .terminal-2636639375-r15 { fill: #7ae998 } + .terminal-2636639375-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-2636639375-r17 { fill: #0a180e;font-weight: bold } + .terminal-2636639375-r18 { fill: #000000 } + .terminal-2636639375-r19 { fill: #008139 } + .terminal-2636639375-r20 { fill: #fea62b;font-weight: bold } + .terminal-2636639375-r21 { fill: #a7a9ab } + .terminal-2636639375-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core                                     Pipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Next  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode  q Quit  @@ -577,251 +577,251 @@ font-weight: 700; } - .terminal-2364166316-matrix { + .terminal-1889064188-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2364166316-title { + .terminal-1889064188-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2364166316-r1 { fill: #c5c8c6 } - .terminal-2364166316-r2 { fill: #e3e3e3 } - .terminal-2364166316-r3 { fill: #989898 } - .terminal-2364166316-r4 { fill: #e1e1e1 } - .terminal-2364166316-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2364166316-r6 { fill: #4ebf71;text-decoration: underline; } - .terminal-2364166316-r7 { fill: #4ebf71;font-style: italic;;text-decoration: underline; } - .terminal-2364166316-r8 { fill: #e1e1e1;font-style: italic; } - .terminal-2364166316-r9 { fill: #7ae998 } - .terminal-2364166316-r10 { fill: #008139 } - .terminal-2364166316-r11 { fill: #507bb3 } - .terminal-2364166316-r12 { fill: #dde6ed;font-weight: bold } - .terminal-2364166316-r13 { fill: #001541 } - .terminal-2364166316-r14 { fill: #e1e1e1;text-decoration: underline; } - .terminal-2364166316-r15 { fill: #fea62b;font-weight: bold } - .terminal-2364166316-r16 { fill: #a7a9ab } - .terminal-2364166316-r17 { fill: #e2e3e3 } + .terminal-1889064188-r1 { fill: #c5c8c6 } + .terminal-1889064188-r2 { fill: #e3e3e3 } + .terminal-1889064188-r3 { fill: #989898 } + .terminal-1889064188-r4 { fill: #e1e1e1 } + .terminal-1889064188-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1889064188-r6 { fill: #4ebf71;text-decoration: underline; } + .terminal-1889064188-r7 { fill: #4ebf71;font-style: italic;;text-decoration: underline; } + .terminal-1889064188-r8 { fill: #e1e1e1;font-style: italic; } + .terminal-1889064188-r9 { fill: #7ae998 } + .terminal-1889064188-r10 { fill: #008139 } + .terminal-1889064188-r11 { fill: #507bb3 } + .terminal-1889064188-r12 { fill: #dde6ed;font-weight: bold } + .terminal-1889064188-r13 { fill: #001541 } + .terminal-1889064188-r14 { fill: #e1e1e1;text-decoration: underline; } + .terminal-1889064188-r15 { fill: #fea62b;font-weight: bold } + .terminal-1889064188-r16 { fill: #a7a9ab } + .terminal-1889064188-r17 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Choose pipeline type - - - - - Choose "nf-core" if:Choose "Custom" if: - - ● You want your pipeline to be part of the ● Your pipeline will never be part of  - nf-core communitynf-core - ● You think that there's an outside chance ● You want full control over all features  - that it ever could be part of nf-corethat are included from the template  - (including those that are mandatory for  - nf-core). - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Custom - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - What's the difference? - - Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions continuous-integration configuration files: - ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) - ▪ Code formatting checks with Prettier - ▪ Auto-fix linting functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core configuration profiles - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Choose pipeline type + + + + + Choose "nf-core" if:Choose "Custom" if: + + ● You want your pipeline to be part of the ● Your pipeline will never be part of  + nf-core communitynf-core + ● You think that there's an outside chance ● You want full control over all features  + that it ever could be part of nf-corethat are included from the template  + (including those that are mandatory for  + nf-core). + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  nf-core  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Custom  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + What's the difference? + +   Choosing "nf-core" effectively pre-selects the following template features: + + ● GitHub Actions continuous-integration configuration files: + ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) + ▪ Code formatting checks with Prettier + ▪ Auto-fix linting functionality using @nf-core-bot + ▪ Marking old issues as stale + ● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + + + + +  d Toggle dark mode  q Quit  @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-3598234483-matrix { + .terminal-2971485804-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3598234483-title { + .terminal-2971485804-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3598234483-r1 { fill: #c5c8c6 } - .terminal-3598234483-r2 { fill: #e3e3e3 } - .terminal-3598234483-r3 { fill: #989898 } - .terminal-3598234483-r4 { fill: #e1e1e1 } - .terminal-3598234483-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3598234483-r6 { fill: #1e1e1e } - .terminal-3598234483-r7 { fill: #0178d4 } - .terminal-3598234483-r8 { fill: #454a50 } - .terminal-3598234483-r9 { fill: #e2e2e2 } - .terminal-3598234483-r10 { fill: #808080 } - .terminal-3598234483-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-3598234483-r12 { fill: #000000 } - .terminal-3598234483-r13 { fill: #e4e4e4 } - .terminal-3598234483-r14 { fill: #14191f } - .terminal-3598234483-r15 { fill: #507bb3 } - .terminal-3598234483-r16 { fill: #dde6ed;font-weight: bold } - .terminal-3598234483-r17 { fill: #001541 } - .terminal-3598234483-r18 { fill: #7ae998 } - .terminal-3598234483-r19 { fill: #0a180e;font-weight: bold } - .terminal-3598234483-r20 { fill: #008139 } - .terminal-3598234483-r21 { fill: #fea62b;font-weight: bold } - .terminal-3598234483-r22 { fill: #a7a9ab } - .terminal-3598234483-r23 { fill: #e2e3e3 } + .terminal-2971485804-r1 { fill: #c5c8c6 } + .terminal-2971485804-r2 { fill: #e3e3e3 } + .terminal-2971485804-r3 { fill: #989898 } + .terminal-2971485804-r4 { fill: #e1e1e1 } + .terminal-2971485804-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2971485804-r6 { fill: #1e1e1e } + .terminal-2971485804-r7 { fill: #0178d4 } + .terminal-2971485804-r8 { fill: #454a50 } + .terminal-2971485804-r9 { fill: #e2e2e2 } + .terminal-2971485804-r10 { fill: #808080 } + .terminal-2971485804-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-2971485804-r12 { fill: #000000 } + .terminal-2971485804-r13 { fill: #e4e4e4 } + .terminal-2971485804-r14 { fill: #14191f } + .terminal-2971485804-r15 { fill: #507bb3 } + .terminal-2971485804-r16 { fill: #dde6ed;font-weight: bold } + .terminal-2971485804-r17 { fill: #001541 } + .terminal-2971485804-r18 { fill: #7ae998 } + .terminal-2971485804-r19 { fill: #0a180e;font-weight: bold } + .terminal-2971485804-r20 { fill: #008139 } + .terminal-2971485804-r21 { fill: #fea62b;font-weight: bold } + .terminal-2971485804-r22 { fill: #a7a9ab } + .terminal-2971485804-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference The pipeline will Hide help - ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI The pipeline will Show help - ▁▁▁▁▁▁▁▁testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing▄▄ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md fileShow help - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing▄▄ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -1131,251 +1131,251 @@ font-weight: 700; } - .terminal-1422742483-matrix { + .terminal-3764053845-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1422742483-title { + .terminal-3764053845-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1422742483-r1 { fill: #c5c8c6 } - .terminal-1422742483-r2 { fill: #e3e3e3 } - .terminal-1422742483-r3 { fill: #989898 } - .terminal-1422742483-r4 { fill: #e1e1e1 } - .terminal-1422742483-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1422742483-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-1422742483-r7 { fill: #1e1e1e } - .terminal-1422742483-r8 { fill: #008139 } - .terminal-1422742483-r9 { fill: #e2e2e2 } - .terminal-1422742483-r10 { fill: #454a50 } - .terminal-1422742483-r11 { fill: #7ae998 } - .terminal-1422742483-r12 { fill: #e2e3e3;font-weight: bold } - .terminal-1422742483-r13 { fill: #0a180e;font-weight: bold } - .terminal-1422742483-r14 { fill: #000000 } - .terminal-1422742483-r15 { fill: #fea62b;font-weight: bold } - .terminal-1422742483-r16 { fill: #a7a9ab } - .terminal-1422742483-r17 { fill: #e2e3e3 } + .terminal-3764053845-r1 { fill: #c5c8c6 } + .terminal-3764053845-r2 { fill: #e3e3e3 } + .terminal-3764053845-r3 { fill: #989898 } + .terminal-3764053845-r4 { fill: #e1e1e1 } + .terminal-3764053845-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3764053845-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-3764053845-r7 { fill: #1e1e1e } + .terminal-3764053845-r8 { fill: #008139 } + .terminal-3764053845-r9 { fill: #e2e2e2 } + .terminal-3764053845-r10 { fill: #454a50 } + .terminal-3764053845-r11 { fill: #7ae998 } + .terminal-3764053845-r12 { fill: #e2e3e3;font-weight: bold } + .terminal-3764053845-r13 { fill: #0a180e;font-weight: bold } + .terminal-3764053845-r14 { fill: #000000 } + .terminal-3764053845-r15 { fill: #fea62b;font-weight: bold } + .terminal-3764053845-r16 { fill: #a7a9ab } + .terminal-3764053845-r17 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Final details - - - - First version of the pipelinePath to the output directory where the pipeline  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created - 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁. - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackFinish - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Final details + + + + First version of the pipelinePath to the output directory where the pipeline  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔will be created + 1.0.0dev▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁.                                            + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Finish  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode  q Quit  @@ -1405,258 +1405,258 @@ font-weight: 700; } - .terminal-4244045401-matrix { + .terminal-1325622613-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4244045401-title { + .terminal-1325622613-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4244045401-r1 { fill: #c5c8c6 } - .terminal-4244045401-r2 { fill: #e3e3e3 } - .terminal-4244045401-r3 { fill: #989898 } - .terminal-4244045401-r4 { fill: #e1e1e1 } - .terminal-4244045401-r5 { fill: #4ebf71;font-weight: bold } - .terminal-4244045401-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-4244045401-r7 { fill: #1e1e1e } - .terminal-4244045401-r8 { fill: #008139 } - .terminal-4244045401-r9 { fill: #454a50 } - .terminal-4244045401-r10 { fill: #e2e2e2 } - .terminal-4244045401-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-4244045401-r12 { fill: #000000 } - .terminal-4244045401-r13 { fill: #18954b } - .terminal-4244045401-r14 { fill: #e2e2e2;font-weight: bold } - .terminal-4244045401-r15 { fill: #969696;font-weight: bold } - .terminal-4244045401-r16 { fill: #808080 } - .terminal-4244045401-r17 { fill: #7ae998 } - .terminal-4244045401-r18 { fill: #507bb3 } - .terminal-4244045401-r19 { fill: #0a180e;font-weight: bold } - .terminal-4244045401-r20 { fill: #dde6ed;font-weight: bold } - .terminal-4244045401-r21 { fill: #001541 } - .terminal-4244045401-r22 { fill: #fea62b;font-weight: bold } - .terminal-4244045401-r23 { fill: #a7a9ab } - .terminal-4244045401-r24 { fill: #e2e3e3 } + .terminal-1325622613-r1 { fill: #c5c8c6 } + .terminal-1325622613-r2 { fill: #e3e3e3 } + .terminal-1325622613-r3 { fill: #989898 } + .terminal-1325622613-r4 { fill: #e1e1e1 } + .terminal-1325622613-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1325622613-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-1325622613-r7 { fill: #1e1e1e } + .terminal-1325622613-r8 { fill: #008139 } + .terminal-1325622613-r9 { fill: #454a50 } + .terminal-1325622613-r10 { fill: #e2e2e2 } + .terminal-1325622613-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-1325622613-r12 { fill: #000000 } + .terminal-1325622613-r13 { fill: #18954b } + .terminal-1325622613-r14 { fill: #e2e2e2;font-weight: bold } + .terminal-1325622613-r15 { fill: #969696;font-weight: bold } + .terminal-1325622613-r16 { fill: #808080 } + .terminal-1325622613-r17 { fill: #7ae998 } + .terminal-1325622613-r18 { fill: #507bb3 } + .terminal-1325622613-r19 { fill: #0a180e;font-weight: bold } + .terminal-1325622613-r20 { fill: #dde6ed;font-weight: bold } + .terminal-1325622613-r21 { fill: #001541 } + .terminal-1325622613-r22 { fill: #fea62b;font-weight: bold } + .terminal-1325622613-r23 { fill: #a7a9ab } + .terminal-1325622613-r24 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - Now that we have created a new pipeline locally, we can create a new GitHub repository and push  - the code to it. - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + +   Now that we have created a new pipeline locally, we can create a new GitHub repository and push    +   the code to it. + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ Show  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁••••••••••••                    ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline                               + nf-core                                 ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user  + account. A core-team member will be able to transfer the repo to nf-core once the development + has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Create GitHub repo  Finish without creating a repo  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + +  d Toggle dark mode  q Quit  @@ -1686,254 +1686,254 @@ font-weight: 700; } - .terminal-396289429-matrix { + .terminal-1078212293-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-396289429-title { + .terminal-1078212293-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-396289429-r1 { fill: #c5c8c6 } - .terminal-396289429-r2 { fill: #e3e3e3 } - .terminal-396289429-r3 { fill: #989898 } - .terminal-396289429-r4 { fill: #e1e1e1 } - .terminal-396289429-r5 { fill: #4ebf71;font-weight: bold } - .terminal-396289429-r6 { fill: #98e024 } - .terminal-396289429-r7 { fill: #626262 } - .terminal-396289429-r8 { fill: #9d65ff } - .terminal-396289429-r9 { fill: #fd971f } - .terminal-396289429-r10 { fill: #d2d2d2 } - .terminal-396289429-r11 { fill: #82aaff } - .terminal-396289429-r12 { fill: #eeffff } - .terminal-396289429-r13 { fill: #18954b } - .terminal-396289429-r14 { fill: #e2e2e2 } - .terminal-396289429-r15 { fill: #969696;font-weight: bold } - .terminal-396289429-r16 { fill: #7ae998 } - .terminal-396289429-r17 { fill: #008139 } - .terminal-396289429-r18 { fill: #fea62b;font-weight: bold } - .terminal-396289429-r19 { fill: #a7a9ab } - .terminal-396289429-r20 { fill: #e2e3e3 } + .terminal-1078212293-r1 { fill: #c5c8c6 } + .terminal-1078212293-r2 { fill: #e3e3e3 } + .terminal-1078212293-r3 { fill: #989898 } + .terminal-1078212293-r4 { fill: #e1e1e1 } + .terminal-1078212293-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1078212293-r6 { fill: #98e024 } + .terminal-1078212293-r7 { fill: #626262 } + .terminal-1078212293-r8 { fill: #9d65ff } + .terminal-1078212293-r9 { fill: #fd971f } + .terminal-1078212293-r10 { fill: #d2d2d2 } + .terminal-1078212293-r11 { fill: #82aaff } + .terminal-1078212293-r12 { fill: #eeffff } + .terminal-1078212293-r13 { fill: #18954b } + .terminal-1078212293-r14 { fill: #e2e2e2 } + .terminal-1078212293-r15 { fill: #969696;font-weight: bold } + .terminal-1078212293-r16 { fill: #7ae998 } + .terminal-1078212293-r17 { fill: #008139 } + .terminal-1078212293-r18 { fill: #fea62b;font-weight: bold } + .terminal-1078212293-r19 { fill: #a7a9ab } + .terminal-1078212293-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - HowTo create a GitHub repository - - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - If you would like to create the GitHub repository later, you can do it manually by following  - these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd<pipeline_directory> - gitremoteaddorigingit@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - gitpush--allorigin - - - 💡 Note the --all flag: this is needed to push all branches to the remote. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Close - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + HowTo create a GitHub repository + + + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + +   If you would like to create the GitHub repository later, you can do it manually by following  +   these steps: + +  1. Create a new GitHub repository +  2. Add the remote to your local repository: + + + cd <pipeline_directory> + git remote add origin git@github.com:<username>/<repo_name>.git + + +  3. Push the code to the remote: + + + git push --all origin + + + 💡 Note the --all flag: this is needed to push all branches to the remote. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Close  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + +  d Toggle dark mode  q Quit  @@ -1963,247 +1963,247 @@ font-weight: 700; } - .terminal-3492397142-matrix { + .terminal-2285979206-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3492397142-title { + .terminal-2285979206-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3492397142-r1 { fill: #c5c8c6 } - .terminal-3492397142-r2 { fill: #e3e3e3 } - .terminal-3492397142-r3 { fill: #989898 } - .terminal-3492397142-r4 { fill: #e1e1e1 } - .terminal-3492397142-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3492397142-r6 { fill: #7ae998 } - .terminal-3492397142-r7 { fill: #507bb3 } - .terminal-3492397142-r8 { fill: #dde6ed;font-weight: bold } - .terminal-3492397142-r9 { fill: #008139 } - .terminal-3492397142-r10 { fill: #001541 } - .terminal-3492397142-r11 { fill: #fea62b;font-weight: bold } - .terminal-3492397142-r12 { fill: #a7a9ab } - .terminal-3492397142-r13 { fill: #e2e3e3 } + .terminal-2285979206-r1 { fill: #c5c8c6 } + .terminal-2285979206-r2 { fill: #e3e3e3 } + .terminal-2285979206-r3 { fill: #989898 } + .terminal-2285979206-r4 { fill: #e1e1e1 } + .terminal-2285979206-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2285979206-r6 { fill: #7ae998 } + .terminal-2285979206-r7 { fill: #507bb3 } + .terminal-2285979206-r8 { fill: #dde6ed;font-weight: bold } + .terminal-2285979206-r9 { fill: #008139 } + .terminal-2285979206-r10 { fill: #001541 } + .terminal-2285979206-r11 { fill: #fea62b;font-weight: bold } + .terminal-2285979206-r12 { fill: #a7a9ab } + .terminal-2285979206-r13 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - - After creating the pipeline template locally, we can create a GitHub repository and push the  - code to it. - - Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Create GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + + +   After creating the pipeline template locally, we can create a GitHub repository and push the  +   code to it. + +   Do you want to create a GitHub repository? + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Create GitHub repo  Finish without creating a repo  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode  q Quit  @@ -2233,254 +2233,254 @@ font-weight: 700; } - .terminal-4082092032-matrix { + .terminal-1445899181-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4082092032-title { + .terminal-1445899181-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4082092032-r1 { fill: #c5c8c6 } - .terminal-4082092032-r2 { fill: #e3e3e3 } - .terminal-4082092032-r3 { fill: #989898 } - .terminal-4082092032-r4 { fill: #e1e1e1 } - .terminal-4082092032-r5 { fill: #4ebf71;font-weight: bold } - .terminal-4082092032-r6 { fill: #1e1e1e } - .terminal-4082092032-r7 { fill: #507bb3 } - .terminal-4082092032-r8 { fill: #e2e2e2 } - .terminal-4082092032-r9 { fill: #808080 } - .terminal-4082092032-r10 { fill: #dde6ed;font-weight: bold } - .terminal-4082092032-r11 { fill: #001541 } - .terminal-4082092032-r12 { fill: #454a50 } - .terminal-4082092032-r13 { fill: #7ae998 } - .terminal-4082092032-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-4082092032-r15 { fill: #0a180e;font-weight: bold } - .terminal-4082092032-r16 { fill: #000000 } - .terminal-4082092032-r17 { fill: #008139 } - .terminal-4082092032-r18 { fill: #fea62b;font-weight: bold } - .terminal-4082092032-r19 { fill: #a7a9ab } - .terminal-4082092032-r20 { fill: #e2e3e3 } + .terminal-1445899181-r1 { fill: #c5c8c6 } + .terminal-1445899181-r2 { fill: #e3e3e3 } + .terminal-1445899181-r3 { fill: #989898 } + .terminal-1445899181-r4 { fill: #e1e1e1 } + .terminal-1445899181-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1445899181-r6 { fill: #1e1e1e } + .terminal-1445899181-r7 { fill: #507bb3 } + .terminal-1445899181-r8 { fill: #e2e2e2 } + .terminal-1445899181-r9 { fill: #808080 } + .terminal-1445899181-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1445899181-r11 { fill: #001541 } + .terminal-1445899181-r12 { fill: #454a50 } + .terminal-1445899181-r13 { fill: #7ae998 } + .terminal-1445899181-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-1445899181-r15 { fill: #0a180e;font-weight: bold } + .terminal-1445899181-r16 { fill: #000000 } + .terminal-1445899181-r17 { fill: #008139 } + .terminal-1445899181-r18 { fill: #fea62b;font-weight: bold } + .terminal-1445899181-r19 { fill: #a7a9ab } + .terminal-1445899181-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference The pipeline will Show help - ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github CI testsThe pipeline will Show help - ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for  - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add Github badgesThe README.md file Show help - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include GitHub - badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Add configuration The pipeline will Show help - ▁▁▁▁▁▁▁▁filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles containing - custom parameters  - requried to run  - nf-core pipelines  - at different  - institutions - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI tests        The pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for  + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file  Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include GitHub + badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles containing + custom parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2510,254 +2510,254 @@ font-weight: 700; } - .terminal-1639960877-matrix { + .terminal-388991162-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1639960877-title { + .terminal-388991162-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1639960877-r1 { fill: #c5c8c6 } - .terminal-1639960877-r2 { fill: #e3e3e3 } - .terminal-1639960877-r3 { fill: #989898 } - .terminal-1639960877-r4 { fill: #e1e1e1 } - .terminal-1639960877-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1639960877-r6 { fill: #1e1e1e } - .terminal-1639960877-r7 { fill: #507bb3 } - .terminal-1639960877-r8 { fill: #e2e2e2 } - .terminal-1639960877-r9 { fill: #808080 } - .terminal-1639960877-r10 { fill: #dde6ed;font-weight: bold } - .terminal-1639960877-r11 { fill: #001541 } - .terminal-1639960877-r12 { fill: #454a50 } - .terminal-1639960877-r13 { fill: #7ae998 } - .terminal-1639960877-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-1639960877-r15 { fill: #0a180e;font-weight: bold } - .terminal-1639960877-r16 { fill: #000000 } - .terminal-1639960877-r17 { fill: #008139 } - .terminal-1639960877-r18 { fill: #fea62b;font-weight: bold } - .terminal-1639960877-r19 { fill: #a7a9ab } - .terminal-1639960877-r20 { fill: #e2e3e3 } + .terminal-388991162-r1 { fill: #c5c8c6 } + .terminal-388991162-r2 { fill: #e3e3e3 } + .terminal-388991162-r3 { fill: #989898 } + .terminal-388991162-r4 { fill: #e1e1e1 } + .terminal-388991162-r5 { fill: #4ebf71;font-weight: bold } + .terminal-388991162-r6 { fill: #1e1e1e } + .terminal-388991162-r7 { fill: #507bb3 } + .terminal-388991162-r8 { fill: #e2e2e2 } + .terminal-388991162-r9 { fill: #808080 } + .terminal-388991162-r10 { fill: #dde6ed;font-weight: bold } + .terminal-388991162-r11 { fill: #001541 } + .terminal-388991162-r12 { fill: #454a50 } + .terminal-388991162-r13 { fill: #7ae998 } + .terminal-388991162-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-388991162-r15 { fill: #0a180e;font-weight: bold } + .terminal-388991162-r16 { fill: #000000 } + .terminal-388991162-r17 { fill: #008139 } + .terminal-388991162-r18 { fill: #fea62b;font-weight: bold } + .terminal-388991162-r19 { fill: #a7a9ab } + .terminal-388991162-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Use reference The pipeline will Show help - ▁▁▁▁▁▁▁▁genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from iGenomes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackContinue - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from iGenomes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2787,255 +2787,255 @@ font-weight: 700; } - .terminal-3046427883-matrix { + .terminal-930117970-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3046427883-title { + .terminal-930117970-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3046427883-r1 { fill: #c5c8c6 } - .terminal-3046427883-r2 { fill: #e3e3e3 } - .terminal-3046427883-r3 { fill: #989898 } - .terminal-3046427883-r4 { fill: #e1e1e1 } - .terminal-3046427883-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3046427883-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-3046427883-r7 { fill: #1e1e1e } - .terminal-3046427883-r8 { fill: #0f4e2a } - .terminal-3046427883-r9 { fill: #7b3042 } - .terminal-3046427883-r10 { fill: #a7a7a7 } - .terminal-3046427883-r11 { fill: #787878 } - .terminal-3046427883-r12 { fill: #e2e2e2 } - .terminal-3046427883-r13 { fill: #b93c5b } - .terminal-3046427883-r14 { fill: #454a50 } - .terminal-3046427883-r15 { fill: #7ae998 } - .terminal-3046427883-r16 { fill: #e2e3e3;font-weight: bold } - .terminal-3046427883-r17 { fill: #000000 } - .terminal-3046427883-r18 { fill: #008139 } - .terminal-3046427883-r19 { fill: #fea62b;font-weight: bold } - .terminal-3046427883-r20 { fill: #a7a9ab } - .terminal-3046427883-r21 { fill: #e2e3e3 } + .terminal-930117970-r1 { fill: #c5c8c6 } + .terminal-930117970-r2 { fill: #e3e3e3 } + .terminal-930117970-r3 { fill: #989898 } + .terminal-930117970-r4 { fill: #e1e1e1 } + .terminal-930117970-r5 { fill: #4ebf71;font-weight: bold } + .terminal-930117970-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-930117970-r7 { fill: #1e1e1e } + .terminal-930117970-r8 { fill: #0f4e2a } + .terminal-930117970-r9 { fill: #7b3042 } + .terminal-930117970-r10 { fill: #a7a7a7 } + .terminal-930117970-r11 { fill: #787878 } + .terminal-930117970-r12 { fill: #e2e2e2 } + .terminal-930117970-r13 { fill: #b93c5b } + .terminal-930117970-r14 { fill: #454a50 } + .terminal-930117970-r15 { fill: #7ae998 } + .terminal-930117970-r16 { fill: #e2e3e3;font-weight: bold } + .terminal-930117970-r17 { fill: #000000 } + .terminal-930117970-r18 { fill: #008139 } + .terminal-930117970-r19 { fill: #fea62b;font-weight: bold } + .terminal-930117970-r20 { fill: #a7a9ab } + .terminal-930117970-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - GitHub organisationWorkflow name - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - A short description of your pipeline. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - Name of the main author / authors - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackNext - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Basic details + + + + GitHub organisationWorkflow name + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core                                     Pipeline Name + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Must be lowercase without  + punctuation. + + A short description of your pipeline. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Description + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + + Name of the main author / authors + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Author(s) + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + Value error, Cannot be left empty. + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Next  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode  q Quit  @@ -3065,253 +3065,253 @@ font-weight: 700; } - .terminal-3787732750-matrix { + .terminal-3972722241-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3787732750-title { + .terminal-3972722241-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3787732750-r1 { fill: #c5c8c6 } - .terminal-3787732750-r2 { fill: #e3e3e3 } - .terminal-3787732750-r3 { fill: #989898 } - .terminal-3787732750-r4 { fill: #98e024 } - .terminal-3787732750-r5 { fill: #626262 } - .terminal-3787732750-r6 { fill: #9d65ff } - .terminal-3787732750-r7 { fill: #fd971f } - .terminal-3787732750-r8 { fill: #e1e1e1 } - .terminal-3787732750-r9 { fill: #4ebf71;font-weight: bold } - .terminal-3787732750-r10 { fill: #e1e1e1;text-decoration: underline; } - .terminal-3787732750-r11 { fill: #18954b } - .terminal-3787732750-r12 { fill: #e2e2e2 } - .terminal-3787732750-r13 { fill: #e2e2e2;text-decoration: underline; } - .terminal-3787732750-r14 { fill: #e2e2e2;font-weight: bold;font-style: italic; } - .terminal-3787732750-r15 { fill: #7ae998 } - .terminal-3787732750-r16 { fill: #008139 } - .terminal-3787732750-r17 { fill: #fea62b;font-weight: bold } - .terminal-3787732750-r18 { fill: #a7a9ab } - .terminal-3787732750-r19 { fill: #e2e3e3 } + .terminal-3972722241-r1 { fill: #c5c8c6 } + .terminal-3972722241-r2 { fill: #e3e3e3 } + .terminal-3972722241-r3 { fill: #989898 } + .terminal-3972722241-r4 { fill: #98e024 } + .terminal-3972722241-r5 { fill: #626262 } + .terminal-3972722241-r6 { fill: #9d65ff } + .terminal-3972722241-r7 { fill: #fd971f } + .terminal-3972722241-r8 { fill: #e1e1e1 } + .terminal-3972722241-r9 { fill: #4ebf71;font-weight: bold } + .terminal-3972722241-r10 { fill: #e1e1e1;text-decoration: underline; } + .terminal-3972722241-r11 { fill: #18954b } + .terminal-3972722241-r12 { fill: #e2e2e2 } + .terminal-3972722241-r13 { fill: #e2e2e2;text-decoration: underline; } + .terminal-3972722241-r14 { fill: #e2e2e2;font-weight: bold;font-style: italic; } + .terminal-3972722241-r15 { fill: #7ae998 } + .terminal-3972722241-r16 { fill: #008139 } + .terminal-3972722241-r17 { fill: #fea62b;font-weight: bold } + .terminal-3972722241-r18 { fill: #a7a9ab } + .terminal-3972722241-r19 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - - - Welcome to the nf-core pipeline creation wizard - - This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. - - The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core  - pipelines. - - 💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with - the community as early as possible; ideally before you start on your pipeline! See the  - nf-core guidelines and the #new-pipelines Slack channel for more information. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Let's go! - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                           ,--./,-. +           ___     __   __   __   ___     /,-._.--~\  +     |\ | |__  __ /  ` /  \ |__) |__         }  { +     | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                           `._,._,' + + + + Welcome to the nf-core pipeline creation wizard + +   This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. + +   The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    +   pipelines. + + 💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with + the community as early as possible; ideally before you start on your pipeline! See the  + nf-core guidelines and the #new-pipelines Slack channel for more information. + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Let's go!  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + +  d Toggle dark mode  q Quit  From 27475f73ccb5dd4529f4e1b6a5f80841ee802b00 Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 11:16:29 +0000 Subject: [PATCH 235/737] add to subworkflow as well --- nf_core/__main__.py | 10 ++++++++++ nf_core/components/update.py | 2 +- nf_core/subworkflows/update.py | 2 ++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 86bcde86e..815c98c1e 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2157,6 +2157,14 @@ def subworkflows_remove(ctx, dir, subworkflow): metavar="", help="Install subworkflow at commit SHA", ) +@click.option( + "-l", + "--limit-output", + "limit_output", + is_flag=True, + default=False, + help="Limit ouput to only the difference in main.nf", +) @click.option( "-a", "--all", @@ -2198,6 +2206,7 @@ def subworkflows_update( preview, save_diff, update_deps, + limit_output, ): """ Update DSL2 subworkflow within a pipeline. @@ -2219,6 +2228,7 @@ def subworkflows_update( ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], + limit_output, ) exit_status = subworkflow_install.update(subworkflow) if not exit_status and install_all: diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 3c3cdca6c..8beab7a92 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -40,7 +40,7 @@ def __init__( no_pull=False, limit_output=False, ): - super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull, limit_output) self.force = force self.prompt = prompt self.sha = sha diff --git a/nf_core/subworkflows/update.py b/nf_core/subworkflows/update.py index 3cd4ad59f..9b6bf1692 100644 --- a/nf_core/subworkflows/update.py +++ b/nf_core/subworkflows/update.py @@ -15,6 +15,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, + limit_output=False, ): super().__init__( pipeline_dir, @@ -29,4 +30,5 @@ def __init__( remote_url, branch, no_pull, + limit_output, ) From 2cb6946d100c0b61e238668b0b2b889decd64a28 Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 11:18:49 +0000 Subject: [PATCH 236/737] better changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9355b3ca1..b01e13ce8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -39,7 +39,7 @@ - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) - Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) - Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) -- add default limit_output argument for write_diff_file & print_diff ([#3047](https://github.com/nf-core/tools/pull/3047)) +- Add `--limit-output` argument for modules/subworkflow update ([#3047](https://github.com/nf-core/tools/pull/3047)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From f35b1f0f840b8324e0cf6c795538cd372cc5c99c Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 11:40:16 +0000 Subject: [PATCH 237/737] debug prints --- nf_core/components/update.py | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 8beab7a92..72c053cfd 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -40,18 +40,20 @@ def __init__( no_pull=False, limit_output=False, ): - super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull, limit_output) + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.force = force self.prompt = prompt self.sha = sha self.update_all = update_all self.show_diff = show_diff self.save_diff_fn = save_diff_fn + self.limit_output = limit_output self.update_deps = update_deps self.component = None self.update_config = None self.modules_json = ModulesJson(self.dir) self.branch = branch + log.info(f"init component limit_output ={self.limit_output}") def _parameter_checks(self): """Checks the compatibilty of the supplied parameters. @@ -77,7 +79,7 @@ def _parameter_checks(self): if not self.has_valid_directory(): raise UserWarning("The command was not run in a valid pipeline directory.") - def update(self, component=None, silent=False, updated=None, check_diff_exist=True, limit_output=False) -> bool: + def update(self, component=None, silent=False, updated=None, check_diff_exist=True) -> bool: """Updates a specified module/subworkflow or all modules/subworkflows in a pipeline. If updating a subworkflow: updates all modules used in that subworkflow. @@ -232,7 +234,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr version, dsp_from_dir=component_dir, dsp_to_dir=component_dir, - limit_output=limit_output, + limit_output=self.limit_output, ) updated.append(component) except UserWarning as e: @@ -264,6 +266,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) elif self.show_diff: + log.info(f"limit_output ModulesDiffer: {self.limit_output}") ModulesDiffer.print_diff( component, modules_repo.repo_path, @@ -273,7 +276,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr version, dsp_from_dir=component_dir, dsp_to_dir=component_dir, - limit_output=limit_output, + limit_output=self.limit_output, ) # Ask the user if they want to install the component @@ -879,7 +882,11 @@ def get_components_to_update(self, component): return modules_to_update, subworkflows_to_update def update_linked_components( - self, modules_to_update, subworkflows_to_update, updated=None, check_diff_exist=True, limit_output=False + self, + modules_to_update, + subworkflows_to_update, + updated=None, + check_diff_exist=True, ): """ Update modules and subworkflows linked to the component being updated. @@ -889,7 +896,10 @@ def update_linked_components( continue original_component_type, original_update_all = self._change_component_type("subworkflows") self.update( - s_update, silent=True, updated=updated, check_diff_exist=check_diff_exist, limit_output=limit_output + s_update, + silent=True, + updated=updated, + check_diff_exist=check_diff_exist, ) self._reset_component_type(original_component_type, original_update_all) @@ -899,7 +909,10 @@ def update_linked_components( original_component_type, original_update_all = self._change_component_type("modules") try: self.update( - m_update, silent=True, updated=updated, check_diff_exist=check_diff_exist, limit_output=limit_output + m_update, + silent=True, + updated=updated, + check_diff_exist=check_diff_exist, ) except LookupError as e: # If the module to be updated is not available, check if there has been a name change From f7e830381e6e22e2c08083c1c3bb29df79c0841c Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 11:47:52 +0000 Subject: [PATCH 238/737] fix file suffix checking --- nf_core/components/update.py | 2 -- nf_core/modules/modules_differ.py | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 72c053cfd..37e3f6212 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -53,7 +53,6 @@ def __init__( self.update_config = None self.modules_json = ModulesJson(self.dir) self.branch = branch - log.info(f"init component limit_output ={self.limit_output}") def _parameter_checks(self): """Checks the compatibilty of the supplied parameters. @@ -266,7 +265,6 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) elif self.show_diff: - log.info(f"limit_output ModulesDiffer: {self.limit_output}") ModulesDiffer.print_diff( component, modules_repo.repo_path, diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index fb1fc31dc..527385770 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -179,7 +179,7 @@ def write_diff_file( if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") - elif limit_output and file != "main.nf": + elif limit_output and not (file.endswith(".nf")): # Skip printing the diff for files other than main.nf fh.write(f"Changes in '{Path(module, file)}' not shown\n") else: @@ -276,7 +276,7 @@ def print_diff( elif diff_status == ModulesDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") - elif limit_output and file != "main.nf": + elif limit_output and not (file.endswith(".nf")): # Skip printing the diff for files other than main.nf log.info(f"Changes in '{Path(module, file)}' not shown") else: From bc8d096e71cebeb22d7e9ac7a10455901e715e5f Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 11:49:21 +0000 Subject: [PATCH 239/737] remove unnecessary() --- nf_core/modules/modules_differ.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index 527385770..7de68b9cc 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -179,7 +179,7 @@ def write_diff_file( if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") - elif limit_output and not (file.endswith(".nf")): + elif limit_output and not file.endswith(".nf"): # Skip printing the diff for files other than main.nf fh.write(f"Changes in '{Path(module, file)}' not shown\n") else: @@ -276,7 +276,7 @@ def print_diff( elif diff_status == ModulesDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") - elif limit_output and not (file.endswith(".nf")): + elif limit_output and not file.endswith(".nf"): # Skip printing the diff for files other than main.nf log.info(f"Changes in '{Path(module, file)}' not shown") else: From e5e34aacbe20b77df6c9f178cbe220a8161b45a5 Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 11:55:25 +0000 Subject: [PATCH 240/737] use suffix instead --- nf_core/modules/modules_differ.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index 7de68b9cc..f97c7dce5 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -179,7 +179,7 @@ def write_diff_file( if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") - elif limit_output and not file.endswith(".nf"): + elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf fh.write(f"Changes in '{Path(module, file)}' not shown\n") else: @@ -276,7 +276,7 @@ def print_diff( elif diff_status == ModulesDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") - elif limit_output and not file.endswith(".nf"): + elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf log.info(f"Changes in '{Path(module, file)}' not shown") else: From a61da2d98f1c24905d09a87e7e4efb83c6fbb35c Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 12:02:26 +0000 Subject: [PATCH 241/737] add method argument docs --- nf_core/modules/modules_differ.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index f97c7dce5..34966b2ff 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -155,6 +155,7 @@ def write_diff_file( adds a/ and b/ prefixes to the file paths dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff + limit_output (bool): If true, don't write the diff for files other than main.nf """ if dsp_from_dir is None: dsp_from_dir = from_dir @@ -181,7 +182,7 @@ def write_diff_file( fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - fh.write(f"Changes in '{Path(module, file)}' not shown\n") + fh.write(f"Changes in '{Path(module, file)}' but not shown\n") else: # The file has changed write the diff lines to the file for line in diff: @@ -249,6 +250,7 @@ def print_diff( new_version (str): The version of the module the diff is computed against dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff + limit_output (bool): If true, don't print the diff for files other than main.nf """ if dsp_from_dir is None: dsp_from_dir = from_dir @@ -278,7 +280,7 @@ def print_diff( log.info(f"'{Path(dsp_from_dir, file)}' was removed") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - log.info(f"Changes in '{Path(module, file)}' not shown") + log.info(f"Changes in '{Path(module, file)}' but not shown") else: # The file has changed log.info(f"Changes in '{Path(module, file)}':") From 5a2c09e67cfc814659f258ddd32bd842d4967d93 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 27 Jun 2024 12:02:43 +0200 Subject: [PATCH 242/737] move pipeline commands to functions to avoid duplication --- nf_core/__main__.py | 1111 +++++++++++++++++++------------------------ 1 file changed, 494 insertions(+), 617 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index ea0018e2a..06712acb7 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -196,40 +196,46 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): } -# nf-core pipelines subcommands -@nf_core_cli.group() -@click.pass_context -def pipelines(ctx): +## nf-core pipelines command functions ## + + +# nf-core pipelines bump-version +def pipelines_bump_version(ctx, new_version, dir, nextflow): """ - Commands to manage nf-core pipelines. + Update nf-core pipeline version number. + + The pipeline version number is mentioned in a lot of different places + in nf-core pipelines. This tool updates the version for you automatically, + so that you don't accidentally miss any. + + Should be used for each pipeline release, and again for the next + development version after release. + + As well as the pipeline version, you can also change the required version of Nextflow. """ - # ensure that ctx.obj exists and is a dict (in case `cli()` is called - # by means other than the `if` block below) - ctx.ensure_object(dict) + from nf_core.pipelines.bump_version import bump_nextflow_version, bump_pipeline_version + from nf_core.utils import Pipeline, is_pipeline_directory + + try: + # Check if pipeline directory contains necessary files + is_pipeline_directory(dir) + + # Make a pipeline object and load config etc + pipeline_obj = Pipeline(dir) + pipeline_obj._load() + + # Bump the pipeline version number + if not nextflow: + bump_pipeline_version(pipeline_obj, new_version) + else: + bump_nextflow_version(pipeline_obj, new_version) + except UserWarning as e: + log.error(e) + sys.exit(1) # nf-core pipelines create -@pipelines.command("create") -@click.pass_context -@click.option( - "-n", - "--name", - type=str, - help="The name of your new pipeline", -) -@click.option("-d", "--description", type=str, help="A short description of your pipeline") -@click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, default="1.0.0dev", help="The initial version number to use") -@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") -@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") -@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -@click.option( - "--organisation", - type=str, - default="nf-core", - help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", -) -def create_pipeline(ctx, name, description, author, version, force, outdir, template_yaml, organisation): +def pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation): """ Create a new pipeline using the nf-core template. @@ -273,62 +279,7 @@ def create_pipeline(ctx, name, description, author, version, force, outdir, temp # nf-core pipelines lint -@pipelines.command("lint") -@click.option( - "-d", - "--dir", - type=click.Path(exists=True), - default=".", - help=r"Pipeline directory [dim]\[default: current working directory][/]", -) -@click.option( - "--release", - is_flag=True, - default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" - and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") - and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", - help="Execute additional checks for release-ready workflows.", -) -@click.option( - "-f", - "--fix", - type=str, - metavar="", - multiple=True, - help="Attempt to automatically fix specified lint test", -) -@click.option( - "-k", - "--key", - type=str, - metavar="", - multiple=True, - help="Run only these lint tests", -) -@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") -@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") -@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") -@click.option( - "--markdown", - type=str, - metavar="", - help="File to write linting results to (Markdown)", -) -@click.option( - "--json", - type=str, - metavar="", - help="File to write linting results to (JSON)", -) -@click.option( - "--sort-by", - type=click.Choice(["module", "test"]), - default="test", - help="Sort lint output by module or test name.", - show_default=True, -) -@click.pass_context -def lint_pipeline( +def pipelines_lint( ctx, dir, release, @@ -389,6 +340,394 @@ def lint_pipeline( sys.exit(1) +# nf-core pipelines download +def pipelines_download( + ctx, + pipeline, + revision, + outdir, + compress, + force, + tower, + platform, + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, +): + """ + Download a pipeline, nf-core/configs and pipeline singularity images. + + Collects all files in a single archive and configures the downloaded + workflow to use relative paths to the configs and singularity images. + """ + from nf_core.pipelines.download import DownloadWorkflow + + if tower: + log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") + + dl = DownloadWorkflow( + pipeline, + revision, + outdir, + compress, + force, + tower or platform, # True if either specified + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, + ) + dl.download_workflow() + + +# nf-core pipelines create-params-file +def pipelines_create_params_file(ctx, pipeline, revision, output, force, show_hidden): + """ + Build a parameter file for a pipeline. + + Uses the pipeline schema file to generate a YAML parameters file. + Parameters are set to the pipeline defaults and descriptions are shown in comments. + After the output file is generated, it can then be edited as needed before + passing to nextflow using the `-params-file` option. + + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory. + """ + builder = ParamsFileBuilder(pipeline, revision) + + if not builder.write_params_file(output, show_hidden=show_hidden, force=force): + sys.exit(1) + + +# nf-core pipelines launch +def pipelines_launch( + ctx, + pipeline, + id, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, +): + """ + Launch a pipeline using a web GUI or command line prompts. + + Uses the pipeline schema file to collect inputs for all available pipeline + parameters. Parameter names, descriptions and help text are shown. + The pipeline schema is used to validate all inputs as they are entered. + + When finished, saves a file with the selected parameters which can be + passed to Nextflow using the -params-file option. + + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory or an ID from the nf-core web launch tool. + """ + from nf_core.pipelines.launch import Launch + + launcher = Launch( + pipeline, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, + id, + ) + if not launcher.launch_pipeline(): + sys.exit(1) + + +# nf-core pipelines list +def pipelines_list(ctx, keywords, sort, json, show_archived): + """ + List available nf-core pipelines with local info. + + Checks the web for a list of nf-core pipelines with their latest releases. + Shows which nf-core pipelines you have pulled locally and whether they are up to date. + """ + from nf_core.pipelines.list import list_workflows + + stdout.print(list_workflows(keywords, sort, json, show_archived)) + + +# nf-core pipelines sync +def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): + """ + Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. + + To keep nf-core pipelines up to date with improvements in the main + template, we use a method of synchronisation that uses a special + git branch called [cyan i]TEMPLATE[/]. + + This command updates the [cyan i]TEMPLATE[/] branch with the latest version of + the nf-core template, so that these updates can be synchronised with + the pipeline. It is run automatically for all pipelines when ever a + new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. + """ + from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError + from nf_core.utils import is_pipeline_directory + + # Check if pipeline directory contains necessary files + is_pipeline_directory(dir) + + # Sync the given pipeline dir + sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + try: + sync_obj.sync() + except (SyncExceptionError, PullRequestExceptionError) as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines create-logo +def pipelines_create_logo(logo_text, dir, name, theme, width, format, force): + """ + Generate a logo with the nf-core logo template. + + This command generates an nf-core pipeline logo, using the supplied + """ + from nf_core.pipelines.create_logo import create_logo + + try: + if dir == ".": + dir = Path.cwd() + logo_path = create_logo(logo_text, dir, name, theme, width, format, force) + # Print path to logo relative to current working directory + try: + logo_path = Path(logo_path).relative_to(Path.cwd()) + except ValueError: + logo_path = Path(logo_path) + log.info(f"Created logo: [magenta]{logo_path}[/]") + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines schema validate +def pipelines_schema_validate(pipeline, params): + """ + Validate a set of parameters against a pipeline schema. + + Nextflow can be run using the -params-file flag, which loads + script parameters from a JSON file. + + This command takes such a file and validates it against the pipeline + schema, checking whether all schema rules are satisfied. + """ + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(pipeline) + # Load and check schema + schema_obj.load_lint_schema() + except AssertionError as e: + log.error(e) + sys.exit(1) + schema_obj.load_input_params(params) + try: + schema_obj.validate_params() + except AssertionError: + sys.exit(1) + + +# nf-core pipelines schema build +def pipelines_schema_build(dir, no_prompts, web_only, url): + """ + Interactively build a pipeline schema from Nextflow params. + + Automatically detects parameters from the pipeline config and main.nf and + compares these to the pipeline schema. Prompts to add or remove parameters + if the two do not match one another. + + Once all parameters are accounted for, can launch a web GUI tool on the + https://nf-co.re website where you can annotate and organise parameters. + Listens for this to be completed and saves the updated schema. + """ + from nf_core.pipelines.schema import PipelineSchema + + try: + schema_obj = PipelineSchema() + if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: + sys.exit(1) + except (UserWarning, AssertionError) as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines schema lint +def pipelines_schema_lint(schema_path): + """ + Check that a given pipeline schema is valid. + + Checks whether the pipeline schema validates as JSON Schema Draft 7 + and adheres to the additional nf-core pipelines schema requirements. + + This function runs as part of the nf-core pipelines lint command, this is a convenience + command that does just the schema linting nice and quickly. + + If no schema path is provided, "nextflow_schema.json" will be used (if it exists). + """ + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(schema_path) + schema_obj.load_lint_schema() + # Validate title and description - just warnings as schema should still work fine + try: + schema_obj.validate_schema_title_description() + except AssertionError as e: + log.warning(e) + except AssertionError: + sys.exit(1) + + +# nf-core pipelines schema docs +def pipelines_schema_docs(schema_path, output, format, force, columns): + """ + Outputs parameter documentation for a pipeline schema. + """ + if not os.path.exists(schema_path): + log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") + sys.exit(1) + + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + # Assume we're in a pipeline dir root if schema path not set + schema_obj.get_schema_path(schema_path) + schema_obj.load_schema() + schema_obj.print_documentation(output, format, force, columns.split(",")) + + +# nf-core pipelines subcommands +@nf_core_cli.group() +@click.pass_context +def pipelines(ctx): + """ + Commands to manage nf-core pipelines. + """ + # ensure that ctx.obj exists and is a dict (in case `cli()` is called + # by means other than the `if` block below) + ctx.ensure_object(dict) + + +# nf-core pipelines create +@pipelines.command("create") +@click.pass_context +@click.option( + "-n", + "--name", + type=str, + help="The name of your new pipeline", +) +@click.option("-d", "--description", type=str, help="A short description of your pipeline") +@click.option("-a", "--author", type=str, help="Name of the main author(s)") +@click.option("--version", type=str, default="1.0.0dev", help="The initial version number to use") +@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") +@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +@click.option( + "--organisation", + type=str, + default="nf-core", + help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", +) +def command_pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation): + """ + Create a new pipeline using the nf-core template. + """ + pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation) + + +# nf-core pipelines lint +@pipelines.command("lint") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory [dim]\[default: current working directory][/]", +) +@click.option( + "--release", + is_flag=True, + default=os.path.basename(os.path.dirname(os.environ.get("GITHUB_REF", "").strip(" '\""))) == "master" + and os.environ.get("GITHUB_REPOSITORY", "").startswith("nf-core/") + and not os.environ.get("GITHUB_REPOSITORY", "") == "nf-core/tools", + help="Execute additional checks for release-ready workflows.", +) +@click.option( + "-f", + "--fix", + type=str, + metavar="", + multiple=True, + help="Attempt to automatically fix specified lint test", +) +@click.option( + "-k", + "--key", + type=str, + metavar="", + multiple=True, + help="Run only these lint tests", +) +@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line") +@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures") +@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") +@click.option( + "--markdown", + type=str, + metavar="", + help="File to write linting results to (Markdown)", +) +@click.option( + "--json", + type=str, + metavar="", + help="File to write linting results to (JSON)", +) +@click.option( + "--sort-by", + type=click.Choice(["module", "test"]), + default="test", + help="Sort lint output by module or test name.", + show_default=True, +) +@click.pass_context +def command_pipelines_lint( + ctx, + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): + """ + Check pipeline code against nf-core guidelines. + """ + pipelines_lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) + + # nf-core pipelines download @pipelines.command("download") @click.argument("pipeline", required=False, metavar="") @@ -467,7 +806,7 @@ def lint_pipeline( help="Number of parallel image downloads", ) @click.pass_context -def download_pipeline( +def command_pipelines_download( ctx, pipeline, revision, @@ -486,22 +825,16 @@ def download_pipeline( ): """ Download a pipeline, nf-core/configs and pipeline singularity images. - - Collects all files in a single archive and configures the downloaded - workflow to use relative paths to the configs and singularity images. """ - from nf_core.pipelines.download import DownloadWorkflow - - if tower: - log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") - - dl = DownloadWorkflow( + pipelines_download( + ctx, pipeline, revision, outdir, compress, force, - tower or platform, # True if either specified + tower, + platform, download_configuration, tag, container_system, @@ -510,7 +843,6 @@ def download_pipeline( container_cache_index, parallel_downloads, ) - dl.download_workflow() # nf-core pipelines create-params-file @@ -534,22 +866,11 @@ def download_pipeline( help="Show hidden params which don't normally need changing", ) @click.pass_context -def create_params_file_pipeline(ctx, pipeline, revision, output, force, show_hidden): +def command_pipelines_create_params_file(ctx, pipeline, revision, output, force, show_hidden): """ Build a parameter file for a pipeline. - - Uses the pipeline schema file to generate a YAML parameters file. - Parameters are set to the pipeline defaults and descriptions are shown in comments. - After the output file is generated, it can then be edited as needed before - passing to nextflow using the `-params-file` option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory. """ - builder = ParamsFileBuilder(pipeline, revision) - - if not builder.write_params_file(output, show_hidden=show_hidden, force=force): - sys.exit(1) + pipelines_create_params_file(ctx, pipeline, revision, output, force, show_hidden) # nf-core pipelines launch @@ -599,7 +920,7 @@ def create_params_file_pipeline(ctx, pipeline, revision, output, force, show_hid help="Customise the builder URL (for development work)", ) @click.pass_context -def launch_pipeline( +def command_pipelines_launch( ctx, pipeline, id, @@ -613,32 +934,8 @@ def launch_pipeline( ): """ Launch a pipeline using a web GUI or command line prompts. - - Uses the pipeline schema file to collect inputs for all available pipeline - parameters. Parameter names, descriptions and help text are shown. - The pipeline schema is used to validate all inputs as they are entered. - - When finished, saves a file with the selected parameters which can be - passed to Nextflow using the -params-file option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory or an ID from the nf-core web launch tool. """ - from nf_core.pipelines.launch import Launch - - launcher = Launch( - pipeline, - revision, - command_only, - params_in, - params_out, - save_all, - show_hidden, - url, - id, - ) - if not launcher.launch_pipeline(): - sys.exit(1) + pipelines_launch(ctx, pipeline, id, revision, command_only, params_in, params_out, save_all, show_hidden, url) # nf-core pipelines list @@ -654,16 +951,11 @@ def launch_pipeline( @click.option("--json", is_flag=True, default=False, help="Print full output as JSON") @click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") @click.pass_context -def list_pipelines(ctx, keywords, sort, json, show_archived): +def command_pipelines_list(ctx, keywords, sort, json, show_archived): """ List available nf-core pipelines with local info. - - Checks the web for a list of nf-core pipelines with their latest releases. - Shows which nf-core pipelines you have pulled locally and whether they are up to date. """ - from nf_core.pipelines.list import list_workflows - - stdout.print(list_workflows(keywords, sort, json, show_archived)) + pipelines_list(ctx, keywords, sort, json, show_archived) # nf-core pipelines sync @@ -698,32 +990,11 @@ def list_pipelines(ctx, keywords, sort, json, show_archived): @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def sync_pipeline(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. - - To keep nf-core pipelines up to date with improvements in the main - template, we use a method of synchronisation that uses a special - git branch called [cyan i]TEMPLATE[/]. - - This command updates the [cyan i]TEMPLATE[/] branch with the latest version of - the nf-core template, so that these updates can be synchronised with - the pipeline. It is run automatically for all pipelines when ever a - new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ - from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError - from nf_core.utils import is_pipeline_directory - - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Sync the given pipeline dir - sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) - try: - sync_obj.sync() - except (SyncExceptionError, PullRequestExceptionError) as e: - log.error(e) - sys.exit(1) + pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core pipelines bump-version @@ -744,38 +1015,11 @@ def sync_pipeline(ctx, dir, from_branch, pull_request, github_repository, userna default=False, help="Bump required nextflow version instead of pipeline version", ) -def bump_version_pipeline(ctx, new_version, dir, nextflow): +def command_pipelines_bump_version(ctx, new_version, dir, nextflow): """ - Update nf-core pipeline version number. - - The pipeline version number is mentioned in a lot of different places - in nf-core pipelines. This tool updates the version for you automatically, - so that you don't accidentally miss any. - - Should be used for each pipeline release, and again for the next - development version after release. - - As well as the pipeline version, you can also change the required version of Nextflow. + Update nf-core pipeline version number with `nf-core pipelines bump-version`. """ - from nf_core.pipelines.bump_version import bump_nextflow_version, bump_pipeline_version - from nf_core.utils import Pipeline, is_pipeline_directory - - try: - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Make a pipeline object and load config etc - pipeline_obj = Pipeline(dir) - pipeline_obj._load() - - # Bump the pipeline version number - if not nextflow: - bump_pipeline_version(pipeline_obj, new_version) - else: - bump_nextflow_version(pipeline_obj, new_version) - except UserWarning as e: - log.error(e) - sys.exit(1) + pipelines_bump_version(ctx, new_version, dir, nextflow) # nf-core pipelines create-logo @@ -816,50 +1060,11 @@ def bump_version_pipeline(ctx, new_version, dir, nextflow): default=False, help="Overwrite any files if they already exist", ) -def logo_pipeline(logo_text, dir, name, theme, width, format, force): +def command_pipelines_create_logo(logo_text, dir, name, theme, width, format, force): """ Generate a logo with the nf-core logo template. - - This command generates an nf-core pipeline logo, using the supplied - """ - from nf_core.pipelines.create_logo import create_logo - - try: - if dir == ".": - dir = Path.cwd() - logo_path = create_logo(logo_text, dir, name, theme, width, format, force) - # Print path to logo relative to current working directory - try: - logo_path = Path(logo_path).relative_to(Path.cwd()) - except ValueError: - logo_path = Path(logo_path) - log.info(f"Created logo: [magenta]{logo_path}[/]") - except UserWarning as e: - log.error(e) - sys.exit(1) - - -# nf-core licences -@nf_core_cli.command() -@click.argument("pipeline", required=True, metavar="") -@click.option("--json", is_flag=True, default=False, help="Print output in JSON") -def licences(pipeline, json): - """ - List software licences for a given workflow (DSL1 only). - - Checks the pipeline environment.yml file which lists all conda software packages, which is not available for DSL2 workflows. Therefore, this command only supports DSL1 workflows (for now). - Each of these is queried against the anaconda.org API to find the licence. - Package name, version and licence is printed to the command line. """ - from nf_core.licences import WorkflowLicences - - lic = WorkflowLicences(pipeline) - lic.as_json = json - try: - stdout.print(lic.run_licences()) - except LookupError as e: - log.error(e) - sys.exit(1) + pipelines_create_logo(logo_text, dir, name, theme, width, format, force) # nf-core pipelines schema subcommands @@ -878,31 +1083,11 @@ def pipeline_schema(): @pipeline_schema.command("validate") @click.argument("pipeline", required=True, metavar="") @click.argument("params", type=click.Path(exists=True), required=True, metavar="") -def validate_schema(pipeline, params): +def command_pipelines_schema_validate(pipeline, params): """ Validate a set of parameters against a pipeline schema. - - Nextflow can be run using the -params-file flag, which loads - script parameters from a JSON file. - - This command takes such a file and validates it against the pipeline - schema, checking whether all schema rules are satisfied. """ - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(pipeline) - # Load and check schema - schema_obj.load_lint_schema() - except AssertionError as e: - log.error(e) - sys.exit(1) - schema_obj.load_input_params(params) - try: - schema_obj.validate_params() - except AssertionError: - sys.exit(1) + pipelines_schema_validate(pipeline, params) # nf-core pipelines schema build @@ -930,27 +1115,11 @@ def validate_schema(pipeline, params): default="https://nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) -def build_schema(dir, no_prompts, web_only, url): +def command_pipelines_schema_build(dir, no_prompts, web_only, url): """ Interactively build a pipeline schema from Nextflow params. - - Automatically detects parameters from the pipeline config and main.nf and - compares these to the pipeline schema. Prompts to add or remove parameters - if the two do not match one another. - - Once all parameters are accounted for, can launch a web GUI tool on the - https://nf-co.re website where you can annotate and organise parameters. - Listens for this to be completed and saves the updated schema. """ - from nf_core.pipelines.schema import PipelineSchema - - try: - schema_obj = PipelineSchema() - if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: - sys.exit(1) - except (UserWarning, AssertionError) as e: - log.error(e) - sys.exit(1) + pipelines_schema_build(dir, no_prompts, web_only, url) # nf-core pipelines schema lint @@ -961,31 +1130,11 @@ def build_schema(dir, no_prompts, web_only, url): default="nextflow_schema.json", metavar="", ) -def lint_schema(schema_path): +def command_pipelines_schema_lint(schema_path): """ Check that a given pipeline schema is valid. - - Checks whether the pipeline schema validates as JSON Schema Draft 7 - and adheres to the additional nf-core pipelines schema requirements. - - This function runs as part of the nf-core pipelines lint command, this is a convenience - command that does just the schema linting nice and quickly. - - If no schema path is provided, "nextflow_schema.json" will be used (if it exists). """ - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(schema_path) - schema_obj.load_lint_schema() - # Validate title and description - just warnings as schema should still work fine - try: - schema_obj.validate_schema_title_description() - except AssertionError as e: - log.warning(e) - except AssertionError: - sys.exit(1) + pipelines_schema_lint(schema_path) # nf-core pipelines schema docs @@ -1020,21 +1169,11 @@ def lint_schema(schema_path): help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", default="parameter,description,type,default,required,hidden", ) -def docs_schema(schema_path, output, format, force, columns): +def command_pipelines_schema_docs(schema_path, output, format, force, columns): """ Outputs parameter documentation for a pipeline schema. """ - if not os.path.exists(schema_path): - log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") - sys.exit(1) - - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - # Assume we're in a pipeline dir root if schema path not set - schema_obj.get_schema_path(schema_path) - schema_obj.load_schema() - schema_obj.print_documentation(output, format, force, columns.split(",")) + pipelines_schema_docs(schema_path, output, format, force, columns) # nf-core modules subcommands @@ -2225,10 +2364,7 @@ def subworkflows_update( @nf_core_cli.group(deprecated=True, hidden=True) def schema(): """ - Suite of tools for developers to manage pipeline schema. - - All nf-core pipelines should have a nextflow_schema.json file in their - root directory that describes the different pipeline parameters. + DEPRECATED """ pass @@ -2237,35 +2373,14 @@ def schema(): @schema.command("validate", deprecated=True) @click.argument("pipeline", required=True, metavar="") @click.argument("params", type=click.Path(exists=True), required=True, metavar="") -def validate(pipeline, params): +def command_schema_validate(pipeline, params): """ DEPRECATED - Validate a set of parameters against a pipeline schema. - - Nextflow can be run using the -params-file flag, which loads - script parameters from a JSON file. - - This command takes such a file and validates it against the pipeline - schema, checking whether all schema rules are satisfied. """ log.warning( "The `[magenta]nf-core schema validate[/]` command is deprecated. Use `[magenta]nf-core pipelines schema validate[/]` instead." ) - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(pipeline) - # Load and check schema - schema_obj.load_lint_schema() - except AssertionError as e: - log.error(e) - sys.exit(1) - schema_obj.load_input_params(params) - try: - schema_obj.validate_params() - except AssertionError: - sys.exit(1) + pipelines_schema_validate(pipeline, params) # nf-core schema build (deprecated) @@ -2293,31 +2408,14 @@ def validate(pipeline, params): default="https://nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) -def build(dir, no_prompts, web_only, url): +def command_schema_build(dir, no_prompts, web_only, url): """ DEPRECATED - Interactively build a pipeline schema from Nextflow params. - - Automatically detects parameters from the pipeline config and main.nf and - compares these to the pipeline schema. Prompts to add or remove parameters - if the two do not match one another. - - Once all parameters are accounted for, can launch a web GUI tool on the - https://nf-co.re website where you can annotate and organise parameters. - Listens for this to be completed and saves the updated schema. """ log.warning( "The `[magenta]nf-core schema build[/]` command is deprecated. Use `[magenta]nf-core pipelines schema build[/]` instead." ) - from nf_core.pipelines.schema import PipelineSchema - - try: - schema_obj = PipelineSchema() - if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: - sys.exit(1) - except (UserWarning, AssertionError) as e: - log.error(e) - sys.exit(1) + pipelines_schema_build(dir, no_prompts, web_only, url) # nf-core schema lint (deprecated) @@ -2328,35 +2426,14 @@ def build(dir, no_prompts, web_only, url): default="nextflow_schema.json", metavar="", ) -def schema_lint(schema_path): +def command_schema_lint(schema_path): """ DEPRECATED - Check that a given pipeline schema is valid. - - Checks whether the pipeline schema validates as JSON Schema Draft 7 - and adheres to the additional nf-core schema requirements. - - This function runs as part of the nf-core lint command, this is a convenience - command that does just the schema linting nice and quickly. - - If no schema path is provided, "nextflow_schema.json" will be used (if it exists). """ log.warning( "The `[magenta]nf-core schema lint[/]` command is deprecated. Use `[magenta]nf-core pipelines schema lint[/]` instead." ) - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(schema_path) - schema_obj.load_lint_schema() - # Validate title and description - just warnings as schema should still work fine - try: - schema_obj.validate_schema_title_description() - except AssertionError as e: - log.warning(e) - except AssertionError: - sys.exit(1) + pipelines_schema_lint(schema_path) # nf-core schema docs (deprecated) @@ -2391,25 +2468,14 @@ def schema_lint(schema_path): help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", default="parameter,description,type,default,required,hidden", ) -def docs(schema_path, output, format, force, columns): +def command_schema_docs(schema_path, output, format, force, columns): """ DEPRECATED - Outputs parameter documentation for a pipeline schema. """ log.warning( "The `[magenta]nf-core schema docs[/]` command is deprecated. Use `[magenta]nf-core pipelines schema docs[/]` instead." ) - if not os.path.exists(schema_path): - log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") - sys.exit(1) - - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - # Assume we're in a pipeline dir root if schema path not set - schema_obj.get_schema_path(schema_path) - schema_obj.load_schema() - schema_obj.print_documentation(output, format, force, columns.split(",")) + pipelines_schema_docs(schema_path, output, format, force, columns) # nf-core create-logo (deprecated) @@ -2450,35 +2516,18 @@ def docs(schema_path, output, format, force, columns): default=False, help="Overwrite any files if they already exist", ) -def logo(logo_text, dir, name, theme, width, format, force): +def command_create_logo(logo_text, dir, name, theme, width, format, force): """ DEPRECATED - Generate a logo with the nf-core logo template. - - This command generates an nf-core pipeline logo, using the supplied """ log.warning( "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." ) - from nf_core.pipelines.create_logo import create_logo - - try: - if dir == ".": - dir = Path.cwd() - logo_path = create_logo(logo_text, dir, name, theme, width, format, force) - # Print path to logo relative to current working directory - try: - logo_path = Path(logo_path).relative_to(Path.cwd()) - except ValueError: - logo_path = Path(logo_path) - log.info(f"Created logo: [magenta]{logo_path}[/]") - except UserWarning as e: - log.error(e) - sys.exit(1) + pipelines_create_logo(logo_text, dir, name, theme, width, format, force) # nf-core sync (deprecated) -@nf_core_cli.command(hidden=True, deprecated=True) +@nf_core_cli.command("sync", hidden=True, deprecated=True) @click.option( "-d", "--dir", @@ -2508,40 +2557,19 @@ def logo(logo_text, dir, name, theme, width, format, force): @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ DEPRECATED - Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. - - To keep nf-core pipelines up to date with improvements in the main - template, we use a method of synchronisation that uses a special - git branch called [cyan i]TEMPLATE[/]. - - This command updates the [cyan i]TEMPLATE[/] branch with the latest version of - the nf-core template, so that these updates can be synchronised with - the pipeline. It is run automatically for all pipelines when ever a - new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ log.warning( "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." ) - from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError - from nf_core.utils import is_pipeline_directory - - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Sync the given pipeline dir - sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) - try: - sync_obj.sync() - except (SyncExceptionError, PullRequestExceptionError) as e: - log.error(e) - sys.exit(1) + pipelines_sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core bump-version (deprecated) -@nf_core_cli.command(hidden=True, deprecated=True) +@nf_core_cli.command("bump-version", hidden=True, deprecated=True) +@click.pass_context @click.argument("new_version", default="") @click.option( "-d", @@ -2557,42 +2585,14 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y default=False, help="Bump required nextflow version instead of pipeline version", ) -def bump_version(new_version, dir, nextflow): +def command_bump_version(ctx, new_version, dir, nextflow): """ DEPRECATED - Update nf-core pipeline version number. - - The pipeline version number is mentioned in a lot of different places - in nf-core pipelines. This tool updates the version for you automatically, - so that you don't accidentally miss any. - - Should be used for each pipeline release, and again for the next - development version after release. - - As well as the pipeline version, you can also change the required version of Nextflow. """ log.warning( "The `[magenta]nf-core bump-version[/]` command is deprecated. Use `[magenta]nf-core pipelines bump-version[/]` instead." ) - from nf_core.pipelines.bump_version import bump_nextflow_version, bump_pipeline_version - from nf_core.utils import Pipeline, is_pipeline_directory - - try: - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Make a pipeline object and load config etc - pipeline_obj = Pipeline(dir) - pipeline_obj._load() - - # Bump the pipeline version number - if not nextflow: - bump_pipeline_version(pipeline_obj, new_version) - else: - bump_nextflow_version(pipeline_obj, new_version) - except UserWarning as e: - log.error(e) - sys.exit(1) + pipelines_bump_version(ctx, new_version, dir, nextflow) # nf-core list (deprecated) @@ -2607,24 +2607,19 @@ def bump_version(new_version, dir, nextflow): ) @click.option("--json", is_flag=True, default=False, help="Print full output as JSON") @click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") -def list(keywords, sort, json, show_archived): +@click.pass_context +def command_list(ctx, keywords, sort, json, show_archived): """ DEPRECATED - List available nf-core pipelines with local info. - - Checks the web for a list of nf-core pipelines with their latest releases. - Shows which nf-core pipelines you have pulled locally and whether they are up to date. """ log.warning( "The `[magenta]nf-core list[/]` command is deprecated. Use `[magenta]nf-core pipelines list[/]` instead." ) - from nf_core.pipelines.list import list_workflows - - stdout.print(list_workflows(keywords, sort, json, show_archived)) + pipelines_list(ctx, keywords, sort, json, show_archived) # nf-core launch (deprecated) -@nf_core_cli.command(deprecated=True, hidden=True) +@nf_core_cli.command("launch", deprecated=True, hidden=True) @click.argument("pipeline", required=False, metavar="") @click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)") @click.option("-i", "--id", help="ID for web-gui launch parameter set") @@ -2669,7 +2664,9 @@ def list(keywords, sort, json, show_archived): default="https://nf-co.re/launch", help="Customise the builder URL (for development work)", ) -def launch( +@click.pass_context +def command_launch( + ctx, pipeline, id, revision, @@ -2682,40 +2679,15 @@ def launch( ): """ DEPRECATED - Launch a pipeline using a web GUI or command line prompts. - - Uses the pipeline schema file to collect inputs for all available pipeline - parameters. Parameter names, descriptions and help text are shown. - The pipeline schema is used to validate all inputs as they are entered. - - When finished, saves a file with the selected parameters which can be - passed to Nextflow using the -params-file option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory or an ID from the nf-core web launch tool. """ log.warning( "The `[magenta]nf-core launch[/]` command is deprecated. Use `[magenta]nf-core pipelines launch[/]` instead." ) - from nf_core.pipelines.launch import Launch - - launcher = Launch( - pipeline, - revision, - command_only, - params_in, - params_out, - save_all, - show_hidden, - url, - id, - ) - if not launcher.launch_pipeline(): - sys.exit(1) + pipelines_launch(ctx, pipeline, id, revision, command_only, params_in, params_out, save_all, show_hidden, url) # nf-core create-params-file (deprecated) -@nf_core_cli.command(deprecated=True, hidden=True) +@nf_core_cli.command("create-params-file", deprecated=True, hidden=True) @click.argument("pipeline", required=False, metavar="") @click.option("-r", "--revision", help="Release/branch/SHA of the pipeline (if remote)") @click.option( @@ -2734,30 +2706,18 @@ def launch( default=False, help="Show hidden params which don't normally need changing", ) -def create_params_file(pipeline, revision, output, force, show_hidden): +def command_create_params_file(pipeline, revision, output, force, show_hidden): """ DEPRECATED - Build a parameter file for a pipeline. - - Uses the pipeline schema file to generate a YAML parameters file. - Parameters are set to the pipeline defaults and descriptions are shown in comments. - After the output file is generated, it can then be edited as needed before - passing to nextflow using the `-params-file` option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory. """ log.warning( "The `[magenta]nf-core create-params-file[/]` command is deprecated. Use `[magenta]nf-core pipelines create-params-file[/]` instead." ) - builder = ParamsFileBuilder(pipeline, revision) - - if not builder.write_params_file(output, show_hidden=show_hidden, force=force): - sys.exit(1) + pipelines_create_params_file(pipeline, revision, output, force, show_hidden) # nf-core download (deprecated) -@nf_core_cli.command(deprecated=True, hidden=True) +@nf_core_cli.command("download", deprecated=True, hidden=True) @click.argument("pipeline", required=False, metavar="") @click.option( "-r", @@ -2830,7 +2790,9 @@ def create_params_file(pipeline, revision, output, force, show_hidden): default=4, help="Number of parallel image downloads", ) -def download( +@click.pass_context +def command_download( + ctx, pipeline, revision, outdir, @@ -2848,26 +2810,19 @@ def download( ): """ DEPRECATED - Download a pipeline, nf-core/configs and pipeline singularity images. - - Collects all files in a single archive and configures the downloaded - workflow to use relative paths to the configs and singularity images. """ log.warning( "The `[magenta]nf-core download[/]` command is deprecated. Use `[magenta]nf-core pipelines download[/]` instead." ) - from nf_core.pipelines.download import DownloadWorkflow - - if tower: - log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") - - dl = DownloadWorkflow( + pipelines_download( + ctx, pipeline, revision, outdir, compress, force, - tower or platform, # True if either specified + tower, + platform, download_configuration, tag, container_system, @@ -2876,11 +2831,10 @@ def download( container_cache_index, parallel_downloads, ) - dl.download_workflow() # nf-core lint (deprecated) -@nf_core_cli.command(hidden=True, deprecated=True) +@nf_core_cli.command("lint", hidden=True, deprecated=True) @click.option( "-d", "--dir", @@ -2935,7 +2889,7 @@ def download( show_default=True, ) @click.pass_context -def lint( +def command_lint( ctx, dir, release, @@ -2950,58 +2904,15 @@ def lint( ): """ DEPRECATED - Check pipeline code against nf-core guidelines. - - Runs a large number of automated tests to ensure that the supplied pipeline - meets the nf-core guidelines. Documentation of all lint tests can be found - on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] - - You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. - See the documentation for details. """ log.warning( "The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead." ) - from nf_core.pipelines.lint import run_linting - from nf_core.utils import is_pipeline_directory - - # Check if pipeline directory is a pipeline - try: - is_pipeline_directory(dir) - except UserWarning as e: - log.error(e) - sys.exit(1) - - # Run the lint tests! - try: - lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - sort_by, - markdown, - json, - ctx.obj["hide_progress"], - ) - swf_failed = 0 - if subworkflow_lint_obj is not None: - swf_failed = len(subworkflow_lint_obj.failed) - if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: - sys.exit(1) - except AssertionError as e: - log.critical(e) - sys.exit(1) - except UserWarning as e: - log.error(e) - sys.exit(1) + pipelines_lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) # nf-core create (deprecated) -@nf_core_cli.command(hidden=True, deprecated=True) +@nf_core_cli.command("create", hidden=True, deprecated=True) @click.option( "-n", "--name", @@ -3021,49 +2932,15 @@ def lint( default="nf-core", help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", ) -def create(name, description, author, version, force, outdir, template_yaml, plain, organisation): +@click.pass_context +def command_create(ctx, name, description, author, version, force, outdir, template_yaml, plain, organisation): """ DEPRECATED - Create a new pipeline using the nf-core template. - - Uses the nf-core template to make a skeleton Nextflow pipeline with all required - files, boilerplate code and best-practices. """ log.warning( "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." ) - from nf_core.pipelines.create import PipelineCreateApp - from nf_core.pipelines.create.create import PipelineCreate - - if (name and description and author) or (template_yaml): - # If all command arguments are used, run without the interactive interface - try: - create_obj = PipelineCreate( - name, - description, - author, - version=version, - force=force, - outdir=outdir, - template_config=template_yaml, - organisation=organisation, - ) - create_obj.init_pipeline() - except UserWarning as e: - log.error(e) - sys.exit(1) - elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": - log.error( - "[red]Partial arguments supplied.[/] " - "Run without [i]any[/] arguments for an interactive interface, " - "or with at least name + description + author to use non-interactively." - ) - sys.exit(1) - else: - log.info("Launching interactive nf-core pipeline creation tool.") - app = PipelineCreateApp() - app.run() - sys.exit(app.return_code or 0) + pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, plain, organisation) # Main script is being run - launch the CLI From d52c1afe959a9dcd50c20bddf9a17cef3d146426 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 27 Jun 2024 15:16:51 +0000 Subject: [PATCH 243/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa079aaa8..170ee8511 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.10 ([#3031](https://github.com/nf-core/tools/pull/3031)) - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) - Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- move pipeline commands to functions to avoid duplication ([#3039](https://github.com/nf-core/tools/pull/3039)) - Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) - Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) From 9b965e1981309e46b336fdb927b321c702dbc536 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 3 Jul 2024 16:22:22 +0200 Subject: [PATCH 244/737] move command functions to separate files --- nf_core/__main__.py | 969 ++++--------------------------- nf_core/commands_modules.py | 340 +++++++++++ nf_core/commands_pipelines.py | 429 ++++++++++++++ nf_core/commands_subworkflows.py | 259 +++++++++ 4 files changed, 1127 insertions(+), 870 deletions(-) create mode 100644 nf_core/commands_modules.py create mode 100644 nf_core/commands_pipelines.py create mode 100644 nf_core/commands_subworkflows.py diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 06712acb7..f9d6a6aeb 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -4,7 +4,6 @@ import logging import os import sys -from pathlib import Path import rich import rich.console @@ -14,9 +13,47 @@ from trogon import tui from nf_core import __version__ +from nf_core.commands_modules import ( + modules_bump_versions, + modules_create, + modules_info, + modules_install, + modules_lint, + modules_list_local, + modules_list_remote, + modules_patch, + modules_remove, + modules_test, + modules_update, +) +from nf_core.commands_pipelines import ( + pipelines_bump_version, + pipelines_create, + pipelines_create_logo, + pipelines_create_params_file, + pipelines_download, + pipelines_launch, + pipelines_lint, + pipelines_list, + pipelines_schema_build, + pipelines_schema_docs, + pipelines_schema_lint, + pipelines_schema_validate, + pipelines_sync, +) +from nf_core.commands_subworkflows import ( + subworkflows_create, + subworkflows_info, + subworkflows_install, + subworkflows_lint, + subworkflows_list_local, + subworkflows_list_remote, + subworkflows_remove, + subworkflows_test, + subworkflows_update, +) from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE from nf_core.pipelines.download import DownloadError -from nf_core.pipelines.params_file import ParamsFileBuilder from nf_core.utils import check_if_outdated, nfcore_logo, rich_force_colors, setup_nfcore_dir # Set up logging as the root logger @@ -196,423 +233,6 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): } -## nf-core pipelines command functions ## - - -# nf-core pipelines bump-version -def pipelines_bump_version(ctx, new_version, dir, nextflow): - """ - Update nf-core pipeline version number. - - The pipeline version number is mentioned in a lot of different places - in nf-core pipelines. This tool updates the version for you automatically, - so that you don't accidentally miss any. - - Should be used for each pipeline release, and again for the next - development version after release. - - As well as the pipeline version, you can also change the required version of Nextflow. - """ - from nf_core.pipelines.bump_version import bump_nextflow_version, bump_pipeline_version - from nf_core.utils import Pipeline, is_pipeline_directory - - try: - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Make a pipeline object and load config etc - pipeline_obj = Pipeline(dir) - pipeline_obj._load() - - # Bump the pipeline version number - if not nextflow: - bump_pipeline_version(pipeline_obj, new_version) - else: - bump_nextflow_version(pipeline_obj, new_version) - except UserWarning as e: - log.error(e) - sys.exit(1) - - -# nf-core pipelines create -def pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation): - """ - Create a new pipeline using the nf-core template. - - Uses the nf-core template to make a skeleton Nextflow pipeline with all required - files, boilerplate code and best-practices. - \n\n - Run without any command line arguments to use an interactive interface. - """ - from nf_core.pipelines.create import PipelineCreateApp - from nf_core.pipelines.create.create import PipelineCreate - - if (name and description and author) or (template_yaml): - # If all command arguments are used, run without the interactive interface - try: - create_obj = PipelineCreate( - name, - description, - author, - version=version, - force=force, - outdir=outdir, - template_config=template_yaml, - organisation=organisation, - ) - create_obj.init_pipeline() - except UserWarning as e: - log.error(e) - sys.exit(1) - elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": - log.error( - "[red]Partial arguments supplied.[/] " - "Run without [i]any[/] arguments for an interactive interface, " - "or with at least name + description + author to use non-interactively." - ) - sys.exit(1) - else: - log.info("Launching interactive nf-core pipeline creation tool.") - app = PipelineCreateApp() - app.run() - sys.exit(app.return_code or 0) - - -# nf-core pipelines lint -def pipelines_lint( - ctx, - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - markdown, - json, - sort_by, -): - """ - Check pipeline code against nf-core guidelines. - - Runs a large number of automated tests to ensure that the supplied pipeline - meets the nf-core guidelines. Documentation of all lint tests can be found - on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] - - You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. - See the documentation for details. - """ - from nf_core.pipelines.lint import run_linting - from nf_core.utils import is_pipeline_directory - - # Check if pipeline directory is a pipeline - try: - is_pipeline_directory(dir) - except UserWarning as e: - log.error(e) - sys.exit(1) - - # Run the lint tests! - try: - lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( - dir, - release, - fix, - key, - show_passed, - fail_ignored, - fail_warned, - sort_by, - markdown, - json, - ctx.obj["hide_progress"], - ) - swf_failed = 0 - if subworkflow_lint_obj is not None: - swf_failed = len(subworkflow_lint_obj.failed) - if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: - sys.exit(1) - except AssertionError as e: - log.critical(e) - sys.exit(1) - except UserWarning as e: - log.error(e) - sys.exit(1) - - -# nf-core pipelines download -def pipelines_download( - ctx, - pipeline, - revision, - outdir, - compress, - force, - tower, - platform, - download_configuration, - tag, - container_system, - container_library, - container_cache_utilisation, - container_cache_index, - parallel_downloads, -): - """ - Download a pipeline, nf-core/configs and pipeline singularity images. - - Collects all files in a single archive and configures the downloaded - workflow to use relative paths to the configs and singularity images. - """ - from nf_core.pipelines.download import DownloadWorkflow - - if tower: - log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") - - dl = DownloadWorkflow( - pipeline, - revision, - outdir, - compress, - force, - tower or platform, # True if either specified - download_configuration, - tag, - container_system, - container_library, - container_cache_utilisation, - container_cache_index, - parallel_downloads, - ) - dl.download_workflow() - - -# nf-core pipelines create-params-file -def pipelines_create_params_file(ctx, pipeline, revision, output, force, show_hidden): - """ - Build a parameter file for a pipeline. - - Uses the pipeline schema file to generate a YAML parameters file. - Parameters are set to the pipeline defaults and descriptions are shown in comments. - After the output file is generated, it can then be edited as needed before - passing to nextflow using the `-params-file` option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory. - """ - builder = ParamsFileBuilder(pipeline, revision) - - if not builder.write_params_file(output, show_hidden=show_hidden, force=force): - sys.exit(1) - - -# nf-core pipelines launch -def pipelines_launch( - ctx, - pipeline, - id, - revision, - command_only, - params_in, - params_out, - save_all, - show_hidden, - url, -): - """ - Launch a pipeline using a web GUI or command line prompts. - - Uses the pipeline schema file to collect inputs for all available pipeline - parameters. Parameter names, descriptions and help text are shown. - The pipeline schema is used to validate all inputs as they are entered. - - When finished, saves a file with the selected parameters which can be - passed to Nextflow using the -params-file option. - - Run using a remote pipeline name (such as GitHub `user/repo` or a URL), - a local pipeline directory or an ID from the nf-core web launch tool. - """ - from nf_core.pipelines.launch import Launch - - launcher = Launch( - pipeline, - revision, - command_only, - params_in, - params_out, - save_all, - show_hidden, - url, - id, - ) - if not launcher.launch_pipeline(): - sys.exit(1) - - -# nf-core pipelines list -def pipelines_list(ctx, keywords, sort, json, show_archived): - """ - List available nf-core pipelines with local info. - - Checks the web for a list of nf-core pipelines with their latest releases. - Shows which nf-core pipelines you have pulled locally and whether they are up to date. - """ - from nf_core.pipelines.list import list_workflows - - stdout.print(list_workflows(keywords, sort, json, show_archived)) - - -# nf-core pipelines sync -def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): - """ - Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. - - To keep nf-core pipelines up to date with improvements in the main - template, we use a method of synchronisation that uses a special - git branch called [cyan i]TEMPLATE[/]. - - This command updates the [cyan i]TEMPLATE[/] branch with the latest version of - the nf-core template, so that these updates can be synchronised with - the pipeline. It is run automatically for all pipelines when ever a - new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. - """ - from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError - from nf_core.utils import is_pipeline_directory - - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Sync the given pipeline dir - sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) - try: - sync_obj.sync() - except (SyncExceptionError, PullRequestExceptionError) as e: - log.error(e) - sys.exit(1) - - -# nf-core pipelines create-logo -def pipelines_create_logo(logo_text, dir, name, theme, width, format, force): - """ - Generate a logo with the nf-core logo template. - - This command generates an nf-core pipeline logo, using the supplied - """ - from nf_core.pipelines.create_logo import create_logo - - try: - if dir == ".": - dir = Path.cwd() - logo_path = create_logo(logo_text, dir, name, theme, width, format, force) - # Print path to logo relative to current working directory - try: - logo_path = Path(logo_path).relative_to(Path.cwd()) - except ValueError: - logo_path = Path(logo_path) - log.info(f"Created logo: [magenta]{logo_path}[/]") - except UserWarning as e: - log.error(e) - sys.exit(1) - - -# nf-core pipelines schema validate -def pipelines_schema_validate(pipeline, params): - """ - Validate a set of parameters against a pipeline schema. - - Nextflow can be run using the -params-file flag, which loads - script parameters from a JSON file. - - This command takes such a file and validates it against the pipeline - schema, checking whether all schema rules are satisfied. - """ - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(pipeline) - # Load and check schema - schema_obj.load_lint_schema() - except AssertionError as e: - log.error(e) - sys.exit(1) - schema_obj.load_input_params(params) - try: - schema_obj.validate_params() - except AssertionError: - sys.exit(1) - - -# nf-core pipelines schema build -def pipelines_schema_build(dir, no_prompts, web_only, url): - """ - Interactively build a pipeline schema from Nextflow params. - - Automatically detects parameters from the pipeline config and main.nf and - compares these to the pipeline schema. Prompts to add or remove parameters - if the two do not match one another. - - Once all parameters are accounted for, can launch a web GUI tool on the - https://nf-co.re website where you can annotate and organise parameters. - Listens for this to be completed and saves the updated schema. - """ - from nf_core.pipelines.schema import PipelineSchema - - try: - schema_obj = PipelineSchema() - if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: - sys.exit(1) - except (UserWarning, AssertionError) as e: - log.error(e) - sys.exit(1) - - -# nf-core pipelines schema lint -def pipelines_schema_lint(schema_path): - """ - Check that a given pipeline schema is valid. - - Checks whether the pipeline schema validates as JSON Schema Draft 7 - and adheres to the additional nf-core pipelines schema requirements. - - This function runs as part of the nf-core pipelines lint command, this is a convenience - command that does just the schema linting nice and quickly. - - If no schema path is provided, "nextflow_schema.json" will be used (if it exists). - """ - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - try: - schema_obj.get_schema_path(schema_path) - schema_obj.load_lint_schema() - # Validate title and description - just warnings as schema should still work fine - try: - schema_obj.validate_schema_title_description() - except AssertionError as e: - log.warning(e) - except AssertionError: - sys.exit(1) - - -# nf-core pipelines schema docs -def pipelines_schema_docs(schema_path, output, format, force, columns): - """ - Outputs parameter documentation for a pipeline schema. - """ - if not os.path.exists(schema_path): - log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") - sys.exit(1) - - from nf_core.pipelines.schema import PipelineSchema - - schema_obj = PipelineSchema() - # Assume we're in a pipeline dir root if schema path not set - schema_obj.get_schema_path(schema_path) - schema_obj.load_schema() - schema_obj.print_documentation(output, format, force, columns.split(",")) - - # nf-core pipelines subcommands @nf_core_cli.group() @click.pass_context @@ -1229,24 +849,11 @@ def modules_list(ctx): @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def modules_list_remote(ctx, keywords, json): +def command_modules_list_remote(ctx, keywords, json): """ List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ - from nf_core.modules import ModuleList - - try: - module_list = ModuleList( - None, - True, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(module_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + modules_list_remote(ctx, keywords, json) # nf-core modules list local @@ -1261,24 +868,11 @@ def modules_list_remote(ctx, keywords, json): default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def command_modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ - from nf_core.modules import ModuleList - - try: - module_list = ModuleList( - dir, - False, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(module_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + modules_list_local(ctx, keywords, json, dir) # nf-core modules install @@ -1307,30 +901,11 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b help="Force reinstallation of module if it already exists", ) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") -def modules_install(ctx, tool, dir, prompt, force, sha): +def command_modules_install(ctx, tool, dir, prompt, force, sha): """ Install DSL2 modules within a pipeline. - - Fetches and installs module files from a remote repo e.g. nf-core/modules. """ - from nf_core.modules import ModuleInstall - - try: - module_install = ModuleInstall( - dir, - force, - prompt, - sha, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - exit_status = module_install.install(tool) - if not exit_status: - sys.exit(1) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + modules_install(ctx, tool, dir, prompt, force, sha) # nf-core modules update @@ -1384,7 +959,7 @@ def modules_install(ctx, tool, dir, prompt, force, sha): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def modules_update( +def command_modules_update( ctx, tool, directory, @@ -1398,35 +973,12 @@ def modules_update( ): """ Update DSL2 modules within a pipeline. - - Fetches and updates module files from a remote repo e.g. nf-core/modules. - """ - from nf_core.modules import ModuleUpdate - - try: - module_install = ModuleUpdate( - directory, - force, - prompt, - sha, - install_all, - preview, - save_diff, - update_deps, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - exit_status = module_install.update(tool) - if not exit_status and install_all: - sys.exit(1) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + """ + modules_update(ctx, tool, directory, force, prompt, sha, install_all, preview, save_diff, update_deps) # nf-core modules patch -@modules.command() +@modules.command("patch") @click.pass_context @click.argument("tool", type=str, callback=normalize_case, required=False, metavar=" or ") @click.option( @@ -1437,29 +989,11 @@ def modules_update( help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-r", "--remove", is_flag=True, default=False) -def patch(ctx, tool, dir, remove): +def command_modules_patch(ctx, tool, dir, remove): """ Create a patch file for minor changes in a module - - Checks if a module has been modified locally and creates a patch file - describing how the module has changed from the remote version """ - from nf_core.modules import ModulePatch - - try: - module_patch = ModulePatch( - dir, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - if remove: - module_patch.remove(tool) - else: - module_patch.patch(tool) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + modules_patch(ctx, tool, dir, remove) # nf-core modules remove @@ -1473,23 +1007,11 @@ def patch(ctx, tool, dir, remove): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def modules_remove(ctx, dir, tool): +def command_modules_remove(ctx, dir, tool): """ Remove a module from a pipeline. """ - from nf_core.modules import ModuleRemove - - try: - module_remove = ModuleRemove( - dir, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - module_remove.remove(tool) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + modules_remove(ctx, dir, tool) # nf-core modules create @@ -1559,7 +1081,7 @@ def modules_remove(ctx, dir, tool): default=False, help="Migrate a module with pytest tests to nf-test", ) -def create_module( +def command_modules_create( ctx, tool, dir, @@ -1575,45 +1097,21 @@ def create_module( ): """ Create a new DSL2 module from the nf-core template. - - If the specified directory is a pipeline, this function creates a file called - 'modules/local/tool_subtool.nf' - - If the specified directory is a clone of nf-core/modules, it creates or modifies files - in 'modules/', 'tests/modules' and 'tests/config/pytest_modules.yml' - """ - # Combine two bool flags into one variable - has_meta = None - if meta and no_meta: - log.critical("Both arguments '--meta' and '--no-meta' given. Please pick one.") - elif meta: - has_meta = True - elif no_meta: - has_meta = False - - from nf_core.modules import ModuleCreate - - # Run function - try: - module_create = ModuleCreate( - dir, - tool, - author, - label, - has_meta, - force, - conda_name, - conda_package_version, - empty_template, - migrate_pytest, - ) - module_create.create() - except UserWarning as e: - log.critical(e) - sys.exit(1) - except LookupError as e: - log.error(e) - sys.exit(1) + """ + modules_create( + ctx, + tool, + dir, + author, + label, + meta, + no_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, + ) # nf-core modules test @@ -1648,31 +1146,11 @@ def create_module( default=None, help="Run tests with a specific profile", ) -def test_module(ctx, tool, dir, no_prompts, update, once, profile): +def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): """ Run nf-test for a module. - - Given the name of a module, runs the nf-test command to test the module and generate snapshots. - """ - from nf_core.components.components_test import ComponentsTest - - try: - module_tester = ComponentsTest( - component_type="modules", - component_name=tool, - directory=dir, - no_prompts=no_prompts, - update=update, - once=once, - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - verbose=ctx.obj["verbose"], - profile=profile, - ) - module_tester.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + """ + modules_test(ctx, tool, dir, no_prompts, update, once, profile) # nf-core modules lint @@ -1718,48 +1196,11 @@ def test_module(ctx, tool, dir, no_prompts, update, once, profile): is_flag=True, help="Fix the module version if a newer version is available", ) -def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): """ Lint one or more modules in a directory. - - Checks DSL2 module code against nf-core guidelines to ensure - that all modules follow the same standards. - - Test modules within a pipeline or a clone of the - nf-core/modules repository. """ - from nf_core.components.lint import LintExceptionError - from nf_core.modules import ModuleLint - - try: - module_lint = ModuleLint( - dir, - fail_warned=fail_warned, - registry=ctx.params["registry"], - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - no_pull=ctx.obj["modules_repo_no_pull"], - hide_progress=ctx.obj["hide_progress"], - ) - module_lint.lint( - module=tool, - registry=registry, - key=key, - all_modules=all, - print_results=True, - local=local, - show_passed=passed, - sort_by=sort_by, - fix_version=fix_version, - ) - if len(module_lint.failed) > 0: - sys.exit(1) - except LintExceptionError as e: - log.error(e) - sys.exit(1) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version) # nf-core modules info @@ -1773,36 +1214,15 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def modules_info(ctx, tool, dir): +def command_modules_info(ctx, tool, dir): """ Show developer usage information about a given module. - - Parses information from a module's [i]meta.yml[/] and renders help - on the command line. A handy equivalent to searching the - [link=https://nf-co.re/modules]nf-core website[/]. - - If run from a pipeline and a local copy of the module is found, the command - will print this usage info. - If not, usage from the remote modules repo will be shown. """ - from nf_core.modules import ModuleInfo - - try: - module_info = ModuleInfo( - dir, - tool, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(module_info.get_component_info()) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + modules_info(ctx, tool, dir) # nf-core modules bump-versions -@modules.command() +@modules.command("bump-versions") @click.pass_context @click.argument("tool", type=str, callback=normalize_case, required=False, metavar=" or ") @click.option( @@ -1814,28 +1234,12 @@ def modules_info(ctx, tool, dir): ) @click.option("-a", "--all", is_flag=True, help="Run on all modules") @click.option("-s", "--show-all", is_flag=True, help="Show up-to-date modules in results too") -def bump_versions(ctx, tool, dir, all, show_all): +def command_modules_bump_versions(ctx, tool, dir, all, show_all): """ Bump versions for one or more modules in a clone of the nf-core/modules repo. """ - from nf_core.modules.bump_versions import ModuleVersionBumper - from nf_core.modules.modules_utils import ModuleExceptionError - - try: - version_bumper = ModuleVersionBumper( - dir, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all) - except ModuleExceptionError as e: - log.error(e) - sys.exit(1) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + modules_bump_versions(ctx, tool, dir, all, show_all) # nf-core subworkflows click command @@ -1901,28 +1305,11 @@ def subworkflows(ctx, git_remote, branch, no_pull): default=False, help="Migrate a module with pytest tests to nf-test", ) -def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): +def command_subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. - - If the specified directory is a pipeline, this function creates a file called - 'subworkflows/local/.nf' - - If the specified directory is a clone of nf-core/modules, it creates or modifies files - in 'subworkflows/', 'tests/subworkflows' and 'tests/config/pytest_modules.yml' """ - from nf_core.subworkflows import SubworkflowCreate - - # Run function - try: - subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force, migrate_pytest) - subworkflow_create.create() - except UserWarning as e: - log.critical(e) - sys.exit(1) - except LookupError as e: - log.error(e) - sys.exit(1) + subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest) # nf-core subworkflows test @@ -1957,31 +1344,11 @@ def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): default=None, help="Run tests with a specific profile", ) -def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once, profile): +def command_subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): """ Run nf-test for a subworkflow. - - Given the name of a subworkflow, runs the nf-test command to test the subworkflow and generate snapshots. - """ - from nf_core.components.components_test import ComponentsTest - - try: - sw_tester = ComponentsTest( - component_type="subworkflows", - component_name=subworkflow, - directory=dir, - no_prompts=no_prompts, - update=update, - once=once, - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - verbose=ctx.obj["verbose"], - profile=profile, - ) - sw_tester.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + """ + subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile) # nf-core subworkflows list subcommands @@ -1999,25 +1366,11 @@ def subworkflows_list(ctx): @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def subworkflows_list_remote(ctx, keywords, json): +def command_subworkflows_list_remote(ctx, keywords, json): """ List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ - from nf_core.subworkflows import SubworkflowList - - try: - subworkflow_list = SubworkflowList( - None, - True, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - - stdout.print(subworkflow_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + subworkflows_list_remote(ctx, keywords, json) # nf-core subworkflows list local @@ -2032,24 +1385,11 @@ def subworkflows_list_remote(ctx, keywords, json): default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def command_subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ - from nf_core.subworkflows import SubworkflowList - - try: - subworkflow_list = SubworkflowList( - dir, - False, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(subworkflow_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + subworkflows_list_local(ctx, keywords, json, dir) # nf-core subworkflows lint @@ -2090,47 +1430,11 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi help="Sort lint output by subworkflow or test name.", show_default=True, ) -def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): +def command_subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): """ Lint one or more subworkflows in a directory. - - Checks DSL2 subworkflow code against nf-core guidelines to ensure - that all subworkflows follow the same standards. - - Test subworkflows within a pipeline or a clone of the - nf-core/modules repository. """ - from nf_core.components.lint import LintExceptionError - from nf_core.subworkflows import SubworkflowLint - - try: - subworkflow_lint = SubworkflowLint( - dir, - fail_warned=fail_warned, - registry=ctx.params["registry"], - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - no_pull=ctx.obj["modules_repo_no_pull"], - hide_progress=ctx.obj["hide_progress"], - ) - subworkflow_lint.lint( - subworkflow=subworkflow, - registry=registry, - key=key, - all_subworkflows=all, - print_results=True, - local=local, - show_passed=passed, - sort_by=sort_by, - ) - if len(subworkflow_lint.failed) > 0: - sys.exit(1) - except LintExceptionError as e: - log.error(e) - sys.exit(1) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by) # nf-core subworkflows info @@ -2144,32 +1448,11 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def subworkflows_info(ctx, subworkflow, dir): +def command_subworkflows_info(ctx, subworkflow, dir): """ Show developer usage information about a given subworkflow. - - Parses information from a subworkflow's [i]meta.yml[/] and renders help - on the command line. A handy equivalent to searching the - [link=https://nf-co.re/modules]nf-core website[/]. - - If run from a pipeline and a local copy of the subworkflow is found, the command - will print this usage info. - If not, usage from the remote subworkflows repo will be shown. """ - from nf_core.subworkflows import SubworkflowInfo - - try: - subworkflow_info = SubworkflowInfo( - dir, - subworkflow, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(subworkflow_info.get_component_info()) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + subworkflows_info(ctx, subworkflow, dir) # nf-core subworkflows install @@ -2204,30 +1487,11 @@ def subworkflows_info(ctx, subworkflow, dir): metavar="", help="Install subworkflow at commit SHA", ) -def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): +def command_subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. - - Fetches and installs subworkflow files from a remote repo e.g. nf-core/modules. """ - from nf_core.subworkflows import SubworkflowInstall - - try: - subworkflow_install = SubworkflowInstall( - dir, - force, - prompt, - sha, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - exit_status = subworkflow_install.install(subworkflow) - if not exit_status: - sys.exit(1) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + subworkflows_install(ctx, subworkflow, dir, prompt, force, sha) # nf-core subworkflows remove @@ -2241,23 +1505,11 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def subworkflows_remove(ctx, dir, subworkflow): +def command_subworkflows_remove(ctx, dir, subworkflow): """ Remove a subworkflow from a pipeline. """ - from nf_core.subworkflows import SubworkflowRemove - - try: - module_remove = SubworkflowRemove( - dir, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - module_remove.remove(subworkflow) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + subworkflows_remove(ctx, dir, subworkflow) # nf-core subworkflows update @@ -2316,7 +1568,7 @@ def subworkflows_remove(ctx, dir, subworkflow): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def subworkflows_update( +def command_subworkflows_update( ctx, subworkflow, dir, @@ -2330,31 +1582,8 @@ def subworkflows_update( ): """ Update DSL2 subworkflow within a pipeline. - - Fetches and updates subworkflow files from a remote repo e.g. nf-core/modules. - """ - from nf_core.subworkflows import SubworkflowUpdate - - try: - subworkflow_install = SubworkflowUpdate( - dir, - force, - prompt, - sha, - install_all, - preview, - save_diff, - update_deps, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - exit_status = subworkflow_install.update(subworkflow) - if not exit_status and install_all: - sys.exit(1) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) + """ + subworkflows_update(ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps) ## DEPRECATED commands since v3.0.0 diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py new file mode 100644 index 000000000..5f7191436 --- /dev/null +++ b/nf_core/commands_modules.py @@ -0,0 +1,340 @@ +import logging +import sys + +import rich + +from nf_core.utils import rich_force_colors + +log = logging.getLogger(__name__) +stdout = rich.console.Console(force_terminal=rich_force_colors()) + + +def modules_list_remote(ctx, keywords, json): + """ + List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + """ + from nf_core.modules import ModuleList + + try: + module_list = ModuleList( + None, + True, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(module_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin + """ + List modules installed locally in a pipeline + """ + from nf_core.modules import ModuleList + + try: + module_list = ModuleList( + dir, + False, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(module_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_install(ctx, tool, dir, prompt, force, sha): + """ + Install DSL2 modules within a pipeline. + + Fetches and installs module files from a remote repo e.g. nf-core/modules. + """ + from nf_core.modules import ModuleInstall + + try: + module_install = ModuleInstall( + dir, + force, + prompt, + sha, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + exit_status = module_install.install(tool) + if not exit_status: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_update( + ctx, + tool, + directory, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, +): + """ + Update DSL2 modules within a pipeline. + + Fetches and updates module files from a remote repo e.g. nf-core/modules. + """ + from nf_core.modules import ModuleUpdate + + try: + module_install = ModuleUpdate( + directory, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + exit_status = module_install.update(tool) + if not exit_status and install_all: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_patch(ctx, tool, dir, remove): + """ + Create a patch file for minor changes in a module + + Checks if a module has been modified locally and creates a patch file + describing how the module has changed from the remote version + """ + from nf_core.modules import ModulePatch + + try: + module_patch = ModulePatch( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + if remove: + module_patch.remove(tool) + else: + module_patch.patch(tool) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_remove(ctx, dir, tool): + """ + Remove a module from a pipeline. + """ + from nf_core.modules import ModuleRemove + + try: + module_remove = ModuleRemove( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + module_remove.remove(tool) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def modules_create( + ctx, + tool, + dir, + author, + label, + meta, + no_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, +): + """ + Create a new DSL2 module from the nf-core template. + + If the specified directory is a pipeline, this function creates a file called + 'modules/local/tool_subtool.nf' + + If the specified directory is a clone of nf-core/modules, it creates or modifies files + in 'modules/', 'tests/modules' and 'tests/config/pytest_modules.yml' + """ + # Combine two bool flags into one variable + has_meta = None + if meta and no_meta: + log.critical("Both arguments '--meta' and '--no-meta' given. Please pick one.") + elif meta: + has_meta = True + elif no_meta: + has_meta = False + + from nf_core.modules import ModuleCreate + + # Run function + try: + module_create = ModuleCreate( + dir, + tool, + author, + label, + has_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, + ) + module_create.create() + except UserWarning as e: + log.critical(e) + sys.exit(1) + except LookupError as e: + log.error(e) + sys.exit(1) + + +def modules_test(ctx, tool, dir, no_prompts, update, once, profile): + """ + Run nf-test for a module. + + Given the name of a module, runs the nf-test command to test the module and generate snapshots. + """ + from nf_core.components.components_test import ComponentsTest + + try: + module_tester = ComponentsTest( + component_type="modules", + component_name=tool, + directory=dir, + no_prompts=no_prompts, + update=update, + once=once, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], + profile=profile, + ) + module_tester.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): + """ + Lint one or more modules in a directory. + + Checks DSL2 module code against nf-core guidelines to ensure + that all modules follow the same standards. + + Test modules within a pipeline or a clone of the + nf-core/modules repository. + """ + from nf_core.components.lint import LintExceptionError + from nf_core.modules import ModuleLint + + try: + module_lint = ModuleLint( + dir, + fail_warned=fail_warned, + registry=ctx.params["registry"], + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + no_pull=ctx.obj["modules_repo_no_pull"], + hide_progress=ctx.obj["hide_progress"], + ) + module_lint.lint( + module=tool, + registry=registry, + key=key, + all_modules=all, + print_results=True, + local=local, + show_passed=passed, + sort_by=sort_by, + fix_version=fix_version, + ) + if len(module_lint.failed) > 0: + sys.exit(1) + except LintExceptionError as e: + log.error(e) + sys.exit(1) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def modules_info(ctx, tool, dir): + """ + Show developer usage information about a given module. + + Parses information from a module's [i]meta.yml[/] and renders help + on the command line. A handy equivalent to searching the + [link=https://nf-co.re/modules]nf-core website[/]. + + If run from a pipeline and a local copy of the module is found, the command + will print this usage info. + If not, usage from the remote modules repo will be shown. + """ + from nf_core.modules import ModuleInfo + + try: + module_info = ModuleInfo( + dir, + tool, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(module_info.get_component_info()) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def modules_bump_versions(ctx, tool, dir, all, show_all): + """ + Bump versions for one or more modules in a clone of + the nf-core/modules repo. + """ + from nf_core.modules.bump_versions import ModuleVersionBumper + from nf_core.modules.modules_utils import ModuleExceptionError + + try: + version_bumper = ModuleVersionBumper( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all) + except ModuleExceptionError as e: + log.error(e) + sys.exit(1) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py new file mode 100644 index 000000000..3f569bfe3 --- /dev/null +++ b/nf_core/commands_pipelines.py @@ -0,0 +1,429 @@ +import logging +import os +import sys +from pathlib import Path + +import rich + +from nf_core.pipelines.params_file import ParamsFileBuilder +from nf_core.utils import rich_force_colors + +log = logging.getLogger(__name__) + +stdout = rich.console.Console(force_terminal=rich_force_colors()) + +## nf-core pipelines command functions ## + + +# nf-core pipelines create +def pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation): + """ + Create a new pipeline using the nf-core template. + + Uses the nf-core template to make a skeleton Nextflow pipeline with all required + files, boilerplate code and best-practices. + \n\n + Run without any command line arguments to use an interactive interface. + """ + from nf_core.pipelines.create import PipelineCreateApp + from nf_core.pipelines.create.create import PipelineCreate + + if (name and description and author) or (template_yaml): + # If all command arguments are used, run without the interactive interface + try: + create_obj = PipelineCreate( + name, + description, + author, + version=version, + force=force, + outdir=outdir, + template_config=template_yaml, + organisation=organisation, + ) + create_obj.init_pipeline() + except UserWarning as e: + log.error(e) + sys.exit(1) + elif name or description or author or version != "1.0.0dev" or force or outdir or organisation != "nf-core": + log.error( + "[red]Partial arguments supplied.[/] " + "Run without [i]any[/] arguments for an interactive interface, " + "or with at least name + description + author to use non-interactively." + ) + sys.exit(1) + else: + log.info("Launching interactive nf-core pipeline creation tool.") + app = PipelineCreateApp() + app.run() + sys.exit(app.return_code or 0) + + +# nf-core pipelines bump-version +def pipelines_bump_version(ctx, new_version, dir, nextflow): + """ + Update nf-core pipeline version number. + + The pipeline version number is mentioned in a lot of different places + in nf-core pipelines. This tool updates the version for you automatically, + so that you don't accidentally miss any. + + Should be used for each pipeline release, and again for the next + development version after release. + + As well as the pipeline version, you can also change the required version of Nextflow. + """ + from nf_core.pipelines.bump_version import bump_nextflow_version, bump_pipeline_version + from nf_core.utils import Pipeline, is_pipeline_directory + + try: + # Check if pipeline directory contains necessary files + is_pipeline_directory(dir) + + # Make a pipeline object and load config etc + pipeline_obj = Pipeline(dir) + pipeline_obj._load() + + # Bump the pipeline version number + if not nextflow: + bump_pipeline_version(pipeline_obj, new_version) + else: + bump_nextflow_version(pipeline_obj, new_version) + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines lint +def pipelines_lint( + ctx, + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + markdown, + json, + sort_by, +): + """ + Check pipeline code against nf-core guidelines. + + Runs a large number of automated tests to ensure that the supplied pipeline + meets the nf-core guidelines. Documentation of all lint tests can be found + on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] + + You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. + See the documentation for details. + """ + from nf_core.pipelines.lint import run_linting + from nf_core.utils import is_pipeline_directory + + # Check if pipeline directory is a pipeline + try: + is_pipeline_directory(dir) + except UserWarning as e: + log.error(e) + sys.exit(1) + + # Run the lint tests! + try: + lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( + dir, + release, + fix, + key, + show_passed, + fail_ignored, + fail_warned, + sort_by, + markdown, + json, + ctx.obj["hide_progress"], + ) + swf_failed = 0 + if subworkflow_lint_obj is not None: + swf_failed = len(subworkflow_lint_obj.failed) + if len(lint_obj.failed) + len(module_lint_obj.failed) + swf_failed > 0: + sys.exit(1) + except AssertionError as e: + log.critical(e) + sys.exit(1) + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines download +def pipelines_download( + ctx, + pipeline, + revision, + outdir, + compress, + force, + tower, + platform, + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, +): + """ + Download a pipeline, nf-core/configs and pipeline singularity images. + + Collects all files in a single archive and configures the downloaded + workflow to use relative paths to the configs and singularity images. + """ + from nf_core.pipelines.download import DownloadWorkflow + + if tower: + log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") + + dl = DownloadWorkflow( + pipeline, + revision, + outdir, + compress, + force, + tower or platform, # True if either specified + download_configuration, + tag, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, + ) + dl.download_workflow() + + +# nf-core pipelines create-params-file +def pipelines_create_params_file(ctx, pipeline, revision, output, force, show_hidden): + """ + Build a parameter file for a pipeline. + + Uses the pipeline schema file to generate a YAML parameters file. + Parameters are set to the pipeline defaults and descriptions are shown in comments. + After the output file is generated, it can then be edited as needed before + passing to nextflow using the `-params-file` option. + + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory. + """ + builder = ParamsFileBuilder(pipeline, revision) + + if not builder.write_params_file(output, show_hidden=show_hidden, force=force): + sys.exit(1) + + +# nf-core pipelines launch +def pipelines_launch( + ctx, + pipeline, + id, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, +): + """ + Launch a pipeline using a web GUI or command line prompts. + + Uses the pipeline schema file to collect inputs for all available pipeline + parameters. Parameter names, descriptions and help text are shown. + The pipeline schema is used to validate all inputs as they are entered. + + When finished, saves a file with the selected parameters which can be + passed to Nextflow using the -params-file option. + + Run using a remote pipeline name (such as GitHub `user/repo` or a URL), + a local pipeline directory or an ID from the nf-core web launch tool. + """ + from nf_core.pipelines.launch import Launch + + launcher = Launch( + pipeline, + revision, + command_only, + params_in, + params_out, + save_all, + show_hidden, + url, + id, + ) + if not launcher.launch_pipeline(): + sys.exit(1) + + +# nf-core pipelines list +def pipelines_list(ctx, keywords, sort, json, show_archived): + """ + List available nf-core pipelines with local info. + + Checks the web for a list of nf-core pipelines with their latest releases. + Shows which nf-core pipelines you have pulled locally and whether they are up to date. + """ + from nf_core.pipelines.list import list_workflows + + stdout.print(list_workflows(keywords, sort, json, show_archived)) + + +# nf-core pipelines sync +def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): + """ + Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. + + To keep nf-core pipelines up to date with improvements in the main + template, we use a method of synchronisation that uses a special + git branch called [cyan i]TEMPLATE[/]. + + This command updates the [cyan i]TEMPLATE[/] branch with the latest version of + the nf-core template, so that these updates can be synchronised with + the pipeline. It is run automatically for all pipelines when ever a + new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. + """ + from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError + from nf_core.utils import is_pipeline_directory + + # Check if pipeline directory contains necessary files + is_pipeline_directory(dir) + + # Sync the given pipeline dir + sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + try: + sync_obj.sync() + except (SyncExceptionError, PullRequestExceptionError) as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines create-logo +def pipelines_create_logo(logo_text, dir, name, theme, width, format, force): + """ + Generate a logo with the nf-core logo template. + + This command generates an nf-core pipeline logo, using the supplied + """ + from nf_core.pipelines.create_logo import create_logo + + try: + if dir == ".": + dir = Path.cwd() + logo_path = create_logo(logo_text, dir, name, theme, width, format, force) + # Print path to logo relative to current working directory + try: + logo_path = Path(logo_path).relative_to(Path.cwd()) + except ValueError: + logo_path = Path(logo_path) + log.info(f"Created logo: [magenta]{logo_path}[/]") + except UserWarning as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines schema validate +def pipelines_schema_validate(pipeline, params): + """ + Validate a set of parameters against a pipeline schema. + + Nextflow can be run using the -params-file flag, which loads + script parameters from a JSON file. + + This command takes such a file and validates it against the pipeline + schema, checking whether all schema rules are satisfied. + """ + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(pipeline) + # Load and check schema + schema_obj.load_lint_schema() + except AssertionError as e: + log.error(e) + sys.exit(1) + schema_obj.load_input_params(params) + try: + schema_obj.validate_params() + except AssertionError: + sys.exit(1) + + +# nf-core pipelines schema build +def pipelines_schema_build(dir, no_prompts, web_only, url): + """ + Interactively build a pipeline schema from Nextflow params. + + Automatically detects parameters from the pipeline config and main.nf and + compares these to the pipeline schema. Prompts to add or remove parameters + if the two do not match one another. + + Once all parameters are accounted for, can launch a web GUI tool on the + https://nf-co.re website where you can annotate and organise parameters. + Listens for this to be completed and saves the updated schema. + """ + from nf_core.pipelines.schema import PipelineSchema + + try: + schema_obj = PipelineSchema() + if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: + sys.exit(1) + except (UserWarning, AssertionError) as e: + log.error(e) + sys.exit(1) + + +# nf-core pipelines schema lint +def pipelines_schema_lint(schema_path): + """ + Check that a given pipeline schema is valid. + + Checks whether the pipeline schema validates as JSON Schema Draft 7 + and adheres to the additional nf-core pipelines schema requirements. + + This function runs as part of the nf-core pipelines lint command, this is a convenience + command that does just the schema linting nice and quickly. + + If no schema path is provided, "nextflow_schema.json" will be used (if it exists). + """ + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + try: + schema_obj.get_schema_path(schema_path) + schema_obj.load_lint_schema() + # Validate title and description - just warnings as schema should still work fine + try: + schema_obj.validate_schema_title_description() + except AssertionError as e: + log.warning(e) + except AssertionError: + sys.exit(1) + + +# nf-core pipelines schema docs +def pipelines_schema_docs(schema_path, output, format, force, columns): + """ + Outputs parameter documentation for a pipeline schema. + """ + if not os.path.exists(schema_path): + log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") + sys.exit(1) + + from nf_core.pipelines.schema import PipelineSchema + + schema_obj = PipelineSchema() + # Assume we're in a pipeline dir root if schema path not set + schema_obj.get_schema_path(schema_path) + schema_obj.load_schema() + schema_obj.print_documentation(output, format, force, columns.split(",")) diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py new file mode 100644 index 000000000..cc1a544ec --- /dev/null +++ b/nf_core/commands_subworkflows.py @@ -0,0 +1,259 @@ +import logging +import sys + +import rich + +from nf_core.utils import rich_force_colors + +log = logging.getLogger(__name__) + +stdout = rich.console.Console(force_terminal=rich_force_colors()) + + +def subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): + """ + Create a new subworkflow from the nf-core template. + + If the specified directory is a pipeline, this function creates a file called + 'subworkflows/local/.nf' + + If the specified directory is a clone of nf-core/modules, it creates or modifies files + in 'subworkflows/', 'tests/subworkflows' and 'tests/config/pytest_modules.yml' + """ + from nf_core.subworkflows import SubworkflowCreate + + # Run function + try: + subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force, migrate_pytest) + subworkflow_create.create() + except UserWarning as e: + log.critical(e) + sys.exit(1) + except LookupError as e: + log.error(e) + sys.exit(1) + + +def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): + """ + Run nf-test for a subworkflow. + + Given the name of a subworkflow, runs the nf-test command to test the subworkflow and generate snapshots. + """ + from nf_core.components.components_test import ComponentsTest + + try: + sw_tester = ComponentsTest( + component_type="subworkflows", + component_name=subworkflow, + directory=dir, + no_prompts=no_prompts, + update=update, + once=once, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], + profile=profile, + ) + sw_tester.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def subworkflows_list_remote(ctx, keywords, json): + """ + List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + """ + from nf_core.subworkflows import SubworkflowList + + try: + subworkflow_list = SubworkflowList( + None, + True, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + + stdout.print(subworkflow_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin + """ + List subworkflows installed locally in a pipeline + """ + from nf_core.subworkflows import SubworkflowList + + try: + subworkflow_list = SubworkflowList( + dir, + False, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflow_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): + """ + Lint one or more subworkflows in a directory. + + Checks DSL2 subworkflow code against nf-core guidelines to ensure + that all subworkflows follow the same standards. + + Test subworkflows within a pipeline or a clone of the + nf-core/modules repository. + """ + from nf_core.components.lint import LintExceptionError + from nf_core.subworkflows import SubworkflowLint + + try: + subworkflow_lint = SubworkflowLint( + dir, + fail_warned=fail_warned, + registry=ctx.params["registry"], + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + no_pull=ctx.obj["modules_repo_no_pull"], + hide_progress=ctx.obj["hide_progress"], + ) + subworkflow_lint.lint( + subworkflow=subworkflow, + registry=registry, + key=key, + all_subworkflows=all, + print_results=True, + local=local, + show_passed=passed, + sort_by=sort_by, + ) + if len(subworkflow_lint.failed) > 0: + sys.exit(1) + except LintExceptionError as e: + log.error(e) + sys.exit(1) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def subworkflows_info(ctx, subworkflow, dir): + """ + Show developer usage information about a given subworkflow. + + Parses information from a subworkflow's [i]meta.yml[/] and renders help + on the command line. A handy equivalent to searching the + [link=https://nf-co.re/modules]nf-core website[/]. + + If run from a pipeline and a local copy of the subworkflow is found, the command + will print this usage info. + If not, usage from the remote subworkflows repo will be shown. + """ + from nf_core.subworkflows import SubworkflowInfo + + try: + subworkflow_info = SubworkflowInfo( + dir, + subworkflow, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflow_info.get_component_info()) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): + """ + Install DSL2 subworkflow within a pipeline. + + Fetches and installs subworkflow files from a remote repo e.g. nf-core/modules. + """ + from nf_core.subworkflows import SubworkflowInstall + + try: + subworkflow_install = SubworkflowInstall( + dir, + force, + prompt, + sha, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + exit_status = subworkflow_install.install(subworkflow) + if not exit_status: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + +def subworkflows_remove(ctx, dir, subworkflow): + """ + Remove a subworkflow from a pipeline. + """ + from nf_core.subworkflows import SubworkflowRemove + + try: + module_remove = SubworkflowRemove( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + module_remove.remove(subworkflow) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +def subworkflows_update( + ctx, + subworkflow, + dir, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, +): + """ + Update DSL2 subworkflow within a pipeline. + + Fetches and updates subworkflow files from a remote repo e.g. nf-core/modules. + """ + from nf_core.subworkflows import SubworkflowUpdate + + try: + subworkflow_install = SubworkflowUpdate( + dir, + force, + prompt, + sha, + install_all, + preview, + save_diff, + update_deps, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + exit_status = subworkflow_install.update(subworkflow) + if not exit_status and install_all: + sys.exit(1) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) From 667c53f95b54b3ceb54671d22f72bc312be0ae9b Mon Sep 17 00:00:00 2001 From: Joon-Klaps Date: Wed, 3 Jul 2024 14:24:02 +0000 Subject: [PATCH 245/737] add tests --- tests/modules/update.py | 113 +++++++++++++++++++++++++++++++++ tests/subworkflows/update.py | 120 +++++++++++++++++++++++++++++++++++ tests/test_modules.py | 2 + tests/test_subworkflows.py | 2 + 4 files changed, 237 insertions(+) diff --git a/tests/modules/update.py b/tests/modules/update.py index 81eb85716..0fddcc8ba 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -1,5 +1,8 @@ import filecmp +import io +import logging import os +import re import shutil import tempfile from pathlib import Path @@ -67,6 +70,71 @@ def test_install_at_hash_and_update(self): assert correct_git_sha == current_git_sha +def test_install_at_hash_and_update_limit_output(self): + """Installs an old version of a module in the pipeline and updates it with limited output reporting""" + assert self.mods_install_old.install("trimgalore") + + # Capture the logger output + log_capture = io.StringIO() + ch = logging.StreamHandler(log_capture) + logger = logging.getLogger() + logger.addHandler(ch) + + update_obj = ModuleUpdate( + self.pipeline_dir, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + limit_output=True, + ) + + # Copy the module files and check that they are affected by the update + tmpdir = tempfile.mkdtemp() + trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") + trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_module(trimgalore_tmpdir, trimgalore_path) is False + + # Check that the modules.json is correctly updated + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Get the up-to-date git_sha for the module from the ModuleRepo object + correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") + current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] + assert correct_git_sha == current_git_sha + + # Get the captured log output + log_output = log_capture.getvalue() + log_lines = log_output.split("\n") + + # Check for various scenarios + for line in log_lines: + if re.match(r"'.+' is unchanged", line): + # Unchanged files should be reported for both .nf and non-.nf files + assert True + elif re.match(r"'.+' was created", line): + # Created files should be reported for both .nf and non-.nf files + assert True + elif re.match(r"'.+' was removed", line): + # Removed files should be reported for both .nf and non-.nf files + assert True + elif re.match(r"Changes in '.+' but not shown", line): + # Changes not shown should only be for non-.nf files + file_path = re.search(r"'(.+)'", line).group(1) + assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" + elif re.match(r"Changes in '.+':$", line): + # Changes shown should only be for .nf files + file_path = re.search(r"'(.+)'", line).group(1) + assert Path(file_path).suffix == ".nf", f"Changes in non-.nf file were shown: {line}" + + # Clean up + logger.removeHandler(ch) + log_capture.close() + + def test_install_at_hash_and_update_and_save_diff_to_file(self): """Installs an old version of a module in the pipeline and updates it""" self.mods_install_old.install("trimgalore") @@ -91,6 +159,51 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): # TODO: Apply the patch to the module +def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): + """Installs an old version of a module in the pipeline and updates it""" + self.mods_install_old.install("trimgalore") + patch_path = os.path.join(self.pipeline_dir, "trimgalore.patch") + update_obj = ModuleUpdate( + self.pipeline_dir, + save_diff_fn=patch_path, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + limit_output=True, + ) + + # Copy the module files and check that they are affected by the update + tmpdir = tempfile.mkdtemp() + trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") + trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_module(trimgalore_tmpdir, trimgalore_path) is True + + # Check that the patch file was created + assert os.path.exists(patch_path), f"Patch file was not created at {patch_path}" + + # Read the contents of the patch file + with open(patch_path) as f: + patch_content = f.read() + + # Check the content of the patch file + patch_lines = patch_content.split("\n") + for line in patch_lines: + if re.match(r"'.+' is unchanged", line): + # Unchanged files should be reported for both .nf and non-.nf files + assert True + elif re.match(r"Changes in '.+' but not shown", line): + # Changes not shown should only be for non-.nf files + file_path = re.search(r"'(.+)'", line).group(1) + assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" + elif re.match("diff --git", line): + # Diff should only be shown for .nf files + file_path = re.search(r"b/(.+)$", line).group(1) + assert Path(file_path).suffix == ".nf", f"Diff shown for non-.nf file: {line}" + + def test_update_all(self): """Updates all modules present in the pipeline""" update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 9ddc9bec0..7d1a3808d 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -1,4 +1,7 @@ import filecmp +import io +import logging +import re import shutil import tempfile from pathlib import Path @@ -57,6 +60,73 @@ def test_install_at_hash_and_update(self): ) +def test_install_at_hash_and_update_limit_output(self): + """Installs an old version of a subworkflow in the pipeline and updates it with limit_output=True""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + + # Capture the logger output + log_capture = io.StringIO() + ch = logging.StreamHandler(log_capture) + logger = logging.getLogger() + logger.addHandler(ch) + + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True, limit_output=True) + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the subworkflow files and check that they are affected by the update + tmpdir = tempfile.mkdtemp() + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is False + + # Check that the modules.json is correctly updated + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + + # Get the captured log output + log_output = log_capture.getvalue() + log_lines = log_output.split("\n") + + # Check for various scenarios + nf_changes_shown = False + for line in log_lines: + if re.match(r"'.+' is unchanged", line): + # Unchanged files should be reported for both .nf and non-.nf files + assert True + elif re.match(r"'.+' was created", line): + # Created files should be reported for both .nf and non-.nf files + assert True + elif re.match(r"'.+' was removed", line): + # Removed files should be reported for both .nf and non-.nf files + assert True + elif re.match(r"Changes in '.+' but not shown", line): + # Changes not shown should only be for non-.nf files + file_path = re.search(r"'(.+)'", line).group(1) + assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" + elif re.match(r"Changes in '.+':$", line): + # Changes shown should only be for .nf files + file_path = re.search(r"'(.+)'", line).group(1) + assert Path(file_path).suffix == ".nf", f"Changes in non-.nf file were shown: {line}" + nf_changes_shown = True + + # Ensure that changes in at least one .nf file were shown + assert nf_changes_shown, "No changes in .nf files were shown" + + # Clean up + logger.removeHandler(ch) + log_capture.close() + shutil.rmtree(tmpdir) + + def test_install_at_hash_and_update_and_save_diff_to_file(self): """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" assert self.subworkflow_install_old.install("fastq_align_bowtie2") @@ -79,6 +149,56 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): ) +def test_install_at_hash_and_update_and_save_diff_limit_output(self): + """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True, limit_output=True) + + # Copy the sw files and check that they are affected by the update + tmpdir = tempfile.mkdtemp() + shutil.rmtree(tmpdir) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is True + + # Check that the patch file was created + assert patch_path.exists(), f"Patch file was not created at {patch_path}" + + nf_changes_shown = False + non_nf_changes_not_shown = False + + with open(patch_path) as fh: + content = fh.read() + + # Check the first line + assert re.match( + r"Changes in module 'nf-core/fastq_align_bowtie2' between \([a-f0-9]+\) and \([a-f0-9]+\)", + content.split("\n")[0], + ), "Unexpected first line in patch file" + + # Check for .nf file changes shown + nf_changes_shown = bool(re.search(r"Changes in '.*\.nf':\n", content)) + + # Check for non-.nf file changes not shown + non_nf_changes_not_shown = bool(re.search(r"Changes in '.*[^.nf]' but not shown", content)) + + # Check that diff content is only for .nf files + diff_lines = re.findall(r"diff --git.*", content) + for line in diff_lines: + assert re.search(r"\.nf$", line), f"Diff shown for non-.nf file: {line}" + + # Ensure that changes in .nf files were shown and non-.nf files were not shown + assert nf_changes_shown, "No changes in .nf files were shown in the patch file" + assert non_nf_changes_not_shown, "Changes in non-.nf files were not properly limited in the patch file" + + # Clean up + patch_path.unlink() + shutil.rmtree(tmpdir) + + def test_update_all(self): """Updates all subworkflows present in the pipeline""" # Install subworkflows fastq_align_bowtie2, bam_sort_stats_samtools, bam_stats_samtools diff --git a/tests/test_modules.py b/tests/test_modules.py index 107b24566..7dd3132d3 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -269,6 +269,8 @@ def test_modulesrepo_class(self): test_install_and_update, test_install_at_hash_and_update, test_install_at_hash_and_update_and_save_diff_to_file, + test_install_at_hash_and_update_and_save_diff_to_file_limit_output, + test_install_at_hash_and_update_limit_output, test_update_all, test_update_different_branch_mix_modules_branch_test, test_update_different_branch_mixed_modules_main, diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 786ba5383..b7ebe952d 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -175,7 +175,9 @@ def tearDown(self): from .subworkflows.update import ( # type: ignore[misc] test_install_and_update, test_install_at_hash_and_update, + test_install_at_hash_and_update_and_save_diff_limit_output, test_install_at_hash_and_update_and_save_diff_to_file, + test_install_at_hash_and_update_limit_output, test_update_all, test_update_all_linked_components_from_subworkflow, test_update_all_subworkflows_from_module, From b04093484e6cf19f82fe670c3a400187162ae9b1 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 2 Jul 2024 19:11:11 +0000 Subject: [PATCH 246/737] Update gitpod/workspace-base Docker digest to 0f38224 --- nf_core/gitpod/gitpod.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 628421607..2f458dfc2 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -1,7 +1,7 @@ # Test build locally before making a PR # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . -FROM gitpod/workspace-base@sha256:92dd1bcbd5a2fb466c81b1e4c21fc2495575546a9e6c53b3f7d4ba0b0c29c5be +FROM gitpod/workspace-base@sha256:0f3822450f94084f6a62db4a4282d895591f6a55632dc044fe496f98cb79e75c USER root From e14819c9ecb9850f664e39d64cf8d8ac75f69c74 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 3 Jul 2024 20:15:58 +0000 Subject: [PATCH 247/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa079aaa8..52291d895 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -40,6 +40,7 @@ - Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) - Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) - Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) +- Update gitpod/workspace-base Docker digest to 0f38224 ([#3048](https://github.com/nf-core/tools/pull/3048)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 5acb263f2bb2c8b606fe801db62eda326528d76f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 5 Jul 2024 03:02:30 +0000 Subject: [PATCH 248/737] Update python:3.12-slim Docker digest to d5f1674 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 66ee3ab2f..7178b6526 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:da2d7af143dab7cd5b0d5a5c9545fe14e67fc24c394fcf1cf15e8ea16cbd8637 +FROM python:3.12-slim@sha256:d5f16749562233aa4bd26538771d76bf0dfd0a0ea7ea8771985e267451397ae4 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From eefec50b0d14065dc95db250e45696f384f3b1dc Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jul 2024 11:20:02 +0200 Subject: [PATCH 249/737] update docstring for deprecated commands --- nf_core/__main__.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index f9d6a6aeb..4fa640436 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1593,7 +1593,7 @@ def command_subworkflows_update( @nf_core_cli.group(deprecated=True, hidden=True) def schema(): """ - DEPRECATED + Use `nf-core pipelines schema ` instead. """ pass @@ -1604,7 +1604,7 @@ def schema(): @click.argument("params", type=click.Path(exists=True), required=True, metavar="") def command_schema_validate(pipeline, params): """ - DEPRECATED + Use `nf-core pipelines schema validate` instead. """ log.warning( "The `[magenta]nf-core schema validate[/]` command is deprecated. Use `[magenta]nf-core pipelines schema validate[/]` instead." @@ -1639,7 +1639,7 @@ def command_schema_validate(pipeline, params): ) def command_schema_build(dir, no_prompts, web_only, url): """ - DEPRECATED + Use `nf-core pipelines schema build` instead. """ log.warning( "The `[magenta]nf-core schema build[/]` command is deprecated. Use `[magenta]nf-core pipelines schema build[/]` instead." @@ -1657,7 +1657,7 @@ def command_schema_build(dir, no_prompts, web_only, url): ) def command_schema_lint(schema_path): """ - DEPRECATED + Use `nf-core pipelines schema lint` instead. """ log.warning( "The `[magenta]nf-core schema lint[/]` command is deprecated. Use `[magenta]nf-core pipelines schema lint[/]` instead." @@ -1699,7 +1699,7 @@ def command_schema_lint(schema_path): ) def command_schema_docs(schema_path, output, format, force, columns): """ - DEPRECATED + Use `nf-core pipelines schema docs` instead. """ log.warning( "The `[magenta]nf-core schema docs[/]` command is deprecated. Use `[magenta]nf-core pipelines schema docs[/]` instead." @@ -1747,7 +1747,7 @@ def command_schema_docs(schema_path, output, format, force, columns): ) def command_create_logo(logo_text, dir, name, theme, width, format, force): """ - DEPRECATED + Use `nf-core pipelines create-logo` instead. """ log.warning( "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." @@ -1788,7 +1788,7 @@ def command_create_logo(logo_text, dir, name, theme, width, format, force): @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") def command_sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ - DEPRECATED + Use `nf-core pipelines sync` instead. """ log.warning( "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." @@ -1816,7 +1816,7 @@ def command_sync(dir, from_branch, pull_request, github_repository, username, te ) def command_bump_version(ctx, new_version, dir, nextflow): """ - DEPRECATED + Use `nf-core pipelines bump-version` instead. """ log.warning( "The `[magenta]nf-core bump-version[/]` command is deprecated. Use `[magenta]nf-core pipelines bump-version[/]` instead." @@ -1839,7 +1839,7 @@ def command_bump_version(ctx, new_version, dir, nextflow): @click.pass_context def command_list(ctx, keywords, sort, json, show_archived): """ - DEPRECATED + DEPREUse `nf-core pipelines list` instead.CATED """ log.warning( "The `[magenta]nf-core list[/]` command is deprecated. Use `[magenta]nf-core pipelines list[/]` instead." @@ -1907,7 +1907,7 @@ def command_launch( url, ): """ - DEPRECATED + Use `nf-core pipelines launch` instead. """ log.warning( "The `[magenta]nf-core launch[/]` command is deprecated. Use `[magenta]nf-core pipelines launch[/]` instead." @@ -1937,7 +1937,7 @@ def command_launch( ) def command_create_params_file(pipeline, revision, output, force, show_hidden): """ - DEPRECATED + Use `nf-core pipelines create-params-file` instead. """ log.warning( "The `[magenta]nf-core create-params-file[/]` command is deprecated. Use `[magenta]nf-core pipelines create-params-file[/]` instead." @@ -2038,7 +2038,7 @@ def command_download( parallel_downloads, ): """ - DEPRECATED + Use `nf-core pipelines download` instead. """ log.warning( "The `[magenta]nf-core download[/]` command is deprecated. Use `[magenta]nf-core pipelines download[/]` instead." @@ -2132,7 +2132,7 @@ def command_lint( sort_by, ): """ - DEPRECATED + Use `nf-core pipelines lint` instead. """ log.warning( "The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead." @@ -2164,7 +2164,7 @@ def command_lint( @click.pass_context def command_create(ctx, name, description, author, version, force, outdir, template_yaml, plain, organisation): """ - DEPRECATED + Use `nf-core pipelines create` instead. """ log.warning( "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." From 17adacadecde4d0fad857fad25ee6e0ef44b4cc2 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 23 Feb 2024 09:42:47 +0100 Subject: [PATCH 250/737] 10 minute hack to try to parse proper input structure from module processes, for meta.yml --- nf_core/components/nfcore_component.py | 30 ++++++++++++++++++-------- nf_core/modules/lint/meta_yml.py | 2 ++ 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 5d0baf63d..e8f6f0b7a 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -173,15 +173,27 @@ def get_inputs_from_main_nf(self): log.debug(f"Could not find any inputs in {self.main_nf}") return inputs input_data = data.split("input:")[1].split("output:")[0] - regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" - matches = re.finditer(regex, input_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - if match.group(3): - input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - elif match.group(4): - input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) + for line in input_data.split("\n"): + theseinputs = [] + regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + matches = re.finditer(regex, line) + for _, match in enumerate(matches, start=1): + input_type = None + input_val = None + if match.group(1): + input_type = match.group(1) + if match.group(3): + input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + elif match.group(4): + input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + if input_type and input_val: + theseinputs.append({ + input_val: { + "type": input_type + } + }) + if len(theseinputs) > 0: + inputs.append(theseinputs) log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") self.inputs = inputs diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 481d50b3e..6718ed5c2 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -40,6 +40,8 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None """ module.get_inputs_from_main_nf() + print(yaml.dump({"input": module.inputs})) + exit() module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case meta_yaml = None From 970b4a0e09249476eaf2153ff57a0aad061d6750 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 4 Jun 2024 11:51:25 +0200 Subject: [PATCH 251/737] get correct format of inputs and outputs --- nf_core/components/nfcore_component.py | 36 +++++++++++++++++--------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index e8f6f0b7a..9a266ab56 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -174,7 +174,7 @@ def get_inputs_from_main_nf(self): return inputs input_data = data.split("input:")[1].split("output:")[0] for line in input_data.split("\n"): - theseinputs = [] + channel_elements = [] regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" matches = re.finditer(regex, line) for _, match in enumerate(matches, start=1): @@ -187,13 +187,9 @@ def get_inputs_from_main_nf(self): elif match.group(4): input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases if input_type and input_val: - theseinputs.append({ - input_val: { - "type": input_type - } - }) - if len(theseinputs) > 0: - inputs.append(theseinputs) + channel_elements.append({input_val: {"type": input_type}}) + if len(channel_elements) > 0: + inputs.append(channel_elements) log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") self.inputs = inputs @@ -206,9 +202,25 @@ def get_outputs_from_main_nf(self): log.debug(f"Could not find any outputs in {self.main_nf}") return outputs output_data = data.split("output:")[1].split("when:")[0] - regex = r"emit:\s*([^)\s,]+)" - matches = re.finditer(regex, output_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - outputs.append(match.group(1)) + regex_emit = r"emit:\s*([^)\s,]+)" + regex_elements = r"(val|path|env|stdout)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + for line in output_data.split("\n"): + match_emit = re.search(regex_emit, line) + matches_elements = re.finditer(regex_elements, line) + if not match_emit: + continue + output_channel = {match_emit.group(1): []} + for _, match_element in enumerate(matches_elements, start=1): + output_type = None + output_val = None + if match_element.group(1): + output_type = match_element.group(1) + if match_element.group(3): + output_val = match_element.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + elif match_element.group(4): + output_val = match_element.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + if output_type and output_val: + output_channel[match_emit.group(1)].append({output_val: {"type": output_type}}) + outputs.append(output_channel) log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs From 744b0fd4ae76e684b19affdb8a79f09ce3815daa Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 5 Jun 2024 14:12:23 +0200 Subject: [PATCH 252/737] update module template meta.yml --- nf_core/module-template/meta.yml | 48 +++++++++++++++++--------------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/nf_core/module-template/meta.yml b/nf_core/module-template/meta.yml index 9d3f3c1c1..2dd8e74ef 100644 --- a/nf_core/module-template/meta.yml +++ b/nf_core/module-template/meta.yml @@ -26,42 +26,44 @@ tools: {% endif -%} input: #{% if has_meta %} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` {% endif %} {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input {%- endif %} - - {{ 'bam:' if not_empty_template else "input:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ 'bam:' if not_empty_template else "input:" }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as output {% endif -%} output: + - versions: + - "versions.yml": + type: file + description: File containing software versions + pattern: "versions.yml" + - {{ 'bam:' if not_empty_template else "output:" }} #{% if has_meta -%} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` {% endif %} - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" {% if not_empty_template -%} - ## TODO nf-core: Delete / customise this example output + ## TODO nf-core: Delete / customise this example output {%- endif %} - - {{ 'bam:' if not_empty_template else "output:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ '"*.bam":' if not_empty_template else '"*":' }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} authors: - "{{ author }}" From c9f6f8fc016a5e0cf8dd4b060876075bb2b655b7 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 5 Jun 2024 15:27:22 +0200 Subject: [PATCH 253/737] add linting for correct inputs and outputs in meta.yml --- nf_core/components/nfcore_component.py | 5 +- nf_core/modules/lint/meta_yml.py | 177 ++++++++++++++----------- 2 files changed, 101 insertions(+), 81 deletions(-) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 9a266ab56..6ff1f6c17 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -216,10 +216,11 @@ def get_outputs_from_main_nf(self): if match_element.group(1): output_type = match_element.group(1) if match_element.group(3): - output_val = match_element.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + output_val = match_element.group(3) elif match_element.group(4): - output_val = match_element.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + output_val = match_element.group(4) if output_type and output_val: + output_val = output_val.strip("'").strip('"') # remove quotes output_channel[match_emit.group(1)].append({output_val: {"type": output_type}}) outputs.append(output_channel) log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 6718ed5c2..d5615059a 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -40,8 +40,6 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None """ module.get_inputs_from_main_nf() - print(yaml.dump({"input": module.inputs})) - exit() module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case meta_yaml = None @@ -93,93 +91,114 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None ) ) - # Confirm that all input and output channels are specified + # Confirm that all input and output channels are correctly specified if valid_meta_yml: + # Check that inputs are specified in meta.yml + if len(module.inputs) > 0 and "input" not in meta_yaml: + module.failed.append( + ( + "meta_input", + "Inputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.inputs) > 0: + module.passed.append( + ( + "meta_input", + "Inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + # Check that all inputs are correctly specified if "input" in meta_yaml: - meta_input = [list(x.keys())[0] for x in meta_yaml["input"]] - for input in module.inputs: - if input in meta_input: - module.passed.append(("meta_input_main_only", f"`{input}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_input_main_only", - f"`{input}` is present as an input in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) + # Obtain list of correct inputs and elements of each input channel + correct_inputs = [] + for input_channel in module.inputs: + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + correct_inputs.append(channel_elements) + # Obtain list of inputs specified in meta.yml + meta_inputs = [] + for input_channel in meta_yaml["input"]: + if isinstance(input_channel, list): # Correct format + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + meta_inputs.append(channel_elements) + elif isinstance(input_channel, dict): # Old format + meta_inputs.append(list(input_channel.keys())[0]) + + if correct_inputs == meta_inputs: + module.passed.append( + ( + "correct_meta_inputs", + "Correct inputs specified in module `meta.yml`", + module.meta_yml, ) - # check if there are any inputs in meta.yml that are not in main.nf - for input in meta_input: - if input in module.inputs: - module.passed.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) + ) + else: + module.failed.append( + ( + "correct_meta_inputs", + f"Incorrect inputs specified in module `meta.yml`. Inputs should contain: {correct_inputs}\nRun `nf-core modules lint --update-meta-yml` to update the `meta.yml` file.", + module.meta_yml, ) + ) - if "output" in meta_yaml and meta_yaml["output"] is not None: - meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] - for output in module.outputs: - if output in meta_output: - module.passed.append(("meta_output_main_only", f"`{output}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_output_main_only", - f"`{output}` is present as an output in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any outputs in meta.yml that are not in main.nf - for output in meta_output: - if output in module.outputs: - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - elif output == "meta": - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is skipped for `meta.yml` outputs", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) - # confirm that the name matches the process name in main.nf - if meta_yaml["name"].upper() == module.process_name: - module.passed.append( + # Check that outputs are specified in meta.yml + if len(module.outputs) > 0 and "output" not in meta_yaml: + module.failed.append( ( - "meta_name", - "Correct name specified in `meta.yml`.", + "meta_output", + "Outputs not specified in module `meta.yml`", module.meta_yml, ) ) - else: - module.failed.append( + elif len(module.outputs) > 0: + module.passed.append( ( - "meta_name", - f"Conflicting `process` name between meta.yml (`{meta_yaml['name']}`) and main.nf (`{module.process_name}`)", + "meta_output", + "Outputs specified in module `meta.yml`", module.meta_yml, ) ) + # Check that all outputs are correctly specified + if "output" in meta_yaml: + # Obtain dictionary of correct outputs and elements of each output channel + correct_outputs = {} + for output_channel in module.outputs: + channel_name = list(output_channel.keys())[0] + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + correct_outputs[channel_name] = channel_elements + # Obtain dictionary of outputs specified in meta.yml + meta_outputs = {} + for output_channel in meta_yaml["output"]: + channel_name = list(output_channel.keys())[0] + if isinstance(output_channel[channel_name], list): # Correct format + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + meta_outputs[channel_name] = channel_elements + elif isinstance(output_channel[channel_name], dict): # Old format + meta_outputs[channel_name] = [] + + if correct_outputs == meta_outputs: + module.passed.append( + ( + "correct_meta_outputs", + "Correct outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_outputs", + f"Incorrect outputs specified in module `meta.yml`. Outputs should contain: {correct_outputs}\nRun `nf-core modules lint --update-meta-yml` to update the `meta.yml` file.", + module.meta_yml, + ) + ) From d79f487bdaacddedc1781c263574996e1c10760d Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 17 Jun 2024 17:26:37 +0200 Subject: [PATCH 254/737] add update-meta-yml option --- nf_core/__main__.py | 5 +- nf_core/commands_modules.py | 3 +- nf_core/components/lint/__init__.py | 2 + nf_core/components/nfcore_component.py | 4 +- nf_core/modules/lint/__init__.py | 105 +++++++++++++++- nf_core/modules/lint/meta_yml.py | 167 ++++++++++++++++--------- 6 files changed, 225 insertions(+), 61 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 4fa640436..584947afd 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1196,7 +1196,10 @@ def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): is_flag=True, help="Fix the module version if a newer version is available", ) -def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +@click.option( + "--update-meta-yml", is_flag=True, help="Update the meta.yml file with the correct format of input and outputs" +) +def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version, update_meta_yml): """ Lint one or more modules in a directory. """ diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index 5f7191436..85471184c 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -244,7 +244,7 @@ def modules_test(ctx, tool, dir, no_prompts, update, once, profile): sys.exit(1) -def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version, update_meta_yml): """ Lint one or more modules in a directory. @@ -277,6 +277,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, show_passed=passed, sort_by=sort_by, fix_version=fix_version, + update_meta_yml=update_meta_yml, ) if len(module_lint.failed) > 0: sys.exit(1) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 499d31e71..b4194e768 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -56,6 +56,7 @@ def __init__( component_type, dir, fail_warned=False, + update_meta_yml=False, remote_url=None, branch=None, no_pull=False, @@ -72,6 +73,7 @@ def __init__( ) self.fail_warned = fail_warned + self.update_meta_yml = update_meta_yml self.passed = [] self.warned = [] self.failed = [] diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 6ff1f6c17..dcd76b0e8 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -187,7 +187,7 @@ def get_inputs_from_main_nf(self): elif match.group(4): input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases if input_type and input_val: - channel_elements.append({input_val: {"type": input_type}}) + channel_elements.append({input_val: {"qualifier": input_type}}) if len(channel_elements) > 0: inputs.append(channel_elements) log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") @@ -221,7 +221,7 @@ def get_outputs_from_main_nf(self): output_val = match_element.group(4) if output_type and output_val: output_val = output_val.strip("'").strip('"') # remove quotes - output_channel[match_emit.group(1)].append({output_val: {"type": output_type}}) + output_channel[match_emit.group(1)].append({output_val: {"qualifier": output_type}}) outputs.append(output_channel) log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index b780144ef..f6c2069bd 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -11,6 +11,7 @@ import questionary import rich +import yaml import nf_core.modules.modules_utils import nf_core.utils @@ -29,7 +30,12 @@ class ModuleLint(ComponentLint): # Import lint functions from .environment_yml import environment_yml # type: ignore[misc] from .main_nf import main_nf # type: ignore[misc] - from .meta_yml import meta_yml # type: ignore[misc] + from .meta_yml import ( # type: ignore[misc] + meta_yml, + obtain_correct_and_specified_inputs, + obtain_correct_and_specified_outputs, + read_meta_yml, + ) from .module_changes import module_changes # type: ignore[misc] from .module_deprecations import module_deprecations # type: ignore[misc] from .module_patch import module_patch # type: ignore[misc] @@ -41,6 +47,7 @@ def __init__( self, dir, fail_warned=False, + update_meta_yml=False, remote_url=None, branch=None, no_pull=False, @@ -51,6 +58,7 @@ def __init__( component_type="modules", dir=dir, fail_warned=fail_warned, + update_meta_yml=update_meta_yml, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -213,6 +221,12 @@ def lint_module(self, mod, progress_bar, registry, local=False, fix_version=Fals # Otherwise run all the lint tests else: + mod.get_inputs_from_main_nf() + mod.get_outputs_from_main_nf() + # Update meta.yml file if requested + if self.update_meta_yml: + self.update_meta_yml_file(mod) + if self.repo_type == "pipeline" and self.modules_json: # Set correct sha version = self.modules_json.get_module_version(mod.component_name, mod.repo_url, mod.org) @@ -232,3 +246,92 @@ def lint_module(self, mod, progress_bar, registry, local=False, fix_version=Fals self.failed += warned self.failed += [LintResult(mod, *m) for m in mod.failed] + + def update_meta_yml_file(self, mod): + """ + Update the meta.yml file with the correct inputs and outputs + """ + meta_yml = self.read_meta_yml(mod) + corrected_meta_yml = meta_yml.copy() + + # Obtain inputs and outputs from main.nf and meta.yml + # Used to compare only the structure of channels and elements + # Do not compare features to allow for custom features in meta.yml (i.e. pattern) + if "input" in meta_yml: + correct_inputs, meta_inputs = self.obtain_correct_and_specified_inputs(mod, meta_yml) + if "output" in meta_yml: + correct_outputs, meta_outputs = self.obtain_correct_and_specified_outputs(mod, meta_yml) + + if correct_inputs != meta_inputs: + log.debug( + f"Correct inputs: '{correct_inputs}' differ from current inputs: '{meta_inputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["input"] = mod.inputs.copy() # list of lists (channels) of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["input"]): + for j, element in enumerate(channel): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["input"]): + try: + # Handle old format of meta.yml: list of dicts (channels) + if element_name in meta_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + except AttributeError: + # Handle new format of meta.yml: list of lists (channels) of elements (dicts) + for x, meta_ch_element in enumerate(meta_element): + if element_name in meta_ch_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[x][element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[x][ + element_name + ][feature] + break + + if correct_outputs != meta_outputs: + log.debug( + f"Correct outputs: '{correct_outputs}' differ from current outputs: '{meta_outputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["output"] = mod.outputs.copy() # list of dicts (channels) with list of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["output"]): + ch_name = list(channel.keys())[0] + for j, element in enumerate(channel[ch_name]): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["output"]): + if element_name in meta_element.keys(): + # Copy current features of that output element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + elif ch_name in meta_element.keys(): + # When the previous output element was using the name of the channel + # Copy current features of that output element form meta.yml + try: + # Handle old format of meta.yml + for feature in meta_element[ch_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_element[ch_name][feature] + ) + except AttributeError: + # Handle new format of meta.yml + for x, meta_ch_element in enumerate(meta_element[ch_name]): + for meta_ch_element_name in meta_ch_element.keys(): + for feature in meta_ch_element[meta_ch_element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_ch_element[meta_ch_element_name][feature] + ) + break + + with open(mod.meta_yml, "w") as fh: + log.info(f"Updating {mod.meta_yml}") + yaml.dump(corrected_meta_yml, fh, sort_keys=False, Dumper=nf_core.utils.custom_yaml_dumper()) diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index d5615059a..9c074ba73 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -1,5 +1,7 @@ import json +import logging from pathlib import Path +from typing import Union import yaml from jsonschema import exceptions, validators @@ -8,6 +10,8 @@ from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer +log = logging.getLogger(__name__) + def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None: """ @@ -39,28 +43,13 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None """ - module.get_inputs_from_main_nf() - module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case - meta_yaml = None - if module.is_patched: - lines = ModulesDiffer.try_apply_patch( - module.component_name, - module_lint_object.modules_repo.repo_path, - module.patch_path, - Path(module.component_dir).relative_to(module.base_dir), - reverse=True, - ).get("meta.yml") - if lines is not None: - meta_yaml = yaml.safe_load("".join(lines)) + meta_yaml = read_meta_yml(module_lint_object, module) if meta_yaml is None: - try: - with open(module.meta_yml) as fh: - meta_yaml = yaml.safe_load(fh) - module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) - except FileNotFoundError: - module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) - return + module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) + return + else: + module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = False @@ -110,25 +99,11 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.meta_yml, ) ) + else: + log.debug(f"No inputs specified in module `main.nf`: {module.component_name}") # Check that all inputs are correctly specified if "input" in meta_yaml: - # Obtain list of correct inputs and elements of each input channel - correct_inputs = [] - for input_channel in module.inputs: - channel_elements = [] - for element in input_channel: - channel_elements.append(list(element.keys())[0]) - correct_inputs.append(channel_elements) - # Obtain list of inputs specified in meta.yml - meta_inputs = [] - for input_channel in meta_yaml["input"]: - if isinstance(input_channel, list): # Correct format - channel_elements = [] - for element in input_channel: - channel_elements.append(list(element.keys())[0]) - meta_inputs.append(channel_elements) - elif isinstance(input_channel, dict): # Old format - meta_inputs.append(list(input_channel.keys())[0]) + correct_inputs, meta_inputs = obtain_correct_and_specified_inputs(module_lint_object, module, meta_yaml) if correct_inputs == meta_inputs: module.passed.append( @@ -166,25 +141,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None ) # Check that all outputs are correctly specified if "output" in meta_yaml: - # Obtain dictionary of correct outputs and elements of each output channel - correct_outputs = {} - for output_channel in module.outputs: - channel_name = list(output_channel.keys())[0] - channel_elements = [] - for element in output_channel[channel_name]: - channel_elements.append(list(element.keys())[0]) - correct_outputs[channel_name] = channel_elements - # Obtain dictionary of outputs specified in meta.yml - meta_outputs = {} - for output_channel in meta_yaml["output"]: - channel_name = list(output_channel.keys())[0] - if isinstance(output_channel[channel_name], list): # Correct format - channel_elements = [] - for element in output_channel[channel_name]: - channel_elements.append(list(element.keys())[0]) - meta_outputs[channel_name] = channel_elements - elif isinstance(output_channel[channel_name], dict): # Old format - meta_outputs[channel_name] = [] + correct_outputs, meta_outputs = obtain_correct_and_specified_outputs(module_lint_object, module, meta_yaml) if correct_outputs == meta_outputs: module.passed.append( @@ -202,3 +159,101 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.meta_yml, ) ) + + +def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> Union[dict, None]: + """ + Read a `meta.yml` file and return it as a dictionary + + Args: + module_lint_object (ComponentLint): The lint object for the module + module (NFCoreComponent): The module to read + + Returns: + dict: The `meta.yml` file as a dictionary + """ + meta_yaml = None + # Check if we have a patch file, get original file in that case + if module.is_patched: + lines = ModulesDiffer.try_apply_patch( + module.component_name, + module_lint_object.modules_repo.repo_path, + module.patch_path, + Path(module.component_dir).relative_to(module.base_dir), + reverse=True, + ).get("meta.yml") + if lines is not None: + meta_yaml = yaml.safe_load("".join(lines)) + if meta_yaml is None: + try: + with open(module.meta_yml) as fh: + meta_yaml = yaml.safe_load(fh) + except FileNotFoundError: + return None + return meta_yaml + + +def obtain_correct_and_specified_inputs(_, module, meta_yaml): + """ + Obtain the list of correct inputs and the elements of each input channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + tuple: A tuple containing two lists. The first list contains the correct inputs, + and the second list contains the inputs specified in meta.yml. + """ + correct_inputs = [] + for input_channel in module.inputs: + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + correct_inputs.append(channel_elements) + + meta_inputs = [] + for input_channel in meta_yaml["input"]: + if isinstance(input_channel, list): # Correct format + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + meta_inputs.append(channel_elements) + elif isinstance(input_channel, dict): # Old format + meta_inputs.append(list(input_channel.keys())[0]) + + return correct_inputs, meta_inputs + + +def obtain_correct_and_specified_outputs(_, module, meta_yaml): + """ + Obtain the dictionary of correct outputs and elements of each output channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + correct_outputs (dict): A dictionary containing the correct outputs and their elements. + meta_outputs (dict): A dictionary containing the outputs specified in meta.yml. + """ + correct_outputs = {} + for output_channel in module.outputs: + channel_name = list(output_channel.keys())[0] + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + correct_outputs[channel_name] = channel_elements + + meta_outputs = {} + for output_channel in meta_yaml["output"]: + channel_name = list(output_channel.keys())[0] + if isinstance(output_channel[channel_name], list): # Correct format + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + meta_outputs[channel_name] = channel_elements + elif isinstance(output_channel[channel_name], dict): # Old format + meta_outputs[channel_name] = [] + + return correct_outputs, meta_outputs From 070d792821c2b26e8cc560b0acdc69c21cfe535d Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 21 Jun 2024 11:54:34 +0200 Subject: [PATCH 255/737] add test for nf-core modules lint --update-meta-yml --- tests/__snapshots__/test_create_app.ambr | 264 ++++++++++++----------- tests/modules/lint.py | 9 + tests/test_modules.py | 1 + 3 files changed, 144 insertions(+), 130 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 2ad077258..ea853c75a 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1405,258 +1405,262 @@ font-weight: 700; } - .terminal-3175764146-matrix { + .terminal-1242773313-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3175764146-title { + .terminal-1242773313-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3175764146-r1 { fill: #c5c8c6 } - .terminal-3175764146-r2 { fill: #e3e3e3 } - .terminal-3175764146-r3 { fill: #989898 } - .terminal-3175764146-r4 { fill: #e1e1e1 } - .terminal-3175764146-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3175764146-r6 { fill: #a5a5a5;font-style: italic; } - .terminal-3175764146-r7 { fill: #454a50 } - .terminal-3175764146-r8 { fill: #e2e3e3;font-weight: bold } - .terminal-3175764146-r9 { fill: #1e1e1e } - .terminal-3175764146-r10 { fill: #008139 } - .terminal-3175764146-r11 { fill: #000000 } - .terminal-3175764146-r12 { fill: #e2e2e2 } - .terminal-3175764146-r13 { fill: #18954b } - .terminal-3175764146-r14 { fill: #e2e2e2;font-weight: bold } - .terminal-3175764146-r15 { fill: #969696;font-weight: bold } - .terminal-3175764146-r16 { fill: #808080 } - .terminal-3175764146-r17 { fill: #7ae998 } - .terminal-3175764146-r18 { fill: #507bb3 } - .terminal-3175764146-r19 { fill: #0a180e;font-weight: bold } - .terminal-3175764146-r20 { fill: #dde6ed;font-weight: bold } - .terminal-3175764146-r21 { fill: #001541 } - .terminal-3175764146-r22 { fill: #fea62b;font-weight: bold } - .terminal-3175764146-r23 { fill: #a7a9ab } - .terminal-3175764146-r24 { fill: #e2e3e3 } + .terminal-1242773313-r1 { fill: #c5c8c6 } + .terminal-1242773313-r2 { fill: #e3e3e3 } + .terminal-1242773313-r3 { fill: #989898 } + .terminal-1242773313-r4 { fill: #e1e1e1 } + .terminal-1242773313-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1242773313-r6 { fill: #18954b } + .terminal-1242773313-r7 { fill: #e2e2e2 } + .terminal-1242773313-r8 { fill: #e2e2e2;font-style: italic; } + .terminal-1242773313-r9 { fill: #e2e2e2;font-style: italic;;text-decoration: underline; } + .terminal-1242773313-r10 { fill: #a5a5a5;font-style: italic; } + .terminal-1242773313-r11 { fill: #1e1e1e } + .terminal-1242773313-r12 { fill: #008139 } + .terminal-1242773313-r13 { fill: #454a50 } + .terminal-1242773313-r14 { fill: #787878 } + .terminal-1242773313-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-1242773313-r16 { fill: #000000 } + .terminal-1242773313-r17 { fill: #b93c5b } + .terminal-1242773313-r18 { fill: #e2e2e2;font-weight: bold } + .terminal-1242773313-r19 { fill: #969696;font-weight: bold } + .terminal-1242773313-r20 { fill: #808080 } + .terminal-1242773313-r21 { fill: #7ae998 } + .terminal-1242773313-r22 { fill: #507bb3 } + .terminal-1242773313-r23 { fill: #0a180e;font-weight: bold } + .terminal-1242773313-r24 { fill: #dde6ed;font-weight: bold } + .terminal-1242773313-r25 { fill: #001541 } + .terminal-1242773313-r26 { fill: #fea62b;font-weight: bold } + .terminal-1242773313-r27 { fill: #a7a9ab } + .terminal-1242773313-r28 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - -   Now that we have created a new pipeline locally, we can create a new GitHub repository and push    -   the code to it. - - - - - Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - for login. Show  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub username••••••••••••                   - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                               mypipeline                             - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + + Now that we have created a new pipeline locally, we can create a new GitHub repository and push  + the code to it. + + 💡 Found GitHub username in local GitHub CLI config + + + + Your GitHub usernameYour GitHub personal access token + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline + nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user  + account. A core-team member will be able to transfer the repo to nf-core once the development + has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + BackCreate GitHub repoFinish without creating a repo + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + +  d Toggle dark mode q Quit diff --git a/tests/modules/lint.py b/tests/modules/lint.py index e1a4e27ff..7798c0a94 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -59,6 +59,15 @@ def test_modules_lint_new_modules(self): assert len(module_lint.warned) >= 0 +def test_modules_lint_update_meta_yml(self): + """update the meta.yml of a module""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules, update_meta_yml=True) + module_lint.lint(print_results=False, all_modules="fastqc") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_no_gitlab(self): """Test linting a pipeline with no modules installed""" self.mods_remove.remove("fastqc", force=True) diff --git a/tests/test_modules.py b/tests/test_modules.py index 107b24566..69778dcf0 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -214,6 +214,7 @@ def test_modulesrepo_class(self): test_modules_lint_snapshot_file_missing_fail, test_modules_lint_snapshot_file_not_needed, test_modules_lint_trimgalore, + test_modules_lint_update_meta_yml, test_modules_meta_yml_incorrect_licence_field, test_modules_meta_yml_incorrect_name, test_modules_meta_yml_input_mismatch, From 2b705572a1e8a07a40ac7ea7cb7274d7654a21d1 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 4 Jul 2024 11:48:11 +0200 Subject: [PATCH 256/737] remove qualifier --- nf_core/components/nfcore_component.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index dcd76b0e8..3e5a68648 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -178,16 +178,13 @@ def get_inputs_from_main_nf(self): regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" matches = re.finditer(regex, line) for _, match in enumerate(matches, start=1): - input_type = None input_val = None - if match.group(1): - input_type = match.group(1) if match.group(3): input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases elif match.group(4): input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - if input_type and input_val: - channel_elements.append({input_val: {"qualifier": input_type}}) + if input_val: + channel_elements.append({input_val: {}}) if len(channel_elements) > 0: inputs.append(channel_elements) log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") @@ -211,17 +208,14 @@ def get_outputs_from_main_nf(self): continue output_channel = {match_emit.group(1): []} for _, match_element in enumerate(matches_elements, start=1): - output_type = None output_val = None - if match_element.group(1): - output_type = match_element.group(1) if match_element.group(3): output_val = match_element.group(3) elif match_element.group(4): output_val = match_element.group(4) - if output_type and output_val: + if output_val: output_val = output_val.strip("'").strip('"') # remove quotes - output_channel[match_emit.group(1)].append({output_val: {"qualifier": output_type}}) + output_channel[match_emit.group(1)].append({output_val: {}}) outputs.append(output_channel) log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs From 0f49a88ec2354361c37a7bf3cb43bf6ce980c0ea Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 4 Jul 2024 12:18:12 +0200 Subject: [PATCH 257/737] use param --fix to update the meta.yml --- nf_core/__main__.py | 6 ++---- nf_core/commands_modules.py | 4 ++-- nf_core/components/lint/__init__.py | 4 ++-- nf_core/modules/lint/__init__.py | 6 +++--- nf_core/modules/lint/meta_yml.py | 4 ++-- tests/modules/lint.py | 2 +- 6 files changed, 12 insertions(+), 14 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 584947afd..9ba23a8b0 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1196,10 +1196,8 @@ def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): is_flag=True, help="Fix the module version if a newer version is available", ) -@click.option( - "--update-meta-yml", is_flag=True, help="Update the meta.yml file with the correct format of input and outputs" -) -def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version, update_meta_yml): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix): """ Lint one or more modules in a directory. """ diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index 85471184c..ca80e87bd 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -244,7 +244,7 @@ def modules_test(ctx, tool, dir, no_prompts, update, once, profile): sys.exit(1) -def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version, update_meta_yml): +def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix): """ Lint one or more modules in a directory. @@ -261,6 +261,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, module_lint = ModuleLint( dir, fail_warned=fail_warned, + fix=fix, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], @@ -277,7 +278,6 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, show_passed=passed, sort_by=sort_by, fix_version=fix_version, - update_meta_yml=update_meta_yml, ) if len(module_lint.failed) > 0: sys.exit(1) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index b4194e768..0e3aba23f 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -56,7 +56,7 @@ def __init__( component_type, dir, fail_warned=False, - update_meta_yml=False, + fix=False, remote_url=None, branch=None, no_pull=False, @@ -73,7 +73,7 @@ def __init__( ) self.fail_warned = fail_warned - self.update_meta_yml = update_meta_yml + self.fix = fix self.passed = [] self.warned = [] self.failed = [] diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index f6c2069bd..58a4c4715 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -47,7 +47,7 @@ def __init__( self, dir, fail_warned=False, - update_meta_yml=False, + fix=False, remote_url=None, branch=None, no_pull=False, @@ -58,7 +58,7 @@ def __init__( component_type="modules", dir=dir, fail_warned=fail_warned, - update_meta_yml=update_meta_yml, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -224,7 +224,7 @@ def lint_module(self, mod, progress_bar, registry, local=False, fix_version=Fals mod.get_inputs_from_main_nf() mod.get_outputs_from_main_nf() # Update meta.yml file if requested - if self.update_meta_yml: + if self.fix: self.update_meta_yml_file(mod) if self.repo_type == "pipeline" and self.modules_json: diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 9c074ba73..c3b5f3f28 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -117,7 +117,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.failed.append( ( "correct_meta_inputs", - f"Incorrect inputs specified in module `meta.yml`. Inputs should contain: {correct_inputs}\nRun `nf-core modules lint --update-meta-yml` to update the `meta.yml` file.", + f"Incorrect inputs specified in module `meta.yml`. Inputs should contain: {correct_inputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", module.meta_yml, ) ) @@ -155,7 +155,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.failed.append( ( "correct_meta_outputs", - f"Incorrect outputs specified in module `meta.yml`. Outputs should contain: {correct_outputs}\nRun `nf-core modules lint --update-meta-yml` to update the `meta.yml` file.", + f"Incorrect outputs specified in module `meta.yml`. Outputs should contain: {correct_outputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", module.meta_yml, ) ) diff --git a/tests/modules/lint.py b/tests/modules/lint.py index 7798c0a94..9b6c6a78c 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -61,7 +61,7 @@ def test_modules_lint_new_modules(self): def test_modules_lint_update_meta_yml(self): """update the meta.yml of a module""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules, update_meta_yml=True) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules, fix=True) module_lint.lint(print_results=False, all_modules="fastqc") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 From ef7364dfbc18e8d63271c0be9732d8ac01c45efa Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 4 Jul 2024 12:27:24 +0200 Subject: [PATCH 258/737] create app snapshot from dev branch --- tests/__snapshots__/test_create_app.ambr | 264 +++++++++++------------ 1 file changed, 130 insertions(+), 134 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index ea853c75a..2ad077258 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -1405,262 +1405,258 @@ font-weight: 700; } - .terminal-1242773313-matrix { + .terminal-3175764146-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1242773313-title { + .terminal-3175764146-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1242773313-r1 { fill: #c5c8c6 } - .terminal-1242773313-r2 { fill: #e3e3e3 } - .terminal-1242773313-r3 { fill: #989898 } - .terminal-1242773313-r4 { fill: #e1e1e1 } - .terminal-1242773313-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1242773313-r6 { fill: #18954b } - .terminal-1242773313-r7 { fill: #e2e2e2 } - .terminal-1242773313-r8 { fill: #e2e2e2;font-style: italic; } - .terminal-1242773313-r9 { fill: #e2e2e2;font-style: italic;;text-decoration: underline; } - .terminal-1242773313-r10 { fill: #a5a5a5;font-style: italic; } - .terminal-1242773313-r11 { fill: #1e1e1e } - .terminal-1242773313-r12 { fill: #008139 } - .terminal-1242773313-r13 { fill: #454a50 } - .terminal-1242773313-r14 { fill: #787878 } - .terminal-1242773313-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-1242773313-r16 { fill: #000000 } - .terminal-1242773313-r17 { fill: #b93c5b } - .terminal-1242773313-r18 { fill: #e2e2e2;font-weight: bold } - .terminal-1242773313-r19 { fill: #969696;font-weight: bold } - .terminal-1242773313-r20 { fill: #808080 } - .terminal-1242773313-r21 { fill: #7ae998 } - .terminal-1242773313-r22 { fill: #507bb3 } - .terminal-1242773313-r23 { fill: #0a180e;font-weight: bold } - .terminal-1242773313-r24 { fill: #dde6ed;font-weight: bold } - .terminal-1242773313-r25 { fill: #001541 } - .terminal-1242773313-r26 { fill: #fea62b;font-weight: bold } - .terminal-1242773313-r27 { fill: #a7a9ab } - .terminal-1242773313-r28 { fill: #e2e3e3 } + .terminal-3175764146-r1 { fill: #c5c8c6 } + .terminal-3175764146-r2 { fill: #e3e3e3 } + .terminal-3175764146-r3 { fill: #989898 } + .terminal-3175764146-r4 { fill: #e1e1e1 } + .terminal-3175764146-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3175764146-r6 { fill: #a5a5a5;font-style: italic; } + .terminal-3175764146-r7 { fill: #454a50 } + .terminal-3175764146-r8 { fill: #e2e3e3;font-weight: bold } + .terminal-3175764146-r9 { fill: #1e1e1e } + .terminal-3175764146-r10 { fill: #008139 } + .terminal-3175764146-r11 { fill: #000000 } + .terminal-3175764146-r12 { fill: #e2e2e2 } + .terminal-3175764146-r13 { fill: #18954b } + .terminal-3175764146-r14 { fill: #e2e2e2;font-weight: bold } + .terminal-3175764146-r15 { fill: #969696;font-weight: bold } + .terminal-3175764146-r16 { fill: #808080 } + .terminal-3175764146-r17 { fill: #7ae998 } + .terminal-3175764146-r18 { fill: #507bb3 } + .terminal-3175764146-r19 { fill: #0a180e;font-weight: bold } + .terminal-3175764146-r20 { fill: #dde6ed;font-weight: bold } + .terminal-3175764146-r21 { fill: #001541 } + .terminal-3175764146-r22 { fill: #fea62b;font-weight: bold } + .terminal-3175764146-r23 { fill: #a7a9ab } + .terminal-3175764146-r24 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - Now that we have created a new pipeline locally, we can create a new GitHub repository and push  - the code to it. - - 💡 Found GitHub username in local GitHub CLI config - - - - Your GitHub usernameYour GitHub personal access token - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔for login.▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - GitHub username▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔Show - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁GitHub token▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔mypipeline - nf-core▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - BackCreate GitHub repoFinish without creating a repo - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - -  d Toggle dark mode q Quit + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Create GitHub repository + +   Now that we have created a new pipeline locally, we can create a new GitHub repository and push    +   the code to it. + + + + + Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + for login. Show  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub username••••••••••••                   + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + The name of the organisation where the The name of the new GitHub repository + GitHub repo will be cretaed + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core                               mypipeline                             + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + ⚠️ You can't create a repository directly in the nf-core organisation. + Please create the pipeline repo to an organisation where you have access or use your user  + account. A core-team member will be able to transfer the repo to nf-core once the development + has started. + + 💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + + ▔▔▔▔▔▔▔▔Private + Select to make the new GitHub repo private. + ▁▁▁▁▁▁▁▁ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Create GitHub repo  Finish without creating a repo  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + +  d Toggle dark mode  q Quit  From c7ff082a46f0693ad5e603fc18a9197e7fcc3e90 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jul 2024 12:20:20 +0200 Subject: [PATCH 259/737] add fix argument --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 9ba23a8b0..1dd92d6b8 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1201,7 +1201,7 @@ def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, """ Lint one or more modules in a directory. """ - modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version) + modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix) # nf-core modules info From 7c4fcc119648ba01977535c07fcfd1044b6e6514 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 21 Jun 2024 11:07:36 +0200 Subject: [PATCH 260/737] add tool identifier from bio.tools and intput and output ontologies --- nf_core/components/components_utils.py | 27 ++++++++++++++++++++++++++ nf_core/components/create.py | 4 ++++ nf_core/module-template/meta.yml | 19 +++++++++++++++++- nf_core/modules/lint/__init__.py | 16 +++++++++++++-- nf_core/modules/lint/meta_yml.py | 8 +++++--- requirements.txt | 1 + 6 files changed, 69 insertions(+), 6 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 01650a643..965b28e16 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -4,6 +4,7 @@ from typing import List, Optional, Tuple import questionary +import requests import rich.prompt import nf_core.utils @@ -152,3 +153,29 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str elif link.startswith("../"): subworkflows.append(name.lower()) return modules, subworkflows + + +def get_biotools_id(tool_name) -> str: + """ + Try to find a bio.tools ID for 'tool' + """ + url = f"https://bio.tools/api/t/?q={tool_name}&format=json" + try: + # Send a GET request to the API + response = requests.get(url) + response.raise_for_status() # Raise an error for bad status codes + # Parse the JSON response + data = response.json() + + # Iterate through the tools in the response to find the tool name + for tool in data["list"]: + if tool["name"].lower() == tool_name: + return tool["biotoolsCURIE"] + + # If the tool name was not found in the response + log.warning(f"Could not find a bio.tools ID for '{tool_name}'") + return "" + + except requests.exceptions.RequestException as e: + log.warning(f"Could not find a bio.tools ID for '{tool_name}': {e}") + return "" diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 5d6c411bd..426b91c98 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -20,6 +20,7 @@ import nf_core import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import get_biotools_id from nf_core.pipelines.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -60,6 +61,7 @@ def __init__( self.file_paths: Dict[str, Path] = {} self.not_empty_template = not empty_template self.migrate_pytest = migrate_pytest + self.tool_identifier = "" def create(self): """ @@ -148,6 +150,8 @@ def create(self): if self.component_type == "modules": # Try to find a bioconda package for 'component' self._get_bioconda_tool() + # Try to find a biotools entry for 'component' + self.tool_identifier = get_biotools_id(self.component) # Prompt for GitHub username self._get_username() diff --git a/nf_core/module-template/meta.yml b/nf_core/module-template/meta.yml index 2dd8e74ef..c7c16dcb3 100644 --- a/nf_core/module-template/meta.yml +++ b/nf_core/module-template/meta.yml @@ -20,6 +20,7 @@ tools: tool_dev_url: "{{ tool_dev_url }}" doi: "" licence: {{ tool_licence }} + identifier: {{ tool_identifier }} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as input @@ -39,6 +40,14 @@ input: type: file description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else %} + - edam: "" + {%- endif %} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as output @@ -56,7 +65,7 @@ output: description: | Groovy Map containing sample information e.g. `[ id:'sample1', single_end:false ]` - {% endif %} + {%- endif %} {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example output {%- endif %} @@ -64,6 +73,14 @@ output: type: file description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else -%} + - edam: "" + {%- endif %} authors: - "{{ author }}" diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 58a4c4715..44e00e333 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -11,10 +11,11 @@ import questionary import rich -import yaml +import ruamel.yaml import nf_core.modules.modules_utils import nf_core.utils +from nf_core.components.components_utils import get_biotools_id from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult from nf_core.pipelines.lint_utils import console @@ -253,6 +254,9 @@ def update_meta_yml_file(self, mod): """ meta_yml = self.read_meta_yml(mod) corrected_meta_yml = meta_yml.copy() + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=2) # Obtain inputs and outputs from main.nf and meta.yml # Used to compare only the structure of channels and elements @@ -332,6 +336,14 @@ def update_meta_yml_file(self, mod): ) break + # Add bio.tools identifier + for i, tool in enumerate(corrected_meta_yml["tools"]): + tool_name = list(tool.keys())[0] + if "identifier" not in tool[tool_name]: + corrected_meta_yml["tools"][i][tool_name]["identifier"] = get_biotools_id( + mod.component_name if "/" not in mod.component_name else mod.component_name.split("/")[0] + ) + with open(mod.meta_yml, "w") as fh: log.info(f"Updating {mod.meta_yml}") - yaml.dump(corrected_meta_yml, fh, sort_keys=False, Dumper=nf_core.utils.custom_yaml_dumper()) + yaml.dump(corrected_meta_yml, fh) diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index c3b5f3f28..5bba74acf 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -3,7 +3,7 @@ from pathlib import Path from typing import Union -import yaml +import ruamel.yaml from jsonschema import exceptions, validators from nf_core.components.lint import ComponentLint @@ -173,6 +173,8 @@ def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> dict: The `meta.yml` file as a dictionary """ meta_yaml = None + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True # Check if we have a patch file, get original file in that case if module.is_patched: lines = ModulesDiffer.try_apply_patch( @@ -183,11 +185,11 @@ def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> reverse=True, ).get("meta.yml") if lines is not None: - meta_yaml = yaml.safe_load("".join(lines)) + meta_yaml = yaml.load("".join(lines)) if meta_yaml is None: try: with open(module.meta_yml) as fh: - meta_yaml = yaml.safe_load(fh) + meta_yaml = yaml.load(fh) except FileNotFoundError: return None return meta_yaml diff --git a/requirements.txt b/requirements.txt index ccfc1bc9c..a85f4e15c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,3 +24,4 @@ tabulate textual==0.71.0 trogon pdiff +ruamel From e757a214ed677daa542853d9f83aee92e76782b0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 25 Jun 2024 11:28:23 +0200 Subject: [PATCH 261/737] fix indentation and run prettier on meta.yml files --- nf_core/modules/lint/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 44e00e333..9314ba07b 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -17,7 +17,7 @@ import nf_core.utils from nf_core.components.components_utils import get_biotools_id from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.pipelines.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) @@ -256,7 +256,7 @@ def update_meta_yml_file(self, mod): corrected_meta_yml = meta_yml.copy() yaml = ruamel.yaml.YAML() yaml.preserve_quotes = True - yaml.indent(mapping=2, sequence=2, offset=2) + yaml.indent(mapping=2, sequence=2, offset=0) # Obtain inputs and outputs from main.nf and meta.yml # Used to compare only the structure of channels and elements @@ -347,3 +347,4 @@ def update_meta_yml_file(self, mod): with open(mod.meta_yml, "w") as fh: log.info(f"Updating {mod.meta_yml}") yaml.dump(corrected_meta_yml, fh) + run_prettier_on_file(fh.name) From 7979ced0852c0f66e357bf876b02c2cea8f4e301 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 5 Jul 2024 12:14:56 +0200 Subject: [PATCH 262/737] install correct ruamel library --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a85f4e15c..61e181cac 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,4 +24,4 @@ tabulate textual==0.71.0 trogon pdiff -ruamel +ruamel.yaml From e2c04cc1c5c4ba7bddcc7c17ec7fd67765a56e63 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 5 Jul 2024 13:48:29 +0200 Subject: [PATCH 263/737] update output_dir for api docs to new website structure --- docs/api/generate-api-docs.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api/generate-api-docs.sh b/docs/api/generate-api-docs.sh index f2b905c0c..cc65e1bb1 100644 --- a/docs/api/generate-api-docs.sh +++ b/docs/api/generate-api-docs.sh @@ -28,7 +28,7 @@ done # Set the output directory if not set if [[ -z "$output_dir" ]]; then - output_dir="../src/content/api_reference" + output_dir="../sites/docs/src/content/api_reference" fi # if no release is specified, use all releases From ade71b7852947459ab242999306204e85bd10300 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 5 Jul 2024 11:49:37 +0000 Subject: [PATCH 264/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16db104fc..559a427e9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,6 +42,7 @@ - Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) - Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) - Update gitpod/workspace-base Docker digest to 0f38224 ([#3048](https://github.com/nf-core/tools/pull/3048)) +- update output_dir for api docs to new website structure ([#3051](https://github.com/nf-core/tools/pull/3051)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From edcda568d640a3d96fe7b653e8c9d6317a68be45 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 5 Jul 2024 14:19:26 +0000 Subject: [PATCH 265/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.5.1 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6cefa7fc0..c1dc7978f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.0 + rev: v0.5.1 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 0ee58ee7f609e44c5150aafda0f2a51435114457 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 5 Jul 2024 14:20:14 +0000 Subject: [PATCH 266/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 559a427e9..6da61b3fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -43,6 +43,7 @@ - Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) - Update gitpod/workspace-base Docker digest to 0f38224 ([#3048](https://github.com/nf-core/tools/pull/3048)) - update output_dir for api docs to new website structure ([#3051](https://github.com/nf-core/tools/pull/3051)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.5.1 ([#3052](https://github.com/nf-core/tools/pull/3052)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From b61d781507b68efab8b8e277806cb3b2290fa8f5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 09:08:22 +0200 Subject: [PATCH 267/737] update awsfulltest linting tests to new structure --- nf_core/pipelines/lint/actions_awsfulltest.py | 4 +++- tests/lint/actions_awsfulltest.py | 16 ++++++++-------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/nf_core/pipelines/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py index d5a061c93..4cf3bece2 100644 --- a/nf_core/pipelines/lint/actions_awsfulltest.py +++ b/nf_core/pipelines/lint/actions_awsfulltest.py @@ -41,7 +41,9 @@ def actions_awsfulltest(self): # Check that the action is only turned on for published releases try: - if wf[True]["release"]["types"] != ["published"]: + if wf[True]["pull_request"]["branches"] != ["master"]: + raise AssertionError() + if wf[True]["pull_request_review"]["types"] != ["submitted"]: raise AssertionError() if "workflow_dispatch" not in wf[True]: raise AssertionError() diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py index caf7bbf36..d1479bb1e 100644 --- a/tests/lint/actions_awsfulltest.py +++ b/tests/lint/actions_awsfulltest.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import yaml @@ -6,7 +6,7 @@ def test_actions_awsfulltest_warn(self): - """Lint test: actions_awsfulltest - WARN""" + """Lint test: actions_awsfulltest - PASS""" self.lint_obj._load() results = self.lint_obj.actions_awsfulltest() assert "`.github/workflows/awsfulltest.yml` is triggered correctly" in results["passed"] @@ -15,14 +15,14 @@ def test_actions_awsfulltest_warn(self): def test_actions_awsfulltest_pass(self): - """Lint test: actions_awsfulltest - PASS""" + """Lint test: actions_awsfulltest - WARN""" # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: awsfulltest_yml = fh.read() awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: fh.write(awsfulltest_yml) # Make lint object @@ -44,10 +44,10 @@ def test_actions_awsfulltest_fail(self): # Edit .github/workflows/awsfulltest.yml to use -profile test_full new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: awsfulltest_yml = yaml.safe_load(fh) - del awsfulltest_yml[True]["release"] - with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + del awsfulltest_yml[True]["pull_request_review"] + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: yaml.dump(awsfulltest_yml, fh) # Make lint object From 48981f2dbfd8719a5adb6b5a7c529032ed6fdd7f Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 09:32:10 +0200 Subject: [PATCH 268/737] escape github variables in jinja template --- nf_core/pipeline-template/.github/workflows/awsfulltest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index f0d072442..dc0450be4 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -20,12 +20,12 @@ jobs: - uses: octokit/request-action@v2.x id: check_approvals with: - route: GET /repos/${{ github.repository }}/pulls/${{ github.event.review.number }}/reviews + route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.review.number }}/reviews env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - id: test_variables run: | - JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}' + JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}'{% endraw %} CURRENT_APPROVALS_COUNT=$(echo $JSON_RESPONSE | jq -c '[.[] | select(.state | contains("APPROVED")) ] | length') test $CURRENT_APPROVALS_COUNT -ge 2 || exit 1 # At least 2 approvals are required - name: Launch workflow via Seqera Platform From fd71df4409cb3556f448f9ed04064a53c49375fc Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 9 Jul 2024 08:19:17 +0000 Subject: [PATCH 269/737] [automated] Fix code linting --- nf_core/__main__.py | 4 +++- nf_core/commands_subworkflows.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 2be4697ff..331f53683 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1601,7 +1601,9 @@ def command_subworkflows_update( """ Update DSL2 subworkflow within a pipeline. """ - subworkflows_update(ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps, limit_output) + subworkflows_update( + ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps, limit_output + ) ## DEPRECATED commands since v3.0.0 diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index f220caf93..a3abce3f8 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -251,7 +251,7 @@ def subworkflows_update( ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], - limit_output + limit_output, ) exit_status = subworkflow_install.update(subworkflow) if not exit_status and install_all: From 0eaa72002e9c11679fcbbd43ea5c39b1679e2424 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 10:54:23 +0200 Subject: [PATCH 270/737] update API docs with new pipeline command structure --- docs/api/_src/api/index.md | 15 ++-- docs/api/_src/api/licences.md | 9 --- docs/api/_src/api/modules.md | 9 --- .../_src/api/{ => pipelines}/bump_version.md | 2 +- docs/api/_src/api/{ => pipelines}/create.md | 2 +- docs/api/_src/api/{ => pipelines}/download.md | 2 +- docs/api/_src/api/pipelines/index.md | 8 ++ docs/api/_src/api/{ => pipelines}/launch.md | 2 +- docs/api/_src/api/{ => pipelines}/lint.md | 4 +- docs/api/_src/api/{ => pipelines}/list.md | 2 +- .../_src/api/{ => pipelines}/params-file.md | 2 +- docs/api/_src/api/{ => pipelines}/schema.md | 2 +- docs/api/_src/api/{ => pipelines}/sync.md | 2 +- docs/api/_src/api/{ => pipelines}/utils.md | 2 +- docs/api/_src/api/subworkflows.md | 9 --- docs/api/_src/index.md | 22 ++---- docs/api/_src/module_lint_tests/index.md | 17 ++-- .../actions_awsfulltest.md | 2 +- .../pipeline_lint_tests/actions_awstest.md | 2 +- .../_src/pipeline_lint_tests/actions_ci.md | 2 +- .../actions_schema_validation.md | 2 +- .../_src/pipeline_lint_tests/base_config.md | 2 +- .../_src/pipeline_lint_tests/files_exist.md | 2 +- .../pipeline_lint_tests/files_unchanged.md | 2 +- docs/api/_src/pipeline_lint_tests/index.md | 31 ++++++-- .../_src/pipeline_lint_tests/merge_markers.md | 2 +- .../pipeline_lint_tests/modules_config.md | 2 +- .../_src/pipeline_lint_tests/modules_json.md | 4 +- .../pipeline_lint_tests/modules_structure.md | 2 +- .../pipeline_lint_tests/multiqc_config.md | 2 +- .../pipeline_lint_tests/nextflow_config.md | 2 +- .../_src/pipeline_lint_tests/nfcore_yml.md | 2 +- .../pipeline_name_conventions.md | 2 +- .../pipeline_lint_tests/pipeline_todos.md | 2 +- docs/api/_src/pipeline_lint_tests/readme.md | 2 +- .../pipeline_lint_tests/schema_description.md | 2 +- .../_src/pipeline_lint_tests/schema_lint.md | 2 +- .../_src/pipeline_lint_tests/schema_params.md | 2 +- .../_src/pipeline_lint_tests/system_exit.md | 2 +- .../pipeline_lint_tests/template_strings.md | 2 +- .../version_consistency.md | 2 +- docs/api/_src/subworkflow_lint_tests/index.md | 14 ++-- docs/api/make_lint_md.py | 77 ++++++++++--------- 43 files changed, 139 insertions(+), 142 deletions(-) delete mode 100644 docs/api/_src/api/licences.md delete mode 100644 docs/api/_src/api/modules.md rename docs/api/_src/api/{ => pipelines}/bump_version.md (72%) rename docs/api/_src/api/{ => pipelines}/create.md (73%) rename docs/api/_src/api/{ => pipelines}/download.md (73%) create mode 100644 docs/api/_src/api/pipelines/index.md rename docs/api/_src/api/{ => pipelines}/launch.md (73%) rename docs/api/_src/api/{ => pipelines}/lint.md (64%) rename docs/api/_src/api/{ => pipelines}/list.md (74%) rename docs/api/_src/api/{ => pipelines}/params-file.md (72%) rename docs/api/_src/api/{ => pipelines}/schema.md (73%) rename docs/api/_src/api/{ => pipelines}/sync.md (74%) rename docs/api/_src/api/{ => pipelines}/utils.md (74%) delete mode 100644 docs/api/_src/api/subworkflows.md diff --git a/docs/api/_src/api/index.md b/docs/api/_src/api/index.md index a1863f7e3..035a89688 100644 --- a/docs/api/_src/api/index.md +++ b/docs/api/_src/api/index.md @@ -1,8 +1,11 @@ -# API Reference +# nf-core/tools documentation -```{toctree} -:glob: true -:maxdepth: 1 +This API documentation is for the [`nf-core/tools`](https://github.com/nf-core/tools) package. -* -``` +## Contents + +- [Pipeline commands](./pipeline_lint_tests/) (run by `nf-core pipelines lint`) +- [Module commands](./module_lint_tests/) (run by `nf-core modules lint`) +- [Subworkflow commands](./subworkflow_lint_tests/) (run by `nf-core subworkflows lint`) +- [nf-core/tools Python package API reference](./api/) + - [nf-core/tools pipeline commands API referece](./api/pipelines/) diff --git a/docs/api/_src/api/licences.md b/docs/api/_src/api/licences.md deleted file mode 100644 index 95b5f9768..000000000 --- a/docs/api/_src/api/licences.md +++ /dev/null @@ -1,9 +0,0 @@ -# nf_core.licences - -```{eval-rst} -.. automodule:: nf_core.licences - :members: - :undoc-members: - :show-inheritance: - :private-members: -``` diff --git a/docs/api/_src/api/modules.md b/docs/api/_src/api/modules.md deleted file mode 100644 index e3f1e39be..000000000 --- a/docs/api/_src/api/modules.md +++ /dev/null @@ -1,9 +0,0 @@ -# nf_core.modules - -```{eval-rst} -.. automodule:: nf_core.modules - :members: - :undoc-members: - :show-inheritance: - :private-members: -``` diff --git a/docs/api/_src/api/bump_version.md b/docs/api/_src/api/pipelines/bump_version.md similarity index 72% rename from docs/api/_src/api/bump_version.md rename to docs/api/_src/api/pipelines/bump_version.md index 54c377440..cd7dc280f 100644 --- a/docs/api/_src/api/bump_version.md +++ b/docs/api/_src/api/pipelines/bump_version.md @@ -1,7 +1,7 @@ # nf_core.bump_version ```{eval-rst} -.. automodule:: nf_core.bump_version +.. automodule:: nf_core.pipelines.bump_version :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/create.md b/docs/api/_src/api/pipelines/create.md similarity index 73% rename from docs/api/_src/api/create.md rename to docs/api/_src/api/pipelines/create.md index 5d5f6a62d..576335e95 100644 --- a/docs/api/_src/api/create.md +++ b/docs/api/_src/api/pipelines/create.md @@ -1,7 +1,7 @@ # nf_core.create ```{eval-rst} -.. automodule:: nf_core.create +.. automodule:: nf_core.pipelines.create :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/download.md b/docs/api/_src/api/pipelines/download.md similarity index 73% rename from docs/api/_src/api/download.md rename to docs/api/_src/api/pipelines/download.md index 18ab51376..540fb92c4 100644 --- a/docs/api/_src/api/download.md +++ b/docs/api/_src/api/pipelines/download.md @@ -1,7 +1,7 @@ # nf_core.download ```{eval-rst} -.. automodule:: nf_core.download +.. automodule:: nf_core.pipelines.download :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/pipelines/index.md b/docs/api/_src/api/pipelines/index.md new file mode 100644 index 000000000..a1863f7e3 --- /dev/null +++ b/docs/api/_src/api/pipelines/index.md @@ -0,0 +1,8 @@ +# API Reference + +```{toctree} +:glob: true +:maxdepth: 1 + +* +``` diff --git a/docs/api/_src/api/launch.md b/docs/api/_src/api/pipelines/launch.md similarity index 73% rename from docs/api/_src/api/launch.md rename to docs/api/_src/api/pipelines/launch.md index eef777ca8..0f7fc03f6 100644 --- a/docs/api/_src/api/launch.md +++ b/docs/api/_src/api/pipelines/launch.md @@ -1,7 +1,7 @@ # nf_core.launch ```{eval-rst} -.. automodule:: nf_core.launch +.. automodule:: nf_core.pipelines.launch :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/lint.md b/docs/api/_src/api/pipelines/lint.md similarity index 64% rename from docs/api/_src/api/lint.md rename to docs/api/_src/api/pipelines/lint.md index 1380f7ec7..aa62c404b 100644 --- a/docs/api/_src/api/lint.md +++ b/docs/api/_src/api/pipelines/lint.md @@ -1,11 +1,11 @@ # nf_core.lint :::{seealso} -See the [Lint Tests](../pipeline_lint_tests/index.md) docs for information about specific linting functions. +See the [Lint Tests](/docs/nf-core-tools/api_reference/dev/pipeline_lint_tests) docs for information about specific linting functions. ::: ```{eval-rst} -.. automodule:: nf_core.lint +.. automodule:: nf_core.pipelines.lint :members: run_linting :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/list.md b/docs/api/_src/api/pipelines/list.md similarity index 74% rename from docs/api/_src/api/list.md rename to docs/api/_src/api/pipelines/list.md index 35c819bc5..7df756454 100644 --- a/docs/api/_src/api/list.md +++ b/docs/api/_src/api/pipelines/list.md @@ -1,7 +1,7 @@ # nf_core.list ```{eval-rst} -.. automodule:: nf_core.list +.. automodule:: nf_core.pipelines.list :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/params-file.md b/docs/api/_src/api/pipelines/params-file.md similarity index 72% rename from docs/api/_src/api/params-file.md rename to docs/api/_src/api/pipelines/params-file.md index c5bbfc0f1..06f27cc59 100644 --- a/docs/api/_src/api/params-file.md +++ b/docs/api/_src/api/pipelines/params-file.md @@ -1,7 +1,7 @@ # nf_core.params_file ```{eval-rst} -.. automodule:: nf_core.params_file +.. automodule:: nf_core.pipelines.params_file :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/schema.md b/docs/api/_src/api/pipelines/schema.md similarity index 73% rename from docs/api/_src/api/schema.md rename to docs/api/_src/api/pipelines/schema.md index a702d2805..c885d9ed2 100644 --- a/docs/api/_src/api/schema.md +++ b/docs/api/_src/api/pipelines/schema.md @@ -1,7 +1,7 @@ # nf_core.schema ```{eval-rst} -.. automodule:: nf_core.schema +.. automodule:: nf_core.pipelines.schema :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/sync.md b/docs/api/_src/api/pipelines/sync.md similarity index 74% rename from docs/api/_src/api/sync.md rename to docs/api/_src/api/pipelines/sync.md index 8cc02209d..da1f468fe 100644 --- a/docs/api/_src/api/sync.md +++ b/docs/api/_src/api/pipelines/sync.md @@ -1,7 +1,7 @@ # nf_core.sync ```{eval-rst} -.. automodule:: nf_core.sync +.. automodule:: nf_core.pipelines.sync :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/utils.md b/docs/api/_src/api/pipelines/utils.md similarity index 74% rename from docs/api/_src/api/utils.md rename to docs/api/_src/api/pipelines/utils.md index 1353f97ef..86b8c3f36 100644 --- a/docs/api/_src/api/utils.md +++ b/docs/api/_src/api/pipelines/utils.md @@ -1,7 +1,7 @@ # nf_core.utils ```{eval-rst} -.. automodule:: nf_core.utils +.. automodule:: nf_core.pipelines.utils :members: :undoc-members: :show-inheritance: diff --git a/docs/api/_src/api/subworkflows.md b/docs/api/_src/api/subworkflows.md deleted file mode 100644 index 438ccd018..000000000 --- a/docs/api/_src/api/subworkflows.md +++ /dev/null @@ -1,9 +0,0 @@ -# nf_core.subworkflows - -```{eval-rst} -.. automodule:: nf_core.subworkflows - :members: - :undoc-members: - :show-inheritance: - :private-members: -``` diff --git a/docs/api/_src/index.md b/docs/api/_src/index.md index 037ca9547..d81a0e90d 100644 --- a/docs/api/_src/index.md +++ b/docs/api/_src/index.md @@ -1,22 +1,10 @@ # nf-core/tools documentation -```{toctree} -:caption: 'Contents:' -:glob: true -:hidden: true -:maxdepth: 2 - -pipeline_lint_tests/index.rst -module_lint_tests/index.rst -subworkflow_lint_tests/index.rst -api/index.rst -``` - -This documentation is for the `nf-core/tools` package. +This API documentation is for the [`nf-core/tools`](https://github.com/nf-core/tools) package. ## Contents -- [Pipeline code lint tests](pipeline_lint_tests/index.md) (run by `nf-core pipelines lint`) -- [Module code lint tests](module_lint_tests/index.md) (run by `nf-core modules lint`) -- [Subworkflow code lint tests](subworkflow_lint_tests/index.md) (run by `nf-core subworkflows lint`) -- [nf-core/tools Python package API reference](api/index.md) +- [Pipeline code lint tests](./pipeline_lint_tests/) (run by `nf-core pipelines lint`) +- [Module code lint tests](./module_lint_tests/) (run by `nf-core modules lint`) +- [Subworkflow code lint tests](./subworkflow_lint_tests/) (run by `nf-core subworkflows lint`) +- [nf-core/tools Python package API reference](./api/) diff --git a/docs/api/_src/module_lint_tests/index.md b/docs/api/_src/module_lint_tests/index.md index dee84d06d..7039ba259 100644 --- a/docs/api/_src/module_lint_tests/index.md +++ b/docs/api/_src/module_lint_tests/index.md @@ -1,8 +1,11 @@ -# Module lint tests +# Module Lint Tests -```{toctree} -:glob: true -:maxdepth: 1 - -* -``` + - [environment_yml](./environment_yml/) + - [main_nf](./main_nf/) + - [meta_yml](./meta_yml/) + - [module_changes](./module_changes/) + - [module_deprecations](./module_deprecations/) + - [module_patch](./module_patch/) + - [module_tests](./module_tests/) + - [module_todos](./module_todos/) + - [module_version](./module_version/) diff --git a/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.md b/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.md index c0f0aef5a..4bb47569a 100644 --- a/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.md +++ b/docs/api/_src/pipeline_lint_tests/actions_awsfulltest.md @@ -1,5 +1,5 @@ # actions_awsfulltest ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.actions_awsfulltest +.. automethod:: nf_core.pipelines.lint.PipelineLint.actions_awsfulltest ``` diff --git a/docs/api/_src/pipeline_lint_tests/actions_awstest.md b/docs/api/_src/pipeline_lint_tests/actions_awstest.md index 42441c8ac..b250571a0 100644 --- a/docs/api/_src/pipeline_lint_tests/actions_awstest.md +++ b/docs/api/_src/pipeline_lint_tests/actions_awstest.md @@ -1,5 +1,5 @@ # actions_awstest ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.actions_awstest +.. automethod:: nf_core.pipelines.lint.PipelineLint.actions_awstest ``` diff --git a/docs/api/_src/pipeline_lint_tests/actions_ci.md b/docs/api/_src/pipeline_lint_tests/actions_ci.md index 78ea3aea0..68cbc089a 100644 --- a/docs/api/_src/pipeline_lint_tests/actions_ci.md +++ b/docs/api/_src/pipeline_lint_tests/actions_ci.md @@ -1,5 +1,5 @@ # actions_ci ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.actions_ci +.. automethod:: nf_core.pipelines.lint.PipelineLint.actions_ci ``` diff --git a/docs/api/_src/pipeline_lint_tests/actions_schema_validation.md b/docs/api/_src/pipeline_lint_tests/actions_schema_validation.md index f0c98eb21..860acb2d2 100644 --- a/docs/api/_src/pipeline_lint_tests/actions_schema_validation.md +++ b/docs/api/_src/pipeline_lint_tests/actions_schema_validation.md @@ -1,5 +1,5 @@ # actions_schema_validation ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.actions_schema_validation +.. automethod:: nf_core.pipelines.lint.PipelineLint.actions_schema_validation ``` diff --git a/docs/api/_src/pipeline_lint_tests/base_config.md b/docs/api/_src/pipeline_lint_tests/base_config.md index 4a56ef978..803627ced 100644 --- a/docs/api/_src/pipeline_lint_tests/base_config.md +++ b/docs/api/_src/pipeline_lint_tests/base_config.md @@ -1,5 +1,5 @@ # base_config ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.base_config +.. automethod:: nf_core.pipelines.lint.PipelineLint.base_config ``` diff --git a/docs/api/_src/pipeline_lint_tests/files_exist.md b/docs/api/_src/pipeline_lint_tests/files_exist.md index 309ea62f0..69890d169 100644 --- a/docs/api/_src/pipeline_lint_tests/files_exist.md +++ b/docs/api/_src/pipeline_lint_tests/files_exist.md @@ -1,5 +1,5 @@ # files_exist ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.files_exist +.. automethod:: nf_core.pipelines.lint.PipelineLint.files_exist ``` diff --git a/docs/api/_src/pipeline_lint_tests/files_unchanged.md b/docs/api/_src/pipeline_lint_tests/files_unchanged.md index 2f3b04fe1..3f626e5b8 100644 --- a/docs/api/_src/pipeline_lint_tests/files_unchanged.md +++ b/docs/api/_src/pipeline_lint_tests/files_unchanged.md @@ -1,5 +1,5 @@ # files_unchanged ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.files_unchanged +.. automethod:: nf_core.pipelines.lint.PipelineLint.files_unchanged ``` diff --git a/docs/api/_src/pipeline_lint_tests/index.md b/docs/api/_src/pipeline_lint_tests/index.md index c631610d6..3575c08db 100644 --- a/docs/api/_src/pipeline_lint_tests/index.md +++ b/docs/api/_src/pipeline_lint_tests/index.md @@ -1,8 +1,25 @@ -# Pipeline lint tests +# Pipeline Lint Tests -```{toctree} -:glob: true -:maxdepth: 1 - -* -``` + - [actions_awsfulltest](./actions_awsfulltest/) + - [actions_awstest](./actions_awstest/) + - [actions_ci](./actions_ci/) + - [actions_schema_validation](./actions_schema_validation/) + - [base_config](./base_config/) + - [files_exist](./files_exist/) + - [files_unchanged](./files_unchanged/) + - [merge_markers](./merge_markers/) + - [modules_config](./modules_config/) + - [modules_json](./modules_json/) + - [modules_structure](./modules_structure/) + - [multiqc_config](./multiqc_config/) + - [nextflow_config](./nextflow_config/) + - [nfcore_yml](./nfcore_yml/) + - [pipeline_name_conventions](./pipeline_name_conventions/) + - [pipeline_todos](./pipeline_todos/) + - [readme](./readme/) + - [schema_description](./schema_description/) + - [schema_lint](./schema_lint/) + - [schema_params](./schema_params/) + - [system_exit](./system_exit/) + - [template_strings](./template_strings/) + - [version_consistency](./version_consistency/) diff --git a/docs/api/_src/pipeline_lint_tests/merge_markers.md b/docs/api/_src/pipeline_lint_tests/merge_markers.md index b52fdba1a..7b620b4af 100644 --- a/docs/api/_src/pipeline_lint_tests/merge_markers.md +++ b/docs/api/_src/pipeline_lint_tests/merge_markers.md @@ -1,5 +1,5 @@ # merge_markers ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.merge_markers +.. automethod:: nf_core.pipelines.lint.PipelineLint.merge_markers ``` diff --git a/docs/api/_src/pipeline_lint_tests/modules_config.md b/docs/api/_src/pipeline_lint_tests/modules_config.md index 2a4f51c5a..c8eac0cf9 100644 --- a/docs/api/_src/pipeline_lint_tests/modules_config.md +++ b/docs/api/_src/pipeline_lint_tests/modules_config.md @@ -1,5 +1,5 @@ # modules_config ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.modules_config +.. automethod:: nf_core.pipelines.lint.PipelineLint.modules_config ``` diff --git a/docs/api/_src/pipeline_lint_tests/modules_json.md b/docs/api/_src/pipeline_lint_tests/modules_json.md index 0c0ba71a8..3070f83f5 100644 --- a/docs/api/_src/pipeline_lint_tests/modules_json.md +++ b/docs/api/_src/pipeline_lint_tests/modules_json.md @@ -1,5 +1,5 @@ -# nextflow_config +# modules_json ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.modules_json +.. automethod:: nf_core.pipelines.lint.PipelineLint.modules_json ``` diff --git a/docs/api/_src/pipeline_lint_tests/modules_structure.md b/docs/api/_src/pipeline_lint_tests/modules_structure.md index faa39ca77..8a8391885 100644 --- a/docs/api/_src/pipeline_lint_tests/modules_structure.md +++ b/docs/api/_src/pipeline_lint_tests/modules_structure.md @@ -1,5 +1,5 @@ # modules_structure ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.modules_structure +.. automethod:: nf_core.pipelines.lint.PipelineLint.modules_structure ``` diff --git a/docs/api/_src/pipeline_lint_tests/multiqc_config.md b/docs/api/_src/pipeline_lint_tests/multiqc_config.md index 311f6b304..281957749 100644 --- a/docs/api/_src/pipeline_lint_tests/multiqc_config.md +++ b/docs/api/_src/pipeline_lint_tests/multiqc_config.md @@ -1,5 +1,5 @@ # multiqc_config ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.multiqc_config +.. automethod:: nf_core.pipelines.lint.PipelineLint.multiqc_config ``` diff --git a/docs/api/_src/pipeline_lint_tests/nextflow_config.md b/docs/api/_src/pipeline_lint_tests/nextflow_config.md index e23e96614..98e8df257 100644 --- a/docs/api/_src/pipeline_lint_tests/nextflow_config.md +++ b/docs/api/_src/pipeline_lint_tests/nextflow_config.md @@ -1,5 +1,5 @@ # nextflow_config ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.nextflow_config +.. automethod:: nf_core.pipelines.lint.PipelineLint.nextflow_config ``` diff --git a/docs/api/_src/pipeline_lint_tests/nfcore_yml.md b/docs/api/_src/pipeline_lint_tests/nfcore_yml.md index f7e797a29..226eb4c94 100644 --- a/docs/api/_src/pipeline_lint_tests/nfcore_yml.md +++ b/docs/api/_src/pipeline_lint_tests/nfcore_yml.md @@ -1,5 +1,5 @@ # nfcore_yml ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.nfcore_yml +.. automethod:: nf_core.pipelines.lint.PipelineLint.nfcore_yml ``` diff --git a/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.md b/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.md index 0034319d3..09396b6af 100644 --- a/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.md +++ b/docs/api/_src/pipeline_lint_tests/pipeline_name_conventions.md @@ -1,5 +1,5 @@ # pipeline_name_conventions ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.pipeline_name_conventions +.. automethod:: nf_core.pipelines.lint.PipelineLint.pipeline_name_conventions ``` diff --git a/docs/api/_src/pipeline_lint_tests/pipeline_todos.md b/docs/api/_src/pipeline_lint_tests/pipeline_todos.md index 08e456ea2..8292075b9 100644 --- a/docs/api/_src/pipeline_lint_tests/pipeline_todos.md +++ b/docs/api/_src/pipeline_lint_tests/pipeline_todos.md @@ -1,5 +1,5 @@ # pipeline_todos ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.pipeline_todos +.. automethod:: nf_core.pipelines.lint.PipelineLint.pipeline_todos ``` diff --git a/docs/api/_src/pipeline_lint_tests/readme.md b/docs/api/_src/pipeline_lint_tests/readme.md index 9583a56e0..bf947bb61 100644 --- a/docs/api/_src/pipeline_lint_tests/readme.md +++ b/docs/api/_src/pipeline_lint_tests/readme.md @@ -1,5 +1,5 @@ # readme ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.readme +.. automethod:: nf_core.pipelines.lint.PipelineLint.readme ``` diff --git a/docs/api/_src/pipeline_lint_tests/schema_description.md b/docs/api/_src/pipeline_lint_tests/schema_description.md index 14f756acd..0429b3cc1 100644 --- a/docs/api/_src/pipeline_lint_tests/schema_description.md +++ b/docs/api/_src/pipeline_lint_tests/schema_description.md @@ -1,5 +1,5 @@ # schema_description ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.schema_description +.. automethod:: nf_core.pipelines.lint.PipelineLint.schema_description ``` diff --git a/docs/api/_src/pipeline_lint_tests/schema_lint.md b/docs/api/_src/pipeline_lint_tests/schema_lint.md index 39be2ea65..95bd5cc2f 100644 --- a/docs/api/_src/pipeline_lint_tests/schema_lint.md +++ b/docs/api/_src/pipeline_lint_tests/schema_lint.md @@ -1,5 +1,5 @@ # schema_lint ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.schema_lint +.. automethod:: nf_core.pipelines.lint.PipelineLint.schema_lint ``` diff --git a/docs/api/_src/pipeline_lint_tests/schema_params.md b/docs/api/_src/pipeline_lint_tests/schema_params.md index 80a626e88..8c6594c57 100644 --- a/docs/api/_src/pipeline_lint_tests/schema_params.md +++ b/docs/api/_src/pipeline_lint_tests/schema_params.md @@ -1,5 +1,5 @@ # schema_params ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.schema_params +.. automethod:: nf_core.pipelines.lint.PipelineLint.schema_params ``` diff --git a/docs/api/_src/pipeline_lint_tests/system_exit.md b/docs/api/_src/pipeline_lint_tests/system_exit.md index 3d0ac20f8..9ba67d4d2 100644 --- a/docs/api/_src/pipeline_lint_tests/system_exit.md +++ b/docs/api/_src/pipeline_lint_tests/system_exit.md @@ -1,5 +1,5 @@ # system_exit ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.system_exit +.. automethod:: nf_core.pipelines.lint.PipelineLint.system_exit ``` diff --git a/docs/api/_src/pipeline_lint_tests/template_strings.md b/docs/api/_src/pipeline_lint_tests/template_strings.md index 3d03bfb25..ee334a2a2 100644 --- a/docs/api/_src/pipeline_lint_tests/template_strings.md +++ b/docs/api/_src/pipeline_lint_tests/template_strings.md @@ -1,5 +1,5 @@ # template_strings ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.template_strings +.. automethod:: nf_core.pipelines.lint.PipelineLint.template_strings ``` diff --git a/docs/api/_src/pipeline_lint_tests/version_consistency.md b/docs/api/_src/pipeline_lint_tests/version_consistency.md index e8038f853..868a34870 100644 --- a/docs/api/_src/pipeline_lint_tests/version_consistency.md +++ b/docs/api/_src/pipeline_lint_tests/version_consistency.md @@ -1,5 +1,5 @@ # version_consistency ```{eval-rst} -.. automethod:: nf_core.lint.PipelineLint.version_consistency +.. automethod:: nf_core.pipelines.lint.PipelineLint.version_consistency ``` diff --git a/docs/api/_src/subworkflow_lint_tests/index.md b/docs/api/_src/subworkflow_lint_tests/index.md index 0ecf590c0..da8db49a7 100644 --- a/docs/api/_src/subworkflow_lint_tests/index.md +++ b/docs/api/_src/subworkflow_lint_tests/index.md @@ -1,8 +1,8 @@ -# Subworkflow lint tests +# Subworkflow Lint Tests -```{toctree} -:glob: true -:maxdepth: 1 - -* -``` + - [main_nf](./main_nf/) + - [meta_yml](./meta_yml/) + - [subworkflow_changes](./subworkflow_changes/) + - [subworkflow_tests](./subworkflow_tests/) + - [subworkflow_todos](./subworkflow_todos/) + - [subworkflow_version](./subworkflow_version/) diff --git a/docs/api/make_lint_md.py b/docs/api/make_lint_md.py index 48393094b..7b823a086 100644 --- a/docs/api/make_lint_md.py +++ b/docs/api/make_lint_md.py @@ -1,53 +1,58 @@ -#!/usr/bin/env python - -import fnmatch -import os +from pathlib import Path +#!/usr/bin/env python +import nf_core.commands_pipelines import nf_core.modules.lint import nf_core.pipelines.lint import nf_core.subworkflows.lint -def make_docs(docs_basedir, lint_tests, md_template): - # Get list of existing .md files - existing_docs = [] - for fn in os.listdir(docs_basedir): - if fnmatch.fnmatch(fn, "*.md") and not fnmatch.fnmatch(fn, "index.md"): - existing_docs.append(os.path.join(docs_basedir, fn)) +def create_docs(docs_basedir, lint_tests, md_template): + docs_basedir.mkdir(parents=True, exist_ok=True) + existing_docs = list(docs_basedir.glob("*.md")) + existing_docs.remove(docs_basedir / "index.md") for test_name in lint_tests: - fn = os.path.join(docs_basedir, f"{test_name}.md") - if os.path.exists(fn): + fn = docs_basedir / f"{test_name}.md" + if fn.exists(): existing_docs.remove(fn) else: with open(fn, "w") as fh: fh.write(md_template.format(test_name)) for fn in existing_docs: - os.remove(fn) + fn.unlink() + + +def create_index_file(basedir, title): + index_file = basedir / "index.md" + with open(index_file, "w") as fh: + fh.write(f"# {title}\n\n") + for fn in sorted(basedir.glob("*.md")): + if fn.name != "index.md": + fh.write(f" - [{fn.stem}](./{fn.stem}/)\n") # Create the pipeline docs -pipeline_docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "pipeline_lint_tests") -make_docs( - pipeline_docs_basedir, +pipeline_lint_docs_basedir = Path(__file__).resolve().parent / "_src" / "pipeline_lint_tests" +create_docs( + pipeline_lint_docs_basedir, nf_core.pipelines.lint.PipelineLint._get_all_lint_tests(True), """# {0} -```{{eval-rst}} -.. automethod:: nf_core.pipelines.lint.PipelineLint.{0} -``` -""", + ```{{eval-rst}} + .. automethod:: nf_core.pipelines.lint.PipelineLint.{0} + ``` + """, ) +create_index_file(pipeline_lint_docs_basedir, "Pipeline Lint Tests") -# Create the modules lint docs -modules_docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "module_lint_tests") -make_docs( - modules_docs_basedir, - list( - set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True)).union( - nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=False) - ) +# Create the modules docs +modules_lint_docs_basedir = Path(__file__).resolve().parent / "_src" / "module_lint_tests" +create_docs( + modules_lint_docs_basedir, + set(nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=True)).union( + nf_core.modules.lint.ModuleLint.get_all_module_lint_tests(is_pipeline=False) ), """# {0} @@ -56,15 +61,14 @@ def make_docs(docs_basedir, lint_tests, md_template): ``` """, ) +create_index_file(modules_lint_docs_basedir, "Module Lint Tests") -# Create the subworkflows lint docs -subworkflows_docs_basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_src", "subworkflow_lint_tests") -make_docs( - subworkflows_docs_basedir, - list( - set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)).union( - nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=False) - ) +# Create the subworkflow docs +subworkflow_lint_docs_basedir = Path(__file__).resolve().parent / "_src" / "subworkflow_lint_tests" +create_docs( + subworkflow_lint_docs_basedir, + set(nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=True)).union( + nf_core.subworkflows.lint.SubworkflowLint.get_all_subworkflow_lint_tests(is_pipeline=False) ), """# {0} @@ -73,3 +77,4 @@ def make_docs(docs_basedir, lint_tests, md_template): ``` """, ) +create_index_file(subworkflow_lint_docs_basedir, "Subworkflow Lint Tests") From e8e2e49d5ab69671d9bcbad4dbe3ddf5733fc09b Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 10:55:26 +0200 Subject: [PATCH 271/737] remove unused assets --- docs/api/_src/_static/css/custom.css | 437 ------------------------- docs/api/_src/_static/js/custom.js | 0 docs/api/_src/_static/nf-core-logo.png | Bin 84414 -> 0 bytes docs/api/_src/_templates/layout.html | 4 - 4 files changed, 441 deletions(-) delete mode 100644 docs/api/_src/_static/css/custom.css delete mode 100644 docs/api/_src/_static/js/custom.js delete mode 100644 docs/api/_src/_static/nf-core-logo.png delete mode 100644 docs/api/_src/_templates/layout.html diff --git a/docs/api/_src/_static/css/custom.css b/docs/api/_src/_static/css/custom.css deleted file mode 100644 index e892dd999..000000000 --- a/docs/api/_src/_static/css/custom.css +++ /dev/null @@ -1,437 +0,0 @@ -@media (prefers-color-scheme: light) { - a, - a:visited { - color: #246eb9; - } - a:hover, - a:focus, - a:active { - color: #c03221; - } - .wy-nav-side { - background-color: #ededed; - } - .wy-nav-top, - .wy-side-nav-search, - .wy-menu-vertical a:active { - background-color: #32ad65; - } - .wy-menu-vertical a { - color: #343434; - } - .wy-menu-vertical a:hover { - background-color: #abacab85; - } - .wy-menu-vertical header, - .wy-menu-vertical p.caption { - color: #32ad65; - } - - .wy-side-nav-search input[type="text"] { - border: none; - } - - code, - .rst-content code.literal { - background-color: rgba(220, 220, 220, 0.4); - color: #c03221; - border: none; - } - .rst-content .note .admonition-title { - background-color: #72757bfc; - } - - html.writer-html4 .rst-content dl:not(.docutils) > dt, - html.writer-html5 - .rst-content - dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) - > dt { - background-color: #32ad65; - border: none; - } - .rst-content .method > dt > code { - color: #f5f6f7; - } - .rst-content div[class^="highlight"], - .rst-content pre.literal-block { - border: none; - } - .highlight { - background-color: #242424; - color: #e5e6e7; - } - .highlight .hll { - color: #e5e6e7; - background-color: #585b60fc; - } - .highlight .k, - .highlight .nt, - .highlight .no { - color: #246eb9; - } - .highlight .s, - .highlight .s1, - .highlight .s2, - .highlight .na { - color: #32ad65; - } - .highlight .nb, - .highlight .o, - .highlight .cm { - color: #ffbe0b; - } - .highlight .c1 { - color: #88898afc; - } - .highlight .nv, - .py > span { - color: #c03221; - } - .py .sig-param, - .py .sig-paren, - .property .pre { - color: #eef0f2; - } - - .method > .py .sig-param, - .method > .py .sig-paren { - color: #32ad65; - } - .btn.btn-neutral { - background-color: #e5e6e7d4 !important; - } - .rst-content .hint .admonition-title, - .rst-content .hint .wy-alert-title, - .rst-content .important .admonition-title, - .rst-content .important .wy-alert-title, - .rst-content .tip .admonition-title, - .rst-content .tip .wy-alert-title, - .rst-content .wy-alert-success.admonition-todo .admonition-title, - .rst-content .wy-alert-success.admonition-todo .wy-alert-title, - .rst-content .wy-alert-success.admonition .admonition-title, - .rst-content .wy-alert-success.admonition .wy-alert-title, - .rst-content .wy-alert-success.attention .admonition-title, - .rst-content .wy-alert-success.attention .wy-alert-title, - .rst-content .wy-alert-success.caution .admonition-title, - .rst-content .wy-alert-success.caution .wy-alert-title, - .rst-content .wy-alert-success.danger .admonition-title, - .rst-content .wy-alert-success.danger .wy-alert-title, - .rst-content .wy-alert-success.error .admonition-title, - .rst-content .wy-alert-success.error .wy-alert-title, - .rst-content .wy-alert-success.note .admonition-title, - .rst-content .wy-alert-success.note .wy-alert-title, - .rst-content .wy-alert-success.seealso .admonition-title, - .rst-content .wy-alert-success.seealso .wy-alert-title, - .rst-content .wy-alert-success.warning .admonition-title, - .rst-content .wy-alert-success.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-success .admonition-title, - .wy-alert.wy-alert-success .rst-content .admonition-title, - .wy-alert.wy-alert-success .wy-alert-title { - background-color: #32ad65; - color: #e5e6e7; - } - - .rst-content .note, - .rst-content .seealso, - .rst-content .wy-alert-info.admonition, - .rst-content .wy-alert-info.admonition-todo, - .rst-content .wy-alert-info.attention, - .rst-content .wy-alert-info.caution, - .rst-content .wy-alert-info.danger, - .rst-content .wy-alert-info.error, - .rst-content .wy-alert-info.hint, - .rst-content .wy-alert-info.important, - .rst-content .wy-alert-info.tip, - .rst-content .wy-alert-info.warning, - .wy-alert.wy-alert-info, - .rst-content .hint, - .rst-content .important, - .rst-content .tip, - .rst-content .wy-alert-success.admonition, - .rst-content .wy-alert-success.admonition-todo, - .rst-content .wy-alert-success.attention, - .rst-content .wy-alert-success.caution, - .rst-content .wy-alert-success.danger, - .rst-content .wy-alert-success.error, - .rst-content .wy-alert-success.note, - .rst-content .wy-alert-success.seealso, - .rst-content .wy-alert-success.warning, - .wy-alert.wy-alert-success { - color: #343434; - background-color: #e3e3e3; - border: none; - } - - .rst-content .admonition-todo .admonition-title, - .rst-content .admonition-todo .wy-alert-title, - .rst-content .attention .admonition-title, - .rst-content .attention .wy-alert-title, - .rst-content .caution .admonition-title, - .rst-content .caution .wy-alert-title, - .rst-content .warning .admonition-title, - .rst-content .warning .wy-alert-title, - .rst-content .wy-alert-warning.admonition .admonition-title, - .rst-content .wy-alert-warning.admonition .wy-alert-title, - .rst-content .wy-alert-warning.danger .admonition-title, - .rst-content .wy-alert-warning.danger .wy-alert-title, - .rst-content .wy-alert-warning.error .admonition-title, - .rst-content .wy-alert-warning.error .wy-alert-title, - .rst-content .wy-alert-warning.hint .admonition-title, - .rst-content .wy-alert-warning.hint .wy-alert-title, - .rst-content .seealso .admonition-title, - .rst-content .seealso .wy-alert-title, - .rst-content .wy-alert-warning.important .admonition-title, - .rst-content .wy-alert-warning.important .wy-alert-title, - .rst-content .wy-alert-warning.note .admonition-title, - .rst-content .wy-alert-warning.note .wy-alert-title, - .rst-content .wy-alert-warning.seealso .admonition-title, - .rst-content .wy-alert-warning.seealso .wy-alert-title, - .rst-content .wy-alert-warning.tip .admonition-title, - .rst-content .wy-alert-warning.tip .wy-alert-title, - .rst-content .wy-alert.wy-alert-warning .admonition-title, - .wy-alert.wy-alert-warning .rst-content .admonition-title, - .wy-alert.wy-alert-warning .wy-alert-title { - background-color: #246eb9; - } - .rst-content .admonition-todo, - .rst-content .attention, - .rst-content .caution, - .rst-content .warning, - .rst-content .wy-alert-warning.admonition, - .rst-content .wy-alert-warning.danger, - .rst-content .wy-alert-warning.error, - .rst-content .wy-alert-warning.hint, - .rst-content .wy-alert-warning.important, - .rst-content .wy-alert-warning.note, - .rst-content .wy-alert-warning.seealso, - .rst-content .wy-alert-warning.tip, - .wy-alert.wy-alert-warning { - color: #343434; - background-color: #e3e3e3; - } -} -@media (prefers-color-scheme: dark) { - .wy-nav-content-wrap { - background-color: #181a1b; - } - .wy-nav-top, - .wy-side-nav-search, - .wy-menu-vertical a:active { - background-color: #32ad65; - } - .wy-menu-vertical header, - .wy-menu-vertical p.caption { - color: #32ad65; - } - .wy-nav-side { - background-color: #2e2e2e; - } - .wy-nav-content { - background-color: #343434; - color: #e5e6e7; - } - a, - a:visited { - color: #6fb2e8; - } - a:hover, - a:focus, - a:active { - color: #db9444; - } - code, - .rst-content code.literal { - background-color: rgba(220, 220, 220, 0.1); - color: #db9444; - border: none; - } - .wy-side-nav-search input[type="text"] { - border: none; - background-color: #e5e6e7; - } - .wy-side-nav-search > div.version { - color: #e5e6e7c9; - } - .wy-side-nav-search .wy-dropdown > a, - .wy-side-nav-search > a { - color: #e5e6e7; - } - - html.writer-html4 .rst-content dl:not(.docutils) > dt, - html.writer-html5 - .rst-content - dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) - > dt { - background-color: #32ad65; - border: none; - } - html.writer-html4 .rst-content dl:not(.docutils) dl:not(.field-list) > dt, - html.writer-html5 - .rst-content - dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) - dl:not(.field-list) - > dt { - background-color: #777; - color: #e5e6e7a6; - } - .rst-content code { - color: #e5e6e7d4; - } - .rst-content .method > dt > code { - color: #e5e6e7; - } - .sig-paren { - color: #db9444; - } - .rst-content .note .admonition-title { - background-color: #72757bfc; - } - .rst-content div[class^="highlight"], - .rst-content pre.literal-block { - border: none; - } - - .highlight { - background-color: #242424; - color: #e5e6e7; - } - .highlight .hll { - color: #e5e6e7; - background-color: #585b60fc; - } - .highlight .k, - .highlight .nt, - .highlight .no { - color: #6fb2e8; - } - .highlight .s, - .highlight .s1, - .highlight .s2, - .highlight .na { - color: #32ad65; - } - .highlight .nb, - .highlight .o, - .highlight .cm { - color: #db9444; - } - .highlight .c1 { - color: #88898afc; - } - .highlight .nv { - color: #f9d977; - } - .btn.btn-neutral { - background-color: #e5e6e7d4 !important; - } - - .rst-content .hint .admonition-title, - .rst-content .hint .wy-alert-title, - .rst-content .important .admonition-title, - .rst-content .important .wy-alert-title, - .rst-content .tip .admonition-title, - .rst-content .tip .wy-alert-title, - .rst-content .wy-alert-success.admonition-todo .admonition-title, - .rst-content .wy-alert-success.admonition-todo .wy-alert-title, - .rst-content .wy-alert-success.admonition .admonition-title, - .rst-content .wy-alert-success.admonition .wy-alert-title, - .rst-content .wy-alert-success.attention .admonition-title, - .rst-content .wy-alert-success.attention .wy-alert-title, - .rst-content .wy-alert-success.caution .admonition-title, - .rst-content .wy-alert-success.caution .wy-alert-title, - .rst-content .wy-alert-success.danger .admonition-title, - .rst-content .wy-alert-success.danger .wy-alert-title, - .rst-content .wy-alert-success.error .admonition-title, - .rst-content .wy-alert-success.error .wy-alert-title, - .rst-content .wy-alert-success.note .admonition-title, - .rst-content .wy-alert-success.note .wy-alert-title, - .rst-content .wy-alert-success.seealso .admonition-title, - .rst-content .wy-alert-success.seealso .wy-alert-title, - .rst-content .wy-alert-success.warning .admonition-title, - .rst-content .wy-alert-success.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-success .admonition-title, - .wy-alert.wy-alert-success .rst-content .admonition-title, - .wy-alert.wy-alert-success .wy-alert-title { - background-color: #32ad65; - color: #e5e6e7; - } - - .rst-content .note, - .rst-content .seealso, - .rst-content .wy-alert-info.admonition, - .rst-content .wy-alert-info.admonition-todo, - .rst-content .wy-alert-info.attention, - .rst-content .wy-alert-info.caution, - .rst-content .wy-alert-info.danger, - .rst-content .wy-alert-info.error, - .rst-content .wy-alert-info.hint, - .rst-content .wy-alert-info.important, - .rst-content .wy-alert-info.tip, - .rst-content .wy-alert-info.warning, - .wy-alert.wy-alert-info, - .rst-content .hint, - .rst-content .important, - .rst-content .tip, - .rst-content .wy-alert-success.admonition, - .rst-content .wy-alert-success.admonition-todo, - .rst-content .wy-alert-success.attention, - .rst-content .wy-alert-success.caution, - .rst-content .wy-alert-success.danger, - .rst-content .wy-alert-success.error, - .rst-content .wy-alert-success.note, - .rst-content .wy-alert-success.seealso, - .rst-content .wy-alert-success.warning, - .wy-alert.wy-alert-success { - color: #343434; - background-color: #e3e3e3; - border: none; - } - - .rst-content .admonition-todo .admonition-title, - .rst-content .admonition-todo .wy-alert-title, - .rst-content .attention .admonition-title, - .rst-content .attention .wy-alert-title, - .rst-content .caution .admonition-title, - .rst-content .caution .wy-alert-title, - .rst-content .warning .admonition-title, - .rst-content .warning .wy-alert-title, - .rst-content .wy-alert-warning.admonition .admonition-title, - .rst-content .wy-alert-warning.admonition .wy-alert-title, - .rst-content .wy-alert-warning.danger .admonition-title, - .rst-content .wy-alert-warning.danger .wy-alert-title, - .rst-content .wy-alert-warning.error .admonition-title, - .rst-content .wy-alert-warning.error .wy-alert-title, - .rst-content .wy-alert-warning.hint .admonition-title, - .rst-content .wy-alert-warning.hint .wy-alert-title, - .rst-content .seealso .admonition-title, - .rst-content .seealso .wy-alert-title, - .rst-content .wy-alert-warning.important .admonition-title, - .rst-content .wy-alert-warning.important .wy-alert-title, - .rst-content .wy-alert-warning.note .admonition-title, - .rst-content .wy-alert-warning.note .wy-alert-title, - .rst-content .wy-alert-warning.seealso .admonition-title, - .rst-content .wy-alert-warning.seealso .wy-alert-title, - .rst-content .wy-alert-warning.tip .admonition-title, - .rst-content .wy-alert-warning.tip .wy-alert-title, - .rst-content .wy-alert.wy-alert-warning .admonition-title, - .wy-alert.wy-alert-warning .rst-content .admonition-title, - .wy-alert.wy-alert-warning .wy-alert-title { - background-color: #f4a25b; - } - .rst-content .admonition-todo, - .rst-content .attention, - .rst-content .caution, - .rst-content .warning, - .rst-content .wy-alert-warning.admonition, - .rst-content .wy-alert-warning.danger, - .rst-content .wy-alert-warning.error, - .rst-content .wy-alert-warning.hint, - .rst-content .wy-alert-warning.important, - .rst-content .wy-alert-warning.note, - .rst-content .wy-alert-warning.seealso, - .rst-content .wy-alert-warning.tip, - .wy-alert.wy-alert-warning { - color: #343434; - background-color: #e3e3e3; - } -} diff --git a/docs/api/_src/_static/js/custom.js b/docs/api/_src/_static/js/custom.js deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/api/_src/_static/nf-core-logo.png b/docs/api/_src/_static/nf-core-logo.png deleted file mode 100644 index 91ddb58d8ada2682b8edb9528c09479f248e3010..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 84414 zcmeEuhgVZs_x25lC?M#75=Y87MgeINq$t&fgkGe#hzbZum5vZ~07VoHAiXF8BGRR| zAcBS#3%z%w_ZrG~ZfuC}Kk%*fyKBuTB)Mmwy`TN;XYX_4eKnPfd+7eAgCJ-RM*h48 z1nrH7pdDJfc7oq{bG$JGf9$rC*K>p*fg|vL2-gG|X9)TS!kj;I$u(-cgLH$6}i6BX-xucrd-F zVGHDY^4AJ^A$Ax+hiZ{6Gs&2Yy}RJf4wn4wlE<^=F#Yx$@B`NIp8EFRq4xpXf4{K* z${~LL4MaE}K(*~xxcf+&ZNGw;Pry$3?K)=4^4rhg2c>u1+kX}Ie}?^Z$^Uumrz`#s zV?SL1{a;r8^2Ptr#V=p{|4tXhuW$X@4CQZX&3)cpl5YM};p@#zHA%}H(CBI1^yVDf zZSRXwZx(%zVVsKT_uypbI<*#9wl?^c$9ZEacC)yc?Bq5@3UyJ%8!lG39)v_AwpT0b zg0vft@N9zu0#618Cga7N1XiC%qEd>wLzle#wIFL~do64E*Qh;eK=ibIR6A6exRa~2 z@;xfFV2}1|{Oj|_q3Z2*6t|}_r_LT9w8jzZYP1knXyb4wQFmi8o3m={6)R$#`;Xi2 zUFu^llS)4xlf5U+;ZTz9kf@5i-W(EVba;C?4!L>5F>K&uUhn-c_f$PoQc$cbQY!XF zOKi9a)b{t%INfjzZl8X~M;v?ge!SEKSdstLZzgM!2WmgJy{N~8LzQyNVORHEro##T zwM`h{%s=MTUAF0G&fcYF9TA8bD%hiG*etyh`(=CaCNPh-4V}zG_~(VF_SV?oYsr@- z({Jq}9Q(sZwhiztsaec1r70eJOnQ$KUvpMnn>YuF-rgQB&jdJLQCx01OMk_DSXxI5 zSaKrNBDcRyQlnXvtKgwpak9s5lI&xu7LV-$WLoxL7LWKbKYKm$E}Xt`lk_%NiAL|L z?9xM>dgVziD?&TAP`J3~_jhDGInTY=WLGZM0CsAF2yem>YgHoXbsXg#~)4uq{@&lBS$G zb^Haqn3Es^S2z89ahqmSQ*0I;#&|v4OS3sy2wu$jGtq*n!Vthh zdHRoCPG2g_)kC@IkQsC9mGOELM<7w3?cg&=w~DMlJ%9uM);VIKBLCH`9fU-@M*KYMl9D#r}tbgS;I_X2+pbQU~6 z^J{s$AL-F#49abe&2NJHFddF0 z{y#_-N83KTw^9q=*qN>5(8|<5sL2S(2j5+>|2?KldT?n9H$}c|*dR@X4Z6Rrx5yC5 zMW441Y4-@lJXK|nalMyzqaMGe>+V9`i8{sn99kCIPJy{^5BPAW;SEuG^RVlb@L;Lv zy3I1Fq(~H3V=PP8%3w2Mol$Ta<1Mkkf2H2}mu^cV>0~&hp{>QR7v{W>psjLynRX*B z+immxutU(mn{RxT`{_^1k~%<`TVLbeHeKMJ)I+UIwOwSHR>bSc1T^O`AzHMz6O;9; zYxw4zz2(L z+o;_T=hV509yt$gPAI=HaW3YgWYPZke2haEQ$M^`(ZK&& z3y%WniC;4$KSGrah@Q7MB0o+7406Hwh z@oq8!!>|jT{!AK-fQH!lnZv{x>HoO#0|H;CGZ)zdr=^Kc_R`26{DZ0qUSn-TlEg@V zuHq>_du{u&r+Rqr2I)R}X;k<^g{vNUFYcDZ|42GQ45)A@jLFP@oU50kbUM;sSw-sM zW39Spc5~vRmMZdJgK{Umy@JO1r`=!-GCby{HS!;vHkaDEkVfr%_~MjX@7ed!;r9H= zI^(Gq{^NcHid9pNvWA%a;j@U0GmK4RP7{ZIXMq-B3QcEz1~}!qc_UkX<)2D23gA`8 z#|5#>ust&VzZawoI<=0DVeRTT2sU?YWy0-4Q4uCf$>A_Wl@Yx? zueqOFMP9>;`IvVvRfzKz7i5E_RUD!QzA|&B!y+*MxH}7^s zyo--HY4x(<{;nOk=O!wF)WWl!9wZ;6-7v#+u|yRsIB453LC&Ac z-AmGLQU4AE37|bHP${VH{ZZ>A5Pq5^2+LLSa=XTs7~#8ij>VhxIfP5H|6c%S9BCUu z6R+WQe2!5(V?hr&{}0bN4)L%)m&fas^Fu9ee^?w2OSWQO@fA&(0N@@dV{UuYTos0C z^3v$05Cr4@RerFv0BOMk&3A&lC;Vof_i#YtK7S0hu;x*$9jpJGM?j=MylzWTNiH;8 zBsv?g#&!YP!vj9*-VM|NeCz$&!&-`gl}*?Fh$2kuGz~P){>K{{U-vW>1H}Vx&Sm@N zmMsC2kDcN@=@N9+7&vh8>GquJHhnsv^(+`xRPVr z5`~~c5iUh%9Up*VM}GtCk9nKa=&`D!v#FzpxBkaVpWp>8j~o_Yab@6S&Lp5T2mrw9rn@azBh;N!F# zDnZ4^Pv&vIp@!D}Xf~KcfK^|#(d0omneEE2 z=deO0cN(1kZB&r-neDzH0aiv6+s`_(!n>9EgC9bp4s__Vbo{T63i5#Z4P>GgfY~zh z_C#Du{%){~x&uI-h8(*Nv=zWU^!&qz zv7ooRUVaLsOv~Os3<}4e@V^EJPWX=ng}Hp3VK^DVTcO&XaU068rYPk-na2lKmBejF zOss>3-56ynG}{;TtiJv%`o~si|JaIV7fYMofi?uh@kbf+1{Lypc>%>>#2*I72i-BK zxnlqksJA!5(-*lCd_lNFK)5;Nwuc*#iyy|AHG-@Lg5vr|O_`M7$|a)xeJZb2(VuMX^K~P;UDNG`pi%I)I$P z?K!%gglr$t+kg}x_-GF-(k|KjgbJF7y8`Qf?altgYiD!A~ z{CM97-q8QD_ePue_0tuNN+i}2s%NyM%og6g4m4(umSjJ&PZv<#F`rOsmz@w-De0&4 zH#K<0y)RDwtS7TOr1t!%#twPxCTY@3T3Eu>Kyou(vdJ#LZJqyHO@7Sa5@34mo;d>Y zxcbwQZ!clrmdvacsNh=Zdm&~l)672mWbB+Y6V!5qV$YEmmZK_o zl?_*UQ?KDMQ~`SJmF{c=lNrJkOjzlS_MCdv-y#Qoo$zm+i=dw#8mH zBE*%;(dHoFZ6w&q1nHEB!pMnF_x-@=jK=X;D{U)BqAqb!yi(!Q-@iUjR?8Y` zW4O|oKJ4qt0ZlVdqAuz8DtS0dN_=Ko^o2TXYF&X_ndPOB^qFcgx6>rel^xu4cC5g2#KJ?*BKf2>u%67wY-}U^idjq33uNN z_bWh9n4Mz+HH2F{0Vj%5)T?BPR*pE`0K}FfM{HI!=d2qAhORrm-9lYt!8DhaTOgSK z*ZL6m6zs75;?6mQaEQQh5LQi9`i%|IC{|$i34@dHN$&S6&S*#x%)}-r8H`la(sTiD z`NSKw#WLVz4N9c0#9NLQpy)AA@lpH+TZH*X_s-7&uFA^jcI#fYA8WvW z;+~kn9D&r6+j=@0qnJksx1y=v0WE%j1A}&Omg5#U>XvK+UkDpzc8ZmR%#AqJLSTPujI5_JrHw2B#s&f(wSbY>Jk0N!V2$j)5Ip4MfsW zUgXtM?o@W*A+EgT+4Aa@Vc=0GMUZe4*Whhfgizx1?CXA&Efzpfn7p3?JYkf)B(k7^ zZ$v)dau0EfZvl>=;8snS-O#gYEW5$*y+4SXKm*0(@VMg{CC zwex_!P&JtPr&}W*CjV^>3fnShH8wbcU=?eLZ}9~dUl%Gq4{s-JZM50yX#GyenUcup zlUkZ2u7aom>yZOn%=Z6^(&Oy_TdI5pY=a0_Uq-m|5M|!&-EvRZsmh@+%T+zT@BZP( zvj7J0`T|@9YK2?gi%WNXBnC~N|K_Ov6w6T?e2(kmzqTw10*r()%Lw+!8f1tOGT3?p zfThkziVp)q0dwH_xJG`9LeGgV*Yys=^8VqV*DR8pP>atwkXR6I2INWwEzO>-mF7*e zXL+_F<7EgcI%xzD0wI||1_T+Z2pMek8TjfT6)6u{uDzj$23|qEc_1+T{s-7as zqA@QmM-@+iZsO-f*ofc-rh1CONk_N>XXHTYXV~~3S3ope>Av5fGy#T*i-OCKSHYj? zr>9}ZPKiK;bG`o%fh5vt>|{JT364b45IK z#>-~i8c!eSd8-%W(s3};^=7f@;HSX5#@>UGWz+;$N@!3R7;>{_BXEBGxQ@N;_%bzX z4Qk({=jwESMfrUdd^WAqB+}z%abNHIyrA@|<-?+EWkuu@I^4d?F#jDh zq)T_0`J`Qfe#^TsK0g!bRa?B_%IKk`vf9k5(}Kl%|Y@F(`GKro~0^hycpeeCkw2uWII zy<~II+os!Nfp*EJ#MynZe4pU`!N4jWFFlb3+SY@K@>xPaJJRPO0-}>sQ&T^KQ#!(9 zN^wfKN&BXu@l1L*Kl>K%(GCJm&*`g7FxeopO6NP8j*i%7iHqd8O*L|Mh$xh6o?LMc zGflOlCw!RW5&#k-j6$BjB}~uC+Jb?1Bj&SPb>|SiR%Z1fv#?)w&PosNm9y>YV$(AT zuEZFx_o=DjM>ZX9m`%FG{+-ZMH1BMa9I;wrUFO`E1x{AY`&IQpC6r_hlDu_lOC`Zc z0ij3^v-P&xYZ@w}2w+bhqUZF=a+@E>JfxX78P#QQ*}-DSj^hnBpzq^qsNg`uv(C-8 zN8(vrB)o+dR;*+sz3iL<^5-yK;RB(*JWxzsLsOIX2oN(^d5{Pww=&?Ej_r;iOixG- z7tVdiD-879$p3WjJ>vz2FQ3|79LD2GX`Jg73$z7}Qw=s=!#+%t-fP&5g5iA+)}5CV z;tN)6A4!pE5}9l}+$QEfMVcGOw3Z#O>HrE`DbqNgCay6NR!-2l97DKr{J5}T06?V0 z$M52={1xpfb@~&=!MVkVJUDs(N&z`ur!EtJ2<*;=L=MJkXB)+K<+HSKnVu>l9xr z>Grap+K}n=O1Sx@cZ!`@XTsLq{Xj*zv$TCB=v-{sV#TdeG;<$NH-HEeZtjx3PFacy zEEP5o{h1y8FNMjzJDfoe2wQ6=rCOO;dugI|=F1a{vpGd8<)b-cOB9` z72XbfVw-(mZyS{4{N{&7q&lo7#Q2^X}00ys5ZSKiIY zFo2i-aWSd^&4rA%c*ttw9WHZg>Py{Pe^Rq(v$RCcLQhTj<>*@$8v}lMD(t88PXr_< zx4U?)RX5ZL3o=c_-p|Z_GN1vtrb+QV#1I76RHUPprr=SxqMNh)1>QitUUhDw3V*l1_UKWRr$B*Aj}j9_^Ws zuWBm7JPpu*`s*ov%LL*EUu;xJEf)aq@cV@`dI@)kGxu%q^hXWET?I{wBBz){MLE|< zfoYv9omJuHB`Qx2`=>L;wiU03dP`@o^bwzN*69iG3?dSsj-s-GC zAnMMRa>R<8ob-KeoWFj}_+ZC}2Jg+U6?Hizl^asa4nx<=gq;S|7&nQZ>LO*N@TuqA zM%S5Y8ybdKWysm72I<8&pX*Zh4^80>|DInROE?EDehZK0&+z=SbwyB(V^nADaD8W6 z|85)4>Gq6?PXW2Q)LPX^D{NrU+E%#vl{B^4Gd<7Bm1NA{xFqiKN~W{eZD`OT9vKk% z5a_yKD&!Fgq#3k;^0>-RwC-g=Uf6;(-2FFmk^?@TQnI}(AI4j29~zum^_JOiU0tnX z&CY2Y&7{4{!iHnQ^X+k8AN+h6B3TvQ1kSemN1%$j?1$Syy#Q^lRP03W{YupEgDUg^ zQ4`7Vx)si76?_n<5$O|?%*yn~;Yf1@7peNp@+osX@VT6tEbvEe~0M}jU-B=z(- zoN5GDeZH;I?9G>YsL~YgV_jzVH{-ihPA{1}ujO@Nzo|~reI}U#uX(0s&Vl+8&4eNj zC+lf)c=A zgkNH*?q+)|r?=hJLW`YOCM+rT=G@#xuz^)L?t}*1zl8uY>f1_2M?gCRcgO-&ksT-g zW$Z4B4G&DS6E1y}l&lo!qjbVtxvoO6?b5Tx|e$>sHTW)^_mJJT55*KJ02{a6mPD6MXcGiUJ+ zF!ikUw{I=I^^3LsX}MIbW0|=X z?5088xk_HCcz}VeM-&45hex@&4Sk)E2w$n2l8!0$nya+FQO7o4@P2c|ByqAXrnZy$ zJ>x3(^0meWddvaZk`4V4B<|w3mS#Nici)sJM-&=}EDWNixZHSsxx@AXF0ZLt3sgzp z^ou2Rkh3)pg-hRDmu2c`i#WAX)||%v_26_4Jx1F_jtA1eVrgZSauX=L=_p|4u>y@k z>Z*6!g~Rm6dir2NGcrw$ES^<*?dn_$nX|iYot9R#+F>c@i;3q(8&e=>!$R+g4KLse zK6b=o)OTNG>TXp8Ehd0Tj_D~lciBq++(9yk&Bwo0< zSt{wiN-hB9*30DYn1R%2F+L+C(3L(0xZPqoIXPN|03NQ4piMtoU<^$Du-#-Ofg>y# zF0>*FGjQ!*C|jFJUN1g2{#GuRmPZ?;M9`QT=&3-T${)P7nTRm~84uFrs2i6rH&#_t zwQeZeWNmzYqU`I{v4PCMrLjUkF`MX!g4|4#(7T-x$_}6%dysGrN2Clx7%sDHT;Mp2bCloJ2LDzrAA>fCA3kXlS zYg}e<`d90V^p+se8ml-wv)&z${w)x;o9Gb1yhk!5r^!p+g>G}THNxaZ8jpEn{3NVS z*r}8ree3y$v($czL?kHeuM$gxC&Utk&m(BR4uNE9+FTrdKegVP{BXFSK66Q?)ML48 zeZBfl$?4duIhLak*Ey z^Dv+nOxETbScmI7>gRiy`Eu^-OBMomyew)K!#Ew3lC5RPd|dt zoeGHPRkGmG6+c(u(-uB(nKNCt^%C`%n(xSwTJLLWs;#})EXKr5?`$PXl-S9GyGt3+ z$u<8TOd^~hddjofy(~S4D4T31#T2fyMh~<+F$&aR^VsY!q79?*xcrz(T#ZuHc7USB zn)2a?C1BjC01&GMPewve=88*@YOb55Xza!WNwrf#DzE5_KbgtQWO+KMj=KI%r;t2? z4N2Ki#I_x2!;EW58Hah;^)F;6*=%0m0=SxaL#EJ_@8cas*MJp!DOQ}M3?9B4fqM!X z6b(EtXv%C5i#Br&)=is3=GjbUs>b9rvND{2LshAlua(j7Vj8rup+CY-1+&Z!#*3*0I1Vsr( zQGw>egxovRzPb3GIN$s5{kkVy+d*t)#`#?TzRN-=rObijli<1K&kM9UV9=sc+{zY^ z0P39?98mHgvA?-=p#oF^xYiu_a3|@6JYt-l;zRdBn4EuHSUku~FaQuCMkW~G?k@Cv zxS6)umy|Ra9Yb#Vr}9&`w|o3~=JB^+qk_SYAUVqD2YDED7x*Z_&Mn>udIgVT8eNSA zm4d;bNXjW`^-JBwGBKyI`jPm&sbGaWNqT7!?xi<(pbNh>Ngkj1*`dIhGx(fnQZvWO z+I4W+AvL=)?gh_GzF*^cuaa4ViZu(Vx%=V7ZL)`D?z8W`WsSPvtY1sK0p!3S8F4l-KO% zrH$scoQgwvDs>(CA+U0(zr%GZcA|M_E$bvCSNpTCWpy@wyy-QRLE+#K?R#5oq;R+J-YQ99DS7pjFE=hp z+?}j48Iy6LTz*;buJNYE$S&+jiYVF##JB@qjNTk=+@+j#5{xet{3O$(GoL1mlej5r z?7Nv^cCe8cGcf!nczIlbJK9H3+Z3fZVVgl9k%y*^x!nN9KC0Mz0nY$bE*N)UW@Ce_ zS+jcfcfgVVSti)Zj+J4-g+=08%Cv;0j8aDP2F7K;P{X?;hZn9JspH*1?8=?Wp-^Lc8ipv9O%*~E? z6H1A~9zZk!Q4dj@pa(T!gIka*3czYbOqcxZnciQGVVn3#M`xkg8JccLjB}p0%7)16TzPB?J{4iR3hALKs-b!)HUC@Oi`CrZ)Mk z4B?jqVYn#-iX?KNbIW*`P~Pg6w4=gf#WObG zl^67*R1pAMzHy-ES>gop|J?n_&$A#8FuJOaI9DoTV&Xqgg0kGc@XI4x5#2Scef-V)EC;*yCU<9ARI*?Z@z55M--#)N)iW&EiGH|&BTksKdUW9VW z;Ra=StJhnb2aDuER5oZ~u~gCNpd~v^C9X@^S2PSxpNH^y`W7u8cf;8jYUhP*um(P` z%b@sH0uI@JhAjtkl3fm=kD905geK5%T8sU7AISE~dWrNhE#RW%BlKFD9{daehbb8*y+NgdH~h^8A#gJY zXB)1ZrF)vH#hc=xTYyJD(v58uXl>77T?Nw$6zBw+jf_p$KhT>mmRlMu+yOoKluz$gND*HCb*3Ug0}_Lc;R+e2w?)nE%%9)R7fe{*CKz=yUjeEjy~`uG>czNAqAd>(eh zIOkT6zy@f5!6%9pK7K0#JAhhbzVASRR;>##56>3n`A9{%myrP&8GXCaN9p0MX(M}` zGOmV!INsJ8EmL;;1mz6phcO_6qWbU-$6G-wg7v1L-6A$+tD_WNMHB`NoCQ>hB^>}z zN|m|k6N zs<^R~JK&s0w&oG;&QrL7BR|&>Suh;g)zeB@#UeImYZp>M>49Lp6H4{C*&#^@im4OX zzl9z500tY}h5R=1f;a3B`|X7eu(d^}|^zd{I}@ABK5@_QXYzJ_#FCW z=0|Q>dY4bK{X2HiMJbL8*Nzg)eD1k;sNb>J%TSy7Kqai~dJXK(KRojvl z;;6Hp_^~iynJ3}3WKU6OP_6enRdPP3H%oiqOn)=&-SSlG)pbw-<>Yv-ogntz>Rwm3 z%HT4Joj94kzSfdjj=VOS5bIrd)!hDb9 zeXlV;sLQiT|H5%Su;!LkX5N?Ad0FNYAw^bM(dF2TxgD*b#D5xfdKwg!9!(923k@5T z)SH%db|$m$XyN?1ee`Vwo+4^R586=ei1Y`U;t(Jt}r;)uSm)46MRgt|LXOqbjl0ENjueo+^w8}8;ygP#5BslY@Awr9^Aj$r z;vHj8R1SG6F&+BaS};YP;_S;ru*Kccs8*O{*KcDt(>Q4_zW7wf^J`%TgW^QYbP-w7 zO2XF&fn=^Te=sm<{-9*6xiX;MJb+A}FqxOWFc{z`QOAXNu9loIb<&Ck_t|F^0gQZ(q+6c|@($Dup*w)Xeby(SADxF^!L0FUst*uuQ zoNpgGzvdM^0Et5e_{8q$&=H!6s~f#&-`4x4u*q}%AM_3c?+Kx;inbGO?HN zJ~EXZ!v6in9e9I3w1w=Db^aFX6nRxR)tIZm%y@3Ip`i@v289`q+=HQ@urqY=~5ux-@ zD&an_x~WM0BI6k@3`p6rwuvDp5OVQ4Ffn0TTxPDPh2+IW@43;@uT7>oUdR?#nc#Pk(K99wsG ziFe)w)Zs}Fe=Y`usvlDbu}zYZEkTM}iFZcweub075~HG=ty72-H!;?gaV#O#U6HU&Rk5gE9*S-HRWzOw#bGB>75V^028Zw|1BqnvLNt> z%cvXzH`6rgl-qB78dA#fDl!1hkd6_e!&*-8MC)EJ6{xCXcvRn|jB6%OOgh)0318LZ z&YXiQiR5!LSMxki)1#tK<}AAEV}$h16%o9|r9w|+kqt*JeVYrSUtNb1TVfDjJ4yTe zncXgVq>t&qC8E$CD|vD(uhDgC5L*&`m3NYhig3gVR4I%XT#VE@S?e0gzrq;r;RYBF zVc;AP9Gb-ZB^s~!#Srg22i%6iTV1C+?)exVdM#V*#GS&jMl=ticoI+*WG4^$>2-E4izd)LhOdA$MDFH2SpOMuWl6vh3yeou#wXiyohChz9uja;OHjX+uD{?uTh(ba{nflGU+1E-vQn`f1q z9jZb^*q8_8e5Z&8T|tO2IY7w##6vmYpQ1X_51!g_DRSP`(+jq*l|$jLQv4_QyAwQw}S(hBMfUYbe%oWNbaXT zbc<1Dr3ae-aWyRY<_?571E_mae7x!r9|qJF%3FZ+GIQnp)OGe)-2}tbew#v_U4)Z> zT-#?_8KL~K1rQK*5D+`yo+AJi+CrYXW#k^OG*h;)o3`ebOX!>Oj(xA`6Vw3HT zR7DwxEiJw%vd*Q3hXGN3*iz+WS@3|v1Rwegg9?KAnO=I@)n&cQ;Q7x8x1M|43D_4Q z8V7{=_8C5u@>TM)I#|kOlFGgVVl+r;KJj5EG~PItR}>0x&!KXV#?Asj#dMNC#;%a1 z;Y&?MuxX0(URUHb7Q!xdP&Dmu44ZFDjTALiPJqI4-2C?aj|_sTqJrlV?DD(n?cx?* z-PXpkos4C6uHj-iCRAr*F1}W*F0l)R;kkCS;xD?XI;E6D7}1?~%e5!|f$WeG70P8s zfJ4w4U6Xl5H6ZXzVg?cr`c%Xr>Y_?!-cRz3@h_rZbdEZS23a3%9nUM0&|-#VC$kla z=o4zX%(^&cI_Dk_%0-2#KR;JpMZ9qF4lP6ZSu=c{Ms*{DpT1EA?#X9QxfXt45xi^`=S< z2z%1B9KmMgU1fwD?!5!ZETD8i(&JzJ53uxGxY!+@WPN+4jXyEpl6x0NTCh-TFLp$Z zL9qN~{?p5upTYhKH!Y<(Uk}(Ie9me0^e+hUC7_pUpILP!HAjRlS-57jV?^VFsc^f2 z{>|;z6N08Mn*ay2!%h-~oy0*2)dAJl=lfZd?mWN8m%^P5#b0=0-ss_ zWls^&hZUE_V+R!Qya~bW2M=h9-{AN=?Hu~Ez_Eg_=S_OJCwN{K2EQ;m#TXFpjE~l+ z9>t>b^(7dBlJ(x-IIq5U{Jo*CANh5NMz98i-pv9~$dsik0Z|}JYgx^uy;vOe4^7aR zr!UiMV^9}tkG29SCgF4h4pe+vumev;ugM|bp0Bk>!IR?-JcDw+l`IDSJYjMP!GjFx z=R&Q_5z=LZVn?)^N0o+igeVx;7xdaRlNA>N0us*;iPd7=scK`0;m$t!}u&2Wag-* zCO->?o6f|rJ^kpX0;1_~9w4W$)$%;U-On>nu;;JPyp_j0jF3|T^|s>7a!$(_75YHL z(&R&w&hd-4uiG6a<|*>Pbmn|~7=a7yla_&|(Tw_j`w|1Fj_Cr@CT6(yX-Gb%jS0|z zWoy{Ka2#oG1*;L3BWgMk-5|q-`4aQB#14!H{zVsdn)M2f&SIP=E}sH2h}=-@oi37I zP;HNGdlU3Vktdz2^(NC^ocb5g=vkSAZWJ(+-ih>`@tA`&N{`KMmwqmm44`GK=+f{9 zlEnCC+0M^)QZfpRk4J!<+ zgP{@W90SnBK~ww`P&RpZ>v_0TQqaT|_a9Ya^&l!#k;m5DRiRINQ1USlG<;^R{6d~| zD3^4d?%Istx5=gGXA%FtY8(>L5n+G=Ul*k3hYy?`|yU1Z^aM zf~hOKmrQv2aHK&^AzU@9shA;-YN-g{n_L`RPV;ZX{&W- z9eRK}C_p^2^0t)o)KrPKf*UV!2=ImRDQ3;*!C4W#3m@hLd*W-axkl?NXNe*iBp=*4 zp`}m|d>P?$7KA`sq4nO`Ge}lPOqYeUUuwY=t?ZsUrGR0;SMT*+gY-dz^tnS}wx_DD z5L3W|IZP?2i)lyoCIG2`j0e|JtK;gLRZcc^WW>RjX-Q|a3i~-k<7tq!nd@0Df>*Ta zY^p|e4mf`;$0mVX;*1b_m&2gQ&A?9`kO}%}U#kN8ka#F?5`;BaqZAk^YUPK(j`B5w z#y5Z__8|5=PdsiPJa-W0i_2$WvckD!M^U=t{jlFl6|ffBW{Gj9q{o}p4xca&=L--9BQyYUk( zd9K}n`x5q@@(Mf;4nF-StxALw`>$?I`T4@I0r%{uZLi?S_5?dZSQFsRtb-?!Qq|l& z_eAct76~$0;4m(Si7w!UZ@m%{EEIOEZtq9!sI&C47wQ%)gfT@ne__v3e5oK<5p>HI zZ0gI)gO`jSEN~ItaVON8yV`yN8v`0&ERtm3qye#s0_95ZssnEH3=^Ugr=sJAX7%Tc z^ItuKXM>D7+3KGB6+zDtaSVIV6;_Hel#hb`r>r~;vo-}#m?j?+G83#*G^UZz zX`+h)kBR0aE#a^(=rm-I`FV_sRocyiUa)R$FN<# zUI^rVm5(%Xgh~e0#ZI(54Q&#f4l&3v72IxEBHQsN4ID^pK2+yX4hn2WIuq`07RWoc zkWh1T#|`G}m9>!|8P`STY#AlI%v^19>y@KZQ|n6`S0A@-_6JTD$D(AG`TUl#zwfyc0; zLNuE2*6%(FLi-K-DHi+EkM4w!_{QitrPURG6speucgo`KdV2%Mv%AQY86NT zI*H=iB!tUL(UCqb;zzW=Uqb2hyYt@L2)FLnci*MBpCMrrrOV^OVuGHt^Rcy@u~N;ROLMd<9#K+mYiq#t zIYsjrVz<(4y=r&aip+XZ`}+;Qb{Y4@rEc%df4q5d>N&8=xeO4t50HN&Adsl9JM8Sb zz<{;}(*&A1&1BfGoKf1l2ruoFsj3Kuc-v5oQa}JrmVUR@n}Rvgjuse7&_jNJ=y@vs zrAs(RxLbR4MVn{P>TWsVj3y+xo0b>dv@t52G{+iC9^hOXF;@{}i{wed^RW$1ZFbDG z`k1(FD0vO82+Qy$E?<;c?#=Gr?715sfUpZ@!y%5yAEfaNFq3r`QSpGpl|qB~^4y~p zL6egN`~c9aRfqkkZQX{5%hIU!#HZNb5kO0)1#4)7=a?CGN`zz+01RMZ zOHkicW;;=#SLWDcl|ga5Bo7xE3>a|W$1*W&xM~HSxrE?Ulx;&jm7OezVVW4%e-)SV z)Bo%992AGxo&bO0r{0yy_Yk>XKt1ZI4U7;|w!zt%qF9sSxk1M{$0RfJnAGSIOtIbS zyMnU0Il-Z+viXsz=;#vybO9w`nve6S15^GSfb|uW6-K)sAwl4F&jz^)<^vA_7=Y$_ zDMD=@dGP5lm8=%XLX=Mbz)kF|p?nq)Rv^)uS|1(RINW=*W6H~6rrvh5Y%17x zgKWTMHH>gof>rL4hErw3&&V<1iw`ihA>>-E>OA04q|(EoOPLeg&mruNlg$ZZAY$N< z2GxDtcj5AL0CzjkW(OFqo+t==P`V>ltYL^)6m;4U6hdMGj< zQANDw1E0kxJusUaa%NbgLQ_SIeRdII6M{e&FUsXt(V}@4f$oG$b=MVZojp`ag<|_P zfUX8WEO6f0vXG$3HP>C{xiO?;a@xgmQTLRd1WDbkcmBSk_wcYuZHVyPRAW8W^gYi< zxP9>fckVX>z()S%Z_8Dndx{YXym%NsEMiVQm2Agbapo|1!vB(vf?myZ;SRJi081`F zPtILJKX<}w@I(B6R9H(`FBt}A8T44dc)1KiH(OYE1$<>~)#Fg=@EpuNZElt;Im4u= zwNz4x-P~YnYnwXw+Na}b|CGd0Niiw2a@42qIcEU)aO`*6C?PmRqZ$yK>A7Qs2Hphg3;Sr$r{VR- z;PnIH5dXGqBMFkY>&*OpAF<85thQdg?`A-ZaVOAVz`%x6%sjz}{kx=f)X8R{y)z}} zF4#H23rv%O3?F(#Kmofhut1zv0&u^W4kjw8ZI{E0>qI<+iGyu=5OR<5J|ECpT6Xaz zyj$H3iPM#>){eMrwkgKOgiC3|n-?Ro#~Lj^Y@&DB(B5QHUH=lh_QsiQ`LSS>c#OH3 z86FiI6-|u2H9G%reWQbOZH64XrZZK#G(IWQ4W!T#>X8Yh5$mixr%)BU`&Z^vV>ilUBRxhh#sc*=B2~$JWYgMxV|lz_r9XS6AW3!O^^3lGGUr;{ zX1|2Y>N;o1(!9*(ry|k@j41~NTs)YPKw&#rg(Qufa~$AyB;T*D7i3Q2JyVG*8LhO~ z|9D>Xd_37anu-D=eF%ACQiSS?g#a=4EXe$9pag?P#=ukd8?yy&m8pL6hiL+@*si?m z8)2q@Z&sa*B8jGFy1-a43G*0*|989$FT4`OwA018Yn@nU3%-)v>%IQTBMDaRRu^}O z!{`4v424pb+B;Jz^)>DwM_T}F)nlm(EUd>bBEoji@rc`joMw2y9V~~iV*&s`Mk+ew z1ZPQ7+DvM~6W!PBIHJS<9b^M9-zE z1nKA;?&6g_a+wW=LZ2Oi%>GDN!v4IBd!cig(x>o|!9ykouqkjmcSGz)qbcaLbtc2< zUA}n8i-b`jTOA3MWj{#%4kb#@q3Ove1-7QNlWuV4y{Eu_5U)*K-4bNZm9@0Ceow;2 z2aaIQ)R#}TkJsxKwVul6jyU7ZdOH=WVOx%A8=l8@G-$ zWB=w`pH)K3{`{Y?6MLm>6&}C0**tIHSn-e1POfT97Mew&mvbMIApVue^kF>K!HZN( zn~K+!qs7TxBp{T;6@jBWQ2g>Pe?wZMI3ngbWl3&w-gD+>!_HQl*PqtB=;WYeQhy9r z15klM{mb@zR9NYbyl98t43i1&OYlb{+;?Ft$I zy~f^6nT;Q9aYka7v@ka$9}|zz2ksjmZzO+;+g}bFEZ;00s$3f-l3-mV43r&2=-Dr@ zq9$sh$+@p40N1O`juUdsjq$34g_2hz-l6kc-3)KK)*We_w@qd2R!WC-8*<~!(%gEd zmgtB9y}#9}zM}54{NtB~m^>7e%ZXCk4Q!s9{rMv478aAqzN42}p#1SRC1f!8z#lsw z*JX-!J%+ZwlJ8HG%Xt1F8vYc%g6NW+XbTA%2RLG866%G670Ns!NWz>^%+*K5i}>;Y z=v*guYy+oT-o32)MQO5(9T?9;oiC?yF~eoAzyC*l#A>*~^-txwpDP?+R<>1jY&i#b zNM4E&`anNTu$3@M1|0&JG`VqwQ-rfw(XX1y2_YO+yxCCLX z_p;}`$6jxyFB`{i)KqRU?XOpg?*;lSe--RJa`6eq;487=qxuOgl|Bv;Y`Ulc?D6A3 zt?}<4W@`{aNPpT(S0`m2m%EAJQ|@;;=tGN*`Bm`|b)0cV92unl97u$lmCz#@tuPB# zfxg4%tbkK=`SpzrK0WRu@%v@_fo;60jOyS99h*P%3-?phTUca(f}H^}_QTH|2L;+} zOLsgMj)wo2p5pR+8%-tDX=MutCkVrc(99)>n@;GTX%SsHLJmF0%*gX>-B{mW0(W(( zKHnCH))F_6?z}f-@7iiL8z)VKbhB6v_>#f+7IJ6GIs7kSc)5KJ4)MCf8fx(<(<`+P z-+8t;PuB1xY_Bm$ueBQQ`&6t0DV-aFRH`SnJ=apX*e{|Er>(WSKO$$xS-maZ9bsUs zdwa+`-1q}Xch?;X&}pq+MTgCXQ4;1O_dpm+jcu~aA4(%4ebkX)EW&8tXaAg(@gPXZ zg-33ZAZ+Qp$IDJxiknCn@}^lq2~K2O*9h}mZ?EotZ}3lBi|4Wwt=xY89K`vq^R=^u z#&JGd#X0-qyM1tn{@(7-y_2CKP^_)fu_=4_f`Ajc>$chZkxnppai+pg*3>H?de#S??o^^Dw&U+&M?C zS_rzT8FWLLgC-#Unaa8NQ;g*Ex*=pf!s?!Lg(w;2#JByCdwqf3wpxi$k`woQT8eKvveiITnT)fbQLfjKr!RNnlX2I8}tKrx1WdaFK+Ot z4ek&`C;#J3Kg_Z^XLW;MrHegius6n-TJR(zp$RSK@Q9;D_UjqLwGc0Su}!&A_WzXh6UggfLB7ay%5Yj5}G#Er88XFukk zu8?!w{FDl>fRyO^47T{mt0a^=oPcKqb)pm;@5%?_XAxb;a?jO-C9dr(w(|L__=FqL5Ckg$UqBQT+I8rL)OB1#l9U zrVqT%RexRVD_CJpBf2tL9%aSf`9;jjZHdMuyPa=a<71?5sR3fWv*B`cf*!V>mOE`Da0gv)rfP3yct5MQNRGQGhVo*=ieIIY3S{^?hg8py# z=vW4d9|19tzj}6Q)+qvss5{PzL|tV-RXB8{0SV2XOxXb)%fV)oGD zVG(+!En-RxCUJ(KzM(}nGjyUI!qN0s7sMgsyOeK$g9(m#F8*ETb>JWcs4t&!JJSGc z{Y8yntITeHjJZOKnb>^rQ6aU2iRD6wCj_`hg);8K7VdefFGd$}$ic#>chU{TO;U!HnS9fj)AB z3Qi7%OOxHeqYP(SM7j&ah_KBCMeNiNChlus$zUS+ZUqlN<~~0fs-c)slin z@2G0A*M_94pEVE{*&h_$-~AX-kRKY6lZbQNZkn2$3jb?aD>PcWsLwaX%nqbM)1SB6 z=}kHLUy}H4;A;5m#*?tb_iZzNNa$y10EY8}&)W?Ph^Frt?QZxSM;Lj~rosY_bn)&l zP^Y@CBb>HB$!`y-eR@6qJLQeKWbnK9X1E4(w9B3wIjYIMjd54NY+tzUk0I)V``clI zEi$_e>Xz3|x*3bvOBx$*HMdN?XyZ$Am4sF^YfeJIUh-#!ci%SxSeU#_lj%1W0q5r(QZ>rs|wtOpG2eyn3 zY=6*@x35Hzpy5;0cqdmk-!%z}9KK{%Nfaec?(Lf-OSyPDkDw)XcKjVoIf}B|n{Njn z1Y=zvb%&qjPj+w1Ky$=5M$I6c!3p!BA4;=r)nAIs%Hf6l^(1IJAMs7O#T@_9{fG$Oz|yTz3KK=hlNd7 zs}NOdYx7;pnS4>7`Ard?u$IUl1uLOROiD(2kd)rD+||6OpavnI%skwV#xCRSEVPmD zn*u7mqC*~l+y)jL-@cqQihC-8h`*8*nvLe+hapl_b3@W1JArgCQg(-X=J6)=K$cUw zun9L}8;Lg5L)N_4zh8Fkds?Xa$%Z>_gpIkXzk0RCkGE>^%?mpjwd>x0b&1dnqwonx zXtM=RRZGwv4iU1?Y?=ieJdFSdBc~i877EbGU4+^gcua#OF*)(3HPb*GgYWI=LduKIR3dZWe-|MMIOqk8rTa_x>l_21QCb;@6j zqos)?=!xqvUx$w4g6CRV^o~KBLQEU=dNiNcn@T{zvp-*}KJ+1e4EsNBUtF}eZ@2AXo z@1X~2vJa68BTR5s3PzTMBod)b8=lSE;OL+|>O*=)PwdtB>mw>+4>jfOA6?SiU)a7; z?z&Y|^=~?T@%JA?o||bn3pLNtVl;{MDZPnJKEVe&(-AqFP;v`*_(mY=<{rcwMG*W5 zbQ*|^3)voi#DH8{?UB01l(_q|r68>>PMX-AIsXnuLQv<bKaz*!WpnMRLr7h%Hgz3IsIRl5;8EWO z$8!xJiWtWJ;qd#{E4kg)f_!ER;WsaT-rr;^0H>Y*$X71q8K!`nLXe{FO~8J*ZwM?#4)n9y01^6q z3GUS~xeJqMYcQY&_^N^iqc86llpZOCzb4yqvfW)nGJnY_cm3m0Ai^AplKQ^; zQi;8HChpFky))MdNBR60dk--kz|Si$7yQ!mtwsgf;c7|rslrjTAl`H3bheTp+n>>s z;6&Xy4UPQAmm->>3d4`nSdyw1St_CTSisOn`AuB&d+VSa`=tq?} zf~R0KBu^?}=CN#o9pgFArff4TbnOxY0(o_x!zIi0-~ijbc6`!6fhB>f*D;0MZDlp z6fs$$^U_e&F+NnQDA7d|!ZpVM31j%j9n+5|eGekP^+Mn-yWtw3a)H}Y zUMh7#(fVm$Hn{No*<`!X9ao}9?Cyx9pFjh5o{`20RrA-18Ep1O;W>K6fn+c;Ii94(lOo3>p-)$KOZ*VRW}Sli z#}bJ5hBk*Z#qVyPO(hbcz!WVqR^^MWDDD1xuSEnhy*lDA)`y9cV&?}c}LB$O+XMveoLt4zM7ci z!(>7^ENYkze6VzC#yxzXEv)d5=e~C{kKW))TSyDPl7O_gq}YWi6<%~)52nq1AHd+< z3AV)n;LYfr2`Sg*AShw9NojeK%zLBrUG5WM(2l%kQL!$XQt%oy73Qjl@&%2}?zHy& z*-~`6!aHTHD*leh1f(l&{Jhv%=Z7dD0{};ikZAo``^t!t@5T08%lBzaz=7VULQOud zj8>u)L`>L0Mz1V5m27Fq=ql?ys3;CE zy>~pA@BIT#^bSDe$6fojLa3UxTyJ!fQSsXF6odR!nB_mJWPPuy30}!1vpbjWEx~sW zLmXqt5{zSk23WeKMKmp1wJU!~*LAcmNf?njMDC1_ibkgeO==9$TUy|b-9DI}K>k3U zKNWixag~H~-UMQ1(pa?q4OC0VPGN>j9FyR0tCFPt5BSCUea&O@auG3V_0O%TPY3((yBJFq|G93zZE$zz+pBaJ?|sRygRd{7929p@Iu}}C_J-sa*~WJ(6Lk;M zKMg%0Azpc5T4Y&*q#q{w8_BCX4;`ThcsyYec2K{QC-Dfz>&JtLSs&j6N91mt`-jhs z{r(5N=!0Zg$DRMF58UwPte`u&7Si0v!?CJ)P1iDBm6 z>fp9J$4p4e3sTkAJ4!j)KEDLv-bEAhFPJtdj#dvPekBV+MKH^Rw9ee} zpR<=(xn25lcYkGQcPD4RJ;HeZ`OEmWAF<`ToTQ2OQ->H5WIR?{GMtAiAK(pWfi56L z1DcX_fU2O9ydK26GfTn0U231HM&%XG%<_4a-(ZS!ogy&#h&XH+F1nQzDf%os`SA-` z1diH+xYOsrEQL6?dUW!{#20TX%wc0ASm_fopKD3#?0U?rD5bW5?`{9`$^Gf9)Z7nryo`>}4;J1cW_Xs^}wSaQz? z{w0#5MRl;{k$lmivWV5TWD62I9PyPih5Eth0|y{s_3|%Wg$BXrvS3zZGrw`lcOET`^CNEr1`w=n#Vp|VREz}RVJ8qG-)?t$a80Xuh7RaO=fdKu<%pm zP$u85V4L?;4)ka?J^ZkjX65>jdYp2e`1+wkmNhk`> zMtotr`KKJF`Ucy4_S^QCR{{qa*n`Pe%x(9BX#8qFf>l}*$~Vn3wkmBUX^>Fu<5KX$ z_ye9xb1kCk$*5$HAV(y+nvL(0BhA!wXF><`m4=kJ8V6bi-V(g$2o{bwx9WkR;_Zx5 zwvuf}C7GA21*r>MhIMM>{S8_o9w()MjXK^lS5Xp7p2Ay|YwQ+T7SFN=hnM;&hW$7Z zUZJ_mczgp>-%bV>vT6k9An)X{Y8!m3Ua`L!ChgJFX1ZTrxxBtsxi>v?oIUt?Y4{^X zsRQrEc815}f74J8iz6nf#CHPqx14t;F88v|*4<4u@-TQ@LVTL~x@2eq*>tfci}A+o z4w~SN8b0R_^C~=2wfI>7|7HQ=)5L0JJ1SBWmEGBJC+#>nu0iUL8=EE(QnBTlFWqi3 z^z3(HBUCu7rv+&a|JV_e;(<^RV#rA$?|DN!T(*aO`x`oY6?^p-l`En{+aE^B>@F?b zgtsQRID6YIkL(QJ<(;u;&qs8x5gozi4RON=(iL}=1`b@RJhFxQMc)Qzg> z(v`a0C#A+k+m7T#|6?%bCac#4@Roy(#}^JDB!n!_TDDiKJ?h1kl7fl+|5bc`$CuODpbb$LLCm# z;e-^KuK+~9khD(tVA_Jt%xvIj3C$Ea&%zeJbt6E8q=rX_*XHiZBRBRz?ZAN=N;P!( zLIS9B(cAIPaClWJ8#hgzIx`br>Z9yBs-}6FTR&CiSR2kPFm<2JlTM1OhO)`WU}bPV?N2HKlW9FMJhcF6Ha=ZIn|Zhawjoi?ccp{fT8$_deonGj`R_F7Jwr z7EAK*`sbq#>)qzgnOk{;D{~j3g3`^M6)_OB&EKMbpRS!Fvo@CA+?$fw3Ta#Afmdns z*v?kEFVFg{cvNn0u3r*55wL4zyFdN4)rAbHLElX#yv%br`)9@G_7zTg7bkDK@TtZ_ zmS$~(o)N}BjWfMg#00C%y3hc2^S1jGoPY)q;>-f}k{HaSR*UrcaxAJSY&FBb@O(b= zw-#fDnKs9HJktXqkHB^^lPQBtZ_d$~9&dqWC5cxPKbsS>sI_;*oH^!lcV@mLcCp@) zqq5fDZrl!IR4&;bD-oYQcw9w}B-H=cuvy*8tsgFs0Js-4J4OU{3myA9#`@oy|Iu`c zd@@CCA1l|ne+$;aZ429}7TlYoY5UBXqqMUgGPGCgvy`*9HkfY3>5uic))FyodF?8F z5HSw{M3?-a(kb|TN`~PMg_@PDJ#*Ib7NDr!d+#N$=!#`h+~j!ir4`C^=r(=T9pU=4 z!{r^0p_z$^jD~h*^c}(~+tO#8;T;8K;o5&gUVZoUV>CbWOIx%ot9qRRMwX;H&r5&cCRFp;QN zTJzs5>Gl^rDnG3!?*7X89kxF??w)KJ3ub{qyb+!rR2W2j9726 zacUdnN$tw$B)60eaOtx*>v`Xd9%NJPTj^sUrVh!})rLsPO5ft9k!LQMDfC%A_H{`V zH$ODa{)laHj56aTJ)`8D9eJ!-TEy)j&C^wY#WMtpLP08%Cxe+suV45| z$tw85Sg^#EXU3kuVQBC!XHz2iIL*S%RV3;Izf$0i^cSz0$^sP;2=BzJZBI2k&g|g; zs?5!7_wTQT$ZS`mDc+(tU}zhS-TM9C#y;LdYZMw*wG)8iKu!?fKsf+A=|TO1rDvT2 z0TA{8gK4AwYhY5hjIHN|D)ZmSjGGS$2a`X7`2N7MUz3em`&q>A)8$e!VtQ9;(8|jU zjbEDK@?YPog4M5=B|hp{@DILhBVlimap}g_XA(j1oa3eM6~K@Z*~c{Gd5u)u&n0Zv zIx}Z(#8cqM%XD^oxa%opXk&Y>*Nz#?1iVxT{-SK1%i%Mz{vs?gLH#s$K-iLzQ_!g4 z>gm$v0psXiTxZzVpeJVEl3WvjVKz5KJ9B;egB#j@-R85}Yq1q!ytmvsOPkyi11SO$ zGU~Yf159Zf^1=MWobhaNalGY}zk6eCA$vdBPovN>*%5m{G#+W1XwQndd-K<{-&3dB znt=xkYzLgrc6FB9Td(r%l!A@*-wOiJfUwX*&6``~7pnr1S1NaQ1e*3lE7zEOwiEaN&`@Yd zmNk{{ru>*@|95`|3*{ZLU!SQ<#;rV!aLF|O=CuY4LGb1^-VHyJ&@R+fH*JC9Ac&v% z3zUg%Zq|uNv)gZXG$7Zx3K`N*M}`DCGfE-mGs`mDJtreDg3zS6d|x(?w_WCp|qm4=#2%YE|yG+ zlYn@tz2I-PD8>v~UK@@0KN0qM)9(L9h|fJQn5FO_#Z#j%#Qn&hw&9nWzw(q`c|spe zFx)e9U^Jr)@2dURuEG>!`9Xi*JLat6J~kaPE$-{#p0joRqlKr$TG8q#Ca2jdNkTa< z@)7MN#>wCJAZ}Qsom`SThI;u$a9X=K(wwIw?93=n9X`YxzNyCB6O}eK)Ofj8E$*A9 zSmdf0n6FX2fA@kr#SxV&!Bd$-G$oe%Qq?U+QN2&5=77G00?W5(m$PbO+p$}is@$LQ z+1zw0q)+b2M6cR!LQ~VBkf|G7Loko z%0z@n(L#Tu~(NSA{Ko1QYtr8eU_Vyvj?sQna#d{stB>VhO*29Md7RQsX0JHPCyRN zq0k8J2GZOMAI%O(@Q1?46xS%hSz-0(&gc85qKvM85~pNbyj?XAMya71TbfwBom48w z*4a=10hmx%j@22j^p`P6=~Py zgUn?6cc1437-^2VlY1(m;7DBb6=H?kk@wt0dy$GeyYJj50G` z8`|9N$q-_XI}&<*L+vl>^hmnMXze~5+p+q}y({Jw1HY#zwi@W?Nk1$%*8k~S4#me^ zS|a*HJj}h4-cDxq<3Doup;$GDqsAUK;5!eD!Gw^_<;DMCAJ#Yl>(X1jym3Jxa+GH@ zf`aa;dKJ1p-*d`5H+IF`ssEY!n3^--HA2Ywkv?lmUbL;Sg+s|^k;2y9fqF=q%N5tZ za(Z_)>uzl>8#uWh=)pWuM3DZ+(dW2W4um~^a9Up2ly~vl!Z$1n%3_h+D>#?zpY1X` zAB`3LuusYc`ebe^0EYLl0&Sc8t=XXt)=r3Hi(^h?i!+d?@oSNg5fi_;%d0VTDC`N# zDj-VJG}=i;Oajq*4(rJ?qvB%xPF=X3np5>LZukJXXJ%s2HYArGRu>uV-IW79esE5Z zZG+I`qLeu!FYHCAI2Km?eY6v_ash6Y|{?F;Ovab61S~Ycf{X{%_NW+w)pD-|9RfL(8a@7>FILH#Rsd*JrC~rjdHj%s>%tfE~TX=DwP~zJ=|9a@etv3 z=0UXd$KyCYf5kW8N9y%FyQGL6nTQ8(f=^8gq!Cj%%_1ERu8>n<(U2R^>*3YJ0K}18 zFL)j@Uzb)ouImsgZtV6?xQuQ#8GdEz zT@rR6=G36Kdn{*b+m3q15KYBcjGoBDh1#)gJ%@Fg(f$4REK$7?Xr@6fnF3vumG556 z66F@<8l2-3hI^ZnVT`z1P31 z>p6XV!wq&lIdK*$`Yx0+y*cl!O!#A#+;;6ADPm6eP4DJNZyKz2% zpiLqFO0Zw2A?*DcNL1O<>}acLsB^@(3H5iY2eFTEAK+blhKf*VeLONM0VO*8@}5(B z@X+nm5>R&WVQ3Q&LNw-zv+th^acqU7J8PV+M zChT<#$jH->6>Z~Od?pC$TwWg~BA-0CeU-*nX%n8IX2-y*cxCPo+ZeE5c?f)bR8+4c z(8MV`)-YJ?tC~_+LB;a~vJuy^qp#d%~)4IqwuD=ccbF=c~-g#H9emb5DBMB9b>toaGLM#yWbk3g4Y9C0E ze`bZ;k-rh?roUQPju6@Dl{@6H*k5!u?(ogurm`(=+L#KTg#LM|jyY=!8>Ka*n`zocNV& zK~hOA8?NC)kcUuub&SEnB^Cw%+bL?4+L$WgsFPfcAYPg&7eeswJPQkbYAj=r+%eSD z&W(^CGAK0BgGPkR(SqOo?SC)$c&|x&g=fEIb2>0lE=ayg)cCGBOWtadQjTq zE2p%O5{OT=GlLh@Ui9WR`<4X(O$99dG)4v#gTDx3#f(B!5Gr@%I%>Wg?Sr+s{VRW& z*lcLaO8P=}UdjlF;&*|Rop(t*ym&u3z291A85Y0}6`inDw{=~W%c?1yd ztLG;8;44!@-r@hX$6Cik4bWpuR&6mu8MT>+%&3M274(1jwhTWX-oWUDznM9pin#oX4C-9F@*UgL(dNUa;n#Mhm9kx< zc^q$#HCm{eC+d8(ep9D4Bp{=Xy)iojYQ*USGJ^lb30FCqX@vw(KzNCk!R$tN(FQ(p zx)H^K56sD~46wPI1n=L3wliL5W=f~s-hFF+(=KB26Fd)3uv|3FQ_@Z3jk0{d? zg#37cE*ZJ>Z9s?MJxyqu*5kepzy9iMIVzq%j<23IEc&aqlETU1-=kM)5YyU~yx`nM z6k&a@_w>n(?qZ5^exUSeY()=14+(8fO7)?WI>7yu6dp^P+53mcboBQ&3exHqfU+Vf35 zL+hYwKrk6@;Wi0zr9RfH?_#V#^Z0>cSWYV!0PYE^0hJ}*Q3w?2L1lMzlOqp-%Rp_1 z;5Se|{s(0d?tdnkSgppg6G9l`!QWE-Q}+ejDL(Jm?w5Nn3GQh-uZgy&JW4B)<@sd7 ztw$x5s;NNDqibItJzgu}#+1Fa#W$2taqFR=e*VB#$+ZV37p(Y?M!osi`0#3;jHQ5! z?c3jWSKSP5(p7wGe?~a}pm@iNr>gz1`J+GW0+tWnRc3u)%KFy#(cxtZQ&aAv%Bfk& z+KZA4r9BpQ-zC)~Zh4KX=OnE^QJ!0s!?bk1 z$rg~ql#g|+u~W5ex3UNCxwov#u?Hu4eQvr<_w!{5d$9YEs2`Rp!N%^my2K!D@_id7 z3XRC^d1iS>8505E>0(!?*VpM?WS#>HOr1=9ht@ZdXaKc(01dfnO$;CX&wxrP_DUCM z*Iy@pTL9kDC3P`h;cxJKf{G;KO3f)mCa&W+_ZvIaf*_Hx2&mRlrWAZh4X9nRie!L(R9q$$MoZB?}?j84n2&&aGd2{PlR`M_MG_ zDGvr7?9kWy|pSMg+=E6u|60Jf7J2`KE+X^3uTOkg{`AxD zoP@}``O@FtpXjZdWFOYJKoY`SJUypB+x~1;7ITY`G7yk z6$OLjqZ+9%WvP|NDdbOvC>l%cW_);l>vP&ucU+fM;b2$F=X2^IMX$3~Csn#9iWI!G zUg%(E-|u^|ytprmaU*yKaj>bJOrAFA#-%#+N``Rde4x<4G-(fb6&Qv%Oxgq`hkD+( zj@|5gq%S^ga&Nrk_koMS;=&?>HG!oi;g6;XYygVBnp^Ul>0j@dc&b4vb$2a4+)X5A z_!EoFT<5(7>(WPSJ`Ak%G*h%g5mu}VR@1=sL5`3TM|>^m03U}>K|EQ63#3e;@x=P; zy&Q(O%0xkNu#0IV5hU>eA?H}WHM(e_I(*$zHvX9DLHqwCgx(HsQTyQBKoa@ zA6Vpe&xo=$&Zc786=#p0(>m^0Sg2>gwD~7f{LQFzW0MQP(vm=)5#$P(B7W&r^)Wt9 zHb9X_#uIDVlp4AV8>NMXvnzg+AJD9Jm$?L&*a5Hhg?Cp4*gc`YT05X#J8Q`J<$sw? z!*!Ox0KK&3AdR_q;H~h*ROKOH7x6di)p-<>AVs|oh~L1=#b^g5(cwT_$q!?%KCR0FvRqN=Y_PFiDA2J4*Isj}!#wCrULR+O|q z`{SM2xP5ik3)82_{-S`^zmuKnW>^2)f%wS28%zm`-82E%4uc7&1oC-iP299KJ5@q+ z9L*rb3!J;)43s=b(N~n#wZeJhFRGm}g!HUI$?hFu6 z&+5Sk^_TXn5yS>ne%OO2VcStj8i82l@|UvoXYZTQI8h`$tfJ`(z($%Kd!UuD7xhcg zPoM7Y4W{Sdr|+(n@9NWiyf1r9A&BFez%Oh7Afi$^m7JsDYgx>_8!7b3jKOM;Mj2|q z;Ff|3c{gW@e$gs;IZ|spx;ocCU(FuOyoq(u%bwjV(uN#15fL#sbhVh}_$!*`S`^kP z13gP5eP4M%k$82Y1Fe1#xsCbn++huE0kzHdqemR+ENsB>aww@B z8jVOODxn}Y^&#QtzFxNnVl{XEVY-sby?P@6d)Gsc)ZcI$X zTBqLdFVHxw9`ZTwfTj6+i4k%Yo%9`9>44>;pJA%H4=<+%DcsIIbvtJhB$h|A47+G1 zc4XeARiO49cfBU6Xj;BeeIrP=rDWpBniq~S8B2gC7R0Yryta~(zY=C9KrFZ_oLmMe zNK8IAheo)xy8v|Cg>(aGMiQCBqA&V?FdI^gOk*fAf|~N5OR{Idu}CaH`f|0UMuu;p zk(J^`BsrqdKmeXRvb9I!N|_C>yc%%hGj}(XCL&xp>H2GM)btS?J*vWR_baELO4B#U@r^gdxkf zqXj$7m!nkx2~v3a>f;K41K-%x?+iTGlNJgV9+a?ft(OQjAP`wR;R(e*=_8J95M#$MUEkIrwdhmwEo9Sr~R>w0*cbkf(!L# z>AST4I=`(_@^|%RG2+RO-GuY+tsY@42U3jSS!h!(&vD_GZc%f@MbTjZoa|yYlVNf!q?)Wza zi=p2NJOSC3w(s4Wuq~QtWAlDk)y#3td)7h8IZ-K;>hJrV1?68&^5v|wF1n$4nK44`q6P;!AUY|M=pDYYZdNSRn+79yg89m86=iGm$##pzpJ6S1VOvSmo$l-VfG*1lL0P7I)3WpSH*cMdT}Wx&YuweR zU%1b~3SL?v6?jJbs`PTnL|NCfYX%I-xue^Jt$ELdN0TJS#5$^jjEKD1A|1|5d{R(HAuvmy37DXX$*UiS6~q^%uZe;_W4Surt(UWd@_w}d?j)KVz`42-W)q}aAw z7g5qdqlV%TDKcq}R^E>z9xlFwMVrk28(S2@REizj(4q}Nh4UX*(1cv7uYe|Oq`woc z#wUAZ#-|aHqf1@n2p{G>F>`-$D=0M-6{2J<-#T7M zc_@2w?NS#f{hNvcOi&Sb#Y=$o?F_yJrnPkqBkLJgcgWUqn9L8wf~7dU%F1(WLr(0$ z7j)d>r>@`ve^R@HV_aPHo4Eb-3AVQ898jqHG6RidDs6pQJB7 zNk1M->;Icp);pZP{!H_SXYxhIuLL~Or%x98!8uD9YHV^V;E;dC$zYACjA7z+W%7Wib+tiV<8QcWaBAiDvkV-Y;S{U6_PwjJGT3< zCv0UgyW2**@}DXmsQy5yMn~T&#{>c74A|{z18r#p;a}Y zomJ-@4sR$zt`4Gm{)_JZD7w24^`03%ccA;oF!v8Cjnyg|^Xt<|M>M|uzAsl%aYW_w z(jm*yW7j`(oYR?LxzzWquuvnU%#-JhiQ7!B;kswT-;)-_s+HXOK z7qw|wvZ3z6*yAD@M>JBOw&Dy9YdjjG0j_&qvOUp7NM6HFyx*OFE$qrA=43^K*>2Mg zBYB`ms78el9-u_yDcI{KG0nzlA4H-KLhE4$gmGFMsFt0p&|R_ojjXZ|RwM~+*KCMF z5++JY{WLTsup^<{5mXu%=mPuXFmLW0;nlv;P+}Tv^na#e{lhw`dUtChENYwIIVUi` zd07y69uxfyC{f7$H@vd+H-%kFE1bTxH}}rvl!rN(+&T9?=~7g?A4-~;W51bF7Ss6EZ(Hic%uqofL5dyI- zNS5-q3OhO?Z>s1=md8>SX1Bg5f=+DJLO@a!pbjh~MMU>R5If-u|FR(fE3x!!@X3TV zueJ#<->a0votJ~EaO_kC?7`fh0W%&J_8nky2$EGe&2DI%aa3bmFV0Zu+FbB5U(FF5`uOXgnKiesC7D9|Jq_8DpOHXh1l$WuE#8cOC23tS+ z;-eB&6|&GYPn#TdW-v97y&g@(UQs7y+P>SlsM%FTE-f&m!FoZ6`38>Z=aD#SiGS*_ z?h>92ErMH1Wv7Ao9j=Kp?}d9@m2l+G6O^6>;# zfiC%*==jT;+d;D8XEm(4fBNxn&05?PR>7H-d+D&;7Mf*khS=vefzT@Vy4e2ge9!md z?DC}F=Ta`HynX>A3b_*UNsYjArvy{VXnqFpEay@C6WROVNom0@ql$C#d4CSvlaXvM zo!jjbK7-N+iMK>F_}Bl&tbxY!poCV#4NXc=Sv{*l(^Y(x2c=6>2(c}DX&i#BrngUq z1DZKSqGl++Xd|8ja{0k|_TT^~St`EKW55UYZtF4KTI85KU7-HZZR^fVmUbQ0*^4I| zY5iw3sC80zJ%aCkUcngua1w94G4%5=1mZEbp_?a@zd5?RFr00ts~UIT)~9*jO+F{%nz!)qe);4!=vDy&+RSgA-WG7YXMfF3rLx`H z?Q(~%D^?_sEk;b<&5*~;G~Tu5Mo@CUt@IvY{^1^>OBPe^wcgDsFKli8g*NeXNVdvf zVNIIUR_xjrh?yQ=2o}cIL{oTD0d^!jJy1DjR!U4^t~Q_;HrMt|lhB$0n!rORhy{^GZn#CcE@*7v2eWb!6Q_X{~SADg;U>DNn440BQnlB8ymj27jII2P~g^gOMqQ zIca?mIEZ`tXt{3iUoS0+n;2I^e|h^{wvs!rH=j-Bs**i>%$SGJ4G zmibnt)tNzxMbWGiUTOXx%g$aj4RJ6MOg>_MtNuhxUk(kZX@_U6abewIYAC}4@rhjG zmzcb$JUgnUf!Jhq11$AE#yTsjX#ZTj)UPWcwZEF4js!h21R~}qzHHr9>^bnqRo3wn zEZ#&Ly1ys`8m>Tl97h2w z!bw1#6|!za~ey7oX~}ynZV??7^KIoOXr zce?hdgJLrMwfiZ@ZW-9P{4l(6IPN5YrA&lWAIAE?*#1W10AUV3%MFR4s zxjCegq zz?+QV;=aBQ`OnQ;xqaIQuZid8o%s2!o#wb{AlR**+d)^Bj{^iWs@WP&&`JN*&vN{+ z?%RZ%Ye8nx9kI)-TfUTt*0jVsjMd?pn2DK28DS1?o!nm*xZ+g%Rq8GlXc(u=tGGgtQQ062vckP2dpYQAb{jK+jy>%P>X^b6`$?|n!b(?$?{w@axFy<}7&s=o$2+>r zW8p5no;I%ojTTgMVC$o8^RcucQQDHyKRzdir0p8ChPR8on&%FJg=jy8W?a6$q&7lzh#Opv)GYoee#J^Uw(fvKPA`+%-9^ls%^gnh zl6ck~@PF8|BKMddH;u}8zWC}lx5?;tJ;VNL?$lz9E>tIyO2qX-x1khZvu6{J3zc?P z6iQZ9i%M!OmuNy;5V~Nw@T-45f#a@Rh(p2t^{&Z@s}%3l@pr90?gmoY@8j)V#;o2W zuui4rUype3$cyWJ*VPEce3MgWy>aaBt};E!IsyY1vo zG)CWHs6KkQ7aqbnJwmF8bU(N6=0ExnX55JM@s%CJmmR?|svdG8R@yzY6m@_kzh-4* zr6Q-NU}4h!0IoUCdVCQezo*Ls0*h;Cm_7(~9b_Ko^HXp)#DPlbXp&4BexJ~^r(JE) z5-i48E*M^x!39cJLKe!MWgUhekY7=rN`ke*UlN9>lf<&ua(fxGhPg;@d==XVBVaMl z@d(vrhwwgnk=AvgEJoIn&FK~|i7V#om&Pby&)+bs-woj<)lr36#bOWX0o^`2Cj$i% z&h)9=odwPXtzAr38tqR6NX9$P<~k9DPd8sxY(Jtzf_pjSo7Y+N%`XCY+!D3Fa?(|e zt>8d-vs8}Ay+r60XqJPl0#Rv(>JM`yWFlv9hIbC2tI{WgNp81`2KXL^Mi0Wcl(gRj zI)S}t2BLzJGwX`z6Hl2TBB_%D)r@JP%=Uwh=S6TAPZ3Fbw{Xbg`V*LMftp3^ePLev z#a>pT_bks-toGgfOc`AN)pX%qz_$Os$+hrtxp@A~5u)H^4$q51VfTxz+w?qhB3|Ar zfU;N{H-G}fj-zO#er^dWlTEzfg%cJiUO1%I?w_ z_iQ~-Zd0Q15YfC59fA*t7pvVD>h@)=oKXuU%RE$ZcOz36^jtGW`Sgoy6E*G` z83u}mQXFrFhU_X3-S6vyTk~TpP8B=|;J6skU)fT$dRj0v5fI(ZHsBv1EKt*-JdKq^ zl#Njz_{P%EiGurWhz5ZT(@`=#68uQ0j|n(g$|i|w~2f-kHBvJpVY4Wb_v=Q{C}-o{m0odXdTJ` z3v{EPf>{4<;bZZIJMjFdUjj66DCNd*QwbyFFH;nZ)A-Fmc*=xM1y~GNfh<9zX6nA9 z(;Z9*Ug+-}Vtt6D(k`EIO@5_X`cP&yp6C~Yv<8?nSyH-sv}~WS20exy?hgu5+!v&X zA9tR2tzwgZ)Ep%B(+&+(6)rZ9W&Rvb|Q`TSL>!ZQuMab^PQY@aV44Qc4MFD6Xcv8gH$eRzkF!4VQLn>x8)20(_#jYuD2eID=R~( z`w1X5vU~Y_mvSj1#0rFLA(<^mJF#iq??PwH;V4GEr}IZ;Z{j!Du%MgK^&ITCcqQwc zLV$7B4}Yv*!k6ix7rZy^wuxuQQ`r?dv`pnhbJA#TfE{ROf!M4sge+_#{uC!E$9%>Y z1KET4;?kAqJXhX)4$>8BfoC!U&HQSz?UK-QQmg;=z%So8S*WS7)t=*bS>t+i`4BTn zbcov-%qwmwg1IIf+$ver^q1H##Oh{L9QpX3_N|-)6&`oyT`wTyOEKIcTLwd7p@o8q z*oJd&cEJq7%szVv{($xoPf3(P;z3zj^uUK%K{iTa1nk{k%ccK<9QwVKDEW=~R@!aM zw{{%kDc6DR@{J$En}ZHKVy+tV79$^$tW%-bcfIM*Pftp2Q@V zCcPTkdmgN*umRXv3JvC<&Sp49;`|Q{_D7w~{AJGV#y<|*Ag9sPs@Ox}jSPZh==B!vW>|&LZ1yrq0V^RHQLTYpB=MJ_J>}Nk;6*bUaPBGX$6z7)OT2Q2Na3p znrZese4H8cl`oV73E^gHJApv}vAzSJ1=3Yf{K zork8XEnLpa-pkG}`_{a(-a=*`Cwi>3a3>NINf~^fhwL$IH4PHO2DAw}(!N17*rCgz zu#o~u>oh;)u^wP1htgQ}burOUd=Y!9OS7lcRLhP`ZlT>-+NG5(fx6B?z3uAX>g{Wu zrN?9zifeJF6?K)Bb-StZ>W!U;ETF?KW0tSA9E9#hfWR8+*@ykk?%$1! zXxuw{>ooBVEk|iS<3*s-WW&x@@9_>&W9xQ%sn_rYM$FvQE&l{V=5=&t-F^B~t^KC( zDb?xra}*mN>H`KsDTbn8GueNrf+$OXw8y`Lan+wrg2xgydN< z@w=8ZXy@M(3;)YRQhC{XYLf`O>mbW)3;1^L+LZi-k7FX;BN{Zx>vd?m`%iY{)f$}3 zwk?lTSlTz|Fzb6~BH20V6cO8-NNM))>UK8YNLHtPJyzVyE15MMdRJy4d-Ze@xabFU zXm>&ANQD%9Xa6_hIv{(R5T!#Dm!F)CsuV$S!0Q-tgv*=>^omTxbkHHT43xq%d`2f# z@_#BP)pc+kRmJHY*1P4+7xf6fk#PrzhS*f^)DM@NWGRWM9&Y!q26_C!Ty1%eqNYZ^ zw%*{5Su?jn>||}6UF=>jWpxP-2ubhH3@rf~Wpkx)l62>u{pnPMB6fvNy%{HI1o$EQ z^IT_YkC)tex^CBXOQ?}G{>#F-5MGc(M~qJ<)({Ny0ZXO zj=kbzo&?bVHgg<{QVq82=e$~Z<@+NV+`7lY&U&2((kk~rUcIc~b};HIKJ@^fjs)(a8&U1S;knBmBBOH!-v>*?|C5Ox zh1|N3Hc@-5U^k?q{w41}(h7#g4n{rRH7&LS8M1^d=gqXH(iqEX88vKZ--q_r2$7qx zFo~-)JFLQ{s>qBs1mD_$!2nlALS}6f$IRdc8_qq>+s-4&zY;wz-OwonDGpl6{;VMS1UXt{06io03Yd`CpHu z%?~?=PVG1+;1W3qT*?u-9%8WBn+lI(O3z^XAZpc21Zkc>2WC$=2(@LHrNqDXF6%!l zv3Aydp{?1{r<6^1M3XsfoAP}~)A;P_+M=feWP0$YOi3YzPkql`D;S&=|1@s*1uHR` z@C?`E!L60$>Ly{Xm+^V6e4CarMn}|w|PIF_v$v`3#D&+S(qPcOSDwNE6^%%f*`zbWp zTn~@0k9}~19M5e4QzaPs`oKjS!7%3=mCU!G+$Wp62V6}j$J_X+6B!1ltHK z0dcoudmhsRD`@EQcZ!9=*8f!{1JuKt%l4SZv8k&?;l}m{V>fc)crZJ~Kf+5g<vAl*yfH}5_RoV<`CWr%7dF?kFfljxt!G=Gts+-?ErN(V>&@6WS0bMR{<;dGTplNAc{$8d(wS+r<1q}N>m#gW3W@rtt zRWsL&NpTwuUG=?CaQ%~zIr$cwCTmUMlYSMSu70C1O@yx3BG|>IyR&7vJAIv~^`Hc} zMCKat^)8>SDo6Rrrc=jD5O<8j*+8g9-9Q6$U|;ELM=f9$T-?|tuxIoy=rMKI3Uc@X zRt@bZKrB2we!)40wD3v>Cun0yBQMh&C>#o9dOc{MU^RlW?v2>=7*ru;vvj3xZq=`&GEV*48^$(}wfM1eiP9p0Fg&MEzBbgKHQ6W>Gub37}42(m=R+IMk_4=&&qQkS;_joHe+b2aUi-5dYSv~nP zo+~5OZos}lufyZy+v-3j>Fa|L!Z?l_hCc9@xp+o|2(j$0S73C0)5Ws9Dx9d=NIt8n@A<*ZM-IvV@GkP01HwKS) zMxaGZILOKk`*6)WRAX8&A zfL4BYbBItbP^sbtB|pUD=FC#ulqMDF_Cfjf|4p@pjZ$qv`7hLW5%}Idw?bl~Kr`q) z0Dzd$2o-8%3tN!4;jI2Q){@GWwCshN7JYmfM`txGeJ&dA8b4xOc}($+yRKC2zDK`a zaX1P+n%r(ucGJSE&F10t+k)TsJxgI<&bXny>n_cn&@G-)H-aZph$yE&1XO)ManUdSmuDwXS*dA_F-W$MkCDN>_A%BNd`;KmEi+g zPPg=kSgh>Pz=_|pQAPos^>x=Ls(z~X`f&LjDE{EP-=fW{k+wwk`S(`0G>;5up09nu zWZSjv+VAE_u0X!Gf}tuh&B1%Hw09%T1Vg(*wYoS-b|mhh{tx5jwIf>WQ#mNtG@mPe_Kw+E3$6^VAW;iu*Y@}VcveAu zM{4~g2776li(!g40w+eTv-wEIk$i!It#?D3BUbJ6E;?T1dc+0KK@=3ZDP}8PYPMj{ z842LD^EKnPZZ?dpX>BbL486$tv|`e3UXIcJce!VEVREzo^m)P1eBYtjg^3-o8@h`h zJd}bEVrSGefE;<)0^vBg?IHi=rnQw6Z)&8Lve6RKrx+oQJGkt#;-vWNqXj!>dmuZQ zK!!S-bwG@oz`YS6hB$(nz{7SL3B%~yM*}Z1lSXWV59C*|`I511PeNP9K~N*q9KOT> zTi5fQ%^@EKU1fZ1Eqr)7mzs)~O4vzt21IP5GwjI#@27FA7FKHZ^|dCFIu9?Kj0W3x zr+g6Uh0%K5bnLpv<=dl~w8j#^e2Z~w)lC_N1d=8Mdl^8$fhZ)sqC*_x zQMLd<%KM=~Up@Jb8jka>aV@{0K$DcFQ^?W%q^0)a3*2$Xs)Y^jm_=RWBGt{OFd4ZP zB#l0=!JI$ine-Oaa@?Nu?E4}&$aqSmNbW?&)4)77Ywk+WezJaX7f!D z!`+A$HUq6yqpX%^9zMrN@jh zcMT5UyfOGXjsJ-J-93G053DN-Y!%{uUKt?_?xvuuV`TDnJ=f`gaeB23b*4e+46Z0I zzd1fSGRR`>`+^jP`Rx?sMnekQ|2x`ll(|OEv{_3se~{%Xgi@YMRZHxHbvXB(L~)De zglXHh@2wRAupNUM=U``hye!?(+r*1R5|hvG3%!hr-$a?4g9TchhzV_9yMz0$RzZ)Q zlkMP|RdVm2weg8SQO)ZUJ}ZgL8s}A^EJl6=W=AevSmK2&NCHNFL_)eL6@8?~ZAnb0;=fP*F|@WB zO5VrQ^H$WPxp?%7eCbzH zRmRwWtSUL2lZbA1h=cghoxJ$a+W$qK#Tt5lKfM(5as4){taLjq@o2t^L8E zWUGNH$C{lzdp_Kb--%^8AmsPMAyv_ZQ8m&y_`uugz*aLYs|<@B0g5gch!%WVBSM_# z3-~;eSyA-#JEtZY8PP>!n6^6RUs`QCQYu8#RfUs!;NDjpa( z-EQGx(o<&IwNr2TwObVcvsQZE5MLm~%ifKk8DugE;xz;p=-)=uw`<)f6|~q%#`WoU z)EEX>ATWbuH-N>L#)2zsL?wTs?VLS{Jq}y>)7#m%EOm4-r6i*@A?5oNHrcnkZLzTC z=TjB)gN54R{uSG`($v{jCY*?*3JK8H);3L#tDZf6CvmwsypYom%XgNipK7yET$N8dg~9b$`AynDGs7NpNjzn*bY$t4IQu9b)%F5Aj$A@W3~500@Tqr1KvsGJ=2g z@tB`wLI^iBW?>3Mq6GX&?z9D^M&T4F7%KkeP8FUk3;o^8-njUk;!(bjv4(mfIVxwH z%{&-Ydf6YfGwNNM3p|^lb3yg!;r4i6zy3yFW8aZDrm9R0SIq4R=aNPjnf5aI0|gn$ zsw=j>7ic5({8!U!71|=zt9K0?WYqhe9ip-yQ&m@+!K`6lBLukpZDiFUsk~@8fw^WR zzc!kc)OQyhHNGj)hzSuL=+1$$qDNnKh#vY|WO78E?fi5|r5EkpTeDupOujJ|D+fUd zxoA+Qmg9EiqFZdZaWL$uwSDl2nn#&+s!exCP7zbLTet;h9N)_GxjC{b1nGpaRrJl*3}4y-UaQ)h3{n;%}jt+z>B(;`*7L&6tdYX zxJ>5^OD&el6Wr!(SV_hSU9dXB_R3==>TJH>EIp&D>ho9qYGCgeNgsU&BRLCS<%2|@ zE_pi-Qwty8imoE_!`u7f(D;MQZw6Wc9W9>cP~Qm0X-eBH;&?AY3fEl2lrKbI#Hh@m z&UQC7NR21aLsxHvn%38JAdShu_2A8%)#cIA5jFOr5R=@A{c(jyGi2lKYA~;toTD#{ zOjb?#)yPTRHKQhl8M|65b-ESJPSOd6rhlxNmysF%FuMX!xJcF#m~00J^Fk_n9}Y8G z&nTP5-j7Zvf}B1Le#be-1+9-JO7$bk(=QI80Fp1~r#M+3ZFmjjgR{^aP%{b}0NqBG^!X zWOYUzP5=!>MrRn*SgixP6)*uVNfzt`R+(9id)5Bvk^1 zz>~a+!^0!L6eoJ+aCeiLYh*iEAR3rV;Fc-fCkQWy2p;`O^`%~$?kuQ;PcKD7R4?i& z6UyfGQBS^ST~+Q!q5HatXj=0o<}>P58$vXsMI_nX619M)tmN(4#oBL&yq9J&Q+L8h zYkm%%12?nM>UP8engzBVTrBCC2$NuNXApRvGaLPD!j7S_e7mXbP0kg|tWeEdpl9i1 z4{q0EDA~2*>rhqef?`MCR*m$f z2i>)^b`19W+S*ErZ(k@L{i*68Us~tRmb_*naM?KF5Xj8k&aYxSj^axkf<$H7yq-@w z4$s9g*X%t4(Vg)vpjW#5jxE;j%yi%)acQdL+nQaPeFss2NBne;0?uBIE%^W@ps>0w zgSqC<x}Ll>*OYJb^n=n2AAK_ zckrgbx5s9vCICR(_7|c?9)F4k;Nz8`gmHZ@6135LC0o8p5|^!_Gxvn)OxmW&0sLE! zzIgLfiaVPTG#qk+nkzG3f10!<6|7bt<+9+5<-+Y={NPK5{3@s>#OCXCEdQYr3{9w*Z1jLQ#daTVx=gxi z1@*~DdvYfV%nGXm)15Frrg;@v#M$?nt9^ z>jk6V{XrAO7A~u9L@Y#ZM8%}J=I|UhQA;;I45ZVs+Fg2iMt;3i*F2yJQeO> zIsd(jZzD@tF}Clm>#kK?T7n_{CrA0{a7)x=%#-9mNmAZEua(Rd#K8bJ*%C0gZLAc| zE#ecjX<*&o(id;XaYod+u|09?ztw8y$VVa?gg^Foei3={(vSj6vorIhk<;gcMWqI# zyUJI1{dz9gLS`kz87Hclk(pCLHY8e1ob0gqzFfLhg(eb?fi*i?y3xZe_5%*j+|8?03catav_i^!ae=4*rPL(qd9 z068o`?HGqVm?_3qa5mL8x`t{6qX z3I*ky^DVx%=i_K=A`~H%ov#8rl2mfCo`m0hevFedjZ4f$kXl`JnzK27UI7}8Svz$p zqD04q$WcKClEqm-u`lahRdg0)A6|HV3?&2@LBC%|9&~+jGp8q>SPedPvvzzphA_eNcTax5Y9$Z`Ph@(Z2P^}CpuKU|$M|f_nf4GWO>muy> zshUr#;<7FC;0h8a%l{nI;Yt#J(;x%U)9Ro)aHcinOZPY zvI?WO!YtV#?3dau0nH-i_oIrvdpy&=q=MoJIwarUiR}~&)iWkljORdxS8#o_ytaD; z=U+bC=HC5wsJcO=eKXN_iTzO9RXj1ytRb+_uF!NvCyDJVIFW+M;*WI-IH%$AodYa` z^<&`dFBsmwnG_u>fhNZ2afFgb5VX8enj%Jqf~@$~2o+rOEly`HhIvgQHuys1c>T9s z!p%}8S2Qv#X{@ssN|{dOR$lH@0E1pht5_Wq;_7q?0acMNUFly5b-HAcQ#4!9T_ZMO zYQM5{lxV?K+`hQ@$IFHJ3lXOJx@3HHv@@H?J3dy$G%8m;Wpkd%AN+v(EzzKY&fc~0 z&z39jdLN>NYv*8nH?Vr6wa17z%j7dDf?XdsG8Rl>*(A@e&8@AdK8NJ3R)qE-uxVf= z6Y2ubjQztQ0C8p&8nI*@{aC%L^kcCYSn0lukHMnuwvClT>(N(Z9j+jc$vCY^n*LP$ zPIm`!)#BHR1=&SDJiU!p^NVQYH5VuvWOipY8;0QouZbkJmrrFZ%)wQ9+uE18xy)9? z*CVfufx$0A)a(T`n?MjOcC}({cFnXVlAn96f>COnCPhE#yW(5E;AjpBbWC~9WYWJM z!0CLvb=#1J=6cY1^6z!CL{C8vrN+%@AKmH{J;v%O$?WxhKC=zqvzcTe$Pz^v$>ybL z8=~X=LW!w-DY{8Hef`ICOFeY`RW>T#|M{eKJw5*SFQ;oM5WKkYcY^OZG#ynoK^-7K zGr!c#Z(4ss4UCqMyjgA7yuPDCbuL1fWZ8t$ z!qJmxo{Js?YOntpB;J8?D}wKpbTcD|_3yyX7wc&d-0U+tP{gm}wKpSg#iB6@?m-aMat)fk=0&}L6h`*Sq5YmeFlAo$*fXNQ++Krl^shkl=+ zOc@*HB8+%6t%;%AkfooA&3G{omw%5efeq=CRVW18)Ms-8itONBN>*ryt^kTop8hL> z_VGzj1l@0EU>t?DZj_%eC4|fbcgAO${|CHb#d86Eva~L|qSM_c%?VQu?Sjt<*XQNe zOI3-KJ#JtuSvpOWRP4YjI`WzNhMAx1pOcT4y#ifn2-pxymJuT!8dmb6*Yz1rBzSKO zf2{ubPb%#oYOW2nLxhKBKG0?UkW*QW+u|ts_gi+eh%l;Ncpb=~bQ^aR$Y~6>P{#y9 zhC-1EZ=gS&oJtc)kgg|Fa1Y<}Ak_9K2&eAPSSX#oOTPnyfjL2efhG!0>KmT-fS;9r zbrAhJ`ghT7!f;V?6ULGb;n&{i2Wl^%EFtS8LyZ_kO>KA5L-Ar4avXQ`EP5K_+X{ca zC!iJ&L13F?3K*6FjJ#?GC&99x&ttk#H=YF75$-RhgdXrJuKr#qgwU-AQ1EV*@pD$* z(_0)Fc!@j6FIg^a!M|G17|9J@0hs>`R()zFEv%n>rBcz;-4*-a3WLcoAOh4+$AJ=J^k?HcAdb< ztMMEXz()=pIRp>wBK+3plnDXT4v!Gtzzv;P8X%YjEepY`(w65Hc`-e96maP?4$&8o zShJ&&Bd(z;1P9p(Ow-zGmf_jpIV*7!n}QZ~#P8E{qNlm?P5pXe;ocrZr)CFx$dvsI zFtq<<|8A$T#T&?0`Dy6Pw@Q57a}W1(c{!R>vno|wIeB%Kjv`jt0Ll+#y} zk{A3S6G>J8M&9t@6Nnr!XH&Btz;t8S=%is)9=1^TZ3AiHStzTrOt=h&1f(mbpYGoq zExDR06ONw)i)jx)LwTLL5-Uvqbphn_T#7dmg(X59H-#iQsLU_?r~~}YdeH-xiavUd z9*i|RxCxey=amW0jE%#)FAaI2KuE_@eR%|?|4IwK(tt9LbOm6ee^0|Fl`_Xav5plx zMnxWzKbFMWKWX|k=91RYf2C~AyYk(*A}I>1*5 zgMTjoK@s_zQd5J*EIpyt`G`0l-47QStq69&2E_0A7=)~S(n_2XjcB(F$1#Qd+^H0T znrlSlhPGV?^T_BS??$nbtONE-P=DVrL-aH(3phR{&4;+a3<#UE&Vo}Dt5Mwld;Zc; zf-CoV5X$^n+U7#k8c!wwLsadG8|eM#gW?aVeAIoa z?vv(AYdQ#5!b>eH(UDC7SnbVW)(9^-*m!phvION4!aJ`C#2i0FStInGM^-a5 zCsAj%K5HGT@|UR5OX|UgO1C$`M|^X!I#GdNMHkEOUcjup)vC|u5;PQMa)oOjH(6ig z7o4y*1g{NOf$!xltrqWjz#Jpp+eviPW!2g%-6S;NHOnYGNV)O)stLbyK_4U&0yU=B zPkM;jK?J(hJ$ft+_3d%@YT+B9m((gRz+kdLQ|SgYUZmx<(!@Yb6(}!B`6g)>J!NRM z#9J)GUd&ThHiv9rGyck(I_i7Ewi`vxcyUZ>z@xwia+>w9#&5w@AX5Q>)L&~TH}3=z z!c!kUL3lYgB+b|lDk1!CN9B!k*V5(5A&DLl16;7^lH3I_$v!$W119`EB|5;QEIW#W zU8e!S>?#%Oa_Vg+MVT7fA?%6?gc->ZKqq4uRBNduW#ROgr$&^H%XVzOpUH9w6LNi> z0E4#-%MnACJgwD`p@y(oxxKMJnhvQ`^n$54ou7Qki;)E!1y-fk#~4?g^OFyfxDBjz zq(`s2UxJ2i=;tf2qb$&o>!2v)r>5Y|2?D!`^}g$)#h&oJiC)0a0g|kZhBilfbNWVK zb8CPIdV&);iUu=#{} z2-6JJ&B=I!EVHywQES}pcz`w*8ziq{509H$0okYP9VPn}XXBPH9qxhZ=cl!gABn$Zf&|CtFfZSAv zf`cA_1Hdx=AtmmM$@KsgLXhpCPP!m+Q26!i0YQQX@WSQa&smkH@>5L(_&6jNz&fh_ zI0cI^3|B{%15uSfN70_sUqQZK7JOzZ{r!qiOW=?rh z?d?Rp9a*RN72t4LD(aS}s&Or7Dn)i;UiVvlcX)H_=Sn|NKw6-$d+QAbMHyRLb2%ui?o#%tt9zCKwOESKJsuXE} z!1p9>*{m$!9k^)p&Vev6QHt~Xdv82M1(@kPP`hD(4f!H4x~Fn4lFq$rkfmtj=T9aj zuK*{n@5-p2E5)!utKsQ7xb6Mxz-Im{fF~$AaEODVD1AhuHjVI`Mbp*~4^Cp4$l1)v zeb*B;s+S`hG@kTYVA!!3PgWV033116gmQ%{+WEC9(!q|0r#LX(vD3ajtgE9ZQ zfl|OW7ABg$I@H)?P$5Bq;0-w2VazlM28x0_d3giZrr6M#@?=W8oKxP?r-=SX9_*3q zQJTq5-o0XmMh{~EcsbKD(q(6$<=8QNIj!oBD$~C+NqhrKVD&u`B-HB!0o6OF$Es)M zKs%#cQI_G)OUgvgO;aWk4muHY-x|dSqeukbv#DEhFde$GI&HeyEm18dODveAS00jL z5>lw^aevp~v5r-{Gib;M7l91SnSQlY0xw7-AB6Y^+%ovbzt{5I3t$^r=%_0p>BQJ~ zS^HSX%U{+}OB9@@E(FU+TfQ6HBgg?bCH#guoFZ0-O|d>eb^Lt}aX#6bhAfLp(7DHe ziICl3s#zWOlTZ88b2Jp92amJyMbH8WN3)nJe;h3G z9I#suAJsQ_vEuls#M@%=ZN%FhU|^AF@R zztkN02i+(LnpK4w-w(4{gfff+FbrQi$j&2SBq#*!U)d`TUAPUDNROGXj~^9MTF<{; zb?Sb#sArn0)MwQQ>NA5!%-y{@ZvpKO5F-q+1*ok)r`e)OS-`pC21r>^dyedzW%Hp> zATygjJ?d(@ZwRp>)kE@R)0?o$8$XKR)|brAGWEH-9lT#qBOQ|uYlTeF;GDd|hcQl| z=wOi;R1ptlvmpGZHjw=}3JR@Zl%+qSb1B5ZvvVueV-)X%o0={m@i);GZyjy$xj?x9 z9ct27L2s1MP89Ue`EWyLIlLftAp>-WjEZIyLhk1xsHe>b!$U3MJe-?I7~ZwOU19x< z+6^4axp|+NekDVP3931iTEUlZn1rW(dfq5;{w&OXNz3E$9x@xMjjwyGhDhEtBEx7f z?}6G^q8bHv4d8nv5Ghln#0239`CG_?f1|atQ;mVkX^}k%%#2l)GpeXbN4W$0@cC}6 z19rdwV#cN>3nZSUcW$1$N$n0uC?**6@Yy7q<7ZR#>1~Yag(JkwU$1juqRkl?m!e{U zl##*quH+EKSTAmSd@u3kFlc-h}5D2j_X@1}ro0tqamKW?<$(&Y^m88-0MthM{q1i~9(cjT7U535c!Dbn!?E+>|$=Al54(cDP0Tafhhd^m&}pK0eoF-mYqOblNXWgVr2| z`+btO^xZdu|Buk?lc_EbNlB|}R-f`TXGQwXqtZ>FNO!%_4Sg6DAE9L zfC{D&7USPx=c&G;bG&bks|E~96LAW65YFt zFkqec4VSIVHLLxx19L!Sl+Yu1!~grJuQ7%X=q74a8$h1Nza@CL+Cj1M=hMth*a1+I znftNB{cAS!PAJIYw=DC8sxN5Gl`>2n>7I$5l$ue%f>su^zz%mqz^S2Xe4K8X_&-ts zFz6)C8_hCCw1RVb&Kc+u4(eQH(`QH2)dmFEOGzC#%r8Cjuo^ljUR6^4YmnDx2l?G- zV!(2VH=WjK!G!!jeLPY#-}@FC`~w3-s1QFihR^wp0`2CalG)_sLe)V>RNW_q8FOTa zx?R;j&8Rg*pViDA;{Vy!;B!X#=EBH;NhZLp;@Y)z$oSzgaY6c8!LknxHchH;pi8Bx zVJ_B-53|PDWW5C$3-h-e&_ffVvknN}d;>;A+e}IrCU-aWmqic(QO_yU@C$*}FC3J? zY=3q<{HB1@k0lnSyRnnlfr6iW4fiHbB3b=Q*x)}{r8Rm?wx9Tn6n2#rQ&IcVAIe5! zP3T1OD2|s%X*h!2+)yLk2KjJrw_;e;e$-S-Ly4-E>;@M*&?skLlLhs;YK(boV*kcWX*yK4SY)0{8-uU|srk92^ta{%5EYdz|2A&P@`XVMZNxAPJ zr~wkQnb1CaL*=fN(ku13giY#PtXE>}WC^GJnh@5UU1#7%aX{QEiqKvdMvh_r?p}io z3wH4v0<+mmUkaP{5{5CV~IZIp@uG*oWd<-F|qYOkV$A9l64Ac(~ z#vd@_X(^HDZ#|?j%RJ&X$v>}!>(Z>A?7!9N^1V;k?@MX}6ap%9UFp)zpxq*JcnO1q z=eDY7RzLsA7av}L5hT{@y>Ey%bNqfX&7wFnRh9~gzat9oK z*Q!Uey26s(bXp0X_Wn$n5z3_eRfm}H){e`NmS0nfB&tf!SXzmP7C2(1W_+dw#{Gjl zn3VpZgxQ&(3ZF75{xvU9w4D`NhYNcFQcr(!Kv75{HJBVZl33*tUMYmyCf!W4x(z1va97PX77{*EC9gFDqC0Xm0NRO) zn!UslKq&Rt1u5yZx(3KInv9i$f0)5`)X%;{${O*80&E-Qee%=X)^c36ztJSYxUBBa zTtlhb5y{AwvVgu?;=B%SnIF>>CNiCiMtH5Ouv@kQxIHztvB&@$sy;=?n<-h0$ha47 zIX2d)yNO!Yg86abZKA(cb*A7EDn#;fCg3I;MG-f7foCHSg)k^Zwop}q8vrzt@3N^? zq%Y6t!Eqnl?$vc}~W)kCAsCac4aED@JHgwU!_7iSZU7K1wXdO z%+0VpVIlBhJ!c*IuIxv1oz23D_Gu737VL*w6A;z`ELJqUwX5goRCQjf zN{~m#_hEpblXQFdo;UcgVvPE)(=N?qP7I3y&nb=u&-5e9|D+D{)i}x%D`%qHFiwCU z!3Z=FcL9;$c!;8qDjWC|vv*Rv?yPcHOIQ^|IG~SVbGp?#3cYqso^Dbpy-nOF_;m1I zEGMi8GYMaT#tmds5P#rgf_8mQd$ps6*!BKP_J#^0M*^Z#im}rITc0gFS{! zSuQucWed@tx)QtaddX#Q;Uq#3`gDtWxh%pLEqLJhH)ru06msfFljSU_C$3>RCU&y) z*s`aqL?6fFK0Qh%?g0+vZH}?V9r0JeQ*xka*bGbd+?&3jcd&Qv=aq&(y)BA;{J!@G zRAarw0_%N?hb2H2!4-L=t#)~?vLJnhsPtSZP`Jr+2&d+`v!fXtnaLac0I?;k`j(g# z`0j9X@_{3jO;e{!#yY`dzaM$(^#hAti!i9(&e)GCbHf)fXHP;w_V#}B%aAi2{4$zY zQQFWTk-d0srfq5l-~**(`$jzZb|6d}r9bi)2=)m{S^KoJUnEqE#}?H!%ys)e=fJEn zNc36x1K_p&#PB8&1hS#Y|sMTUD$y&ucBO-g}H{>Cb+w-((NBhhFI|J_xrACv}%d zp4)5Dz!7Qa0_>eX+GNRITJpnB*e5!+r{zzbt5a2&xT7a)>TtBO|2D+Y0{p2bH}=vP z%1Hbb1FtEFLHRlavzpaOv0sZh!!Hoe@HX}(P)n3>I;@EpO7!^gq;gJHJz0UsKl#sI zY(~ix73-bYdU5yP=Vv)izX4xaOpQc-@s(h&zMNrR^V2J1?a~nRTrB*f6ufXyx}tio z{rq`)-YrOcF*pjj6HPFXsT?7scQYCLme(p?6;>&4pJ_2m8c;b6BKLQdQ2Jxf7Wt8n zX1V-&{~tTs9{DLd&uN*hCN`k>s0wyKOoRHD&tWWKu5^sP;Y3ySG2(L}Pa0&&3Bu(+ zeQvhFVm;PFBzKlM?yas4mNUFVqf?D-@VpqRez~RmTM3hk5HV8{v8f}ILIX)HLVY2n z#G{ge(n8qPP~s>N(9CAiuk2G<{a}5wME164KO>F#bdliSO4!xdSbvS`Ta%nR$%=)e zSi@!^&l49)JOz-cC=(l4MGVP;H?plevsZd%l2{l@Ol|F`{bV4Lwu7l6yrnX*ergRG zEPQHFk|o>QWrti~G$&rTM_czB6`fxh%*8eD{jo? zWk3DQ3R=tWA5hc}7?zIi&W!Gt7lf?63lvGq4$U%tY!C?e)8g2Hrk@x|!6pku{ps`7 z3TH9d-g;i}=!sM3N+_|=E`7E_qKyw8epE4wPzn9$$Yx@5K?;H_zx19r$GJ7d#LwGA z*IbXdNxzkK`Y^x?K670z1JS${tMWCg3*g^pc?S&A2H4u%J`X)#;-3CK61^q>4-5dJ zLn=@Pi1N3)L6RpuFLmO`3A$%hRi*#adL zUi7M}u$blTb6XqzvIgrbVL2N=Bqh`2vbX>*=TnE0O5Y1$X{O9JY`NjX5}?{Fy_{6j zz(m-yGU7jskMrE)V%PY(EBSQ$ywZ9W{uDGU8{tgzTU9t59_3E(+4t7|?5-_^ouZvV z+DL?+&G_rE!i5rn=Mx5VHY>bucj-^>F}xAlxoTuH{^up##CfYfH@c^5HRjiV*i38( zb&;s*RhLh*M#HsN#u1lmN82s?3Et-9_Cl%ec(}+D(lcr~8 z!IpF-BNx8EXp0LdoMh<=8{9>!PerS=gF5)x5l7QngYOwGyLCQwAAjchG}k1f zl`W|}M!?}^*-FJXrJ3OwDq7yvxCILjxe7188Kqup!GP`?7r&S$1lIT=l5ZZ~E7en^ z34c+=(s@#=qMglP%t}}P?$F?AmooovTEQl!&DZi6Pwu%_@||VQJSj}W7&gvJJUxzR zmyh56-dL1b(UF-zO=wQp$g`bX3#IekJwd_~^iNc@Tr;chuHn(`{K~LhUtB=!*@(67 z>Fk-T3-P-@Y)M%dA`x!t6`X|vmG*1>_t#QeOo&cAN-uMYmONa3PY&lLtA1fK;>~+t zm;0g)N<5TafJ-x66Bk~>N<>{McSoMRu^or;$Xup8TY=v6{b{(&J7rd5cza0NTavKm z=cZ7xd!;am3~OEOW}Qz-Pn|=b`3z(cNUS>^atRm(7eyUHw6jxo`5eFB=4@Dc@d)ji%?y|0Swx1_=@Xb)e)WdN{ zr>jkAYgpauFUKEff&HaFKfeFTAs_MTl7veGw7Yfzc3=?D`z(7#=`T*UV}~{N+~J39lkJ9iH!M_8*@0l zG-T#-F*+$02V*3&nY_niK7M_VzFjkBOD9i#j^3LO%ytg@n-Z56Q{(bn5XncUQQ@IS z6X{&|?1v6{aOd~+4Wn#Z8mY*}#;^F`sY5-NRi_kp^n1_V`hZp@eC?n@xXj**QcmQ$ zo^n6MxNZk|-UPl)xa*O3j(!L7C7Wd!Kn~%GK=n8QeuPK3XDaqixJ7rAO=yqrCtuOs zn95V*7jTzEO|i>|ISx}z;Y;*PX%K@4`tJAkw>H8h$Y%>dDrdBQAjj{yXO_f#D>-f+ zHusDWw6UtfYGNr#->!|rzLWPKUW4VhlzxAGwXPbSICs)HoicRcwm)xw`HVAYrxbNi zy~yky%@AH_y({7NiiS!vV)IJxENDw;smaD-DQUuAUxCft&565|;!cnBS#!Wb19?nx zyGDoKE0!E%F>dbh46Y(lE- zAzV}`_h)x!wo#^4{V0=<@X|Hk9r<}>uGr;xYm})cInp&KnwHC8L$)!ZDfjbsm!Ye2 z{K4j`)D`yd^1wsXuh}fYxgrr?VPdxTX7XGl6Y^yrD&|Q`n!?yrj;k{H_xxbk!yx(T z{zmQ}!f|hmqW%}3pl0#b0t}nnv@d0>+UXyl^af=wfSm1~xg$Z(yGO~6a@j65zjchQE+>55%Ga6EHe*e9L?a6I~SL+s;( zn>8%>wR>&+Ge+WxrvKngMK_EE8_Uc{=E|uOC%bmuwxmMB18Nu|I$=hRyy7j4+yuK9!XUD)oi4>WDm|N1;*Bs%`kT;hwh(Ps*roG2_ z;!7j>OWaF7Le%F8x8ZxX!bMYT6&WvRp2csjxuMu)k#|1mjM&Dqc7O~|h@C_&Jo5Sd z*Z01ezz2YS$}Z<$jDxu1!$AY@JWcvGcBUs#YI}_H=0aUbx$k$c&1M)Oh%~e9P5%Pc z^YH*!ETWWsOOzF`wC}6yH97*|d29!_oH;BIAduH0HWbIbRa<#;Rskuy+fbMh(K~VO z#A^eXm%2-y*k@|w?J=>-(g8D(FqWxmSY_)d(-#TUBv(8MG_PfMCSTPlFTa~&&o|Yt zZA`p^S&?mvq#p-r5u)x<`_YA00`NX@&ju_XVC5kI*b#N^8XC|(X&8|5yYH5z9~zyy zIVAlyHHQ6!*l4!8AY%FK{wJMJsbCST1t0IjP7s4Og6|zc$ezvsPDd<3XA)19v?ZMU zq~qKl<6xqHq;6XUtL^Zn;*#Bw{dgPx-s}l68&tkc&+fmDy_8MSm$#=uU#@_F z`hMhH?>>F{84;_eNp0vz6dG!*OP0s_=xE;_-)|bA7lmD@ni);Uj<=>jTjS zfSWnq2#|3*wu8!30cIrlPGI<3qRxFiGTSC@p1sQPyRnvW1_{5rDGJ9ijz>@jl9cBl zF>rzz)P8=+heQ zfpvk4WJ`iA@#cXWGE$_D1aoSHg#txZFRo~moS@@U-c+0uDfe4;8=;C*YjEPL_AeBI zRkFg`dhGHoKgQ$)3lg|D2FL!4wtkoVzluR}R?L_oQ1x>!cIWExT|Es+(kB4gi z|37FWm%4;*mby)t5=uqcN{dFawvcU#D9V~GVoF*NHDljPbjud5eH#f4p_F}Jr^48> zFY|jH&2;p=npR7*4@4M!Vj-~C z`*o&(K?j|9cL9toElyV+;cf(lkP&~DBi)yO4v4MBBz6j^B;_p}*}1&7u!4@EU@kx+ zvvSjr_9rh^H*&$bA_nmL2C(HscUej^hjXeQYr^NRVCT>)Henede8bFo3`-!Rav(y< z=LV;V?ukx(0PZGrDlY*q$+r8b!c@|N9XtaH<`0@Ni=kRk3G<=JLWCHYoCPsSH22J& za&MpM-_G241r`gv{z?u^kM8SdtnwVu5EV+X^a!)3V5adPAd8sd^Po7;*>f4tiwnUw zlWp`k2!kX}+0j>=h#5A{)|F?Ic4shPD-I-xG6*U`=FxkHzbJ$BUQENjVM7wsb;X-t zaA4*fs&Pcalqfdg=dONxS-~o~uQ|@kzf<+#KA^S$u16FGZs-cNOgy&3S8x$wlpT?qllEDf0k1FaM zYw_xVIt@g_WLW0?yl7CEbx3RqPIvAO!XgxzvdaTv4aQ7w;$O zpk)z#_QQgs0+QDN-6+ge$Z^5IsCyQ5jk6D|*pG6_kiqdTmT>PvDbxoC#094R;TwjW zD60qLi3;VHo$(Tw+;gb>sQ)F)t0R2}w16P?;iVA%4^ZG-%%b0U?bBea{+aR7ee2ye z=^)a4;brQm0|>Qm{9$bUyk`aAqib@;7ll4I8~An5#nrd(H+SeoKb$D{3js((O2MA~ zCYGxuOW}z~7D5Ch876ad9S0T>kcqHCTD3D=7NF8-OR=@kxa3JR$L>Lkp+fs2y%<^^P+ znG~45IsOiKmcNB9WM`{@a)s@KO0RNQHQoZWP+xPIS6XA4unk;$u7Y$Kq8>RYPQe#c z3n^tpC5W&tm>+cBY$(P-=_`GiFPv%u-mI?R-WPZs?AA1-LYy?;D9zb=iI@TSTQ@XS z2u}pkcys!Q3%@vd^*Pq%&Q}JAf_ktD2U32Vsh{c(Lr(p09lfVsZUxU8=9Jp%`o`p7 z%yWR1yHm`splZ5TJdR-Zx0XB^!*FZ+DOj>I7B@>u{spS)7$UO+nIz~CegNd%%XYYO*!^c;Q6>%YOfSI5$piz zpqT3d(Wbt}{tkF~q%@9U0-|p(kEwfdPmEc%6CZ)yPSJ^t4bcGXhOLojMaZS^g-fp+ zzKeZ)sawE$y@$D*sp|pCu1p8=9=YPev;r;pw>d!9C``m=Ug6W};Ajcm;Hkmdfe&tlC|iIEneBrY46E$)QBV6(Wf+MA zPm6lUAdF{ZiQVNoBk)oJ!VGn=5B7_#_ z<=6FIcQef;_2~=?mX6OJX`b(zib{@>pC4(gdGj#0Ng*XSU?AtC=D1qF_|bSWd%}H& zso1%OzL=eDY?wD@tdy5gY$U%$?9xjog7)WSH>vE^Tk*Gc$0}VF zhH)2+gK%78bT$6%L1Pw$J;!;G!)H-x!T(iMPjj3H8f7_cD6awq)IqU*WMD0KWk=?4 zK(>j8)h>G9+L6NV10BOs?)2)y*4gCj>>8uiCB1EBeYrV>X>BisE}a2tHUn6V?#pb^ z-x&^s7T{vi^L_a0qRx})1_wD1g~uu!go=a*U|!izZ}W!zlM!7Aj<`%&hRQI5FblhN z(`6`-j;_8dsa~Av`FcLpV`iyiq#)CEpk8FLKR0`NT4Zh_b?J*#ZSu8+nFi|G%Qh!B zZuW<;=fn{QLp@FVr%u6@0|1on9c8RNk^n9OCx#k}i|~&yZsTf1TF0*!5)hP8K63Qw z@nleKo9Voaqj{R;jX>{;VdeIM#hSWD)8n0mnRc)5=-suRGgXaQ${kAO4?bt2KFM*%s_F^)P0_xQg;35!0gk_hZa~0=UeXjb zY((<8M3UL=j>oI;#~`*dNIr*ViPn=JXssTTs-CVi&$+%&2K2$s zxq96f#h-7|a%^Ir9*QXGy|uH_6>P+{HB{sGv#bdXFB#wsUk8D=8a4|2>@rz0B+0dg zyW?-5q=`!jOb>oeGRwAyRFFVccz&9RdiuxZh~D~<4(Ey53*~=fMkl<|ov+XL4u^EM zRE{i_Nso1Q=0CZDa;s`PRmf5jNwA?-7lT^-CthxkbRnac*c zr1TD^p?(~w!*}V46tZ1?+v>w92U1&Eka882Z7M#4U=R`V->$nUHvtZ(3f;X4Mm2;8 zVNoxx&}}ednqJS{**U+goy)=rOi~VJpu4MTP=uy1UEJw1JsUt9vT3X&%gD!= zy>xoe3$_Ba+-k%T3(iyKmbOD)Uzy^1F9{;Bz5Jw776ENY3UKo6#Xd(8%BsM0wQK|D zg@&J5{n{Z#8{Cq3@+x?Ldqq&Sn3mM5V1>d7It?T{2ANCwwx^Ll0ukEC$I!|R= zS#-ZlvbK>3xNziyDfrVLx}A@!Iq%OVO=Pg^@D5ec`*dRnD{PfdZ!5b-L?tEiZZd!< zwPGhoard5q;CkFXa6Kz>_%{oZdt&>bbrwL7mWy&85JR5ANjB8jr4}xF#VmEt%jZiK zX9jiNbQ^5TcD9b5?dpnkN~#zytflp3cXS+@hDi%l_{U^y=GF1D#C^Dhf-hKIpxO5A zGt&wu%CjDCoE4^^=Smca+Xd$qVu!%eyUb717orvlvkw^0cugxS%ug+O4C`qQ zb9c~3heqhL@~+bj7O3Cj$$e~=?DU|bUO_C?;_gy#MU!68V?LsXJ`cKhH|YoYZECxPe=a8HKB z`j&%A%HnRGdGlZ8>Gr)9KV0In(%l-#kk2#~iWs@&Bvw4nKTisGY6{ z2>_&C1C;Y1j+l?Oagt3LsX=6W&pVaQ&cvwI2o16^kC(%~=#r{xhs}@`yi#+}iCADP z4{?iNTt+6Fg10YjDF?I{95pc0WiK#O2G^kTHO<+ErA)bXj&iDVw(r~{@uL%S_vLFB zYYZYRmKI2b!!7zQ*Fsd&9zXkmLFzG`wOzTPy)qCa+m17hzynA%VP+peeg^EK8=8!B z-%hax?rC0~EKS*aT`J9!cmr$(a=Nh)rb&aGOkQ~YMLXgiO-!j4QLCh07B`+PHAj~e@IdB*~ zf&*7BP#%|Sxy`RtE*$&XQLQ}pr8lGqtg;Nw_>b~M|2*(!)avX5HAUl%=GJst=S=^2 zyl%2grp%a@4Fp!ywnh`9|dK6$>DTf&4Vx{$`G!`WwA z|4z7nvoLV)3ji1Ru6KI0$vhkc)8);W)E%=|f5gE~VItdrO}lWCF@3j&hD8>>w@g*E z7SpLCeItdLV{e#TYru8%H{Dz;jiP!`ldEMqJ7>Qy0^6R;g3;%{f9nM}c|g6gq=JnA zq$+xtewJLiKfktCRR6U7OG^Pn?wNjD;Rnmixy?fyj#<`!5@MQ9r)#0X&z=67b?6YIkR*n1Ga&>uCM+Aaadr}lybtIfRH5M;xN zyp@neTjqaSWEtdc{d^%Of3Hi0C`(KNOPz_?B`ay9pBj1f74Xe@?~Jk>-+!__<$4wDY-^igx?5OfoWQ zi0SjNtx6FklHwtQ2`q&CAM(jlAs(Az6RZ<#c^@DG|LNQ3U({Rf-r<^T4U_B=wvchD z?Y9TDsEj>nJy{;VD`NlA(|KS7axqr5H|2k!=+Zzkf6;Vv6hExn7Km19lS^>BRh+fpTWj=T4oLqCCXF05x>Vel@}cREVO~l4UJN zAd19c`$ye-E!V6+4*L1g@o8=WGE``x%FR>QTUlO{Xx`m3;g4npT z0wOZu+Gb2hL=}LW>6>fHco#9Z`gysb;#UyfSJzDyX)DTb?EO@$L7xw>@fcR7wNqN> z=Pif@XLfJZ9F~u1t8|8lp~oiLRgyl@Ib$`lFr2DDrQ7W*0Ldd(jZ?e02-EuVTP&^f{OYgN=s+*wK z=f@0FNnT#=;5u>kMb?M>TbG5OyTk2kqh?G^@k|7@gDzD(w{X7r#w*7JrxYwCy632oX8}jC`n-dG{jZ-*)sE-OeltS`a+j>im3kI`3hpm`fvXZH`~v* zheeYXfjcLd3SmzoR$vHSQa^o4m44|U#W%AQl#S_P@9vDhCK?7++$c`bhX&u*w+&hn;|FG0?>%!Z za0LFVtck~oIkoRQB%KEm%DC!h+ES#OzK24Y*{BqRP* zFBx14;Wi5Kytl{m@(V9*fnX){P}@*X-AAbd$2Xyc=3Ij_Pug~;%2`su5C$0SYoj@G z`(pgR3Fdm}?X|PfaIv(Zwbw)#D7$_=uMt(LJo9bBt#~5QbNG|VlJ_U7B{lq*h{x1< zkz3P?0W}M)0X9*9PP^h;ToIRjO!K!R{5YiDpWT0%8DX|tOTt2hkH}nt(iNNXf$v`U-I@5*Ku)9<_)|kAESQCmjz@39~7;INr03Iq^xy`lP zK!kQ|PZ?=`tRrfAsV`15+J-}XA%ew^S#6T`4qyHC6nMrRYFNAu_9=d-O*i!Wg<%D< zmXT(9Q{X2EzE6ZtaQRl}f^LTFRAm)am7-W;FDFB2fFv|7`e2s^Gm2d%Y&`pOLo6+} zY>E*HETmIO2CejITD1dxs@K2*Jp9asVRiTrlu75ZKppb27PWMMUcmb3@+YRBEZ4{n z8mIiRPVTC|0c1X(q1CRUdP~Q>0<}g-VQU2pU~wQSO-WlIiB*r%aqj1e>C>_{g`*IGMB<)Bvf)`7dXCU`Lm56cu*bb# zi%iN3HY=07t?pxc zM0NrYigbs?&en)!7X@RA?Z2IoTsTt$ zbY8NgH^kgZ&!&qg7dkF6th^M)^{LYVC)wRTGu6w>|GyEqg=hl$^-raZE zQm|P+n6e~ajp&{Q|o6(t)G;7&i9+V zYxL;8vE*yw?lD*<6{9qnXtGqx3vgmm>WnI`=09oIdF&4D_GDT3??&6%8{S5pDtWs1iW z(&FyId3YF*}((%}uhWD#zG5#RkP;}=IiCr zj*|89fv(4S;VV?CAT-mRWH=E-GdWDUgVMg5obSj}($uOpev_CQqH{>pvb5K#lE_Ca zc-$9Zr+(*ResJHG)9huI!_t7B0}{+$la64$iAMx9mCql38o7Z$vhS@6nrzt&9C&ED z_a~r)R82bKa=?rIh11@W);shSufM3 ziw|{*?V)YBTu)am?eUSZWO&Oh4sjq!Ld2e}|BRe=l@CchAivo|kur$O`j&9#iQ{f( zF)Jx3|5>iRgZm^%8o1}LX*?M|xSck^fxpVW~kG6UzXIkR} zgIUpbCjy0uTI4oXKOZa8sk}z)ticmswR46!HIf#LX|%02=?6GVp0qofyz8>j(*9i` zxAeGbv9?;611G?)=R(+`vwPP~Uc0PyXz`IwrJs%w4C}9#D)fuK^FWJ@mFUYFv)2(9 zQ=LQwp2e-^b%+n#B>(QHP_;@zP=feZ*=OSH!WEO8#>LeKQmN`Y;ZvV4G~K-}h1P$D zUMXx-j5*jEs4>0Ef|cNAln9+b4|;Bfvc6buw?SzHLbIO&jceh!y`6@VXMbB=F>;ew zkFoL*!|&U_{u@8q9Jc%I*gAhOzrn|D!$t+$;{(%4W;-Z=B*UxlhS`t%u9fLY)3_=K zivYDzh6Br)$?(IQs^5;)izndizZ1X55m?J>6p!Q9SNE%wozyC|rtxVdHF80vC7`ar z+DJXU31B;#pvhhrkQFhYV7_KGZ^T*fDbP&;A$DDA6bot!Lsv!MQtG+jY(k#VlL}mN z9>Tg9VNe3~mvLphxH@%V<$|XA$J&lLSF&S{u#{=E%aJqkEbg~AbNU}$udQCvcSs>P zxp|TA)mRbou!4mwV`W^Mf-Ur(KO_+Y?R`q!nQIXeUWQVJX8!PXnQGWf8FW!l6VpQ1 z+|J&|$EMpl*=F>&Rowyghuh0|qx9atooTIXZ@1Y5CQmwzS68}>t4gdvMqqw25#l!< zXw(TI7NC_+^)xF89=}3US!BT{C?5S7SUsc5Q;|~lguO3nz`n?MZ^GMiYk0D``G;wr zAF+Mbg4iORXx1NX`q7=3F-a^j&5te_yLL7V*zxJv!GMf#v$K0v^Uz+qKZ_|jsNSoD z4(jjCD8S&a2}T``#-Q=6voH2g+SX7DXFa8lwfLSX^J;1G7guuH7uzGBye&(*e7X4l z6bi;(KX)1?7SwJb;VkDrc&vMOFZH#ptzx-5qMlj}scd}ZBnZ&Yf`fEcxKKxfOjEM) z!PT0*H^XtQd+fHjaH#qpKUJ-fAEo!WfbpKqHgacvc*hmr62B5?D{O z*yT0Bb-#>fG0zUV%OC&;GYt7J1TV+5Iy}8f{k$2u2I_nG_+ODf< zUoBRZe_!>0?+#ogR!-x-eD_puD4dM=xE!pGDtperuFL(|CXzNyh3!JZp{X2jIQ5d0NLj+^r; zupR!b5mmRBLaYM*KSk8<8FXFHf`u2}KRNy;BRBu#%&3*q5%&X);Q@o2)!S%d50B@P z_hI7&mFSZJ9*ggK5}Mm~zk+mi!L@#x)%x`$&?S|9se{F%Egb^ZmxOjl&ghRFlL-bn zvXL=1WjXv?NAEw?*)DcLeT=(F^|1+{TiEtS(%0QdIngD3Lw&U3c5?^q11zHgwCo)0 zY_pxk0cDfGeUB<4v|mRgTzQr&9tGAJ`<(zJpSXLU`kTDA&K^syD*O5Bib;%ojR`Bn z3j8i3JP8jN`av+gVU~84eo?v6GV_qClH$$9E3RcvTYP=E`5S~Y9cmkCi*5b{bfhM^@hGn(2OEKYXMLd0&?2$h5y-=b`ElE@fkdSu@>>yH4-1 z_9&8YR=vFy@k{n7FlHtE;TLz&hXL7z;0X$5|Qg@kIW&F47QrV8a2 zh2@*(#ChkIL%fFAvH}yK+WpTu_n7lTBU!@}BUe&|_zBC*3mKaw><`@-p>of)XglC) zln%9TA0FtIrwIn1R2Z`IfoeD*&9+YvPv55OWb+k2Ue;66=evV%Ivi);{q^^yNYzKp zM5*ADf|Cxt2dRgl&i4gvi&=++?rjua+)ldxO zAPToVP<4z#J7Islgc~6ao&&oJZMNVMfA(%LpYe-mU$#TvQ9E>sp=ZpSz2&qaL|UN{ zG$kF?6lQA*^S*#A;0YKRQV9rW3_}W-H_NNdp^PYfd=4$_GkrYcCc!Sl7*fXSzL2a; zs;~J!f4t)+{MeDnWyX*3No_edE*CnjK|+6Dgv$OQrgdZ7m-Yvz2Br@W2sW1KKbf=g zi(9t4Z;XoVzE+wKZ#XQ()PBYj0;+@Pn(hw5nm#l85g7Lfc4&{1MgvrO4=X0?s10*6 zhh6?V`nIJoWwkwGyEg^WCYMv3*2PBa!eGB%WZk@T=*s`9hneTCoOwzEuz|py36E%n ztfc4JV(2XkJ&c-^bba|%|K|gV@XE&SnMTd!Ff^Oy#dXu1E5K(^|1hfChtobo+66~^;m-e6oO}#)se8Owx5*px+6ZDut zQ-~;84FvWJvxLH&WDZJq#FW;uEVf6HIA#v+=L}NN8TjR*^!r%1GyZc8^UjaIwa}(^ z0ZJR17K5-v=-xr%`p+=x*~}+xIZ?SHhoYU}KU*A`H$zuhp*ICu@<2i?hc}SF{G)x# z&yX=+mgjgS4P5I!W`xA}Iew@eSTD2O--!1!%!e^9z!+DUooI~5$j>470$BT<&M>k- zA8s!>yQTS=clUKMyaht~Lw0j0|B)w064&l8y18+me^+pGuPYap1 z<<^s@I+?33TQ> zz}&;f8%hIJ_D#-I+_J9{`G3QU4?SnSC|ypD9j@0FI&~0J_R7~m#Zny{EIL# z@^c3q;X4)C2Yk(MIZ7xU`>&y%EDyEb9M1YlDAbn>$wWJZ|GCH-`;JaO?t2c*YkJ&S zo@D~W6O7J=h5UMvxr`bhCfzcw{eZP8_BRU?uiK?N!MC-ZA6~24;u5QyR|tlGUDK2%P7;-j;xXI<*1x)|?Q@f0hv8qZ zS6A=R4p8oce0FhrY-#n-c3Wi|*@2Se*DI*niW?#DM{hsIa)z8RKm9= zZYay$uv)7%qTgQM?LXbpnMTg`qeK!sC3j&OCt055`Iq9VuIt19IUDJX{J^be8`XeBf+1BAQr^AYEOGVqwctyQ(YT zzm~-#Q}ykRG;*!#7SRDX^J)cWy1dhdM~Lr%fL(Yai%rjR5@E^)9TFi`EnEKMeR%uu zr}?BD(t+aT96Dz;@2y|5;OxNxq)r;j{w|~DhPlo<4Z;6Q=_sV^_bZ+ zpxAExy2OmMVH+!M7MDl=L&`%7%)eL@|IT!V({=-J=6~{%i`-Vue(0e-alU0c5}Krm zs{v^|I%=M4*wDdz+mF9)PdDd%!Z_nP&PKD6Z?uVpJ-Y6B9*B{^*j9o0G@1L#@-kOp z;b0F3VpZC3swFo|6La@7=3_HQsy{@D&1!-MoNOwgG`*TF@j{yDS;(VF+k)b;)#AadD5e|YE*c_m2Rm)MtiLs#% zb26}x-`3S`;ES#k?@O6fWO>dsaLXQb4)5zfsA(VE*WSRVq9~HVp{-Ca%=+4D1v6f= z+~FO1GLP}aWP`cA-m|*#iO&Q-)r==~eUPD8?n1uwdh%1A|I}~7ZyTTEds0Y-*LNNs za}Iwhf66{hsrcIbBL3(xab)FI+<325*wENl{6mcwee};=^dbGRrAy?27P4@A#X7I) z$=u+_@u56=V>cg(Qq^d9n@Lj4BUd7KX7*tiyJ+Rovtl>rWni*5K9BXM3$-r2ntEUZ zmSZ2~GSOsEvU{DPSgxA=ev~_V5M9Pc%|FRzkj|weL0r&r<*4XYqFh@x=tZhBH%3VB z8&z|>uit#XQzdP~Q)I?sCC!cxR!zy`lsx;uV7N6rF2lL1N z)0HAlFw5galU=o4wnhQhl@k?vEFS8-KGKLw0UHe3ix*j8oXP(Cj>?`IA8C=7lM|`P zDo#?oL^_6biLbz%$MFGjsTXE@UAw|8;O{SUf&{LGkL!(%X{(fFKvS?|V9!oUYG$_d z)kgi*;?|&Oa=a^-V>-Bg$`^CJA3K!o#id>FnX~;WvFd@&X#K5o^Z?W*zA9MV)wM!X z4)3V`P)%Ae85SCfk7Hl{mFqceA8yAGEs8_Y8gbfA8x}d{rmIYvYiY~eu)tZPZuCtD zdicz#LGiLbqa#f1qWa8HD0CFEPAkOXOao{8JSGIUbk>#QlQW!(5EA}0;_~Fs10BKW zZHZd7)!jbvuO)`nxA>Nwl=&((eaF&knZV<&81fkqLxB-Hwb*gaj^uul8T zYma;8`rg=%vqCbvdw394<_74@MQ{6G0f<|p;=IK$%=jD7Ez zPa|9p#LJ9-e-;`#>9Efrh7@j))pv04%dw(zv1m&!R{Vt(lkY;%$y3~Kfte{aansn# z|12~M*qQF5V`!#a%3JHYG3wn;g)uFM4ag@kM(;fHWoxeq#pj%~4_hoa>G@YQDKcQ$ zi*(uuCCJr&3!Bq&t~R;({*c_0v+g}dTv!Q>D}Xy;`$x|9<3xR8)_9<)bBq}rHPhMf zG-Lu}!n!HnPgj9zb)#n+rM+n?Ti)Fh;fh=#M1p|%G)V6=ICKjV4-L$4>U)bvTXJ8V z0KKo;sbLp>Hn!&$(Zr)?2b~vPLU$)OVzAQasRAo}lb-Q{wpVoZHcf?Os%nAFqe?Bg zEIkr&(~I%D?wMb$h>SX~pc?^Tt5TwPnFK}ZY=vNS1xCgW^0+1|qz|FTqparNn9y_= zKpr@wSBk5UF1Ho-4I(P_F-!31;By*;S&`9|B1RZV=hBU|*!1E`qJLNl6?5LS)nxg> z3oDElIT3)@lT!P_F& zR=V5QcHrB09IWat#|o1qh=yqV5OsM|f{QB%{iH=L&YIMQ$psMok|u}{hF7>(L^KYi;R{?1si2m&eN!!tmd>Ho z<`0aCn}*$nIDgUKM0KZK2(oU5c}4(U3dm8kG*#7@k#uL1NfEsRUuH_YKRKO^i-tiI zAaERQ^j!VV7SBw{hQC_>h2 - -{% endblock %} From 3f36d108e2da67dd64ac8f25738d17853d9c7622 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 11:14:45 +0200 Subject: [PATCH 272/737] handle mypy warnings --- tests/modules/update.py | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/tests/modules/update.py b/tests/modules/update.py index 0fddcc8ba..b56fc0df1 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -90,9 +90,9 @@ def test_install_at_hash_and_update_limit_output(self): ) # Copy the module files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + tmpdir = Path(tempfile.mkdtemp()) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True @@ -112,22 +112,28 @@ def test_install_at_hash_and_update_limit_output(self): # Check for various scenarios for line in log_lines: - if re.match(r"'.+' is unchanged", line): - # Unchanged files should be reported for both .nf and non-.nf files - assert True - elif re.match(r"'.+' was created", line): - # Created files should be reported for both .nf and non-.nf files - assert True - elif re.match(r"'.+' was removed", line): - # Removed files should be reported for both .nf and non-.nf files + if ( + re.match(r"'.+' is unchanged", line) + or re.match(r"'.+' was created", line) + or re.match(r"'.+' was removed", line) + ): + # Unchanged, created, and removed files should be reported for both .nf and non-.nf files assert True elif re.match(r"Changes in '.+' but not shown", line): # Changes not shown should only be for non-.nf files - file_path = re.search(r"'(.+)'", line).group(1) + match = re.search(r"'(.+)'", line) + if match: + file_path = match.group(1) + else: + raise AssertionError("Changes not shown message did not contain a file path") assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" elif re.match(r"Changes in '.+':$", line): # Changes shown should only be for .nf files - file_path = re.search(r"'(.+)'", line).group(1) + match = re.search(r"'(.+)'", line) + if match: + file_path = match.group(1) + else: + raise AssertionError("Changes shown message did not contain a file path") assert Path(file_path).suffix == ".nf", f"Changes in non-.nf file were shown: {line}" # Clean up From 18eefb7b05e066f787a99bd74d5f845af0fb53d0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 11:27:05 +0200 Subject: [PATCH 273/737] modernize code --- nf_core/modules/modules_json.py | 5 ++- tests/modules/update.py | 67 ++++++++++++++++++--------------- 2 files changed, 40 insertions(+), 32 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index b0a4fa661..42c633ed2 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -862,7 +862,7 @@ def module_present(self, module_name, repo_url, install_dir): install_dir, {} ) - def get_modules_json(self): + def get_modules_json(self) -> dict: """ Returns a copy of the loaded modules.json @@ -871,7 +871,8 @@ def get_modules_json(self): """ if self.modules_json is None: self.load() - return copy.deepcopy(self.modules_json) + + return copy.deepcopy(self.modules_json) # type: ignore def get_component_version(self, component_type, component_name, repo_url, install_dir): """ diff --git a/tests/modules/update.py b/tests/modules/update.py index b56fc0df1..72766779e 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -1,7 +1,6 @@ import filecmp import io import logging -import os import re import shutil import tempfile @@ -30,15 +29,21 @@ ) +def cmp_module(dir1: Path, dir2: Path) -> bool: + """Compare two versions of the same module""" + files = ["main.nf", "meta.yml"] + return all(filecmp.cmp(dir1 / f, dir2 / f, shallow=False) for f in files) + + def test_install_and_update(self): """Installs a module in the pipeline and updates it (no change)""" self.mods_install.install("trimgalore") update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) # Copy the module files and check that they are unaffected by the update - tmpdir = tempfile.mkdtemp() - trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") + tmpdir = Path(tempfile.mkdtemp()) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True @@ -53,9 +58,9 @@ def test_install_at_hash_and_update(self): ) # Copy the module files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + tmpdir = Path(tempfile.mkdtemp()) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True @@ -144,7 +149,7 @@ def test_install_at_hash_and_update_limit_output(self): def test_install_at_hash_and_update_and_save_diff_to_file(self): """Installs an old version of a module in the pipeline and updates it""" self.mods_install_old.install("trimgalore") - patch_path = os.path.join(self.pipeline_dir, "trimgalore.patch") + patch_path = Path(self.pipeline_dir, "trimgalore.patch") update_obj = ModuleUpdate( self.pipeline_dir, save_diff_fn=patch_path, @@ -154,9 +159,9 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): ) # Copy the module files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + tmpdir = Path(tempfile.mkdtemp()) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True @@ -168,7 +173,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): """Installs an old version of a module in the pipeline and updates it""" self.mods_install_old.install("trimgalore") - patch_path = os.path.join(self.pipeline_dir, "trimgalore.patch") + patch_path = Path(self.pipeline_dir, "trimgalore.patch") update_obj = ModuleUpdate( self.pipeline_dir, save_diff_fn=patch_path, @@ -179,16 +184,16 @@ def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): ) # Copy the module files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - trimgalore_tmpdir = os.path.join(tmpdir, "trimgalore") - trimgalore_path = os.path.join(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + tmpdir = Path(tempfile.mkdtemp()) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True assert cmp_module(trimgalore_tmpdir, trimgalore_path) is True # Check that the patch file was created - assert os.path.exists(patch_path), f"Patch file was not created at {patch_path}" + assert patch_path.exists(), f"Patch file was not created at {patch_path}" # Read the contents of the patch file with open(patch_path) as f: @@ -202,11 +207,19 @@ def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): assert True elif re.match(r"Changes in '.+' but not shown", line): # Changes not shown should only be for non-.nf files - file_path = re.search(r"'(.+)'", line).group(1) + match = re.search(r"'(.+)'", line) + if match: + file_path = match.group(1) + else: + raise AssertionError("Changes not shown message did not contain a file path.") assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" elif re.match("diff --git", line): # Diff should only be shown for .nf files - file_path = re.search(r"b/(.+)$", line).group(1) + match = re.search(r"'(.+)'", line) + if match: + file_path = match.group(1) + else: + raise AssertionError("Changes shown message did not contain a file path.") assert Path(file_path).suffix == ".nf", f"Diff shown for non-.nf file: {line}" @@ -235,7 +248,7 @@ def test_update_with_config_fixed_version(self): update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: + with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) # Update all modules in the pipeline @@ -260,7 +273,7 @@ def test_update_with_config_dont_update(self): update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: + with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) # Update all modules in the pipeline @@ -289,7 +302,7 @@ def test_update_with_config_fix_all(self): update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: + with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) # Update all modules in the pipeline @@ -313,7 +326,7 @@ def test_update_with_config_no_updates(self): update_config = {GITLAB_URL: False} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) tools_config["update"] = update_config - with open(os.path.join(self.pipeline_dir, config_fn), "w") as f: + with open(Path(self.pipeline_dir, config_fn), "w") as f: yaml.dump(tools_config, f) # Update all modules in the pipeline @@ -428,7 +441,7 @@ def test_update_only_show_differences(self, mock_prompt): ) update_old.update() - tmpdir = tempfile.mkdtemp() + tmpdir = Path(tempfile.mkdtemp()) shutil.rmtree(tmpdir) shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) @@ -476,7 +489,7 @@ def test_update_only_show_differences_when_patch(self, mock_prompt): patch_obj = ModulePatch(self.pipeline_dir) patch_obj.patch("fastqc") # Check that a patch file with the correct name has been created - assert "fastqc.diff" in set(os.listdir(module_path)) + assert "fastqc.diff" in set(Path(module_path).iterdir()) # Update all modules assert update_obj.update() is True @@ -491,12 +504,6 @@ def test_update_only_show_differences_when_patch(self, mock_prompt): assert correct_git_sha != current_git_sha -def cmp_module(dir1, dir2): - """Compare two versions of the same module""" - files = ["main.nf", "meta.yml"] - return all(filecmp.cmp(os.path.join(dir1, f), os.path.join(dir2, f), shallow=False) for f in files) - - def test_update_module_with_extra_config_file(self): """Try updating a module with a config file""" # Install the module From 9f4323a610cee3b968b2efc7fc18f8c332b3afe4 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 9 Jul 2024 09:28:42 +0000 Subject: [PATCH 274/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 58e9a36bf..23fcb8336 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -45,6 +45,7 @@ - Update gitpod/workspace-base Docker digest to 0f38224 ([#3048](https://github.com/nf-core/tools/pull/3048)) - update output_dir for api docs to new website structure ([#3051](https://github.com/nf-core/tools/pull/3051)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.5.1 ([#3052](https://github.com/nf-core/tools/pull/3052)) +- update api docs to new structure ([#3054](https://github.com/nf-core/tools/pull/3054)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 96b50f4739209a4ccb817b5f087e383533a5b4b0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 11:31:56 +0200 Subject: [PATCH 275/737] template: Use filename in code block for `params.yml` --- nf_core/pipeline-template/docs/usage.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 6645e0f6c..ca64377d2 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -89,9 +89,9 @@ The above pipeline run specified with a params file in yaml format: nextflow run {{ name }} -profile docker -params-file params.yaml ``` -with `params.yaml` containing: +with: -```yaml +```yaml title="params.yaml" input: './samplesheet.csv' outdir: './results/' genome: 'GRCh37' From c22888aea2329abf9a658e8a11a6c3514ad72fd0 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 9 Jul 2024 09:33:11 +0000 Subject: [PATCH 276/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 58e9a36bf..b0b706df1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ - Change paths to test data ([#2985](https://github.com/nf-core/tools/pull/2985)) - Run awsfulltest on PRs to `master` with two PR approvals ([#3042](https://github.com/nf-core/tools/pull/3042)) - Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) +- Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) ### Linting From 2525905655a922bdc0cde8af31062c3354c4cad1 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 11:39:30 +0200 Subject: [PATCH 277/737] fix tests --- tests/modules/update.py | 2 +- tests/subworkflows/update.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/modules/update.py b/tests/modules/update.py index 72766779e..06ae3a294 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -489,7 +489,7 @@ def test_update_only_show_differences_when_patch(self, mock_prompt): patch_obj = ModulePatch(self.pipeline_dir) patch_obj.patch("fastqc") # Check that a patch file with the correct name has been created - assert "fastqc.diff" in set(Path(module_path).iterdir()) + assert "fastqc.diff" in [f.name for f in module_path.glob("*.diff")] # Update all modules assert update_obj.update() is True diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 7d1a3808d..42d09aa66 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -74,7 +74,7 @@ def test_install_at_hash_and_update_limit_output(self): old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Copy the subworkflow files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() + tmpdir = Path(tempfile.mkdtemp()) sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") shutil.copytree(sw_path, tmpdir) From 863a8a1063ad756729bf1b3e979d3ce508002a2b Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 11:45:04 +0200 Subject: [PATCH 278/737] fix shebang and imports --- docs/api/make_lint_md.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/api/make_lint_md.py b/docs/api/make_lint_md.py index 7b823a086..432f0d16b 100644 --- a/docs/api/make_lint_md.py +++ b/docs/api/make_lint_md.py @@ -1,7 +1,6 @@ +#!/usr/bin/env python from pathlib import Path -#!/usr/bin/env python -import nf_core.commands_pipelines import nf_core.modules.lint import nf_core.pipelines.lint import nf_core.subworkflows.lint From 29075d6bf5895f3af97be456413e350dd56a3b31 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 16:36:37 +0200 Subject: [PATCH 279/737] improve per test clean-up, move component comparison to utils --- nf_core/components/update.py | 14 +++++++++----- tests/modules/update.py | 32 +++++++++++++------------------- tests/subworkflows/update.py | 34 +++++++++++----------------------- tests/utils.py | 21 ++++++++++++++------- 4 files changed, 47 insertions(+), 54 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 37e3f6212..7050b76df 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -278,11 +278,14 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr ) # Ask the user if they want to install the component - dry_run = not questionary.confirm( - f"Update {self.component_type[:-1]} '{component}'?", - default=False, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() + if not self.prompt: + dry_run = False + else: + dry_run = not questionary.confirm( + f"Update {self.component_type[:-1]} '{component}'?", + default=False, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() if not dry_run: # Clear the component directory and move the installed files there @@ -833,6 +836,7 @@ def try_apply_patch( for_git=False, dsp_from_dir=component_relpath, dsp_to_dir=component_relpath, + limit_output=self.limit_output, ) # Move the patched files to the install dir diff --git a/tests/modules/update.py b/tests/modules/update.py index 06ae3a294..49a261e7b 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -1,4 +1,3 @@ -import filecmp import io import logging import re @@ -26,28 +25,23 @@ GITLAB_URL, OLD_TRIMGALORE_BRANCH, OLD_TRIMGALORE_SHA, + cmp_component, ) -def cmp_module(dir1: Path, dir2: Path) -> bool: - """Compare two versions of the same module""" - files = ["main.nf", "meta.yml"] - return all(filecmp.cmp(dir1 / f, dir2 / f, shallow=False) for f in files) - - def test_install_and_update(self): """Installs a module in the pipeline and updates it (no change)""" self.mods_install.install("trimgalore") update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) # Copy the module files and check that they are unaffected by the update - tmpdir = Path(tempfile.mkdtemp()) + tmpdir = Path(tempfile.TemporaryDirectory().name) trimgalore_tmpdir = tmpdir / "trimgalore" trimgalore_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True - assert cmp_module(trimgalore_tmpdir, trimgalore_path) is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True def test_install_at_hash_and_update(self): @@ -58,13 +52,13 @@ def test_install_at_hash_and_update(self): ) # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.mkdtemp()) + tmpdir = Path(tempfile.TemporaryDirectory().name) trimgalore_tmpdir = tmpdir / "trimgalore" trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True - assert cmp_module(trimgalore_tmpdir, trimgalore_path) is False + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False # Check that the modules.json is correctly updated mod_json_obj = ModulesJson(self.pipeline_dir) @@ -95,13 +89,13 @@ def test_install_at_hash_and_update_limit_output(self): ) # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.mkdtemp()) + tmpdir = Path(tempfile.TemporaryDirectory().name) trimgalore_tmpdir = tmpdir / "trimgalore" trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True - assert cmp_module(trimgalore_tmpdir, trimgalore_path) is False + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False # Check that the modules.json is correctly updated mod_json_obj = ModulesJson(self.pipeline_dir) @@ -159,13 +153,13 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): ) # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.mkdtemp()) + tmpdir = Path(tempfile.TemporaryDirectory().name) trimgalore_tmpdir = tmpdir / "trimgalore" trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True - assert cmp_module(trimgalore_tmpdir, trimgalore_path) is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True # TODO: Apply the patch to the module @@ -184,13 +178,13 @@ def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): ) # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.mkdtemp()) + tmpdir = Path(tempfile.TemporaryDirectory().name) trimgalore_tmpdir = tmpdir / "trimgalore" trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") shutil.copytree(trimgalore_path, trimgalore_tmpdir) assert update_obj.update("trimgalore") is True - assert cmp_module(trimgalore_tmpdir, trimgalore_path) is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True # Check that the patch file was created assert patch_path.exists(), f"Patch file was not created at {patch_path}" @@ -441,7 +435,7 @@ def test_update_only_show_differences(self, mock_prompt): ) update_old.update() - tmpdir = Path(tempfile.mkdtemp()) + tmpdir = Path(tempfile.TemporaryDirectory().name) shutil.rmtree(tmpdir) shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) @@ -455,7 +449,7 @@ def test_update_only_show_differences(self, mock_prompt): correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] assert correct_git_sha != current_git_sha - assert cmp_module(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True + assert cmp_component(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True # Mock questionary answer: do not update module, only show diffs diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 42d09aa66..5395d194a 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -1,4 +1,3 @@ -import filecmp import io import logging import re @@ -14,7 +13,7 @@ from nf_core.modules.update import ModuleUpdate from nf_core.subworkflows.update import SubworkflowUpdate -from ..utils import OLD_SUBWORKFLOWS_SHA +from ..utils import OLD_SUBWORKFLOWS_SHA, cmp_component def test_install_and_update(self): @@ -23,8 +22,7 @@ def test_install_and_update(self): update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False) # Copy the sw files and check that they are unaffected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) + tmpdir = Path(tempfile.TemporaryDirectory().name) sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "bam_stats_samtools") shutil.copytree(sw_path, tmpdir) @@ -39,8 +37,8 @@ def test_install_at_hash_and_update(self): old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) + tmpdir = Path(tempfile.TemporaryDirectory().name) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") shutil.copytree(sw_path, tmpdir) @@ -69,12 +67,13 @@ def test_install_at_hash_and_update_limit_output(self): ch = logging.StreamHandler(log_capture) logger = logging.getLogger() logger.addHandler(ch) + logger.setLevel(logging.INFO) update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True, limit_output=True) old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Copy the subworkflow files and check that they are affected by the update - tmpdir = Path(tempfile.mkdtemp()) + tmpdir = Path(tempfile.TemporaryDirectory().name) sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") shutil.copytree(sw_path, tmpdir) @@ -124,7 +123,6 @@ def test_install_at_hash_and_update_limit_output(self): # Clean up logger.removeHandler(ch) log_capture.close() - shutil.rmtree(tmpdir) def test_install_at_hash_and_update_and_save_diff_to_file(self): @@ -134,8 +132,8 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True) # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) + tmpdir = Path(tempfile.TemporaryDirectory().name) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") shutil.copytree(sw_path, tmpdir) @@ -156,8 +154,7 @@ def test_install_at_hash_and_update_and_save_diff_limit_output(self): update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True, limit_output=True) # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) + tmpdir = Path(tempfile.TemporaryDirectory().name) sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") shutil.copytree(sw_path, tmpdir) @@ -196,7 +193,6 @@ def test_install_at_hash_and_update_and_save_diff_limit_output(self): # Clean up patch_path.unlink() - shutil.rmtree(tmpdir) def test_update_all(self): @@ -343,8 +339,7 @@ def test_update_all_linked_components_from_subworkflow(self): old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) + tmpdir = Path(tempfile.TemporaryDirectory().name) subworkflows_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME) modules_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME) shutil.copytree(subworkflows_path, Path(tmpdir, "subworkflows")) @@ -390,8 +385,7 @@ def test_update_all_subworkflows_from_module(self): old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Copy the sw files and check that they are affected by the update - tmpdir = tempfile.mkdtemp() - shutil.rmtree(tmpdir) + tmpdir = Path(tempfile.TemporaryDirectory().name) sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") shutil.copytree(sw_path, Path(tmpdir, "fastq_align_bowtie2")) @@ -445,9 +439,3 @@ def test_update_change_of_included_modules(self): assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() - - -def cmp_component(dir1, dir2): - """Compare two versions of the same component""" - files = ["main.nf", "meta.yml"] - return all(filecmp.cmp(Path(dir1, f), Path(dir2, f), shallow=False) for f in files) diff --git a/tests/utils.py b/tests/utils.py index 9a0fd0896..151655b8f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -2,9 +2,10 @@ Helper functions for tests """ +import filecmp import functools -import os import tempfile +from pathlib import Path from typing import Any, Callable, Tuple import responses @@ -93,18 +94,24 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) -def create_tmp_pipeline() -> Tuple[str, str, str, str]: +def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" - tmp_dir = tempfile.mkdtemp() - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") + tmp_dir = Path(tempfile.mkdtemp()) + root_repo_dir = Path(__file__).resolve().parent.parent + template_dir = root_repo_dir / "nf_core" / "pipeline-template" pipeline_name = "mypipeline" - pipeline_dir = os.path.join(tmp_dir, pipeline_name) + pipeline_dir = tmp_dir / pipeline_name nf_core.pipelines.create.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=True, outdir=pipeline_dir + pipeline_name, "it is mine", "me", no_git=True, outdir=str(pipeline_dir) ).init_pipeline() # return values to instance variables for later use in test methods return tmp_dir, template_dir, pipeline_name, pipeline_dir + + +def cmp_component(dir1: Path, dir2: Path) -> bool: + """Compare two versions of the same component""" + files = ["main.nf", "meta.yml"] + return all(filecmp.cmp(dir1 / f, dir2 / f, shallow=False) for f in files) From c1b955bc12b2becbf92392e673ce3ce619633f88 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 16:44:13 +0200 Subject: [PATCH 280/737] use same output for diff file or print output --- nf_core/modules/modules_differ.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index 34966b2ff..e79554f2b 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -180,11 +180,18 @@ def write_diff_file( if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") + elif diff_status == ModulesDiffer.DiffEnum.CREATED: + # The file was created between the commits + fh.write(f"'{Path(dsp_from_dir, file)}' was created\n") + elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + # The file was removed between the commits + fh.write(f"'{Path(dsp_from_dir, file)}' was removed\n") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf fh.write(f"Changes in '{Path(module, file)}' but not shown\n") else: # The file has changed write the diff lines to the file + fh.write(f"Changes in '{Path(module, file)}':\n") for line in diff: fh.write(line) fh.write("\n") From 71fe3da619910bc74543a0f75f77a3b24f90b9b8 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 9 Jul 2024 16:56:24 +0200 Subject: [PATCH 281/737] fix module tests --- tests/modules/update.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/modules/update.py b/tests/modules/update.py index 49a261e7b..12adbeb82 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -436,7 +436,6 @@ def test_update_only_show_differences(self, mock_prompt): update_old.update() tmpdir = Path(tempfile.TemporaryDirectory().name) - shutil.rmtree(tmpdir) shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) assert update_obj.update() is True From b5833900284b9da51003295b79fb8e3c9b626cbb Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 10 Jul 2024 16:38:16 +0200 Subject: [PATCH 282/737] fix test --- nf_core/components/update.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 7050b76df..81d897fc7 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -278,14 +278,11 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr ) # Ask the user if they want to install the component - if not self.prompt: - dry_run = False - else: - dry_run = not questionary.confirm( - f"Update {self.component_type[:-1]} '{component}'?", - default=False, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() + dry_run = not questionary.confirm( + f"Update {self.component_type[:-1]} '{component}'?", + default=False, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() if not dry_run: # Clear the component directory and move the installed files there From 403af3270cc903d57b273fed22699b59090d361a Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 11 Jul 2024 12:15:44 +0200 Subject: [PATCH 283/737] actually test the output --- nf_core/components/update.py | 16 +++++++++------- tests/modules/update.py | 11 ++++++++--- tests/subworkflows/update.py | 22 ++++++++++++---------- 3 files changed, 29 insertions(+), 20 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 81d897fc7..72c6e0254 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -276,13 +276,15 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr dsp_to_dir=component_dir, limit_output=self.limit_output, ) - - # Ask the user if they want to install the component - dry_run = not questionary.confirm( - f"Update {self.component_type[:-1]} '{component}'?", - default=False, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() + if self.prompt: + # Ask the user if they want to install the component + dry_run = not questionary.confirm( + f"Update {self.component_type[:-1]} '{component}'?", + default=False, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + else: + dry_run = False if not dry_run: # Clear the component directory and move the installed files there diff --git a/tests/modules/update.py b/tests/modules/update.py index 12adbeb82..141a9bed0 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -78,10 +78,11 @@ def test_install_at_hash_and_update_limit_output(self): ch = logging.StreamHandler(log_capture) logger = logging.getLogger() logger.addHandler(ch) + logger.setLevel(logging.INFO) update_obj = ModuleUpdate( self.pipeline_dir, - show_diff=False, + show_diff=True, update_deps=True, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, @@ -98,8 +99,7 @@ def test_install_at_hash_and_update_limit_output(self): assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False # Check that the modules.json is correctly updated - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Get the up-to-date git_sha for the module from the ModuleRepo object correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] @@ -110,6 +110,7 @@ def test_install_at_hash_and_update_limit_output(self): log_lines = log_output.split("\n") # Check for various scenarios + nf_changes_shown = False for line in log_lines: if ( re.match(r"'.+' is unchanged", line) @@ -134,6 +135,10 @@ def test_install_at_hash_and_update_limit_output(self): else: raise AssertionError("Changes shown message did not contain a file path") assert Path(file_path).suffix == ".nf", f"Changes in non-.nf file were shown: {line}" + nf_changes_shown = True + + # Ensure that changes in at least one .nf file were shown + assert nf_changes_shown, "No changes in .nf files were shown" # Clean up logger.removeHandler(ch) diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 5395d194a..e7328813e 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -69,7 +69,7 @@ def test_install_at_hash_and_update_limit_output(self): logger.addHandler(ch) logger.setLevel(logging.INFO) - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True, limit_output=True) + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=True, update_deps=True, limit_output=True) old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Copy the subworkflow files and check that they are affected by the update @@ -98,18 +98,20 @@ def test_install_at_hash_and_update_limit_output(self): # Check for various scenarios nf_changes_shown = False for line in log_lines: - if re.match(r"'.+' is unchanged", line): - # Unchanged files should be reported for both .nf and non-.nf files - assert True - elif re.match(r"'.+' was created", line): - # Created files should be reported for both .nf and non-.nf files - assert True - elif re.match(r"'.+' was removed", line): - # Removed files should be reported for both .nf and non-.nf files + if ( + re.match(r"'.+' is unchanged", line) + or re.match(r"'.+' was created", line) + or re.match(r"'.+' was removed", line) + ): + # Unchanged, created, and removed files should be reported for both .nf and non-.nf files assert True elif re.match(r"Changes in '.+' but not shown", line): # Changes not shown should only be for non-.nf files - file_path = re.search(r"'(.+)'", line).group(1) + match = re.search(r"'(.+)'", line) + if match: + file_path = match.group(1) + else: + raise AssertionError("Changes not shown message did not contain a file path") assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" elif re.match(r"Changes in '.+':$", line): # Changes shown should only be for .nf files From 12c166610bf108954a9661241252198e7aa2e239 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 11 Jul 2024 12:16:08 +0200 Subject: [PATCH 284/737] check that limit_output is set only when it makes sense --- nf_core/components/update.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 72c6e0254..a896ef094 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -77,6 +77,8 @@ def _parameter_checks(self): if not self.has_valid_directory(): raise UserWarning("The command was not run in a valid pipeline directory.") + if self.limit_output and not (self.save_diff_fn or self.show_diff): + raise UserWarning("The '--limit-output' flag can only be used with '--preview' or '--save-diff'.") def update(self, component=None, silent=False, updated=None, check_diff_exist=True) -> bool: """Updates a specified module/subworkflow or all modules/subworkflows in a pipeline. From cd827e1f5452c0bb6a8615e1cd4ae6aa065bbd3d Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 11 Jul 2024 12:16:46 +0200 Subject: [PATCH 285/737] remove short option (`l` might be confused with `list` (which we don't have)), fix typo --- nf_core/__main__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 331f53683..f33c63e87 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -929,12 +929,11 @@ def command_modules_install(ctx, tool, dir, prompt, force, sha): help="Prompt for the version of the module", ) @click.option( - "-l", "--limit-output", "limit_output", is_flag=True, default=False, - help="Limit ouput to only the difference in main.nf", + help="Limit output to only the difference in main.nf", ) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") @click.option( From eb72f09b0923fa325a1935f13fcbc5626c241373 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 11 Jul 2024 14:10:34 +0200 Subject: [PATCH 286/737] update to pytest 8 and move it to dev requirements --- requirements-dev.txt | 3 ++- requirements.txt | 2 -- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index ebfbb530d..2c7bb0c8c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -7,7 +7,6 @@ ruff Sphinx sphinx-rtd-theme textual-dev==1.5.1 -mypy types-PyYAML types-requests types-jsonschema @@ -16,4 +15,6 @@ types-PyYAML types-requests types-setuptools pytest-textual-snapshot==0.4.0 +pytest-workflow>=2.0.0 +pytest>=8.0.0 ruff diff --git a/requirements.txt b/requirements.txt index ccfc1bc9c..524b739e8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,8 +11,6 @@ pdiff pre-commit prompt_toolkit>=3.0.3 pydantic>=2.2.1 -pytest-workflow>=2.0.0 -pytest>=7.0.0 pyyaml questionary>=1.8.0 refgenie From 66c8ca5e9d16435e8fc8a8077666af7d61ff9cbb Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 11 Jul 2024 14:10:42 +0200 Subject: [PATCH 287/737] fix deprecation warning --- nf_core/pipelines/download.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 8454f894a..24de045a6 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -20,7 +20,7 @@ import rich import rich.progress from git.exc import GitCommandError, InvalidGitRepositoryError -from pkg_resources import parse_version as version_parser +from setuptools._distutils.version import LooseVersion as VersionParser import nf_core import nf_core.pipelines.list @@ -1693,7 +1693,7 @@ def tidy_tags_and_branches(self): else: # desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns. valid_versions = [ - version_parser(v) + VersionParser(v) for v in desired_revisions if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) ] From 66be4a0a52390d2637eafda4e702bc97d3016a73 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 11 Jul 2024 14:10:42 +0200 Subject: [PATCH 288/737] fix deprecation warning --- nf_core/pipelines/download.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 8454f894a..43b63d819 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -20,7 +20,7 @@ import rich import rich.progress from git.exc import GitCommandError, InvalidGitRepositoryError -from pkg_resources import parse_version as version_parser +from packaging.version import Version import nf_core import nf_core.pipelines.list @@ -1693,13 +1693,11 @@ def tidy_tags_and_branches(self): else: # desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns. valid_versions = [ - version_parser(v) - for v in desired_revisions - if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) + Version(v) for v in desired_revisions if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) ] # valid versions sorted in ascending order, last will be aliased as "latest". latest = sorted(valid_versions)[-1] - self.repo.create_head("latest", latest) + self.repo.create_head("latest", str(latest)) self.checkout(latest) if self.repo.head.is_detached: self.repo.head.reset(index=True, working_tree=True) From ed64d5415e22def0d6abc8fc4a9b142d00a3f8b7 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 11 Jul 2024 12:31:13 +0000 Subject: [PATCH 289/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c2f5d99d..706d155d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,6 +47,7 @@ - update output_dir for api docs to new website structure ([#3051](https://github.com/nf-core/tools/pull/3051)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.5.1 ([#3052](https://github.com/nf-core/tools/pull/3052)) - update api docs to new structure ([#3054](https://github.com/nf-core/tools/pull/3054)) +- Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From e4cfde9aba2567fdc3903f1039caaa995da7a77f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 15 Jul 2024 09:05:46 +0000 Subject: [PATCH 290/737] chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.5.2 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c1dc7978f..872bd61e0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.1 + rev: v0.5.2 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 5b648fba592bf69d864a5ee88e062bad84906f6d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 15 Jul 2024 11:15:23 +0200 Subject: [PATCH 291/737] fix tests by mocking questionary confirm --- nf_core/components/update.py | 18 ++++++++---------- tests/modules/update.py | 19 ++++++++++--------- tests/subworkflows/update.py | 6 +++++- 3 files changed, 23 insertions(+), 20 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index a896ef094..1e31e5627 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -128,7 +128,6 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr components_info = ( self.get_all_components_info() if self.update_all else [self.get_single_component_info(component)] ) - # Save the current state of the modules.json old_modules_json = self.modules_json.get_modules_json() @@ -278,15 +277,12 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr dsp_to_dir=component_dir, limit_output=self.limit_output, ) - if self.prompt: - # Ask the user if they want to install the component - dry_run = not questionary.confirm( - f"Update {self.component_type[:-1]} '{component}'?", - default=False, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() - else: - dry_run = False + # Ask the user if they want to install the component + dry_run = not questionary.confirm( + f"Update {self.component_type[:-1]} '{component}'?", + default=False, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() if not dry_run: # Clear the component directory and move the installed files there @@ -397,6 +393,8 @@ def get_single_component_info(self, component): sha = self.sha config_entry = None + if self.update_config is None: + raise UserWarning("Could not find '.nf-core.yml' file in pipeline directory") if any( [ entry.count("/") == 1 diff --git a/tests/modules/update.py b/tests/modules/update.py index 141a9bed0..a19067b34 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -69,7 +69,9 @@ def test_install_at_hash_and_update(self): assert correct_git_sha == current_git_sha -def test_install_at_hash_and_update_limit_output(self): +# Mock questionary answer: do not update module, only show diffs +@mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) +def test_install_at_hash_and_update_limit_output(self, mock_prompt): """Installs an old version of a module in the pipeline and updates it with limited output reporting""" assert self.mods_install_old.install("trimgalore") @@ -429,10 +431,8 @@ def test_update_different_branch_mix_modules_branch_test(self): @mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) def test_update_only_show_differences(self, mock_prompt): """Try updating all modules showing differences. - Don't update some of them. + Only show diffs, don't actually save any updated files. Check that the sha in modules.json is not changed.""" - modules_json = ModulesJson(self.pipeline_dir) - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) # Update modules to a fixed old SHA update_old = ModuleUpdate( @@ -443,16 +443,17 @@ def test_update_only_show_differences(self, mock_prompt): tmpdir = Path(tempfile.TemporaryDirectory().name) shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) - assert update_obj.update() is True + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + assert ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True).update() - mod_json = modules_json.get_modules_json() + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Loop through all modules and check that they are NOT updated (according to the modules.json file) # A module that can be updated but shouldn't is fastqc # Module multiqc is already up to date so don't check mod = "fastqc" - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + non_updated_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha != current_git_sha + assert non_updated_git_sha != current_git_sha assert cmp_component(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True @@ -469,7 +470,7 @@ def test_update_only_show_differences_when_patch(self, mock_prompt): update_old = ModuleUpdate( self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" ) - update_old.update() + assert update_old.update() # Modify fastqc module, it will have a patch which will be applied during update # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index e7328813e..170b2af66 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -4,7 +4,9 @@ import shutil import tempfile from pathlib import Path +from unittest import mock +import questionary import yaml import nf_core.utils @@ -58,7 +60,9 @@ def test_install_at_hash_and_update(self): ) -def test_install_at_hash_and_update_limit_output(self): +# Mock questionary answer: update components +@mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) +def test_install_at_hash_and_update_limit_output(self, mock_prompt): """Installs an old version of a subworkflow in the pipeline and updates it with limit_output=True""" assert self.subworkflow_install_old.install("fastq_align_bowtie2") From 08a138c8ee6a5f79a1573982927667dc3f0f5782 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 15 Jul 2024 11:15:48 +0200 Subject: [PATCH 292/737] handle new jsonschema error type --- nf_core/modules/lint/environment_yml.py | 2 +- nf_core/modules/lint/meta_yml.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index 92281d99c..e10ef1761 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -60,7 +60,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) hint = "" if len(e.path) > 0: hint = f"\nCheck the entry for `{e.path[0]}`." - if e.schema.get("message"): + if e.schema and isinstance(e.schema, dict) and "message" in e.schema: e.message = e.schema["message"] module.failed.append( ( diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 481d50b3e..4c036713c 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -76,7 +76,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None hint = f"\nCheck the entry for `{e.path[0]}`." if e.message.startswith("None is not of type 'object'") and len(e.path) > 2: hint = f"\nCheck that the child entries of {str(e.path[0])+'.'+str(e.path[2])} are indented correctly." - if e.schema.get("message"): + if e.schema and isinstance(e.schema, dict) and "message" in e.schema: e.message = e.schema["message"] incorrect_value = meta_yaml for key in e.path: From 4d3f793e7a40b4ead7e5134a070523933946316f Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 15 Jul 2024 11:15:48 +0200 Subject: [PATCH 293/737] handle new jsonschema error type --- nf_core/modules/lint/environment_yml.py | 2 +- nf_core/modules/lint/meta_yml.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index 92281d99c..e10ef1761 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -60,7 +60,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) hint = "" if len(e.path) > 0: hint = f"\nCheck the entry for `{e.path[0]}`." - if e.schema.get("message"): + if e.schema and isinstance(e.schema, dict) and "message" in e.schema: e.message = e.schema["message"] module.failed.append( ( diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 481d50b3e..4c036713c 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -76,7 +76,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None hint = f"\nCheck the entry for `{e.path[0]}`." if e.message.startswith("None is not of type 'object'") and len(e.path) > 2: hint = f"\nCheck that the child entries of {str(e.path[0])+'.'+str(e.path[2])} are indented correctly." - if e.schema.get("message"): + if e.schema and isinstance(e.schema, dict) and "message" in e.schema: e.message = e.schema["message"] incorrect_value = meta_yaml for key in e.path: From 716ead4e61a363dd5340fc6b3b0ede15b872c176 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 15 Jul 2024 09:29:34 +0000 Subject: [PATCH 294/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 706d155d2..fe7fdcc28 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -48,6 +48,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.5.1 ([#3052](https://github.com/nf-core/tools/pull/3052)) - update api docs to new structure ([#3054](https://github.com/nf-core/tools/pull/3054)) - Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) +- handle new jsonschema error type ([#3061](https://github.com/nf-core/tools/pull/3061)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From e06ebdf422669a405146b57c05c5c59f30395513 Mon Sep 17 00:00:00 2001 From: adamrtalbot <12817534+adamrtalbot@users.noreply.github.com> Date: Mon, 15 Jul 2024 11:52:20 +0100 Subject: [PATCH 295/737] Remove Azure specific documentation which is no longer required --- nf_core/pipeline-template/docs/usage.md | 8 -------- 1 file changed, 8 deletions(-) diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index ca64377d2..bf637219f 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -207,14 +207,6 @@ See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs). -## Azure Resource Requests - -To be used with the `azurebatch` profile by specifying the `-profile azurebatch`. -We recommend providing a compute `params.vm_type` of `Standard_D16_v3` VMs by default but these options can be changed if required. - -Note that the choice of VM size depends on your quota and the overall workload during the analysis. -For a thorough list, please refer the [Azure Sizes for virtual machines in Azure](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes). - {% endif -%} ## Running in the background From e23869120ce320d3d4d4d58856751c358a2d1dd6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 15 Jul 2024 14:55:30 +0200 Subject: [PATCH 296/737] move pipelines tests into `pipelines` subdirectory to mirror command structure --- tests/{ => pipelines}/test_bump_version.py | 0 tests/{ => pipelines}/test_create.py | 2 +- tests/{ => pipelines}/test_create_app.py | 0 tests/{ => pipelines}/test_create_logo.py | 0 tests/{ => pipelines}/test_download.py | 2 +- tests/{ => pipelines}/test_launch.py | 2 +- tests/{ => pipelines}/test_lint.py | 30 +++++++++++----------- tests/{ => pipelines}/test_list.py | 0 tests/{ => pipelines}/test_params_file.py | 0 tests/{ => pipelines}/test_refgenie.py | 0 tests/{ => pipelines}/test_schema.py | 2 +- tests/{ => pipelines}/test_sync.py | 2 +- 12 files changed, 20 insertions(+), 20 deletions(-) rename tests/{ => pipelines}/test_bump_version.py (100%) rename tests/{ => pipelines}/test_create.py (99%) rename tests/{ => pipelines}/test_create_app.py (100%) rename tests/{ => pipelines}/test_create_logo.py (100%) rename tests/{ => pipelines}/test_download.py (99%) rename tests/{ => pipelines}/test_launch.py (99%) rename tests/{ => pipelines}/test_lint.py (90%) rename tests/{ => pipelines}/test_list.py (100%) rename tests/{ => pipelines}/test_params_file.py (100%) rename tests/{ => pipelines}/test_refgenie.py (100%) rename tests/{ => pipelines}/test_schema.py (99%) rename tests/{ => pipelines}/test_sync.py (99%) diff --git a/tests/test_bump_version.py b/tests/pipelines/test_bump_version.py similarity index 100% rename from tests/test_bump_version.py rename to tests/pipelines/test_bump_version.py diff --git a/tests/test_create.py b/tests/pipelines/test_create.py similarity index 99% rename from tests/test_create.py rename to tests/pipelines/test_create.py index 313b6f535..d93b26bd1 100644 --- a/tests/test_create.py +++ b/tests/pipelines/test_create.py @@ -9,7 +9,7 @@ import nf_core.pipelines.create.create -from .utils import with_temporary_folder +from ..utils import with_temporary_folder TEST_DATA_DIR = Path(__file__).parent / "data" PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" diff --git a/tests/test_create_app.py b/tests/pipelines/test_create_app.py similarity index 100% rename from tests/test_create_app.py rename to tests/pipelines/test_create_app.py diff --git a/tests/test_create_logo.py b/tests/pipelines/test_create_logo.py similarity index 100% rename from tests/test_create_logo.py rename to tests/pipelines/test_create_logo.py diff --git a/tests/test_download.py b/tests/pipelines/test_download.py similarity index 99% rename from tests/test_download.py rename to tests/pipelines/test_download.py index c3a8c5546..ebb76fef7 100644 --- a/tests/test_download.py +++ b/tests/pipelines/test_download.py @@ -18,7 +18,7 @@ from nf_core.synced_repo import SyncedRepo from nf_core.utils import run_cmd -from .utils import with_temporary_folder +from ..utils import with_temporary_folder class DownloadTest(unittest.TestCase): diff --git a/tests/test_launch.py b/tests/pipelines/test_launch.py similarity index 99% rename from tests/test_launch.py rename to tests/pipelines/test_launch.py index 977485341..b4b285a3c 100644 --- a/tests/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -11,7 +11,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.launch -from .utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder +from ..utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder class TestLaunch(TestCase): diff --git a/tests/test_lint.py b/tests/pipelines/test_lint.py similarity index 90% rename from tests/test_lint.py rename to tests/pipelines/test_lint.py index 4e468b753..460c8f03d 100644 --- a/tests/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -12,7 +12,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.lint -from .utils import with_temporary_folder +from ..utils import with_temporary_folder class TestLint(unittest.TestCase): @@ -179,33 +179,33 @@ def test_sphinx_md_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from .lint.actions_awsfulltest import ( # type: ignore[misc] + from ..lint.actions_awsfulltest import ( # type: ignore[misc] test_actions_awsfulltest_fail, test_actions_awsfulltest_pass, test_actions_awsfulltest_warn, ) - from .lint.actions_awstest import ( # type: ignore[misc] + from ..lint.actions_awstest import ( # type: ignore[misc] test_actions_awstest_fail, test_actions_awstest_pass, ) - from .lint.actions_ci import ( # type: ignore[misc] + from ..lint.actions_ci import ( # type: ignore[misc] test_actions_ci_fail_wrong_nf, test_actions_ci_fail_wrong_trigger, test_actions_ci_pass, ) - from .lint.actions_schema_validation import ( # type: ignore[misc] + from ..lint.actions_schema_validation import ( # type: ignore[misc] test_actions_schema_validation_fails_for_additional_property, test_actions_schema_validation_missing_jobs, test_actions_schema_validation_missing_on, ) - from .lint.configs import ( # type: ignore[misc] + from ..lint.configs import ( # type: ignore[misc] test_ignore_base_config, test_ignore_modules_config, test_superfluous_withname_in_base_config_fails, test_superfluous_withname_in_modules_config_fails, test_withname_in_modules_config, ) - from .lint.files_exist import ( # type: ignore[misc] + from ..lint.files_exist import ( # type: ignore[misc] test_files_exist_depreciated_file, test_files_exist_fail_conditional, test_files_exist_missing_config, @@ -214,13 +214,13 @@ def test_sphinx_md_files(self): test_files_exist_pass_conditional, test_files_exist_pass_conditional_nfschema, ) - from .lint.files_unchanged import ( # type: ignore[misc] + from ..lint.files_unchanged import ( # type: ignore[misc] test_files_unchanged_fail, test_files_unchanged_pass, ) - from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] - from .lint.modules_json import test_modules_json_pass # type: ignore[misc] - from .lint.multiqc_config import ( # type: ignore[misc] + from ..lint.merge_markers import test_merge_markers_found # type: ignore[misc] + from ..lint.modules_json import test_modules_json_pass # type: ignore[misc] + from ..lint.multiqc_config import ( # type: ignore[misc] test_multiqc_config_exists, test_multiqc_config_ignore, test_multiqc_config_missing_report_section_order, @@ -229,7 +229,7 @@ def test_sphinx_md_files(self): test_multiqc_config_report_comment_release_succeed, test_multiqc_incorrect_export_plots, ) - from .lint.nextflow_config import ( # type: ignore[misc] + from ..lint.nextflow_config import ( # type: ignore[misc] test_allow_params_reference_in_main_nf, test_catch_params_assignment_in_main_nf, test_default_values_fail, @@ -242,14 +242,14 @@ def test_sphinx_md_files(self): test_nextflow_config_example_pass, test_nextflow_config_missing_test_profile_failed, ) - from .lint.nfcore_yml import ( # type: ignore[misc] + from ..lint.nfcore_yml import ( # type: ignore[misc] test_nfcore_yml_fail_nfcore_version, test_nfcore_yml_fail_repo_type, test_nfcore_yml_pass, ) - from .lint.template_strings import ( # type: ignore[misc] + from ..lint.template_strings import ( # type: ignore[misc] test_template_strings, test_template_strings_ignore_file, test_template_strings_ignored, ) - from .lint.version_consistency import test_version_consistency # type: ignore[misc] + from ..lint.version_consistency import test_version_consistency # type: ignore[misc] diff --git a/tests/test_list.py b/tests/pipelines/test_list.py similarity index 100% rename from tests/test_list.py rename to tests/pipelines/test_list.py diff --git a/tests/test_params_file.py b/tests/pipelines/test_params_file.py similarity index 100% rename from tests/test_params_file.py rename to tests/pipelines/test_params_file.py diff --git a/tests/test_refgenie.py b/tests/pipelines/test_refgenie.py similarity index 100% rename from tests/test_refgenie.py rename to tests/pipelines/test_refgenie.py diff --git a/tests/test_schema.py b/tests/pipelines/test_schema.py similarity index 99% rename from tests/test_schema.py rename to tests/pipelines/test_schema.py index 4cb157c08..633de3db6 100644 --- a/tests/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -15,7 +15,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.schema -from .utils import with_temporary_file, with_temporary_folder +from ..utils import with_temporary_file, with_temporary_folder class TestSchema(unittest.TestCase): diff --git a/tests/test_sync.py b/tests/pipelines/test_sync.py similarity index 99% rename from tests/test_sync.py rename to tests/pipelines/test_sync.py index ca90071d9..d7b73c7ff 100644 --- a/tests/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -14,7 +14,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.sync -from .utils import with_temporary_folder +from ..utils import with_temporary_folder class TestModules(unittest.TestCase): From 7b3d985ff288b6e4e222a9142bed59707d06935f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 15 Jul 2024 18:02:49 +0200 Subject: [PATCH 297/737] reduce update with limit_output tests --- tests/modules/update.py | 119 ++++++--------------------------- tests/subworkflows/update.py | 123 ++++++++--------------------------- tests/test_modules.py | 5 ++ tests/test_subworkflows.py | 5 ++ 4 files changed, 58 insertions(+), 194 deletions(-) diff --git a/tests/modules/update.py b/tests/modules/update.py index a19067b34..e02b058fb 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -1,6 +1,4 @@ -import io import logging -import re import shutil import tempfile from pathlib import Path @@ -73,15 +71,9 @@ def test_install_at_hash_and_update(self): @mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) def test_install_at_hash_and_update_limit_output(self, mock_prompt): """Installs an old version of a module in the pipeline and updates it with limited output reporting""" + self.caplog.set_level(logging.INFO) assert self.mods_install_old.install("trimgalore") - # Capture the logger output - log_capture = io.StringIO() - ch = logging.StreamHandler(log_capture) - logger = logging.getLogger() - logger.addHandler(ch) - logger.setLevel(logging.INFO) - update_obj = ModuleUpdate( self.pipeline_dir, show_diff=True, @@ -90,61 +82,15 @@ def test_install_at_hash_and_update_limit_output(self, mock_prompt): branch=OLD_TRIMGALORE_BRANCH, limit_output=True, ) + assert update_obj.update("trimgalore") - # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - trimgalore_tmpdir = tmpdir / "trimgalore" - trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False - - # Check that the modules.json is correctly updated - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Get the up-to-date git_sha for the module from the ModuleRepo object - correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") - current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] - assert correct_git_sha == current_git_sha - - # Get the captured log output - log_output = log_capture.getvalue() - log_lines = log_output.split("\n") - - # Check for various scenarios - nf_changes_shown = False - for line in log_lines: - if ( - re.match(r"'.+' is unchanged", line) - or re.match(r"'.+' was created", line) - or re.match(r"'.+' was removed", line) - ): - # Unchanged, created, and removed files should be reported for both .nf and non-.nf files - assert True - elif re.match(r"Changes in '.+' but not shown", line): - # Changes not shown should only be for non-.nf files - match = re.search(r"'(.+)'", line) - if match: - file_path = match.group(1) - else: - raise AssertionError("Changes not shown message did not contain a file path") - assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" - elif re.match(r"Changes in '.+':$", line): - # Changes shown should only be for .nf files - match = re.search(r"'(.+)'", line) - if match: - file_path = match.group(1) - else: - raise AssertionError("Changes shown message did not contain a file path") - assert Path(file_path).suffix == ".nf", f"Changes in non-.nf file were shown: {line}" - nf_changes_shown = True - - # Ensure that changes in at least one .nf file were shown - assert nf_changes_shown, "No changes in .nf files were shown" - - # Clean up - logger.removeHandler(ch) - log_capture.close() + # Check changes not shown for non-.nf files + assert "Changes in 'trimgalore/meta.yml' but not shown" in self.caplog.text + # Check changes shown for .nf files + assert "Changes in 'trimgalore/main.nf'" in self.caplog.text + for line in self.caplog.text.split("\n"): + if line.startswith("---"): + assert line.endswith("main.nf") def test_install_at_hash_and_update_and_save_diff_to_file(self): @@ -173,55 +119,32 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): """Installs an old version of a module in the pipeline and updates it""" + # Install old version of trimgalore self.mods_install_old.install("trimgalore") patch_path = Path(self.pipeline_dir, "trimgalore.patch") + # Update saving the differences to a patch file and with `limit_output` update_obj = ModuleUpdate( self.pipeline_dir, save_diff_fn=patch_path, - sha=OLD_TRIMGALORE_SHA, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, limit_output=True, ) - - # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - trimgalore_tmpdir = tmpdir / "trimgalore" - trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True + assert update_obj.update("trimgalore") # Check that the patch file was created assert patch_path.exists(), f"Patch file was not created at {patch_path}" # Read the contents of the patch file - with open(patch_path) as f: - patch_content = f.read() - - # Check the content of the patch file - patch_lines = patch_content.split("\n") - for line in patch_lines: - if re.match(r"'.+' is unchanged", line): - # Unchanged files should be reported for both .nf and non-.nf files - assert True - elif re.match(r"Changes in '.+' but not shown", line): - # Changes not shown should only be for non-.nf files - match = re.search(r"'(.+)'", line) - if match: - file_path = match.group(1) - else: - raise AssertionError("Changes not shown message did not contain a file path.") - assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" - elif re.match("diff --git", line): - # Diff should only be shown for .nf files - match = re.search(r"'(.+)'", line) - if match: - file_path = match.group(1) - else: - raise AssertionError("Changes shown message did not contain a file path.") - assert Path(file_path).suffix == ".nf", f"Diff shown for non-.nf file: {line}" + with open(patch_path) as fh: + patch_content = fh.read() + # Check changes not shown for non-.nf files + assert "Changes in 'trimgalore/meta.yml' but not shown" in patch_content + # Check changes only shown for main.nf + assert "Changes in 'trimgalore/main.nf'" in patch_content + for line in patch_content: + if line.startswith("---"): + assert line.endswith("main.nf") def test_update_all(self): diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 170b2af66..0d3a63c7e 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -64,71 +64,23 @@ def test_install_at_hash_and_update(self): @mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) def test_install_at_hash_and_update_limit_output(self, mock_prompt): """Installs an old version of a subworkflow in the pipeline and updates it with limit_output=True""" + self.caplog.set_level(logging.INFO) assert self.subworkflow_install_old.install("fastq_align_bowtie2") - # Capture the logger output - log_capture = io.StringIO() - ch = logging.StreamHandler(log_capture) - logger = logging.getLogger() - logger.addHandler(ch) - logger.setLevel(logging.INFO) - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=True, update_deps=True, limit_output=True) - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the subworkflow files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("fastq_align_bowtie2") is True - assert cmp_component(tmpdir, sw_path) is False - # Check that the modules.json is correctly updated - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - ) + assert update_obj.update("fastq_align_bowtie2") - # Get the captured log output - log_output = log_capture.getvalue() - log_lines = log_output.split("\n") - - # Check for various scenarios - nf_changes_shown = False - for line in log_lines: - if ( - re.match(r"'.+' is unchanged", line) - or re.match(r"'.+' was created", line) - or re.match(r"'.+' was removed", line) - ): - # Unchanged, created, and removed files should be reported for both .nf and non-.nf files - assert True - elif re.match(r"Changes in '.+' but not shown", line): - # Changes not shown should only be for non-.nf files - match = re.search(r"'(.+)'", line) - if match: - file_path = match.group(1) - else: - raise AssertionError("Changes not shown message did not contain a file path") - assert Path(file_path).suffix != ".nf", f"Changes in .nf file were not shown: {line}" - elif re.match(r"Changes in '.+':$", line): - # Changes shown should only be for .nf files - file_path = re.search(r"'(.+)'", line).group(1) - assert Path(file_path).suffix == ".nf", f"Changes in non-.nf file were shown: {line}" - nf_changes_shown = True - - # Ensure that changes in at least one .nf file were shown - assert nf_changes_shown, "No changes in .nf files were shown" - - # Clean up - logger.removeHandler(ch) - log_capture.close() + # Check changes not shown for non-.nf files + assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in self.caplog.text + # Check changes only shown for main.nf files + assert "Changes in 'fastq_align_bowtie2/main.nf'" in self.caplog.text + for line in self.caplog.text.split("\n"): + if line.startswith("---"): + assert line.endswith("main.nf") def test_install_at_hash_and_update_and_save_diff_to_file(self): @@ -155,50 +107,29 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): def test_install_at_hash_and_update_and_save_diff_limit_output(self): """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" - assert self.subworkflow_install_old.install("fastq_align_bowtie2") + # Install old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + # Update saving the differences to a patch file and with `limit_output` update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True, limit_output=True) - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("fastq_align_bowtie2") is True - assert cmp_component(tmpdir, sw_path) is True + assert update_obj.update("fastq_align_bowtie2") # Check that the patch file was created assert patch_path.exists(), f"Patch file was not created at {patch_path}" - nf_changes_shown = False - non_nf_changes_not_shown = False - + # Read the contents of the patch file with open(patch_path) as fh: content = fh.read() - - # Check the first line - assert re.match( - r"Changes in module 'nf-core/fastq_align_bowtie2' between \([a-f0-9]+\) and \([a-f0-9]+\)", - content.split("\n")[0], - ), "Unexpected first line in patch file" - - # Check for .nf file changes shown - nf_changes_shown = bool(re.search(r"Changes in '.*\.nf':\n", content)) - - # Check for non-.nf file changes not shown - non_nf_changes_not_shown = bool(re.search(r"Changes in '.*[^.nf]' but not shown", content)) - - # Check that diff content is only for .nf files - diff_lines = re.findall(r"diff --git.*", content) - for line in diff_lines: - assert re.search(r"\.nf$", line), f"Diff shown for non-.nf file: {line}" - - # Ensure that changes in .nf files were shown and non-.nf files were not shown - assert nf_changes_shown, "No changes in .nf files were shown in the patch file" - assert non_nf_changes_not_shown, "Changes in non-.nf files were not properly limited in the patch file" - - # Clean up - patch_path.unlink() + # Check changes not shown for non-.nf files + assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in content + assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in content + assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in content + assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in content + # Check changes only shown for main.nf files + assert "Changes in 'fastq_align_bowtie2/main.nf'" in content + for line in content: + if line.startswith("---"): + assert line.endswith("main.nf") def test_update_all(self): diff --git a/tests/test_modules.py b/tests/test_modules.py index 7dd3132d3..67ba64218 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -5,6 +5,7 @@ import shutil import unittest from pathlib import Path +import pytest import requests_cache import responses @@ -156,6 +157,10 @@ def test_modulesrepo_class(self): assert modrepo.repo_path == "nf-core" assert modrepo.branch == "master" + @pytest.fixture(autouse=True) + def _use_caplog(self, caplog): + self.caplog = caplog + ############################################ # Test of the individual modules commands. # ############################################ diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index b7ebe952d..7b6031311 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -5,6 +5,7 @@ import shutil import unittest from pathlib import Path +import pytest import nf_core.modules import nf_core.pipelines.create.create @@ -113,6 +114,10 @@ def tearDown(self): if os.path.exists(self.tmp_dir): shutil.rmtree(self.tmp_dir) + @pytest.fixture(autouse=True) + def _use_caplog(self, caplog): + self.caplog = caplog + ################################################ # Test of the individual subworkflow commands. # ################################################ From 6325769aaef3b2f53b2bc0321455b90f0aa89528 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 16 Jul 2024 08:21:27 +0000 Subject: [PATCH 298/737] [automated] Fix code linting --- tests/subworkflows/update.py | 2 -- tests/test_modules.py | 2 +- tests/test_subworkflows.py | 1 + 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 0d3a63c7e..42ed716b1 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -1,6 +1,4 @@ -import io import logging -import re import shutil import tempfile from pathlib import Path diff --git a/tests/test_modules.py b/tests/test_modules.py index 67ba64218..6e601ce7a 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -5,8 +5,8 @@ import shutil import unittest from pathlib import Path -import pytest +import pytest import requests_cache import responses import yaml diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 7b6031311..17bc678ca 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -5,6 +5,7 @@ import shutil import unittest from pathlib import Path + import pytest import nf_core.modules From 891d08ab9472e686686561d8b5426be760782268 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:21:43 +0200 Subject: [PATCH 299/737] move lint tests into pipeline folder --- tests/{lint => pipelines}/__init__.py | 0 tests/pipelines/lint/__init__.py | 0 .../lint/actions_awsfulltest.py | 0 tests/{ => pipelines}/lint/actions_awstest.py | 0 tests/{ => pipelines}/lint/actions_ci.py | 0 .../lint/actions_schema_validation.py | 0 tests/{ => pipelines}/lint/configs.py | 0 tests/{ => pipelines}/lint/files_exist.py | 7 +++--- tests/{ => pipelines}/lint/files_unchanged.py | 0 tests/{ => pipelines}/lint/merge_markers.py | 0 tests/{ => pipelines}/lint/modules_json.py | 0 tests/{ => pipelines}/lint/multiqc_config.py | 0 tests/{ => pipelines}/lint/nextflow_config.py | 22 +++++++++---------- tests/{ => pipelines}/lint/nfcore_yml.py | 0 .../{ => pipelines}/lint/template_strings.py | 0 .../lint/version_consistency.py | 2 +- 16 files changed, 15 insertions(+), 16 deletions(-) rename tests/{lint => pipelines}/__init__.py (100%) create mode 100644 tests/pipelines/lint/__init__.py rename tests/{ => pipelines}/lint/actions_awsfulltest.py (100%) rename tests/{ => pipelines}/lint/actions_awstest.py (100%) rename tests/{ => pipelines}/lint/actions_ci.py (100%) rename tests/{ => pipelines}/lint/actions_schema_validation.py (100%) rename tests/{ => pipelines}/lint/configs.py (100%) rename tests/{ => pipelines}/lint/files_exist.py (95%) rename tests/{ => pipelines}/lint/files_unchanged.py (100%) rename tests/{ => pipelines}/lint/merge_markers.py (100%) rename tests/{ => pipelines}/lint/modules_json.py (100%) rename tests/{ => pipelines}/lint/multiqc_config.py (100%) rename tests/{ => pipelines}/lint/nextflow_config.py (95%) rename tests/{ => pipelines}/lint/nfcore_yml.py (100%) rename tests/{ => pipelines}/lint/template_strings.py (100%) rename tests/{ => pipelines}/lint/version_consistency.py (93%) diff --git a/tests/lint/__init__.py b/tests/pipelines/__init__.py similarity index 100% rename from tests/lint/__init__.py rename to tests/pipelines/__init__.py diff --git a/tests/pipelines/lint/__init__.py b/tests/pipelines/lint/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/lint/actions_awsfulltest.py b/tests/pipelines/lint/actions_awsfulltest.py similarity index 100% rename from tests/lint/actions_awsfulltest.py rename to tests/pipelines/lint/actions_awsfulltest.py diff --git a/tests/lint/actions_awstest.py b/tests/pipelines/lint/actions_awstest.py similarity index 100% rename from tests/lint/actions_awstest.py rename to tests/pipelines/lint/actions_awstest.py diff --git a/tests/lint/actions_ci.py b/tests/pipelines/lint/actions_ci.py similarity index 100% rename from tests/lint/actions_ci.py rename to tests/pipelines/lint/actions_ci.py diff --git a/tests/lint/actions_schema_validation.py b/tests/pipelines/lint/actions_schema_validation.py similarity index 100% rename from tests/lint/actions_schema_validation.py rename to tests/pipelines/lint/actions_schema_validation.py diff --git a/tests/lint/configs.py b/tests/pipelines/lint/configs.py similarity index 100% rename from tests/lint/configs.py rename to tests/pipelines/lint/configs.py diff --git a/tests/lint/files_exist.py b/tests/pipelines/lint/files_exist.py similarity index 95% rename from tests/lint/files_exist.py rename to tests/pipelines/lint/files_exist.py index 679d20987..87508e78a 100644 --- a/tests/lint/files_exist.py +++ b/tests/pipelines/lint/files_exist.py @@ -1,4 +1,3 @@ -import os from pathlib import Path import nf_core.pipelines.lint @@ -31,12 +30,12 @@ def test_files_exist_missing_main(self): assert "File not found: `main.nf`" in results["warned"] -def test_files_exist_depreciated_file(self): - """Check whether depreciated file issues warning""" +def test_files_exist_deprecated_file(self): + """Check whether deprecated file issues warning""" new_pipeline = self._make_pipeline_copy() nf = Path(new_pipeline, "parameters.settings.json") - os.system(f"touch {nf}") + nf.touch() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) lint_obj._load() diff --git a/tests/lint/files_unchanged.py b/tests/pipelines/lint/files_unchanged.py similarity index 100% rename from tests/lint/files_unchanged.py rename to tests/pipelines/lint/files_unchanged.py diff --git a/tests/lint/merge_markers.py b/tests/pipelines/lint/merge_markers.py similarity index 100% rename from tests/lint/merge_markers.py rename to tests/pipelines/lint/merge_markers.py diff --git a/tests/lint/modules_json.py b/tests/pipelines/lint/modules_json.py similarity index 100% rename from tests/lint/modules_json.py rename to tests/pipelines/lint/modules_json.py diff --git a/tests/lint/multiqc_config.py b/tests/pipelines/lint/multiqc_config.py similarity index 100% rename from tests/lint/multiqc_config.py rename to tests/pipelines/lint/multiqc_config.py diff --git a/tests/lint/nextflow_config.py b/tests/pipelines/lint/nextflow_config.py similarity index 95% rename from tests/lint/nextflow_config.py rename to tests/pipelines/lint/nextflow_config.py index b90298f54..d9157c90c 100644 --- a/tests/lint/nextflow_config.py +++ b/tests/pipelines/lint/nextflow_config.py @@ -8,7 +8,7 @@ def test_nextflow_config_example_pass(self): """Tests that config variable existence test works with good pipeline example""" - self.lint_obj._load_pipeline_config() + self.lint_obj.load_pipeline_config() result = self.lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 @@ -18,7 +18,7 @@ def test_nextflow_config_bad_name_fail(self): """Tests that config variable existence test fails with bad pipeline name""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj.nf_config["manifest.name"] = "bad_name" result = lint_obj.nextflow_config() @@ -30,7 +30,7 @@ def test_nextflow_config_dev_in_release_mode_failed(self): """Tests that config variable existence test fails with dev version in release mode""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj.release_mode = True lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" @@ -50,7 +50,7 @@ def test_nextflow_config_missing_test_profile_failed(self): with open(nf_conf_file, "w") as f: f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) > 0 assert len(result["warned"]) == 0 @@ -60,7 +60,7 @@ def test_default_values_match(self): """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 @@ -86,7 +86,7 @@ def test_default_values_fail(self): with open(nf_schema_file, "w") as f: f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 2 assert ( @@ -107,7 +107,7 @@ def test_catch_params_assignment_in_main_nf(self): with open(main_nf_file, "a") as f: f.write("params.max_time = 42") lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 1 assert ( @@ -124,7 +124,7 @@ def test_allow_params_reference_in_main_nf(self): with open(main_nf_file, "a") as f: f.write("params.max_time == 42") lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 @@ -139,7 +139,7 @@ def test_default_values_ignored(self): "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" ) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj._load_lint_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 @@ -173,7 +173,7 @@ def test_default_values_float(self): f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 @@ -203,7 +203,7 @@ def test_default_values_float_fail(self): f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 1 diff --git a/tests/lint/nfcore_yml.py b/tests/pipelines/lint/nfcore_yml.py similarity index 100% rename from tests/lint/nfcore_yml.py rename to tests/pipelines/lint/nfcore_yml.py diff --git a/tests/lint/template_strings.py b/tests/pipelines/lint/template_strings.py similarity index 100% rename from tests/lint/template_strings.py rename to tests/pipelines/lint/template_strings.py diff --git a/tests/lint/version_consistency.py b/tests/pipelines/lint/version_consistency.py similarity index 93% rename from tests/lint/version_consistency.py rename to tests/pipelines/lint/version_consistency.py index 1be57969f..88eadce39 100644 --- a/tests/lint/version_consistency.py +++ b/tests/pipelines/lint/version_consistency.py @@ -6,7 +6,7 @@ def test_version_consistency(self): """Tests that config variable existence test fails with bad pipeline name""" new_pipeline = self._make_pipeline_copy() lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj.nextflow_config() result = lint_obj.version_consistency() From 11130e64d513969cfe011e8bdec4ae7da62b488b Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:38:59 +0200 Subject: [PATCH 300/737] make pipeline tests a subclass of TestPipeline --- tests/pipelines/test_bump_version.py | 127 ++++++++++----------------- tests/pipelines/test_create_logo.py | 39 +++----- tests/pipelines/test_launch.py | 27 ++---- tests/test_pipelines.py | 37 ++++++++ 4 files changed, 104 insertions(+), 126 deletions(-) create mode 100644 tests/test_pipelines.py diff --git a/tests/pipelines/test_bump_version.py b/tests/pipelines/test_bump_version.py index 260637c06..709e82427 100644 --- a/tests/pipelines/test_bump_version.py +++ b/tests/pipelines/test_bump_version.py @@ -1,86 +1,55 @@ """Some tests covering the bump_version code.""" -import os - import yaml import nf_core.pipelines.bump_version -import nf_core.pipelines.create.create import nf_core.utils - -# pass tmp_path as argument, which is a pytest feature -# see: https://docs.pytest.org/en/latest/how-to/tmp_path.html#the-tmp-path-fixture -def test_bump_pipeline_version(datafiles, tmp_path): - """Test that making a release with the working example files works""" - - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number - nf_core.pipelines.bump_version.bump_pipeline_version(pipeline_obj, "1.1") - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check nextflow.config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" - - -def test_dev_bump_pipeline_version(datafiles, tmp_path): - """Test that making a release works with a dev name and a leading v""" - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number - nf_core.pipelines.bump_version.bump_pipeline_version(pipeline_obj, "v1.2dev") - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check the pipeline config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.2dev" - - -def test_bump_nextflow_version(datafiles, tmp_path): - # Get a workflow and configs - test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") - create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", no_git=True, outdir=test_pipeline_dir - ) - create_obj.init_pipeline() - pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - pipeline_obj._load() - - # Bump the version number to a specific version, preferably one - # we're not already on - version = "22.04.3" - nf_core.pipelines.bump_version.bump_nextflow_version(pipeline_obj, version) - new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) - - # Check nextflow.config - new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == f"!>={version}" - - # Check .github/workflows/ci.yml - with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: - ci_yaml = yaml.safe_load(fh) - assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["NXF_VER"][0] == version - - # Check README.md - with open(new_pipeline_obj._fp("README.md")) as fh: - readme = fh.read().splitlines() - assert ( - f"[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{version}-23aa62.svg)]" - "(https://www.nextflow.io/)" in readme - ) +from ..test_pipelines import TestPipelines + + +class TestBumpVersion(TestPipelines): + def test_bump_pipeline_version(self): + """Test that making a release with the working example files works""" + + # Bump the version number + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1") + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + + # Check nextflow.config + new_pipeline_obj.load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" + + def test_dev_bump_pipeline_version(self): + """Test that making a release works with a dev name and a leading v""" + # Bump the version number + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "v1.2dev") + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + + # Check the pipeline config + new_pipeline_obj.load_pipeline_config() + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.2dev" + + def test_bump_nextflow_version(self): + # Bump the version number to a specific version, preferably one + # we're not already on + version = "22.04.3" + nf_core.pipelines.bump_version.bump_nextflow_version(self.pipeline_obj, version) + new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) + new_pipeline_obj._load() + + # Check nextflow.config + assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == f"!>={version}" + + # Check .github/workflows/ci.yml + with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: + ci_yaml = yaml.safe_load(fh) + assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["NXF_VER"][0] == version + + # Check README.md + with open(new_pipeline_obj._fp("README.md")) as fh: + readme = fh.read().splitlines() + assert ( + f"[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{version}-23aa62.svg)]" + "(https://www.nextflow.io/)" in readme + ) diff --git a/tests/pipelines/test_create_logo.py b/tests/pipelines/test_create_logo.py index 8942894ce..9ff9fce56 100644 --- a/tests/pipelines/test_create_logo.py +++ b/tests/pipelines/test_create_logo.py @@ -1,35 +1,24 @@ """Test covering the create-logo command.""" -import tempfile -import unittest from pathlib import Path import nf_core.pipelines.create_logo +from ..test_pipelines import TestPipelines -class TestCreateLogo(unittest.TestCase): - """Class for create-logo tests""" - - # create tempdir in setup step - def setUp(self): - self.tempdir = tempfile.TemporaryDirectory() - self.tempdir_path = Path(self.tempdir.name) - - # delete tempdir in teardown step - def tearDown(self): - self.tempdir.cleanup() +class TestCreateLogo(TestPipelines): def test_create_logo_png(self): """Test that the create-logo command works for PNGs""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 1000) == int(fixture_fn.stat().st_size / 1000)) @@ -37,13 +26,13 @@ def test_create_logo_png_dark(self): """Test that the create-logo command works for dark PNGs""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, theme="dark") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, theme="dark") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo_dark.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo_dark.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 1000) == int(fixture_fn.stat().st_size / 1000)) @@ -51,13 +40,13 @@ def test_create_log_png_width(self): """Test that the create-logo command works for PNGs with a custom width""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, width=100) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, width=100) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a PNG self.assertTrue(logo_fn.suffix == ".png") # Check that the file is the right size - fixture_fn = Path(__file__).parent / "fixtures" / "create_logo_width100.png" + fixture_fn = Path(__file__).parent.parent / "fixtures" / "create_logo_width100.png" # allow some flexibility in the file size self.assertTrue(int(logo_fn.stat().st_size / 100) == int(fixture_fn.stat().st_size / 100)) @@ -65,12 +54,12 @@ def test_create_logo_twice(self): """Test that the create-logo command returns an info message when run twice""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the file exists self.assertTrue(logo_fn.is_file()) # Create the logo again and capture the log output with self.assertLogs(level="INFO") as log: - nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path) + nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir) # Check that the log message is correct self.assertIn("Logo already exists", log.output[0]) @@ -79,14 +68,14 @@ def test_create_logo_without_text_fail(self): # Create a logo with self.assertRaises(UserWarning): - nf_core.pipelines.create_logo.create_logo("", self.tempdir_path) + nf_core.pipelines.create_logo.create_logo("", self.pipeline_dir) def test_create_logo_with_filename(self): """Test that the create-logo command works with a custom filename""" # Create a logo logo_fn = nf_core.pipelines.create_logo.create_logo( - "pipes", Path(self.tempdir_path / "custom_dir"), filename="custom" + "pipes", Path(self.pipeline_dir / "custom_dir"), filename="custom" ) # Check that the file exists self.assertTrue(logo_fn.is_file()) @@ -99,7 +88,7 @@ def test_create_logo_svg(self): """Test that the create-logo command works for SVGs""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, format="svg") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, format="svg") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a SVG @@ -115,7 +104,7 @@ def test_create_logo_svg_dark(self): """Test that the create-logo command works for svgs and dark theme""" # Create a logo - logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.tempdir_path, format="svg", theme="dark") + logo_fn = nf_core.pipelines.create_logo.create_logo("pipes", self.pipeline_dir, format="svg", theme="dark") # Check that the file exists self.assertTrue(logo_fn.is_file()) # Check that the file is a SVG diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index b4b285a3c..c9efa5aa6 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -2,36 +2,18 @@ import json import os -import shutil -from pathlib import Path -from unittest import TestCase, mock +from unittest import mock import pytest import nf_core.pipelines.create.create import nf_core.pipelines.launch -from ..utils import create_tmp_pipeline, with_temporary_file, with_temporary_folder +from ..test_pipelines import TestPipelines +from ..utils import with_temporary_file, with_temporary_folder -class TestLaunch(TestCase): - """Class for launch tests""" - - def setUp(self): - """Create a new PipelineSchema and Launch objects""" - self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() - self.nf_params_fn = os.path.join(self.tmp_dir, "nf-params.json") - self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) - - def tearDown(self): - """Clean up temporary files and folders""" - - if Path(self.nf_params_fn).exists(): - Path(self.nf_params_fn).unlink() - - if Path(self.tmp_dir).exists(): - shutil.rmtree(self.tmp_dir) - +class TestLaunch(TestPipelines): @mock.patch.object(nf_core.pipelines.launch.Launch, "prompt_web_gui", side_effect=[True]) @mock.patch.object(nf_core.pipelines.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): @@ -43,6 +25,7 @@ def test_launch_file_exists(self, mock_confirm): """Test that we detect an existing params file and return""" # Make an empty params file to be overwritten open(self.nf_params_fn, "a").close() + # Try and to launch, return with error assert self.launcher.launch_pipeline() is False diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py new file mode 100644 index 000000000..461f046f5 --- /dev/null +++ b/tests/test_pipelines.py @@ -0,0 +1,37 @@ +import shutil +from pathlib import Path +from unittest import TestCase + +from git import Repo + +import nf_core.pipelines.launch +import nf_core.pipelines.lint +from nf_core.utils import Pipeline + +from .utils import create_tmp_pipeline + + +class TestPipelines(TestCase): + def setUp(self) -> None: + """Create a new Pipeline for testing""" + self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() + self.pipeline_obj = Pipeline(self.pipeline_dir) + Repo.init(self.pipeline_dir) + self.pipeline_obj._load() + + self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") + self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) + + def tearDown(self) -> None: + """Remove the test pipeline directory""" + shutil.rmtree(self.tmp_dir) + + def _make_pipeline_copy(self): + """Make a copy of the test pipeline that can be edited + + Returns: Path to new temp directory with pipeline""" + new_pipeline = self.tmp_dir / "nf-core-testpipeline-copy" + shutil.copytree(self.pipeline_dir, new_pipeline) + return new_pipeline From 20f0ba6ca7f30acb113e5234e60693eba89a410d Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:40:14 +0200 Subject: [PATCH 301/737] move pytest.ini into pyproject.toml --- pyproject.toml | 3 +++ pytest.ini | 7 ------- 2 files changed, 3 insertions(+), 7 deletions(-) delete mode 100644 pytest.ini diff --git a/pyproject.toml b/pyproject.toml index 8168bd7c1..449932444 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,9 @@ requires = ["setuptools>=40.6.0", "wheel"] [tool.pytest.ini_options] markers = ["datafiles: load datafiles"] testpaths = ["tests"] +python_files = ["test_*.py"] +# automatically run coroutine tests with asyncio +asyncio_mode = ["auto"] norecursedirs = [ ".*", "build", diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index fcbd03fa4..000000000 --- a/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -testpaths = - tests -python_files = test_*.py - -# automatically run coroutine tests with asyncio -asyncio_mode = auto From 36050fd44b5dcfb70c6d4fe8b40cb6f45ebc5fd2 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:54:32 +0200 Subject: [PATCH 302/737] migrate more tests to pathilb, use commonly shared TEST_DATA_DIR var --- nf_core/components/lint/__init__.py | 2 +- nf_core/modules/modules_json.py | 8 ++--- nf_core/pipelines/create/create.py | 6 ++-- nf_core/pipelines/download.py | 7 ++-- nf_core/pipelines/lint/__init__.py | 2 +- nf_core/pipelines/sync.py | 5 +-- nf_core/utils.py | 56 +++++++++++++++++------------ tests/pipelines/test_create.py | 4 +-- tests/pipelines/test_download.py | 12 +++---- tests/test_utils.py | 4 +-- tests/utils.py | 13 +++---- 11 files changed, 66 insertions(+), 53 deletions(-) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 499d31e71..6d47f1e7a 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -117,7 +117,7 @@ def __init__( ) for comp in self.get_local_components() ] - self.config = nf_core.utils.fetch_wf_config(self.dir, cache_config=True) + self.config = nf_core.utils.fetch_wf_config(Path(self.dir), cache_config=True) else: component_dir = Path( self.dir, diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index b0a4fa661..ca5d9de07 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -38,10 +38,10 @@ def __init__(self, pipeline_dir: str): Args: pipeline_dir (str): The pipeline directory """ - self.dir = pipeline_dir - self.modules_dir = Path(self.dir, "modules") - self.subworkflows_dir = Path(self.dir, "subworkflows") - self.modules_json_path = Path(self.dir, "modules.json") + self.dir = Path(pipeline_dir) + self.modules_dir = self.dir / "modules" + self.subworkflows_dir = self.dir / "subworkflows" + self.modules_json_path = self.dir / "modules.json" self.modules_json = None self.pipeline_modules = None self.pipeline_subworkflows = None diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index bdbbca646..e9f37306f 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -50,7 +50,7 @@ def __init__( version: str = "1.0.0dev", no_git: bool = False, force: bool = False, - outdir: Optional[str] = None, + outdir: Optional[Union[Path, str]] = None, template_config: Optional[Union[str, CreateConfig, Path]] = None, organisation: str = "nf-core", from_config_file: bool = False, @@ -61,7 +61,7 @@ def __init__( self.config = template_config elif from_config_file: # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") + _, config_yml = nf_core.utils.load_tools_config(str(outdir) if outdir else ".") # Obtain a CreateConfig object from `.nf-core.yml` config file if "template" in config_yml: self.config = CreateConfig(**config_yml["template"]) @@ -372,6 +372,8 @@ def render_template(self): config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) with open(config_fn, "w") as fh: config_yml.update(template=self.config.model_dump()) + # convert posix path to string for yaml dump + config_yml["template"]["outdir"] = str(config_yml["template"]["outdir"]) yaml.safe_dump(config_yml, fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 43b63d819..f16430b0a 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -10,6 +10,7 @@ import tarfile import textwrap from datetime import datetime +from pathlib import Path from typing import List, Optional, Tuple from zipfile import ZipFile @@ -697,7 +698,7 @@ def wf_use_local_configs(self, revision_dirname): with open(nfconfig_fn, "w") as nfconfig_fh: nfconfig_fh.write(nfconfig) - def find_container_images(self, workflow_directory): + def find_container_images(self, workflow_directory: str) -> None: """Find container image names for workflow. Starts by using `nextflow config` to pull out any process.container @@ -716,7 +717,7 @@ def find_container_images(self, workflow_directory): module_findings = [] # Use linting code to parse the pipeline nextflow config - self.nf_config = nf_core.utils.fetch_wf_config(workflow_directory) + self.nf_config = nf_core.utils.fetch_wf_config(Path(workflow_directory)) # Find any config variables that look like a container for k, v in self.nf_config.items(): @@ -1007,7 +1008,7 @@ def gather_registries(self, workflow_directory: str) -> None: # should exist, because find_container_images() is always called before if not self.nf_config: - self.nf_config = nf_core.utils.fetch_wf_config(workflow_directory) + self.nf_config = nf_core.utils.fetch_wf_config(Path(workflow_directory)) # Select registries defined in pipeline config configured_registries = [ diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index cf5ba3913..b40f0ee29 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -559,7 +559,7 @@ def run_linting( # Load the various pipeline configs lint_obj._load_lint_config() - lint_obj._load_pipeline_config() + lint_obj.load_pipeline_config() lint_obj._list_files() # Create the modules lint object diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index f9a874c7e..a309fa8c3 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -5,6 +5,7 @@ import os import re import shutil +from pathlib import Path import git import questionary @@ -69,7 +70,7 @@ def __init__( ): """Initialise syncing object""" - self.pipeline_dir = os.path.abspath(pipeline_dir) + self.pipeline_dir = Path(pipeline_dir).resolve() self.from_branch = from_branch self.original_branch = None self.original_merge_branch = f"nf-core-template-merge-{nf_core.__version__}" @@ -209,7 +210,7 @@ def get_wf_config(self): # Fetch workflow variables log.debug("Fetching workflow config variables") - self.wf_config = nf_core.utils.fetch_wf_config(self.pipeline_dir) + self.wf_config = nf_core.utils.fetch_wf_config(Path(self.pipeline_dir)) # Check that we have the required variables for rvar in self.required_config_vars: diff --git a/nf_core/utils.py b/nf_core/utils.py index 48d1c3ca3..cf624935b 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -141,7 +141,7 @@ def __init__(self, wf_path): self.files = [] self.git_sha = None self.minNextflowVersion = None - self.wf_path = wf_path + self.wf_path = Path(wf_path) self.pipeline_name = None self.pipeline_prefix = None self.schema_obj = None @@ -156,13 +156,15 @@ def __init__(self, wf_path): if os.environ.get("GITHUB_PR_COMMIT", "") != "": self.git_sha = os.environ["GITHUB_PR_COMMIT"] - def _load(self): + def __repr__(self) -> str: + return f"" + + def _load(self) -> bool: """Run core load functions""" - self._list_files() - self._load_pipeline_config() - self._load_conda_environment() - def _list_files(self): + return self._list_files() and self.load_pipeline_config() and self._load_conda_environment() + + def _list_files(self) -> bool: """Get a list of all files in the pipeline""" try: # First, try to get the list of files using git @@ -174,18 +176,36 @@ def _list_files(self): self.files.append(full_fn) else: log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") + return True except subprocess.CalledProcessError as e: # Failed, so probably not initialised as a git repository - just a list of all files log.debug(f"Couldn't call 'git ls-files': {e}") self.files = [] for subdir, _, files in os.walk(self.wf_path): for fn in files: - self.files.append(Path(subdir) / fn) + self.files.append(Path(subdir, str(fn))) + if len(self.files) > 0: + return True + return False + + def _load_conda_environment(self) -> bool: + """Try to load the pipeline environment.yml file, if it exists""" + try: + with open(Path(self.wf_path, "environment.yml")) as fh: + self.conda_config = yaml.safe_load(fh) + return True + except FileNotFoundError: + log.debug("No conda `environment.yml` file found.") + return False - def _load_pipeline_config(self): + def _fp(self, fn): + """Convenience function to get full path to a file in the pipeline""" + return os.path.join(self.wf_path, fn) + + def load_pipeline_config(self) -> bool: """Get the nextflow config for this pipeline - Once loaded, set a few convienence reference class attributes + Once loaded, set a few convenience reference class attributes """ self.nf_config = fetch_wf_config(self.wf_path) @@ -194,18 +214,8 @@ def _load_pipeline_config(self): nextflow_version_match = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) if nextflow_version_match: self.minNextflowVersion = nextflow_version_match.group(0) - - def _load_conda_environment(self): - """Try to load the pipeline environment.yml file, if it exists""" - try: - with open(os.path.join(self.wf_path, "environment.yml")) as fh: - self.conda_config = yaml.safe_load(fh) - except FileNotFoundError: - log.debug("No conda `environment.yml` file found.") - - def _fp(self, fn): - """Convenience function to get full path to a file in the pipeline""" - return os.path.join(self.wf_path, fn) + return True + return False def is_pipeline_directory(wf_path): @@ -229,7 +239,7 @@ def is_pipeline_directory(wf_path): raise UserWarning(warning) -def fetch_wf_config(wf_path: str, cache_config: bool = True) -> dict: +def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: """Uses Nextflow to retrieve the the configuration variables from a Nextflow workflow. @@ -263,7 +273,7 @@ def fetch_wf_config(wf_path: str, cache_config: bool = True) -> dict: concat_hash = "" for fn in ["nextflow.config", "main.nf"]: try: - with open(Path(wf_path, fn), "rb") as fh: + with open(wf_path / fn, "rb") as fh: concat_hash += hashlib.sha256(fh.read()).hexdigest() except FileNotFoundError: pass diff --git a/tests/pipelines/test_create.py b/tests/pipelines/test_create.py index d93b26bd1..bb27445e7 100644 --- a/tests/pipelines/test_create.py +++ b/tests/pipelines/test_create.py @@ -2,16 +2,14 @@ import os import unittest -from pathlib import Path import git import yaml import nf_core.pipelines.create.create -from ..utils import with_temporary_folder +from ..utils import TEST_DATA_DIR, with_temporary_folder -TEST_DATA_DIR = Path(__file__).parent / "data" PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" PIPELINE_TEMPLATE_YML_SKIP = TEST_DATA_DIR / "pipeline_create_template_skip.yml" diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index ebb76fef7..7ed6de3fa 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -18,7 +18,7 @@ from nf_core.synced_repo import SyncedRepo from nf_core.utils import run_cmd -from ..utils import with_temporary_folder +from ..utils import TEST_DATA_DIR, with_temporary_folder class DownloadTest(unittest.TestCase): @@ -139,12 +139,12 @@ def test_wf_use_local_configs(self, tmp_path): with tempfile.TemporaryDirectory() as test_outdir: download_obj = DownloadWorkflow(pipeline="dummy", revision="1.2.0", outdir=test_outdir) - shutil.copytree(test_pipeline_dir, os.path.join(test_outdir, "workflow")) + shutil.copytree(test_pipeline_dir, Path(test_outdir, "workflow")) download_obj.download_configs() # Test the function download_obj.wf_use_local_configs("workflow") - wf_config = nf_core.utils.fetch_wf_config(os.path.join(test_outdir, "workflow"), cache_config=False) + wf_config = nf_core.utils.fetch_wf_config(Path(test_outdir, "workflow"), cache_config=False) assert wf_config["params.custom_config_base"] == f"{test_outdir}/workflow/../configs/" # @@ -173,7 +173,7 @@ def test_find_container_images_config_basic(self, tmp_path, mock_fetch_wf_config @mock.patch("nf_core.utils.fetch_wf_config") def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) - result = run_cmd("nextflow", f"config -flat {Path(__file__).resolve().parent / 'data/mock_config_containers'}") + result = run_cmd("nextflow", f"config -flat {TEST_DATA_DIR}'/mock_config_containers'") if result is not None: nfconfig_raw, _ = result config = {} @@ -203,7 +203,7 @@ def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_co def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) mock_fetch_wf_config.return_value = {} - download_obj.find_container_images(Path(__file__).resolve().parent / "data/mock_module_containers") + download_obj.find_container_images(TEST_DATA_DIR / "mock_module_containers") # mock_docker_single_quay_io.nf assert "quay.io/biocontainers/singlequay:1.9--pyh9f0ad1d_0" in download_obj.containers @@ -546,7 +546,7 @@ def test_remote_container_functionality(self, tmp_dir): outdir=os.path.join(tmp_dir, "new"), revision="3.9", compress_type="none", - container_cache_index=Path(__file__).resolve().parent / "data/testdata_remote_containers.txt", + container_cache_index=TEST_DATA_DIR / "data/testdata_remote_containers.txt", ) download_obj.include_configs = False # suppress prompt, because stderr.is_interactive doesn't. diff --git a/tests/test_utils.py b/tests/test_utils.py index 89ba0444f..7afe1a532 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -95,9 +95,9 @@ def test_rich_force_colours_true(self): os.environ.pop("PY_COLORS", None) assert nf_core.utils.rich_force_colors() is True - def test_load_pipeline_config(self): + def testload_pipeline_config(self): """Load the pipeline Nextflow config""" - self.pipeline_obj._load_pipeline_config() + self.pipeline_obj.load_pipeline_config() assert self.pipeline_obj.nf_config["dag.enabled"] == "true" # TODO nf-core: Assess and strip out if no longer required for DSL2 diff --git a/tests/utils.py b/tests/utils.py index 9a0fd0896..7be0799d6 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,8 +3,8 @@ """ import functools -import os import tempfile +from pathlib import Path from typing import Any, Callable, Tuple import responses @@ -12,6 +12,7 @@ import nf_core.modules import nf_core.pipelines.create.create +TEST_DATA_DIR = Path(__file__).parent / "data" OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" @@ -93,14 +94,14 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) -def create_tmp_pipeline() -> Tuple[str, str, str, str]: +def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" - tmp_dir = tempfile.mkdtemp() - root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") + tmp_dir = Path(tempfile.TemporaryDirectory().name) + root_repo_dir = Path(__file__).resolve().parent.parent + template_dir = Path(root_repo_dir, "nf_core", "pipeline-template") pipeline_name = "mypipeline" - pipeline_dir = os.path.join(tmp_dir, pipeline_name) + pipeline_dir = Path(tmp_dir, pipeline_name) nf_core.pipelines.create.create.PipelineCreate( pipeline_name, "it is mine", "me", no_git=True, outdir=pipeline_dir From fb274107d592b16e7634cc64db149a6b514e88db Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 16:55:00 +0200 Subject: [PATCH 303/737] use testpipeline class in test_lint --- tests/pipelines/test_lint.py | 83 +++++++++++------------------------- 1 file changed, 26 insertions(+), 57 deletions(-) diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 460c8f03d..8530fea5a 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -3,51 +3,20 @@ import fnmatch import json import os -import shutil -import tempfile -import unittest +from pathlib import Path import yaml import nf_core.pipelines.create.create import nf_core.pipelines.lint +from ..test_pipelines import TestPipelines from ..utils import with_temporary_folder -class TestLint(unittest.TestCase): +class TestLint(TestPipelines): """Class for lint tests""" - def setUp(self): - """Function that runs at start of tests for common resources - - Use nf_core.pipelines.create() to make a pipeline that we can use for testing - """ - - self.tmp_dir = tempfile.mkdtemp() - self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir - ) - self.create_obj.init_pipeline() - - # Base lint object on this directory - self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.test_pipeline_dir) - - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - - def _make_pipeline_copy(self): - """Make a copy of the test pipeline that can be edited - - Returns: Path to new temp directory with pipeline""" - new_pipeline = os.path.join(self.tmp_dir, "nf-core-testpipeline-copy") - shutil.copytree(self.test_pipeline_dir, new_pipeline) - return new_pipeline - ########################## # CORE lint.py FUNCTIONS # ########################## @@ -56,7 +25,7 @@ def test_run_linting_function(self): We don't really check any of this code as it's just a series of function calls and we're testing each of those individually. This is mostly to check for syntax errors.""" - nf_core.pipelines.lint.run_linting(self.test_pipeline_dir, False) + nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) def test_init_pipeline_lint(self): """Simply create a PipelineLint object. @@ -64,11 +33,11 @@ def test_init_pipeline_lint(self): This checks that all of the lint test imports are working properly, we also check that the git sha was found and that the release flag works properly """ - lint_obj = nf_core.pipelines.lint.PipelineLint(self.test_pipeline_dir, True) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) # Tests that extra test is added for release mode assert "version_consistency" in lint_obj.lint_tests - + assert lint_obj.git_sha # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash assert len(lint_obj.git_sha) > 0 @@ -86,7 +55,7 @@ def test_load_lint_config_ignore_all_tests(self): # Make a config file listing all test names config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} - with open(os.path.join(new_pipeline, ".nf-core.yml"), "w") as fh: + with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: yaml.dump(config_dict, fh) # Load the new lint config file and check @@ -130,7 +99,7 @@ def test_json_output(self, tmp_dir): self.lint_obj.warned.append(("test_three", "This test gave a warning")) # Make a temp dir for the JSON output - json_fn = os.path.join(tmp_dir, "lint_results.json") + json_fn = Path(tmp_dir, "lint_results.json") self.lint_obj._save_json_results(json_fn) # Load created JSON file and check its contents @@ -156,7 +125,7 @@ def test_sphinx_md_files(self): """Check that we have .md files for all lint module code, and that there are no unexpected files (eg. deleted lint tests)""" - docs_basedir = os.path.join( + docs_basedir = Path( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "docs", "api", "_src", "pipeline_lint_tests" ) @@ -164,12 +133,12 @@ def test_sphinx_md_files(self): existing_docs = [] for fn in os.listdir(docs_basedir): if fnmatch.fnmatch(fn, "*.md") and not fnmatch.fnmatch(fn, "index.md"): - existing_docs.append(os.path.join(docs_basedir, fn)) + existing_docs.append(Path(docs_basedir, fn)) # Check .md files against each test name lint_obj = nf_core.pipelines.lint.PipelineLint("", True) for test_name in lint_obj.lint_tests: - fn = os.path.join(docs_basedir, f"{test_name}.md") + fn = Path(docs_basedir, f"{test_name}.md") assert os.path.exists(fn), f"Could not find lint docs .md file: {fn}" existing_docs.remove(fn) @@ -179,34 +148,34 @@ def test_sphinx_md_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from ..lint.actions_awsfulltest import ( # type: ignore[misc] + from .lint.actions_awsfulltest import ( # type: ignore[misc] test_actions_awsfulltest_fail, test_actions_awsfulltest_pass, test_actions_awsfulltest_warn, ) - from ..lint.actions_awstest import ( # type: ignore[misc] + from .lint.actions_awstest import ( # type: ignore[misc] test_actions_awstest_fail, test_actions_awstest_pass, ) - from ..lint.actions_ci import ( # type: ignore[misc] + from .lint.actions_ci import ( # type: ignore[misc] test_actions_ci_fail_wrong_nf, test_actions_ci_fail_wrong_trigger, test_actions_ci_pass, ) - from ..lint.actions_schema_validation import ( # type: ignore[misc] + from .lint.actions_schema_validation import ( # type: ignore[misc] test_actions_schema_validation_fails_for_additional_property, test_actions_schema_validation_missing_jobs, test_actions_schema_validation_missing_on, ) - from ..lint.configs import ( # type: ignore[misc] + from .lint.configs import ( # type: ignore[misc] test_ignore_base_config, test_ignore_modules_config, test_superfluous_withname_in_base_config_fails, test_superfluous_withname_in_modules_config_fails, test_withname_in_modules_config, ) - from ..lint.files_exist import ( # type: ignore[misc] - test_files_exist_depreciated_file, + from .lint.files_exist import ( # type: ignore[misc] + test_files_exist_deprecated_file, test_files_exist_fail_conditional, test_files_exist_missing_config, test_files_exist_missing_main, @@ -214,13 +183,13 @@ def test_sphinx_md_files(self): test_files_exist_pass_conditional, test_files_exist_pass_conditional_nfschema, ) - from ..lint.files_unchanged import ( # type: ignore[misc] + from .lint.files_unchanged import ( # type: ignore[misc] test_files_unchanged_fail, test_files_unchanged_pass, ) - from ..lint.merge_markers import test_merge_markers_found # type: ignore[misc] - from ..lint.modules_json import test_modules_json_pass # type: ignore[misc] - from ..lint.multiqc_config import ( # type: ignore[misc] + from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] + from .lint.modules_json import test_modules_json_pass # type: ignore[misc] + from .lint.multiqc_config import ( # type: ignore[misc] test_multiqc_config_exists, test_multiqc_config_ignore, test_multiqc_config_missing_report_section_order, @@ -229,7 +198,7 @@ def test_sphinx_md_files(self): test_multiqc_config_report_comment_release_succeed, test_multiqc_incorrect_export_plots, ) - from ..lint.nextflow_config import ( # type: ignore[misc] + from .lint.nextflow_config import ( # type: ignore[misc] test_allow_params_reference_in_main_nf, test_catch_params_assignment_in_main_nf, test_default_values_fail, @@ -242,14 +211,14 @@ def test_sphinx_md_files(self): test_nextflow_config_example_pass, test_nextflow_config_missing_test_profile_failed, ) - from ..lint.nfcore_yml import ( # type: ignore[misc] + from .lint.nfcore_yml import ( # type: ignore[misc] test_nfcore_yml_fail_nfcore_version, test_nfcore_yml_fail_repo_type, test_nfcore_yml_pass, ) - from ..lint.template_strings import ( # type: ignore[misc] + from .lint.template_strings import ( # type: ignore[misc] test_template_strings, test_template_strings_ignore_file, test_template_strings_ignored, ) - from ..lint.version_consistency import test_version_consistency # type: ignore[misc] + from .lint.version_consistency import test_version_consistency # type: ignore[misc] From 1d2076b511665034153d2ed3010508479f1e8e27 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 16 Jul 2024 17:56:42 +0200 Subject: [PATCH 304/737] fix tests --- nf_core/utils.py | 24 ++++++++++++------------ tests/pipelines/test_download.py | 4 ++-- tests/pipelines/test_lint.py | 4 +--- tests/pipelines/test_list.py | 28 ++++++++++++++-------------- tests/test_pipelines.py | 3 --- 5 files changed, 29 insertions(+), 34 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index cf624935b..9b3775d62 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,7 +19,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Generator, Tuple, Union +from typing import Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit @@ -135,22 +135,22 @@ class Pipeline: def __init__(self, wf_path): """Initialise pipeline object""" - self.conda_config = {} - self.conda_package_info = {} - self.nf_config = {} - self.files = [] - self.git_sha = None - self.minNextflowVersion = None + self.conda_config: Dict = {} + self.conda_package_info: Dict = {} + self.nf_config: Dict = {} + self.files: List[Path] = [] + self.git_sha: Optional[str] = None + self.minNextflowVersion: Optional[str] = None self.wf_path = Path(wf_path) - self.pipeline_name = None - self.pipeline_prefix = None - self.schema_obj = None + self.pipeline_name: Optional[str] = None + self.pipeline_prefix: Optional[str] = None + self.schema_obj: Optional[Dict] = None try: repo = git.Repo(self.wf_path) self.git_sha = repo.head.object.hexsha - except Exception: - log.debug(f"Could not find git hash for pipeline: {self.wf_path}") + except Exception as e: + log.debug(f"Could not find git hash for pipeline: {self.wf_path}. {e}") # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash if os.environ.get("GITHUB_PR_COMMIT", "") != "": diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 7ed6de3fa..d571b82ce 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -203,7 +203,7 @@ def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_co def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) mock_fetch_wf_config.return_value = {} - download_obj.find_container_images(TEST_DATA_DIR / "mock_module_containers") + download_obj.find_container_images(str(Path(TEST_DATA_DIR, "mock_module_containers"))) # mock_docker_single_quay_io.nf assert "quay.io/biocontainers/singlequay:1.9--pyh9f0ad1d_0" in download_obj.containers @@ -546,7 +546,7 @@ def test_remote_container_functionality(self, tmp_dir): outdir=os.path.join(tmp_dir, "new"), revision="3.9", compress_type="none", - container_cache_index=TEST_DATA_DIR / "data/testdata_remote_containers.txt", + container_cache_index=str(Path(TEST_DATA_DIR, "testdata_remote_containers.txt")), ) download_obj.include_configs = False # suppress prompt, because stderr.is_interactive doesn't. diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 8530fea5a..80d7e1e01 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -125,9 +125,7 @@ def test_sphinx_md_files(self): """Check that we have .md files for all lint module code, and that there are no unexpected files (eg. deleted lint tests)""" - docs_basedir = Path( - os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "docs", "api", "_src", "pipeline_lint_tests" - ) + docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") # Get list of existing .md files existing_docs = [] diff --git a/tests/pipelines/test_list.py b/tests/pipelines/test_list.py index 21d239287..32970ae07 100644 --- a/tests/pipelines/test_list.py +++ b/tests/pipelines/test_list.py @@ -14,15 +14,17 @@ import nf_core.pipelines.list -# create a temporary directory that can be used by the tests in this file -tmp = Path(tempfile.mkdtemp()) -tmp_nxf = tmp / "nxf" -tmp_nxf_str = str(tmp_nxf) - class TestList(unittest.TestCase): """Class for list tests""" + def setUp(self) -> None: + # create a temporary directory that can be used by the tests in this file + tmp = Path(tempfile.TemporaryDirectory().name) + self.tmp_nxf = tmp / "nxf" + self.tmp_nxf_str = str(self.tmp_nxf) + os.environ["NXF_ASSETS"] = self.tmp_nxf_str + @mock.patch("subprocess.check_output") def test_working_listcall(self, mock_subprocess): """Test that listing pipelines works""" @@ -105,28 +107,26 @@ def test_local_workflows_compare_and_fail_silently(self): rwf_ex.releases = None - @mock.patch.dict(os.environ, {"NXF_ASSETS": tmp_nxf_str}) @mock.patch("nf_core.pipelines.list.LocalWorkflow") def test_parse_local_workflow_and_succeed(self, mock_local_wf): - test_path = tmp_nxf / "nf-core" + test_path = self.tmp_nxf / "nf-core" if not os.path.isdir(test_path): os.makedirs(test_path) - assert os.environ["NXF_ASSETS"] == tmp_nxf_str - with open(tmp_nxf / "nf-core/dummy-wf", "w") as f: + assert os.environ["NXF_ASSETS"] == self.tmp_nxf_str + with open(self.tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() assert len(workflows_obj.local_workflows) == 1 - @mock.patch.dict(os.environ, {"NXF_ASSETS": tmp_nxf_str}) @mock.patch("nf_core.pipelines.list.LocalWorkflow") @mock.patch("subprocess.check_output") def test_parse_local_workflow_home(self, mock_local_wf, mock_subprocess): - test_path = tmp_nxf / "nf-core" + test_path = self.tmp_nxf / "nf-core" if not os.path.isdir(test_path): os.makedirs(test_path) - assert os.environ["NXF_ASSETS"] == tmp_nxf_str - with open(tmp_nxf / "nf-core/dummy-wf", "w") as f: + assert os.environ["NXF_ASSETS"] == self.tmp_nxf_str + with open(self.tmp_nxf / "nf-core/dummy-wf", "w") as f: f.write("dummy") workflows_obj = nf_core.pipelines.list.Workflows() workflows_obj.get_local_nf_workflows() @@ -135,7 +135,7 @@ def test_parse_local_workflow_home(self, mock_local_wf, mock_subprocess): @mock.patch("git.Repo") def test_local_workflow_investigation(self, mock_repo, mock_stat): local_wf = nf_core.pipelines.list.LocalWorkflow("dummy") - local_wf.local_path = tmp + local_wf.local_path = self.tmp_nxf.parent mock_repo.head.commit.hexsha = "h00r4y" mock_stat.st_mode = 1 local_wf.get_local_nf_workflow_details() diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py index 461f046f5..899c4641e 100644 --- a/tests/test_pipelines.py +++ b/tests/test_pipelines.py @@ -2,8 +2,6 @@ from pathlib import Path from unittest import TestCase -from git import Repo - import nf_core.pipelines.launch import nf_core.pipelines.lint from nf_core.utils import Pipeline @@ -16,7 +14,6 @@ def setUp(self) -> None: """Create a new Pipeline for testing""" self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() self.pipeline_obj = Pipeline(self.pipeline_dir) - Repo.init(self.pipeline_dir) self.pipeline_obj._load() self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") From 65084dd6143d3be44121689b84ec3a00fa857356 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 16 Jul 2024 18:48:35 +0000 Subject: [PATCH 305/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fe7fdcc28..6bc07e2a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ - Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) - Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) +- Rrestructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) ### Download From 45fdfb9ddd06e0b75fef592475ce166ee89894da Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 09:57:57 +0200 Subject: [PATCH 306/737] more fine-grained setup steps --- nf_core/pipelines/create/create.py | 2 +- nf_core/utils.py | 2 +- tests/pipelines/test_launch.py | 13 +++++++++---- tests/pipelines/test_lint.py | 4 ++++ tests/pipelines/test_list.py | 5 ++--- tests/test_pipelines.py | 8 -------- 6 files changed, 17 insertions(+), 17 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index e9f37306f..8cfa09491 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -61,7 +61,7 @@ def __init__( self.config = template_config elif from_config_file: # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(str(outdir) if outdir else ".") + _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") # Obtain a CreateConfig object from `.nf-core.yml` config file if "template" in config_yml: self.config = CreateConfig(**config_yml["template"]) diff --git a/nf_core/utils.py b/nf_core/utils.py index 9b3775d62..ead871aed 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -133,7 +133,7 @@ class Pipeline: schema_obj (obj): A :class:`PipelineSchema` object """ - def __init__(self, wf_path): + def __init__(self, wf_path: Path) -> None: """Initialise pipeline object""" self.conda_config: Dict = {} self.conda_package_info: Dict = {} diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index c9efa5aa6..03bc0e98b 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -1,7 +1,7 @@ """Tests covering the pipeline launch code.""" import json -import os +from pathlib import Path from unittest import mock import pytest @@ -14,6 +14,11 @@ class TestLaunch(TestPipelines): + def setUp(self) -> None: + super().setUp() + self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") + self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) + @mock.patch.object(nf_core.pipelines.launch.Launch, "prompt_web_gui", side_effect=[True]) @mock.patch.object(nf_core.pipelines.launch.Launch, "launch_web_gui") def test_launch_pipeline(self, mock_webbrowser, mock_lauch_web_gui): @@ -47,12 +52,12 @@ def test_get_pipeline_schema(self): @with_temporary_folder def test_make_pipeline_schema(self, tmp_path): """Create a workflow, but delete the schema file, then try to load it""" - test_pipeline_dir = os.path.join(tmp_path, "wf") + test_pipeline_dir = Path(tmp_path, "wf") create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "a description", "Me", outdir=test_pipeline_dir, no_git=True ) create_obj.init_pipeline() - os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) + Path(test_pipeline_dir, "nextflow_schema.json").unlink() self.launcher = nf_core.pipelines.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) self.launcher.get_pipeline_schema() assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 @@ -300,7 +305,7 @@ def test_build_command_params(self): # Check command assert ( self.launcher.nextflow_cmd - == f'nextflow run {self.pipeline_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' + == f'nextflow run {self.pipeline_dir} -params-file "{Path(self.nf_params_fn).relative_to(Path.cwd())}"' ) # Check saved parameters file with open(self.nf_params_fn) as fh: diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 80d7e1e01..800ffa16b 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -17,6 +17,10 @@ class TestLint(TestPipelines): """Class for lint tests""" + def setUp(self) -> None: + super().setUp() + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) + ########################## # CORE lint.py FUNCTIONS # ########################## diff --git a/tests/pipelines/test_list.py b/tests/pipelines/test_list.py index 32970ae07..aacc3805e 100644 --- a/tests/pipelines/test_list.py +++ b/tests/pipelines/test_list.py @@ -4,10 +4,9 @@ import os import tempfile import time -import unittest from datetime import datetime from pathlib import Path -from unittest import mock +from unittest import TestCase, mock import pytest from rich.console import Console @@ -15,7 +14,7 @@ import nf_core.pipelines.list -class TestList(unittest.TestCase): +class TestList(TestCase): """Class for list tests""" def setUp(self) -> None: diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py index 899c4641e..656ccbef5 100644 --- a/tests/test_pipelines.py +++ b/tests/test_pipelines.py @@ -1,9 +1,6 @@ import shutil -from pathlib import Path from unittest import TestCase -import nf_core.pipelines.launch -import nf_core.pipelines.lint from nf_core.utils import Pipeline from .utils import create_tmp_pipeline @@ -16,11 +13,6 @@ def setUp(self) -> None: self.pipeline_obj = Pipeline(self.pipeline_dir) self.pipeline_obj._load() - self.nf_params_fn = Path(self.pipeline_dir, "nf-params.json") - self.launcher = nf_core.pipelines.launch.Launch(self.pipeline_dir, params_out=self.nf_params_fn) - - self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) - def tearDown(self) -> None: """Remove the test pipeline directory""" shutil.rmtree(self.tmp_dir) From 189015f1b12809bce71744801823c6fa4a956c58 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 10:00:53 +0200 Subject: [PATCH 307/737] don't run list_files on every init, only needed for one linting step --- nf_core/pipelines/lint/__init__.py | 1 - nf_core/pipelines/lint/template_strings.py | 3 +- nf_core/utils.py | 53 +++++++++++----------- tests/test_utils.py | 4 +- 4 files changed, 31 insertions(+), 30 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index b40f0ee29..d731cb018 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -560,7 +560,6 @@ def run_linting( # Load the various pipeline configs lint_obj._load_lint_config() lint_obj.load_pipeline_config() - lint_obj._list_files() # Create the modules lint object module_lint_obj = nf_core.modules.lint.ModuleLint(pipeline_dir, hide_progress=hide_progress) diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 9b015bc20..90c47203f 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -39,10 +39,11 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) + files = self.list_files() # Loop through files, searching for string num_matches = 0 - for fn in self.files: + for fn in files: if str(fn.relative_to(self.wf_path)) in ignore_files: ignored.append(f"Ignoring Jinja template strings in file `{fn}`") continue diff --git a/nf_core/utils.py b/nf_core/utils.py index ead871aed..b5719a2b1 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -162,31 +162,7 @@ def __repr__(self) -> str: def _load(self) -> bool: """Run core load functions""" - return self._list_files() and self.load_pipeline_config() and self._load_conda_environment() - - def _list_files(self) -> bool: - """Get a list of all files in the pipeline""" - try: - # First, try to get the list of files using git - git_ls_files = subprocess.check_output(["git", "ls-files"], cwd=self.wf_path).splitlines() - self.files = [] - for fn in git_ls_files: - full_fn = Path(self.wf_path) / fn.decode("utf-8") - if full_fn.is_file(): - self.files.append(full_fn) - else: - log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") - return True - except subprocess.CalledProcessError as e: - # Failed, so probably not initialised as a git repository - just a list of all files - log.debug(f"Couldn't call 'git ls-files': {e}") - self.files = [] - for subdir, _, files in os.walk(self.wf_path): - for fn in files: - self.files.append(Path(subdir, str(fn))) - if len(self.files) > 0: - return True - return False + return self.load_pipeline_config() and self._load_conda_environment() def _load_conda_environment(self) -> bool: """Try to load the pipeline environment.yml file, if it exists""" @@ -202,6 +178,31 @@ def _fp(self, fn): """Convenience function to get full path to a file in the pipeline""" return os.path.join(self.wf_path, fn) + def list_files(self) -> List[Path]: + """Get a list of all files in the pipeline""" + files = [] + try: + # First, try to get the list of files using git + git_ls_files = subprocess.check_output(["git", "ls-files"], cwd=self.wf_path).splitlines() + for fn in git_ls_files: + full_fn = Path(self.wf_path) / fn.decode("utf-8") + if full_fn.is_file(): + files.append(full_fn) + else: + log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") + except subprocess.CalledProcessError as e: + # Failed, so probably not initialised as a git repository - just a list of all files + log.debug(f"Couldn't call 'git ls-files': {e}") + files = [] + for file_path in self.wf_path.rglob("*"): + if file_path.is_file(): + # Append the file path to the list + files.append(file_path.relative_to(self.wf_path)) + if len(files) == 0: + log.debug(f"No files found in pipeline: {self.wf_path}") + + return files + def load_pipeline_config(self) -> bool: """Get the nextflow config for this pipeline @@ -1082,7 +1083,7 @@ def determine_base_dir(directory="."): return directory if (base_dir == start_dir or str(base_dir) == base_dir.root) else base_dir -def get_first_available_path(directory, paths): +def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> Union[Path, None]: for p in paths: if Path(directory, p).is_file(): return Path(directory, p) diff --git a/tests/test_utils.py b/tests/test_utils.py index 7afe1a532..f61f3584e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -109,7 +109,7 @@ def testload_pipeline_config(self): def test_list_files_git(self): """Test listing pipeline files using `git ls`""" - self.pipeline_obj._list_files() + self.pipeline_obj.list_files() assert Path(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files @with_temporary_folder @@ -119,7 +119,7 @@ def test_list_files_no_git(self, tmpdir): tmp_fn = Path(tmpdir, "testfile") tmp_fn.touch() pipeline_obj = nf_core.utils.Pipeline(tmpdir) - pipeline_obj._list_files() + pipeline_obj.list_files() assert tmp_fn in pipeline_obj.files @mock.patch("os.path.exists") From 2ddc860183729642bd69169b3d948fa3ec273d46 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 10:01:29 +0200 Subject: [PATCH 308/737] create git repo with testpipeline --- tests/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/utils.py b/tests/utils.py index 38a10fa81..ef8f33466 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -105,7 +105,7 @@ def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: pipeline_dir = tmp_dir / pipeline_name nf_core.pipelines.create.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=True, outdir=str(pipeline_dir) + pipeline_name, "it is mine", "me", no_git=False, outdir=str(pipeline_dir) ).init_pipeline() # return values to instance variables for later use in test methods From c313fed6a08c0e157a93a19a14d4d77946ad6d50 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 10:25:45 +0200 Subject: [PATCH 309/737] update textual snapshots to new location --- .editorconfig | 2 +- .../__snapshots__/test_create_app.ambr | 0 tests/pipelines/test_create_app.py | 26 ++++++++++--------- 3 files changed, 15 insertions(+), 13 deletions(-) rename tests/{ => pipelines}/__snapshots__/test_create_app.ambr (100%) diff --git a/.editorconfig b/.editorconfig index 5aa8697d3..f266805d6 100644 --- a/.editorconfig +++ b/.editorconfig @@ -21,7 +21,7 @@ indent_style = unset [**/Makefile] indent_style = unset -[tests/__snapshots__/*] +[tests/pipelines/__snapshots__/*] charset = unset end_of_line = unset insert_final_newline = unset diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr similarity index 100% rename from tests/__snapshots__/test_create_app.ambr rename to tests/pipelines/__snapshots__/test_create_app.ambr diff --git a/tests/pipelines/test_create_app.py b/tests/pipelines/test_create_app.py index 8c89b92cb..9a02f04f0 100644 --- a/tests/pipelines/test_create_app.py +++ b/tests/pipelines/test_create_app.py @@ -4,6 +4,8 @@ from nf_core.pipelines.create import PipelineCreateApp +INIT_FILE = "../../nf_core/pipelines/create/__init__.py" + async def test_app_bindings(): """Test that the app bindings work.""" @@ -23,7 +25,7 @@ async def test_app_bindings(): def test_welcome(snap_compare): """Test snapshot for the first screen in the app. The welcome screen.""" - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50)) + assert snap_compare(INIT_FILE, terminal_size=(100, 50)) def test_choose_type(snap_compare): @@ -36,7 +38,7 @@ def test_choose_type(snap_compare): async def run_before(pilot) -> None: await pilot.click("#start") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_basic_details_nfcore(snap_compare): @@ -51,7 +53,7 @@ async def run_before(pilot) -> None: await pilot.click("#start") await pilot.click("#type_nfcore") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_basic_details_custom(snap_compare): @@ -66,7 +68,7 @@ async def run_before(pilot) -> None: await pilot.click("#start") await pilot.click("#type_custom") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_type_nfcore(snap_compare): @@ -89,7 +91,7 @@ async def run_before(pilot) -> None: await pilot.press("M", "e") await pilot.click("#next") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_type_nfcore_validation(snap_compare): @@ -108,7 +110,7 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.pause() - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_type_custom(snap_compare): @@ -132,7 +134,7 @@ async def run_before(pilot) -> None: await pilot.press("M", "e") await pilot.click("#next") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_final_details(snap_compare): @@ -157,7 +159,7 @@ async def run_before(pilot) -> None: await pilot.click("#next") await pilot.click("#continue") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_customisation_help(snap_compare): @@ -184,7 +186,7 @@ async def run_before(pilot) -> None: await pilot.press("tab") await pilot.press("enter") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_github_question(tmpdir, snap_compare): @@ -216,7 +218,7 @@ async def run_before(pilot) -> None: await pilot.app.workers.wait_for_complete() await pilot.click("#close_screen") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) @mock.patch("nf_core.pipelines.create.githubrepo.GithubRepo._get_github_credentials") @@ -255,7 +257,7 @@ async def run_before(pilot) -> None: await pilot.click("#close_screen") await pilot.click("#github_repo") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) def test_github_exit_message(tmpdir, snap_compare): @@ -291,4 +293,4 @@ async def run_before(pilot) -> None: await pilot.click("#github_repo") await pilot.click("#exit") - assert snap_compare("../nf_core/pipelines/create/__init__.py", terminal_size=(100, 50), run_before=run_before) + assert snap_compare(INIT_FILE, terminal_size=(100, 50), run_before=run_before) From d6560656873b8bc448cde44975c78fb0abc39e02 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 11:19:49 +0200 Subject: [PATCH 310/737] fix tests --- nf_core/pipelines/launch.py | 5 +++-- nf_core/utils.py | 2 +- tests/pipelines/test_launch.py | 5 +---- tests/test_utils.py | 8 ++++---- tests/utils.py | 2 +- 5 files changed, 10 insertions(+), 12 deletions(-) diff --git a/nf_core/pipelines/launch.py b/nf_core/pipelines/launch.py index 3a5f97e78..e03982a25 100644 --- a/nf_core/pipelines/launch.py +++ b/nf_core/pipelines/launch.py @@ -7,6 +7,7 @@ import re import subprocess import webbrowser +from pathlib import Path import questionary from rich.console import Console @@ -46,7 +47,7 @@ def __init__( self.schema_obj = None self.use_params_file = False if command_only else True self.params_in = params_in - self.params_out = params_out if params_out else os.path.join(os.getcwd(), "nf-params.json") + self.params_out = params_out if params_out else Path.cwd() / "nf-params.json" self.save_all = save_all self.show_hidden = show_hidden self.web_schema_launch_url = url if url else "https://nf-co.re/launch" @@ -697,7 +698,7 @@ def build_command(self): # Write the user selection to a file and run nextflow with that if self.use_params_file: dump_json_with_prettier(self.params_out, self.schema_obj.input_params) - self.nextflow_cmd += f' -params-file "{os.path.relpath(self.params_out)}"' + self.nextflow_cmd += f' -params-file "{Path(self.params_out)}"' # Call nextflow with a list of command line flags else: diff --git a/nf_core/utils.py b/nf_core/utils.py index b5719a2b1..0cd812cb0 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -197,7 +197,7 @@ def list_files(self) -> List[Path]: for file_path in self.wf_path.rglob("*"): if file_path.is_file(): # Append the file path to the list - files.append(file_path.relative_to(self.wf_path)) + files.append(file_path) if len(files) == 0: log.debug(f"No files found in pipeline: {self.wf_path}") diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index 03bc0e98b..da7618d48 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -303,10 +303,7 @@ def test_build_command_params(self): self.launcher.schema_obj.input_params.update({"input": "custom_input"}) self.launcher.build_command() # Check command - assert ( - self.launcher.nextflow_cmd - == f'nextflow run {self.pipeline_dir} -params-file "{Path(self.nf_params_fn).relative_to(Path.cwd())}"' - ) + assert self.launcher.nextflow_cmd == f'nextflow run {self.pipeline_dir} -params-file "{self.nf_params_fn}"' # Check saved parameters file with open(self.nf_params_fn) as fh: try: diff --git a/tests/test_utils.py b/tests/test_utils.py index f61f3584e..860cba5ba 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -109,8 +109,8 @@ def testload_pipeline_config(self): def test_list_files_git(self): """Test listing pipeline files using `git ls`""" - self.pipeline_obj.list_files() - assert Path(self.test_pipeline_dir, "main.nf") in self.pipeline_obj.files + files = self.pipeline_obj.list_files() + assert Path(self.test_pipeline_dir, "main.nf") in files @with_temporary_folder def test_list_files_no_git(self, tmpdir): @@ -119,8 +119,8 @@ def test_list_files_no_git(self, tmpdir): tmp_fn = Path(tmpdir, "testfile") tmp_fn.touch() pipeline_obj = nf_core.utils.Pipeline(tmpdir) - pipeline_obj.list_files() - assert tmp_fn in pipeline_obj.files + files = pipeline_obj.list_files() + assert tmp_fn in files @mock.patch("os.path.exists") @mock.patch("os.makedirs") diff --git a/tests/utils.py b/tests/utils.py index ef8f33466..90c4ae041 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -105,7 +105,7 @@ def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: pipeline_dir = tmp_dir / pipeline_name nf_core.pipelines.create.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=False, outdir=str(pipeline_dir) + pipeline_name, "it is mine", "me", no_git=False, outdir=pipeline_dir ).init_pipeline() # return values to instance variables for later use in test methods From 21cdbbdd2871fe74d337ee755e83680c2ad820f6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 12:07:12 +0200 Subject: [PATCH 311/737] remove asyncio mode (and see what happens) --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 449932444..775f04c9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,8 +6,6 @@ requires = ["setuptools>=40.6.0", "wheel"] markers = ["datafiles: load datafiles"] testpaths = ["tests"] python_files = ["test_*.py"] -# automatically run coroutine tests with asyncio -asyncio_mode = ["auto"] norecursedirs = [ ".*", "build", From e0a82d51bf231dfc04629bf32105cacc10c8556c Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 12:08:14 +0200 Subject: [PATCH 312/737] fix refgenie tests --- nf_core/pipelines/create/create.py | 9 ++++++--- nf_core/pipelines/create_logo.py | 1 - nf_core/pipelines/lint/files_exist.py | 2 +- nf_core/pipelines/refgenie.py | 2 +- requirements.txt | 4 ++-- tests/pipelines/lint/files_exist.py | 2 +- tests/pipelines/test_refgenie.py | 20 ++++++++++---------- 7 files changed, 21 insertions(+), 19 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8cfa09491..c5af95669 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -355,7 +355,6 @@ def render_template(self): # Remove all unused parameters in the nextflow schema if not self.jinja_params["igenomes"] or not self.jinja_params["nf_core_configs"]: self.update_nextflow_schema() - if self.config.is_nfcore: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() @@ -513,11 +512,15 @@ def fix_linting(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" email_logo_path = Path(self.outdir) / "assets" - create_logo(text=self.jinja_params["short_name"], dir=email_logo_path, theme="light", force=self.force) + create_logo(text=self.jinja_params["short_name"], dir=email_logo_path, theme="light", force=bool(self.force)) for theme in ["dark", "light"]: readme_logo_path = Path(self.outdir) / "docs" / "images" create_logo( - text=self.jinja_params["short_name"], dir=readme_logo_path, width=600, theme=theme, force=self.force + text=self.jinja_params["short_name"], + dir=readme_logo_path, + width=600, + theme=theme, + force=bool(self.force), ) def git_init_pipeline(self): diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 1e96b7032..780bafd7f 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -20,7 +20,6 @@ def create_logo( force: bool = False, ) -> Path: """Create a logo for a pipeline.""" - if not text: raise UserWarning("Please provide the name of the text to put on the logo.") dir = Path(dir) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index c6f622b3e..edad62aab 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -5,7 +5,7 @@ log = logging.getLogger(__name__) -def files_exist(self) -> Dict[str, Union[List[str], bool]]: +def files_exist(self) -> Dict[str, List[str]]: """Checks a given pipeline directory for required files. Iterates through the pipeline's directory content and checks that specified diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index de9201bcd..19ef4b512 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -183,7 +183,7 @@ def update_config(rgc): # Save the updated genome config try: - with open(refgenie_genomes_config_file, "w+") as fh: + with open(str(refgenie_genomes_config_file), "w+") as fh: fh.write(refgenie_genomes) log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") except FileNotFoundError: diff --git a/requirements.txt b/requirements.txt index 524b739e8..fb658be2f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,10 +9,10 @@ packaging pillow pdiff pre-commit -prompt_toolkit>=3.0.3 +prompt_toolkit<=3.0.36 pydantic>=2.2.1 pyyaml -questionary>=1.8.0 +questionary>=2.0.1 refgenie requests requests_cache diff --git a/tests/pipelines/lint/files_exist.py b/tests/pipelines/lint/files_exist.py index 87508e78a..4ae167b1d 100644 --- a/tests/pipelines/lint/files_exist.py +++ b/tests/pipelines/lint/files_exist.py @@ -14,7 +14,7 @@ def test_files_exist_missing_config(self): lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" results = lint_obj.files_exist() - assert results["failed"] == ["File not found: `CHANGELOG.md`"] + assert "File not found: `CHANGELOG.md`" in results["failed"] def test_files_exist_missing_main(self): diff --git a/tests/pipelines/test_refgenie.py b/tests/pipelines/test_refgenie.py index 23cc0dd14..734a2368b 100644 --- a/tests/pipelines/test_refgenie.py +++ b/tests/pipelines/test_refgenie.py @@ -5,6 +5,7 @@ import subprocess import tempfile import unittest +from pathlib import Path class TestRefgenie(unittest.TestCase): @@ -14,36 +15,35 @@ def setUp(self): """ Prepare a refgenie config file """ - self.tmp_dir = tempfile.mkdtemp() - self.NXF_HOME = os.path.join(self.tmp_dir, ".nextflow") - self.NXF_REFGENIE_PATH = os.path.join(self.NXF_HOME, "nf-core", "refgenie_genomes.config") - self.REFGENIE = os.path.join(self.tmp_dir, "genomes_config.yaml") - self.translation_file = os.path.join(self.tmp_dir, "alias_translations.yaml") + self.tmp_dir = Path(tempfile.TemporaryDirectory().name) + self.NXF_HOME = self.tmp_dir / ".nextflow" + self.NXF_REFGENIE_PATH = self.NXF_HOME / "nf-core" / "refgenie_genomes.config" + self.REFGENIE = self.tmp_dir / "genomes_config.yaml" + self.translation_file = self.tmp_dir / "alias_translations.yaml" # Set NXF_HOME environment variable # avoids adding includeConfig statement to config file outside the current tmpdir try: self.NXF_HOME_ORIGINAL = os.environ["NXF_HOME"] except Exception: self.NXF_HOME_ORIGINAL = None - os.environ["NXF_HOME"] = self.NXF_HOME + os.environ["NXF_HOME"] = str(self.NXF_HOME) # create NXF_HOME and nf-core directories - os.makedirs(os.path.join(self.NXF_HOME, "nf-core"), exist_ok=True) + nf_core_dir = self.NXF_HOME / "nf-core" + nf_core_dir.mkdir(parents=True, exist_ok=True) # Initialize a refgenie config os.system(f"refgenie init -c {self.REFGENIE}") # Add NXF_REFGENIE_PATH to refgenie config with open(self.REFGENIE, "a") as fh: - fh.write(f"nextflow_config: {os.path.join(self.NXF_REFGENIE_PATH)}\n") + fh.write(f"nextflow_config: {self.NXF_REFGENIE_PATH}\n") # Add an alias translation to YAML file with open(self.translation_file, "a") as fh: fh.write("ensembl_gtf: gtf\n") def tearDown(self) -> None: - # Remove the tempdir again - os.system(f"rm -rf {self.tmp_dir}") # Reset NXF_HOME environment variable if self.NXF_HOME_ORIGINAL is None: del os.environ["NXF_HOME"] From 1fde6ca6e70866d0a8b7a7a79ade75afefcf36bb Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 13:12:43 +0200 Subject: [PATCH 313/737] add cleanup step to some download tests --- nf_core/pipelines/download.py | 2 +- tests/pipelines/test_download.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index f16430b0a..797909636 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -231,7 +231,7 @@ def download_workflow(self): summary_log.append(f"Enabled for Seqera Platform: '{self.platform}'") # Check that the outdir doesn't already exist - if os.path.exists(self.outdir): + if self.outdir is not None and os.path.exists(self.outdir): if not self.force: raise DownloadError( f"Output directory '{self.outdir}' already exists (use [red]--force[/] to overwrite)" diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index d571b82ce..a898d37b7 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -13,6 +13,7 @@ import pytest import nf_core.pipelines.create.create +import nf_core.pipelines.list import nf_core.utils from nf_core.pipelines.download import ContainerError, DownloadWorkflow, WorkflowRepo from nf_core.synced_repo import SyncedRepo @@ -643,6 +644,11 @@ def test_download_workflow_for_platform(self, tmp_dir, _): in download_obj.containers ) # indirect definition via $container variable. + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) + # # Brief test adding a single custom tag to Seqera Platform download # @@ -659,6 +665,11 @@ def test_download_workflow_for_platform_with_one_custom_tag(self, _, tmp_dir): ) assert isinstance(download_obj.additional_tags, list) and len(download_obj.additional_tags) == 1 + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) + # # Test adding custom tags to Seqera Platform download (full test) # @@ -727,3 +738,8 @@ def test_download_workflow_for_platform_with_custom_tags(self, _, tmp_dir): "[red]Could not apply invalid `--tag` specification[/]: 'What is this?'", } ) + + # clean-up + # remove "nf-core-rnaseq*" directories + for path in Path().cwd().glob("nf-core-rnaseq*"): + shutil.rmtree(path) From a0f788dc1d7a8059e8077ca3413397f9f38cc3be Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 13:40:39 +0200 Subject: [PATCH 314/737] start converting linting tests to new subclass structure --- nf_core/pipelines/create_logo.py | 13 ++-- tests/pipelines/lint/actions_awsfulltest.py | 60 ------------------ .../lint/test_actions_awsfulltest.py | 61 +++++++++++++++++++ tests/pipelines/test_lint.py | 20 +++--- 4 files changed, 77 insertions(+), 77 deletions(-) delete mode 100644 tests/pipelines/lint/actions_awsfulltest.py create mode 100644 tests/pipelines/lint/test_actions_awsfulltest.py diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 780bafd7f..0643d2e29 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -90,11 +90,14 @@ def create_logo( color = theme == "dark" and (250, 250, 250) or (5, 5, 5) draw.text((110, 465), text, color, font=font) - # Crop to max width - img = img.crop((0, 0, max_width, height)) - - # Resize - img = img.resize((width, int((width / max_width) * height))) + if img is not None: + # Crop to max width + img = img.crop((0, 0, max_width, height)) + + # Resize + img = img.resize((width, int((width / max_width) * height))) + else: + log.error("Failed to create logo, no image object created.") # Save to cache Path(cache_path.parent).mkdir(parents=True, exist_ok=True) diff --git a/tests/pipelines/lint/actions_awsfulltest.py b/tests/pipelines/lint/actions_awsfulltest.py deleted file mode 100644 index d1479bb1e..000000000 --- a/tests/pipelines/lint/actions_awsfulltest.py +++ /dev/null @@ -1,60 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.pipelines.lint - - -def test_actions_awsfulltest_warn(self): - """Lint test: actions_awsfulltest - PASS""" - self.lint_obj._load() - results = self.lint_obj.actions_awsfulltest() - assert "`.github/workflows/awsfulltest.yml` is triggered correctly" in results["passed"] - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awsfulltest_pass(self): - """Lint test: actions_awsfulltest - WARN""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: - awsfulltest_yml = fh.read() - awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") - with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: - fh.write(awsfulltest_yml) - - # Make lint object - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awsfulltest() - assert results["passed"] == [ - "`.github/workflows/awsfulltest.yml` is triggered correctly", - "`.github/workflows/awsfulltest.yml` does not use `-profile test`", - ] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awsfulltest_fail(self): - """Lint test: actions_awsfulltest - FAIL""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: - awsfulltest_yml = yaml.safe_load(fh) - del awsfulltest_yml[True]["pull_request_review"] - with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: - yaml.dump(awsfulltest_yml, fh) - - # Make lint object - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awsfulltest() - assert results["failed"] == ["`.github/workflows/awsfulltest.yml` is not triggered correctly"] - assert "`.github/workflows/awsfulltest.yml` does not use `-profile test`" in results["passed"] - assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_actions_awsfulltest.py b/tests/pipelines/lint/test_actions_awsfulltest.py new file mode 100644 index 000000000..5c070fd5c --- /dev/null +++ b/tests/pipelines/lint/test_actions_awsfulltest.py @@ -0,0 +1,61 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsAwsfulltest(TestLint): + def test_actions_awsfulltest_warn(self): + """Lint test: actions_awsfulltest - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_awsfulltest() + assert "`.github/workflows/awsfulltest.yml` is triggered correctly" in results["passed"] + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awsfulltest_pass(self): + """Lint test: actions_awsfulltest - WARN""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: + awsfulltest_yml = fh.read() + awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ") + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + fh.write(awsfulltest_yml) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awsfulltest() + assert results["passed"] == [ + "`.github/workflows/awsfulltest.yml` is triggered correctly", + "`.github/workflows/awsfulltest.yml` does not use `-profile test`", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awsfulltest_fail(self): + """Lint test: actions_awsfulltest - FAIL""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh: + awsfulltest_yml = yaml.safe_load(fh) + del awsfulltest_yml[True]["pull_request_review"] + with open(Path(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh: + yaml.dump(awsfulltest_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awsfulltest() + assert results["failed"] == ["`.github/workflows/awsfulltest.yml` is not triggered correctly"] + assert "`.github/workflows/awsfulltest.yml` does not use `-profile test`" in results["passed"] + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 800ffa16b..54279cd06 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -1,8 +1,6 @@ """Some tests covering the linting code.""" -import fnmatch import json -import os from pathlib import Path import yaml @@ -133,16 +131,18 @@ def test_sphinx_md_files(self): # Get list of existing .md files existing_docs = [] - for fn in os.listdir(docs_basedir): - if fnmatch.fnmatch(fn, "*.md") and not fnmatch.fnmatch(fn, "index.md"): - existing_docs.append(Path(docs_basedir, fn)) + existing_docs = [ + str(Path(docs_basedir, fn)) + for fn in Path(docs_basedir).iterdir() + if fn.match("*.md") and not fn.match("index.md") + ] # Check .md files against each test name lint_obj = nf_core.pipelines.lint.PipelineLint("", True) for test_name in lint_obj.lint_tests: fn = Path(docs_basedir, f"{test_name}.md") - assert os.path.exists(fn), f"Could not find lint docs .md file: {fn}" - existing_docs.remove(fn) + assert fn.exists(), f"Could not find lint docs .md file: {fn}" + existing_docs.remove(str(fn)) # Check that we have no remaining .md files that we didn't expect assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" @@ -150,11 +150,7 @@ def test_sphinx_md_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from .lint.actions_awsfulltest import ( # type: ignore[misc] - test_actions_awsfulltest_fail, - test_actions_awsfulltest_pass, - test_actions_awsfulltest_warn, - ) + from .lint.actions_awstest import ( # type: ignore[misc] test_actions_awstest_fail, test_actions_awstest_pass, From 13be56f73d50e753d941301ed9a84424d3fa3022 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 13:41:25 +0200 Subject: [PATCH 315/737] update prettier version --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c1dc7978f..bcf7ff65c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: hooks: - id: prettier additional_dependencies: - - prettier@3.2.5 + - prettier@3.3.3 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python rev: "2.7.3" From da155abe4945be13eaa5beb3654dd342dc9d5b19 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 13:54:53 +0200 Subject: [PATCH 316/737] import linting tests correctly to avoid "module is not callable" warnings --- nf_core/pipelines/lint/__init__.py | 73 +++++++++++++++++++----------- 1 file changed, 47 insertions(+), 26 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index d731cb018..93f652370 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -30,6 +30,29 @@ from nf_core.utils import plural_s as _s from nf_core.utils import strip_ansi_codes +from .actions_awsfulltest import actions_awsfulltest +from .actions_awstest import actions_awstest +from .actions_ci import actions_ci +from .actions_schema_validation import actions_schema_validation +from .configs import base_config, modules_config +from .files_exist import files_exist +from .files_unchanged import files_unchanged +from .merge_markers import merge_markers +from .modules_json import modules_json +from .modules_structure import modules_structure +from .multiqc_config import multiqc_config +from .nextflow_config import nextflow_config +from .nfcore_yml import nfcore_yml +from .pipeline_name_conventions import pipeline_name_conventions +from .pipeline_todos import pipeline_todos +from .readme import readme +from .schema_description import schema_description +from .schema_lint import schema_lint +from .schema_params import schema_params +from .system_exit import system_exit +from .template_strings import template_strings +from .version_consistency import version_consistency + log = logging.getLogger(__name__) @@ -52,32 +75,30 @@ class PipelineLint(nf_core.utils.Pipeline): warned (list): A list of tuples of the form: ``(, )`` """ - from .actions_awsfulltest import actions_awsfulltest # type: ignore[misc] - from .actions_awstest import actions_awstest # type: ignore[misc] - from .actions_ci import actions_ci # type: ignore[misc] - from .actions_schema_validation import ( # type: ignore[misc] - actions_schema_validation, - ) - from .configs import base_config, modules_config # type: ignore[misc] - from .files_exist import files_exist # type: ignore[misc] - from .files_unchanged import files_unchanged # type: ignore[misc] - from .merge_markers import merge_markers # type: ignore[misc] - from .modules_json import modules_json # type: ignore[misc] - from .modules_structure import modules_structure # type: ignore[misc] - from .multiqc_config import multiqc_config # type: ignore[misc] - from .nextflow_config import nextflow_config # type: ignore[misc] - from .nfcore_yml import nfcore_yml # type: ignore[misc] - from .pipeline_name_conventions import ( # type: ignore[misc] - pipeline_name_conventions, - ) - from .pipeline_todos import pipeline_todos # type: ignore[misc] - from .readme import readme # type: ignore[misc] - from .schema_description import schema_description # type: ignore[misc] - from .schema_lint import schema_lint # type: ignore[misc] - from .schema_params import schema_params # type: ignore[misc] - from .system_exit import system_exit # type: ignore[misc] - from .template_strings import template_strings # type: ignore[misc] - from .version_consistency import version_consistency # type: ignore[misc] + # Import all linting tests as methods for this class + actions_awsfulltest = actions_awsfulltest + actions_awstest = actions_awstest + actions_ci = actions_ci + actions_schema_validation = actions_schema_validation + base_config = base_config + modules_config = modules_config + files_exist = files_exist + files_unchanged = files_unchanged + merge_markers = merge_markers + modules_json = modules_json + modules_structure = modules_structure + multiqc_config = multiqc_config + nextflow_config = nextflow_config + nfcore_yml = nfcore_yml + pipeline_name_conventions = pipeline_name_conventions + pipeline_todos = pipeline_todos + readme = readme + schema_description = schema_description + schema_lint = schema_lint + schema_params = schema_params + system_exit = system_exit + template_strings = template_strings + version_consistency = version_consistency def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False From 4c77621e8816a0ec5da9d9096efc89e77e5951f6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 14:31:33 +0200 Subject: [PATCH 317/737] convert rest of the lint test to new subclass structure --- tests/pipelines/lint/actions_awstest.py | 37 -- tests/pipelines/lint/actions_ci.py | 49 --- .../lint/actions_schema_validation.py | 66 ---- tests/pipelines/lint/configs.py | 89 ----- tests/pipelines/lint/files_exist.py | 97 ----- tests/pipelines/lint/files_unchanged.py | 26 -- tests/pipelines/lint/merge_markers.py | 22 -- tests/pipelines/lint/modules_json.py | 6 - tests/pipelines/lint/multiqc_config.py | 129 ------- tests/pipelines/lint/nextflow_config.py | 211 ----------- tests/pipelines/lint/nfcore_yml.py | 53 --- tests/pipelines/lint/template_strings.py | 53 --- tests/pipelines/lint/test_actions_awstest.py | 39 ++ tests/pipelines/lint/test_actions_ci.py | 50 +++ .../lint/test_actions_schema_validation.py | 62 +++ tests/pipelines/lint/test_configs.py | 91 +++++ tests/pipelines/lint/test_files_exist.py | 91 +++++ tests/pipelines/lint/test_files_unchanged.py | 28 ++ tests/pipelines/lint/test_merge_markers.py | 25 ++ tests/pipelines/lint/test_modules_json.py | 10 + tests/pipelines/lint/test_multiqc_config.py | 127 +++++++ tests/pipelines/lint/test_nextflow_config.py | 200 ++++++++++ tests/pipelines/lint/test_nfcore_yml.py | 57 +++ tests/pipelines/lint/test_template_strings.py | 55 +++ .../lint/test_version_consistency.py | 19 + tests/pipelines/lint/version_consistency.py | 14 - tests/pipelines/test_lint.py | 354 ++++++++---------- 27 files changed, 1012 insertions(+), 1048 deletions(-) delete mode 100644 tests/pipelines/lint/actions_awstest.py delete mode 100644 tests/pipelines/lint/actions_ci.py delete mode 100644 tests/pipelines/lint/actions_schema_validation.py delete mode 100644 tests/pipelines/lint/configs.py delete mode 100644 tests/pipelines/lint/files_exist.py delete mode 100644 tests/pipelines/lint/files_unchanged.py delete mode 100644 tests/pipelines/lint/merge_markers.py delete mode 100644 tests/pipelines/lint/modules_json.py delete mode 100644 tests/pipelines/lint/multiqc_config.py delete mode 100644 tests/pipelines/lint/nextflow_config.py delete mode 100644 tests/pipelines/lint/nfcore_yml.py delete mode 100644 tests/pipelines/lint/template_strings.py create mode 100644 tests/pipelines/lint/test_actions_awstest.py create mode 100644 tests/pipelines/lint/test_actions_ci.py create mode 100644 tests/pipelines/lint/test_actions_schema_validation.py create mode 100644 tests/pipelines/lint/test_configs.py create mode 100644 tests/pipelines/lint/test_files_exist.py create mode 100644 tests/pipelines/lint/test_files_unchanged.py create mode 100644 tests/pipelines/lint/test_merge_markers.py create mode 100644 tests/pipelines/lint/test_modules_json.py create mode 100644 tests/pipelines/lint/test_multiqc_config.py create mode 100644 tests/pipelines/lint/test_nextflow_config.py create mode 100644 tests/pipelines/lint/test_nfcore_yml.py create mode 100644 tests/pipelines/lint/test_template_strings.py create mode 100644 tests/pipelines/lint/test_version_consistency.py delete mode 100644 tests/pipelines/lint/version_consistency.py diff --git a/tests/pipelines/lint/actions_awstest.py b/tests/pipelines/lint/actions_awstest.py deleted file mode 100644 index 259bf866b..000000000 --- a/tests/pipelines/lint/actions_awstest.py +++ /dev/null @@ -1,37 +0,0 @@ -import os - -import yaml - -import nf_core.pipelines.lint - - -def test_actions_awstest_pass(self): - """Lint test: actions_awstest - PASS""" - self.lint_obj._load() - results = self.lint_obj.actions_awstest() - assert results["passed"] == ["'.github/workflows/awstest.yml' is triggered correctly"] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_awstest_fail(self): - """Lint test: actions_awsfulltest - FAIL""" - - # Edit .github/workflows/awsfulltest.yml to use -profile test_full - new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml[True]["push"] = ["master"] - with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: - yaml.dump(awstest_yml, fh) - - # Make lint object - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_awstest() - assert results["failed"] == ["'.github/workflows/awstest.yml' is not triggered correctly"] - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) == 0 - assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/actions_ci.py b/tests/pipelines/lint/actions_ci.py deleted file mode 100644 index eb438b881..000000000 --- a/tests/pipelines/lint/actions_ci.py +++ /dev/null @@ -1,49 +0,0 @@ -import os - -import yaml - -import nf_core.pipelines.lint - - -def test_actions_ci_pass(self): - """Lint test: actions_ci - PASS""" - self.lint_obj._load() - results = self.lint_obj.actions_ci() - assert results["passed"] == [ - "'.github/workflows/ci.yml' is triggered on expected events", - "'.github/workflows/ci.yml' checks minimum NF version", - ] - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_actions_ci_fail_wrong_nf(self): - """Lint test: actions_ci - FAIL - wrong minimum version of Nextflow tested""" - self.lint_obj._load() - self.lint_obj.minNextflowVersion = "1.2.3" - results = self.lint_obj.actions_ci() - assert results["failed"] == ["Minimum pipeline NF version '1.2.3' is not tested in '.github/workflows/ci.yml'"] - - -def test_actions_ci_fail_wrong_trigger(self): - """Lint test: actions_actions_ci - FAIL - workflow triggered incorrectly, NF ver not checked at all""" - - # Edit .github/workflows/actions_ci.yml to mess stuff up! - new_pipeline = self._make_pipeline_copy() - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml")) as fh: - ci_yml = yaml.safe_load(fh) - ci_yml[True]["push"] = ["dev", "patch"] - ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} - with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "w") as fh: - yaml.dump(ci_yml, fh) - - # Make lint object - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_ci() - assert results["failed"] == [ - "'.github/workflows/ci.yml' is not triggered on expected events", - "'.github/workflows/ci.yml' does not check minimum NF version", - ] diff --git a/tests/pipelines/lint/actions_schema_validation.py b/tests/pipelines/lint/actions_schema_validation.py deleted file mode 100644 index 4b00e7bf4..000000000 --- a/tests/pipelines/lint/actions_schema_validation.py +++ /dev/null @@ -1,66 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.pipelines.lint - - -def test_actions_schema_validation_missing_jobs(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml.pop("jobs") - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert "Workflow validation failed for awstest.yml: 'jobs' is a required property" in results["failed"][0] - - -def test_actions_schema_validation_missing_on(self): - """Missing 'on' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml.pop(True) - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert results["failed"][0] == "Missing 'on' keyword in awstest.yml" - assert "Workflow validation failed for awstest.yml: 'on' is a required property" in results["failed"][1] - - -def test_actions_schema_validation_fails_for_additional_property(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - awstest_yml_path = Path(new_pipeline) / ".github" / "workflows" / "awstest.yml" - with open(awstest_yml_path) as fh: - awstest_yml = yaml.safe_load(fh) - awstest_yml["not_jobs"] = awstest_yml["jobs"] - with open(awstest_yml_path, "w") as fh: - yaml.dump(awstest_yml, fh) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.actions_schema_validation() - - assert ( - "Workflow validation failed for awstest.yml: Additional properties are not allowed ('not_jobs' was unexpected)" - in results["failed"][0] - ) diff --git a/tests/pipelines/lint/configs.py b/tests/pipelines/lint/configs.py deleted file mode 100644 index 3ca35cab8..000000000 --- a/tests/pipelines/lint/configs.py +++ /dev/null @@ -1,89 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.pipelines.create -import nf_core.pipelines.lint - - -def test_withname_in_modules_config(self): - """Tests finding withName in modules.config passes linting.""" - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["failed"]) == 0 - assert any( - ["`FASTQC` found in `conf/modules.config` and Nextflow scripts." in passed for passed in result["passed"]] - ) - - -def test_superfluous_withname_in_modules_config_fails(self): - """Tests finding withName in modules.config fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add withName to modules.config - modules_config = Path(new_pipeline) / "conf" / "modules.config" - with open(modules_config, "a") as f: - f.write("\nwithName: 'BPIPE' {\n cache = false \n}") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline, hide_progress=False) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`conf/modules.config` contains `withName:BPIPE`") - - -def test_ignore_modules_config(self): - """Tests ignoring the modules.config passes linting.""" - new_pipeline = self._make_pipeline_copy() - # ignore modules.config in linting - with open(Path(new_pipeline) / ".nf-core.yml") as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"modules_config": False} - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(content, f) - Path(new_pipeline, "conf", "modules.config").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.modules_config() - assert len(result["ignored"]) == 1 - assert result["ignored"][0].startswith("`conf/modules.config` not found, but it is ignored.") - # cleanup - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(old_content, f) - - -def test_superfluous_withname_in_base_config_fails(self): - """Tests finding withName in base.config fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add withName to base.config - base_config = Path(new_pipeline) / "conf" / "base.config" - with open(base_config, "a") as f: - f.write("\nwithName:CUSTOM_DUMPSOFTWAREVERSIONS {\n cache = false \n}") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.base_config() - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`conf/base.config` contains `withName:CUSTOM_DUMPSOFTWAREVERSIONS`") - - -def test_ignore_base_config(self): - """Tests ignoring the base.config passes linting.""" - new_pipeline = self._make_pipeline_copy() - # ignore base.config in linting - with open(Path(new_pipeline) / ".nf-core.yml") as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"base_config": False} - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(content, f) - Path(new_pipeline, "conf", "base.config").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.base_config() - assert len(result["ignored"]) == 1 - assert result["ignored"][0].startswith("`conf/base.config` not found, but it is ignored.") - # cleanup - with open(Path(new_pipeline) / ".nf-core.yml", "w") as f: - yaml.dump(old_content, f) diff --git a/tests/pipelines/lint/files_exist.py b/tests/pipelines/lint/files_exist.py deleted file mode 100644 index 4ae167b1d..000000000 --- a/tests/pipelines/lint/files_exist.py +++ /dev/null @@ -1,97 +0,0 @@ -from pathlib import Path - -import nf_core.pipelines.lint - - -def test_files_exist_missing_config(self): - """Lint test: critical files missing FAIL""" - new_pipeline = self._make_pipeline_copy() - - Path(new_pipeline, "CHANGELOG.md").unlink() - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" - - results = lint_obj.files_exist() - assert "File not found: `CHANGELOG.md`" in results["failed"] - - -def test_files_exist_missing_main(self): - """Check if missing main issues warning""" - new_pipeline = self._make_pipeline_copy() - - Path(new_pipeline, "main.nf").unlink() - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert "File not found: `main.nf`" in results["warned"] - - -def test_files_exist_deprecated_file(self): - """Check whether deprecated file issues warning""" - new_pipeline = self._make_pipeline_copy() - - nf = Path(new_pipeline, "parameters.settings.json") - nf.touch() - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert results["failed"] == ["File must be removed: `parameters.settings.json`"] - - -def test_files_exist_pass(self): - """Lint check should pass if all files are there""" - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.files_exist() - assert results["failed"] == [] - - -def test_files_exist_pass_conditional(self): - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj.nf_config["plugins"] = [] - lib_dir = Path(new_pipeline, "lib") - lib_dir.mkdir() - (lib_dir / "nfcore_external_java_deps.jar").touch() - results = lint_obj.files_exist() - assert results["failed"] == [] - assert results["ignored"] == [] - - -def test_files_exist_fail_conditional(self): - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lib_dir = Path(new_pipeline, "lib") - lib_dir.mkdir() - (lib_dir / "nfcore_external_java_deps.jar").touch() - results = lint_obj.files_exist() - assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"] - assert results["ignored"] == [] - - -def test_files_exist_pass_conditional_nfschema(self): - new_pipeline = self._make_pipeline_copy() - # replace nf-validation with nf-schema in nextflow.config - with open(Path(new_pipeline, "nextflow.config")) as f: - config = f.read() - config = config.replace("nf-validation", "nf-schema") - with open(Path(new_pipeline, "nextflow.config"), "w") as f: - f.write(config) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.schema"] = "nf-core" - results = lint_obj.files_exist() - assert results["failed"] == [] - assert results["ignored"] == [] diff --git a/tests/pipelines/lint/files_unchanged.py b/tests/pipelines/lint/files_unchanged.py deleted file mode 100644 index 07a722919..000000000 --- a/tests/pipelines/lint/files_unchanged.py +++ /dev/null @@ -1,26 +0,0 @@ -from pathlib import Path - -import nf_core.pipelines.lint - - -def test_files_unchanged_pass(self): - self.lint_obj._load() - results = self.lint_obj.files_unchanged() - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - assert not results.get("could_fix", True) - - -def test_files_unchanged_fail(self): - failing_file = Path(".github", "CONTRIBUTING.md") - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, failing_file), "a") as fh: - fh.write("THIS SHOULD NOT BE HERE") - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.files_unchanged() - assert len(results["failed"]) > 0 - assert str(failing_file) in results["failed"][0] - assert results["could_fix"] diff --git a/tests/pipelines/lint/merge_markers.py b/tests/pipelines/lint/merge_markers.py deleted file mode 100644 index 0e3699e19..000000000 --- a/tests/pipelines/lint/merge_markers.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - -import nf_core.pipelines.lint - - -def test_merge_markers_found(self): - """Missing 'jobs' field should result in failure""" - new_pipeline = self._make_pipeline_copy() - - with open(os.path.join(new_pipeline, "main.nf")) as fh: - main_nf_content = fh.read() - main_nf_content = ">>>>>>>\n" + main_nf_content - with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: - fh.write(main_nf_content) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - - results = lint_obj.merge_markers() - assert len(results["failed"]) > 0 - assert len(results["passed"]) == 0 - assert "Merge marker '>>>>>>>' in " in results["failed"][0] diff --git a/tests/pipelines/lint/modules_json.py b/tests/pipelines/lint/modules_json.py deleted file mode 100644 index f025daa7f..000000000 --- a/tests/pipelines/lint/modules_json.py +++ /dev/null @@ -1,6 +0,0 @@ -def test_modules_json_pass(self): - self.lint_obj._load() - results = self.lint_obj.modules_json() - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("passed", [])) > 0 diff --git a/tests/pipelines/lint/multiqc_config.py b/tests/pipelines/lint/multiqc_config.py deleted file mode 100644 index 7f1fdbd67..000000000 --- a/tests/pipelines/lint/multiqc_config.py +++ /dev/null @@ -1,129 +0,0 @@ -from pathlib import Path - -import yaml - -import nf_core.pipelines.lint - - -def test_multiqc_config_exists(self): - """Test that linting fails if the multiqc_config.yml file is missing""" - # Delete the file - new_pipeline = self._make_pipeline_copy() - Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - assert result["failed"] == ["`assets/multiqc_config.yml` not found."] - - -def test_multiqc_config_ignore(self): - """Test that linting succeeds if the multiqc_config.yml file is missing but ignored""" - # Delete the file - new_pipeline = self._make_pipeline_copy() - Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() - with open(Path(new_pipeline, ".nf-core.yml")) as f: - content = yaml.safe_load(f) - old_content = content.copy() - content["lint"] = {"multiqc_config": False} - with open(Path(new_pipeline, ".nf-core.yml"), "w") as f: - yaml.dump(content, f) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - assert result["ignored"] == ["`assets/multiqc_config.yml` not found, but it is ignored."] - - # cleanup - with open(Path(new_pipeline, ".nf-core.yml"), "w") as f: - yaml.dump(old_content, f) - - -def test_multiqc_config_missing_report_section_order(self): - """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml.pop("report_section_order") - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert result["failed"] == ["`assets/multiqc_config.yml` does not contain `report_section_order`"] - - -def test_multiqc_incorrect_export_plots(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml["export_plots"] = False - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert result["failed"] == ["`assets/multiqc_config.yml` does not contain 'export_plots: true'."] - - -def test_multiqc_config_report_comment_fail(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - mqc_yml["report_comment"] = "This is a test" - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`assets/multiqc_config.yml` does not contain a matching 'report_comment'.") - - -def test_multiqc_config_report_comment_release_fail(self): - """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" - new_pipeline = self._make_pipeline_copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh: - mqc_yml = yaml.safe_load(fh) - mqc_yml_tmp = mqc_yml.copy() - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml, fh) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - # bump version - lint_obj.nf_config["manifest.version"] = "1.0" - result = lint_obj.multiqc_config() - # Reset the file - with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: - yaml.safe_dump(mqc_yml_tmp, fh) - assert len(result["failed"]) == 1 - assert result["failed"][0].startswith("`assets/multiqc_config.yml` does not contain a matching 'report_comment'.") - - -def test_multiqc_config_report_comment_release_succeed(self): - """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" - - import nf_core.pipelines.bump_version - - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - # bump version using the bump_version function - nf_core.pipelines.bump_version.bump_pipeline_version(lint_obj, "1.0") - # lint again - lint_obj._load() - result = lint_obj.multiqc_config() - assert "`assets/multiqc_config.yml` contains a matching 'report_comment'." in result["passed"] diff --git a/tests/pipelines/lint/nextflow_config.py b/tests/pipelines/lint/nextflow_config.py deleted file mode 100644 index d9157c90c..000000000 --- a/tests/pipelines/lint/nextflow_config.py +++ /dev/null @@ -1,211 +0,0 @@ -import os -import re -from pathlib import Path - -import nf_core.pipelines.create.create -import nf_core.pipelines.lint - - -def test_nextflow_config_example_pass(self): - """Tests that config variable existence test works with good pipeline example""" - self.lint_obj.load_pipeline_config() - result = self.lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_bad_name_fail(self): - """Tests that config variable existence test fails with bad pipeline name""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - - lint_obj.nf_config["manifest.name"] = "bad_name" - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_dev_in_release_mode_failed(self): - """Tests that config variable existence test fails with dev version in release mode""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - - lint_obj.release_mode = True - lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_nextflow_config_missing_test_profile_failed(self): - """Test failure if config file does not contain `test` profile.""" - new_pipeline = self._make_pipeline_copy() - # Change the name of the test profile so there is no such profile - nf_conf_file = os.path.join(new_pipeline, "nextflow.config") - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub(r"\btest\b", "testfail", content) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) > 0 - assert len(result["warned"]) == 0 - - -def test_default_values_match(self): - """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - assert "Config default value correct: params.max_cpus" in str(result["passed"]) - assert "Config default value correct: params.validate_params" in str(result["passed"]) - - -def test_default_values_fail(self): - """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - # Change the default value of max_cpus in nextflow.config - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Change the default value of max_memory in nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 2 - assert ( - "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." - in result["failed"] - ) - assert ( - "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." - in result["failed"] - ) - - -def test_catch_params_assignment_in_main_nf(self): - """Test linting fails if main.nf contains an assignment to a parameter from nextflow_schema.json.""" - new_pipeline = self._make_pipeline_copy() - # Add parameter assignment in main.nf - main_nf_file = Path(new_pipeline) / "main.nf" - with open(main_nf_file, "a") as f: - f.write("params.max_time = 42") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 1 - assert ( - result["failed"][0] - == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." - ) - - -def test_allow_params_reference_in_main_nf(self): - """Test linting allows for references like `params.aligner == 'bwa'` in main.nf. The test will detect if the bug mentioned in GitHub-issue #2833 reemerges.""" - new_pipeline = self._make_pipeline_copy() - # Add parameter reference in main.nf - main_nf_file = Path(new_pipeline) / "main.nf" - with open(main_nf_file, "a") as f: - f.write("params.max_time == 42") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - - -def test_default_values_ignored(self): - """Test ignoring linting of default values.""" - new_pipeline = self._make_pipeline_copy() - # Add max_cpus to the ignore list - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write( - "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" - ) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - lint_obj._load_lint_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 - assert "Config default value correct: params.max_cpu" not in str(result["passed"]) - assert "Config default ignored: params.max_cpus" in str(result["ignored"]) - - -def test_default_values_float(self): - """Test comparing two float values.""" - new_pipeline = self._make_pipeline_copy() - # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content - ) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Add a float value `dummy` to the nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub( - r'"validate_params": {', - ' "dummy": {"type": "number","default":0.000000001},\n"validate_params": {', - content, - ) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - assert len(result["failed"]) == 0 - assert len(result["warned"]) == 0 - assert "Config default value correct: params.dummy" in str(result["passed"]) - - -def test_default_values_float_fail(self): - """Test comparing two float values.""" - new_pipeline = self._make_pipeline_copy() - # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` - nf_conf_file = Path(new_pipeline) / "nextflow.config" - with open(nf_conf_file) as f: - content = f.read() - fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content - ) - with open(nf_conf_file, "w") as f: - f.write(fail_content) - # Add a float value `dummy` to the nextflow_schema.json - nf_schema_file = Path(new_pipeline) / "nextflow_schema.json" - with open(nf_schema_file) as f: - content = f.read() - fail_content = re.sub( - r'"validate_params": {', ' "dummy": {"type": "float","default":0.000001},\n"validate_params": {', content - ) - with open(nf_schema_file, "w") as f: - f.write(fail_content) - - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - result = lint_obj.nextflow_config() - - assert len(result["failed"]) == 1 - assert len(result["warned"]) == 0 - assert "Config default value incorrect: `params.dummy" in str(result["failed"]) diff --git a/tests/pipelines/lint/nfcore_yml.py b/tests/pipelines/lint/nfcore_yml.py deleted file mode 100644 index 94d2870e1..000000000 --- a/tests/pipelines/lint/nfcore_yml.py +++ /dev/null @@ -1,53 +0,0 @@ -import re -from pathlib import Path - -import nf_core.pipelines.create -import nf_core.pipelines.lint - - -def test_nfcore_yml_pass(self): - """Lint test: nfcore_yml - PASS""" - self.lint_obj._load() - results = self.lint_obj.nfcore_yml() - - assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) - assert "nf-core version in `.nf-core.yml` is set to the latest version" in str(results["passed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("failed", [])) == 0 - assert len(results.get("ignored", [])) == 0 - - -def test_nfcore_yml_fail_repo_type(self): - """Lint test: nfcore_yml - FAIL - repository type not set""" - new_pipeline = self._make_pipeline_copy() - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml) as fh: - content = fh.read() - new_content = content.replace("repository_type: pipeline", "repository_type: foo") - with open(nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 - - -def test_nfcore_yml_fail_nfcore_version(self): - """Lint test: nfcore_yml - FAIL - nf-core version not set""" - new_pipeline = self._make_pipeline_copy() - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml) as fh: - content = fh.read() - new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) - with open(nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) - assert len(results.get("failed", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/template_strings.py b/tests/pipelines/lint/template_strings.py deleted file mode 100644 index 2db9e20a3..000000000 --- a/tests/pipelines/lint/template_strings.py +++ /dev/null @@ -1,53 +0,0 @@ -import subprocess -from pathlib import Path - -import nf_core.pipelines.create -import nf_core.pipelines.lint - - -def test_template_strings(self): - """Tests finding a template string in a file fails linting.""" - new_pipeline = self._make_pipeline_copy() - # Add template string to a file - txt_file = Path(new_pipeline) / "docs" / "test.txt" - with open(txt_file, "w") as f: - f.write("my {{ template_string }}") - subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.template_strings() - assert len(result["failed"]) == 1 - assert len(result["ignored"]) == 0 - - -def test_template_strings_ignored(self): - """Tests ignoring template_strings""" - new_pipeline = self._make_pipeline_copy() - # Ignore template_strings test - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings: False") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - lint_obj._lint_pipeline() - assert len(lint_obj.failed) == 0 - assert len(lint_obj.ignored) == 1 - - -def test_template_strings_ignore_file(self): - """Tests ignoring template_strings file""" - new_pipeline = self._make_pipeline_copy() - # Add template string to a file - txt_file = Path(new_pipeline) / "docs" / "test.txt" - with open(txt_file, "w") as f: - f.write("my {{ template_string }}") - subprocess.check_output(["git", "add", "docs"], cwd=new_pipeline) - # Ignore template_strings test - nf_core_yml = Path(new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj._load() - result = lint_obj.template_strings() - assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 diff --git a/tests/pipelines/lint/test_actions_awstest.py b/tests/pipelines/lint/test_actions_awstest.py new file mode 100644 index 000000000..51b55cb86 --- /dev/null +++ b/tests/pipelines/lint/test_actions_awstest.py @@ -0,0 +1,39 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsAws(TestLint): + def test_actions_awstest_pass(self): + """Lint test: actions_awstest - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_awstest() + assert results["passed"] == ["'.github/workflows/awstest.yml' is triggered correctly"] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_awstest_fail(self): + """Lint test: actions_awsfulltest - FAIL""" + + # Edit .github/workflows/awsfulltest.yml to use -profile test_full + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "awstest.yml")) as fh: + awstest_yml = yaml.safe_load(fh) + awstest_yml[True]["push"] = ["master"] + with open(Path(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh: + yaml.dump(awstest_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_awstest() + assert results["failed"] == ["'.github/workflows/awstest.yml' is not triggered correctly"] + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_actions_ci.py b/tests/pipelines/lint/test_actions_ci.py new file mode 100644 index 000000000..7319ce4b0 --- /dev/null +++ b/tests/pipelines/lint/test_actions_ci.py @@ -0,0 +1,50 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsCi(TestLint): + def test_actions_ci_pass(self): + """Lint test: actions_ci - PASS""" + self.lint_obj._load() + results = self.lint_obj.actions_ci() + assert results["passed"] == [ + "'.github/workflows/ci.yml' is triggered on expected events", + "'.github/workflows/ci.yml' checks minimum NF version", + ] + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_actions_ci_fail_wrong_nf(self): + """Lint test: actions_ci - FAIL - wrong minimum version of Nextflow tested""" + self.lint_obj._load() + self.lint_obj.minNextflowVersion = "1.2.3" + results = self.lint_obj.actions_ci() + assert results["failed"] == ["Minimum pipeline NF version '1.2.3' is not tested in '.github/workflows/ci.yml'"] + + def test_actions_ci_fail_wrong_trigger(self): + """Lint test: actions_actions_ci - FAIL - workflow triggered incorrectly, NF ver not checked at all""" + + # Edit .github/workflows/actions_ci.yml to mess stuff up! + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, ".github", "workflows", "ci.yml")) as fh: + ci_yml = yaml.safe_load(fh) + ci_yml[True]["push"] = ["dev", "patch"] + ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]} + with open(Path(new_pipeline, ".github", "workflows", "ci.yml"), "w") as fh: + yaml.dump(ci_yml, fh) + + # Make lint object + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.actions_ci() + assert results["failed"] == [ + "'.github/workflows/ci.yml' is not triggered on expected events", + "'.github/workflows/ci.yml' does not check minimum NF version", + ] diff --git a/tests/pipelines/lint/test_actions_schema_validation.py b/tests/pipelines/lint/test_actions_schema_validation.py new file mode 100644 index 000000000..34f6b5fcb --- /dev/null +++ b/tests/pipelines/lint/test_actions_schema_validation.py @@ -0,0 +1,62 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintActionsSchemaValidation(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.awstest_yml_path = Path(self.new_pipeline) / ".github" / "workflows" / "awstest.yml" + with open(self.awstest_yml_path) as fh: + self.awstest_yml = yaml.safe_load(fh) + + def test_actions_schema_validation_missing_jobs(self): + """Missing 'jobs' field should result in failure""" + + self.awstest_yml.pop("jobs") + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert "Workflow validation failed for awstest.yml: 'jobs' is a required property" in results["failed"][0] + + def test_actions_schema_validation_missing_on(self): + """Missing 'on' field should result in failure""" + + self.awstest_yml.pop(True) + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert results["failed"][0] == "Missing 'on' keyword in awstest.yml" + assert "Workflow validation failed for awstest.yml: 'on' is a required property" in results["failed"][1] + + def test_actions_schema_validation_fails_for_additional_property(self): + """Missing 'jobs' field should result in failure""" + + self.awstest_yml["not_jobs"] = self.awstest_yml["jobs"] + with open(self.awstest_yml_path, "w") as fh: + yaml.dump(self.awstest_yml, fh) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.actions_schema_validation() + + assert ( + "Workflow validation failed for awstest.yml: Additional properties are not allowed ('not_jobs' was unexpected)" + in results["failed"][0] + ) diff --git a/tests/pipelines/lint/test_configs.py b/tests/pipelines/lint/test_configs.py new file mode 100644 index 000000000..7bb6329b5 --- /dev/null +++ b/tests/pipelines/lint/test_configs.py @@ -0,0 +1,91 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintConfigs(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_withname_in_modules_config(self): + """Tests finding withName in modules.config passes linting.""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["failed"]) == 0 + assert any( + ["`FASTQC` found in `conf/modules.config` and Nextflow scripts." in passed for passed in result["passed"]] + ) + + def test_superfluous_withname_in_modules_config_fails(self): + """Tests finding withName in modules.config fails linting.""" + + # Add withName to modules.config + modules_config = Path(self.new_pipeline) / "conf" / "modules.config" + with open(modules_config, "a") as f: + f.write("\nwithName: 'BPIPE' {\n cache = false \n}") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline, hide_progress=False) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("`conf/modules.config` contains `withName:BPIPE`") + + def test_ignore_modules_config(self): + """Tests ignoring the modules.config passes linting.""" + + # ignore modules.config in linting + with open(Path(self.new_pipeline) / ".nf-core.yml") as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"modules_config": False} + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(content, f) + Path(self.new_pipeline, "conf", "modules.config").unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.modules_config() + assert len(result["ignored"]) == 1 + assert result["ignored"][0].startswith("`conf/modules.config` not found, but it is ignored.") + # cleanup + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(old_content, f) + + def test_superfluous_withname_in_base_config_fails(self): + """Tests finding withName in base.config fails linting.""" + + # Add withName to base.config + base_config = Path(self.new_pipeline) / "conf" / "base.config" + with open(base_config, "a") as f: + f.write("\nwithName:CUSTOM_DUMPSOFTWAREVERSIONS {\n cache = false \n}") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.base_config() + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("`conf/base.config` contains `withName:CUSTOM_DUMPSOFTWAREVERSIONS`") + + def test_ignore_base_config(self): + """Tests ignoring the base.config passes linting.""" + + # ignore base.config in linting + with open(Path(self.new_pipeline) / ".nf-core.yml") as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"base_config": False} + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(content, f) + Path(self.new_pipeline, "conf", "base.config").unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.base_config() + assert len(result["ignored"]) == 1 + assert result["ignored"][0].startswith("`conf/base.config` not found, but it is ignored.") + # cleanup + with open(Path(self.new_pipeline) / ".nf-core.yml", "w") as f: + yaml.dump(old_content, f) diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py new file mode 100644 index 000000000..85ba81753 --- /dev/null +++ b/tests/pipelines/lint/test_files_exist.py @@ -0,0 +1,91 @@ +from pathlib import Path + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintFilesExist(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_files_exist_missing_config(self): + """Lint test: critical files missing FAIL""" + + Path(self.new_pipeline, "CHANGELOG.md").unlink() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" + + results = lint_obj.files_exist() + assert "File not found: `CHANGELOG.md`" in results["failed"] + + def test_files_exist_missing_main(self): + """Check if missing main issues warning""" + + Path(self.new_pipeline, "main.nf").unlink() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert "File not found: `main.nf`" in results["warned"] + + def test_files_exist_deprecated_file(self): + """Check whether deprecated file issues warning""" + + nf = Path(self.new_pipeline, "parameters.settings.json") + nf.touch() + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == ["File must be removed: `parameters.settings.json`"] + + def test_files_exist_pass(self): + """Lint check should pass if all files are there""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + + results = lint_obj.files_exist() + assert results["failed"] == [] + + def test_files_exist_pass_conditional(self): + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj.nf_config["plugins"] = [] + lib_dir = Path(self.new_pipeline, "lib") + lib_dir.mkdir() + (lib_dir / "nfcore_external_java_deps.jar").touch() + results = lint_obj.files_exist() + assert results["failed"] == [] + assert results["ignored"] == [] + + def test_files_exist_fail_conditional(self): + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lib_dir = Path(self.new_pipeline, "lib") + lib_dir.mkdir() + (lib_dir / "nfcore_external_java_deps.jar").touch() + results = lint_obj.files_exist() + assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"] + assert results["ignored"] == [] + + def test_files_exist_pass_conditional_nfschema(self): + # replace nf-validation with nf-schema in nextflow.config + with open(Path(self.new_pipeline, "nextflow.config")) as f: + config = f.read() + config = config.replace("nf-validation", "nf-schema") + with open(Path(self.new_pipeline, "nextflow.config"), "w") as f: + f.write(config) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj.nf_config["manifest.schema"] = "nf-core" + results = lint_obj.files_exist() + assert results["failed"] == [] + assert results["ignored"] == [] diff --git a/tests/pipelines/lint/test_files_unchanged.py b/tests/pipelines/lint/test_files_unchanged.py new file mode 100644 index 000000000..4282b4995 --- /dev/null +++ b/tests/pipelines/lint/test_files_unchanged.py @@ -0,0 +1,28 @@ +from pathlib import Path + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintFilesUnchanged(TestLint): + def test_files_unchanged_pass(self): + self.lint_obj._load() + results = self.lint_obj.files_unchanged() + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + assert not results.get("could_fix", True) + + def test_files_unchanged_fail(self): + failing_file = Path(".github", "CONTRIBUTING.md") + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, failing_file), "a") as fh: + fh.write("THIS SHOULD NOT BE HERE") + + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + results = lint_obj.files_unchanged() + assert len(results["failed"]) > 0 + assert str(failing_file) in results["failed"][0] + assert results["could_fix"] diff --git a/tests/pipelines/lint/test_merge_markers.py b/tests/pipelines/lint/test_merge_markers.py new file mode 100644 index 000000000..3094d8f8d --- /dev/null +++ b/tests/pipelines/lint/test_merge_markers.py @@ -0,0 +1,25 @@ +import os + +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintMergeMarkers(TestLint): + def test_merge_markers_found(self): + """Missing 'jobs' field should result in failure""" + new_pipeline = self._make_pipeline_copy() + + with open(os.path.join(new_pipeline, "main.nf")) as fh: + main_nf_content = fh.read() + main_nf_content = ">>>>>>>\n" + main_nf_content + with open(os.path.join(new_pipeline, "main.nf"), "w") as fh: + fh.write(main_nf_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj._load() + + results = lint_obj.merge_markers() + assert len(results["failed"]) > 0 + assert len(results["passed"]) == 0 + assert "Merge marker '>>>>>>>' in " in results["failed"][0] diff --git a/tests/pipelines/lint/test_modules_json.py b/tests/pipelines/lint/test_modules_json.py new file mode 100644 index 000000000..0d8333d9a --- /dev/null +++ b/tests/pipelines/lint/test_modules_json.py @@ -0,0 +1,10 @@ +from ..test_lint import TestLint + + +class TestLintModulesJson(TestLint): + def test_modules_json_pass(self): + self.lint_obj._load() + results = self.lint_obj.modules_json() + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("passed", [])) > 0 diff --git a/tests/pipelines/lint/test_multiqc_config.py b/tests/pipelines/lint/test_multiqc_config.py new file mode 100644 index 000000000..5da6e567e --- /dev/null +++ b/tests/pipelines/lint/test_multiqc_config.py @@ -0,0 +1,127 @@ +from pathlib import Path + +import yaml + +import nf_core.pipelines.bump_version +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintMultiqcConfig(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.multiqc_config_yml = Path(self.new_pipeline, "assets", "multiqc_config.yml") + + def test_multiqc_config_exists(self): + """Test that linting fails if the multiqc_config.yml file is missing""" + # Delete the file + self.multiqc_config_yml.unlink() + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + assert result["failed"] == ["`assets/multiqc_config.yml` not found."] + + def test_multiqc_config_ignore(self): + """Test that linting succeeds if the multiqc_config.yml file is missing but ignored""" + # Delete the file + self.multiqc_config_yml.unlink() + with open(Path(self.new_pipeline, ".nf-core.yml")) as f: + content = yaml.safe_load(f) + old_content = content.copy() + content["lint"] = {"multiqc_config": False} + with open(Path(self.new_pipeline, ".nf-core.yml"), "w") as f: + yaml.dump(content, f) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + assert result["ignored"] == ["`assets/multiqc_config.yml` not found, but it is ignored."] + + # cleanup + with open(Path(self.new_pipeline, ".nf-core.yml"), "w") as f: + yaml.dump(old_content, f) + + def test_multiqc_config_missing_report_section_order(self): + """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml.pop("report_section_order") + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["`assets/multiqc_config.yml` does not contain `report_section_order`"] + + def test_multiqc_incorrect_export_plots(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml["export_plots"] = False + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["`assets/multiqc_config.yml` does not contain 'export_plots: true'."] + + def test_multiqc_config_report_comment_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + mqc_yml["report_comment"] = "This is a test" + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith( + "`assets/multiqc_config.yml` does not contain a matching 'report_comment'." + ) + + def test_multiqc_config_report_comment_release_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" + with open(self.multiqc_config_yml) as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml.copy() + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + # bump version + lint_obj.nf_config["manifest.version"] = "1.0" + result = lint_obj.multiqc_config() + # Reset the file + with open(self.multiqc_config_yml, "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith( + "`assets/multiqc_config.yml` does not contain a matching 'report_comment'." + ) + + def test_multiqc_config_report_comment_release_succeed(self): + """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + # bump version using the bump_version function + nf_core.pipelines.bump_version.bump_pipeline_version(lint_obj, "1.0") + # lint again + lint_obj._load() + result = lint_obj.multiqc_config() + assert "`assets/multiqc_config.yml` contains a matching 'report_comment'." in result["passed"] diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py new file mode 100644 index 000000000..01173aec3 --- /dev/null +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -0,0 +1,200 @@ +import os +import re +from pathlib import Path + +import nf_core.pipelines.create.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintNextflowConfig(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_nextflow_config_example_pass(self): + """Tests that config variable existence test works with good pipeline example""" + self.lint_obj.load_pipeline_config() + result = self.lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + + def test_default_values_match(self): + """Test that the default values in nextflow.config match the default values defined in the nextflow_schema.json.""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + assert "Config default value correct: params.max_cpus" in str(result["passed"]) + assert "Config default value correct: params.validate_params" in str(result["passed"]) + + def test_nextflow_config_bad_name_fail(self): + """Tests that config variable existence test fails with bad pipeline name""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + + lint_obj.nf_config["manifest.name"] = "bad_name" + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_nextflow_config_dev_in_release_mode_failed(self): + """Tests that config variable existence test fails with dev version in release mode""" + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + + lint_obj.release_mode = True + lint_obj.nf_config["manifest.version"] = "dev_is_bad_name" + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_nextflow_config_missing_test_profile_failed(self): + """Test failure if config file does not contain `test` profile.""" + # Change the name of the test profile so there is no such profile + nf_conf_file = os.path.join(self.new_pipeline, "nextflow.config") + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub(r"\btest\b", "testfail", content) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) > 0 + assert len(result["warned"]) == 0 + + def test_default_values_fail(self): + """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" + # Change the default value of max_cpus in nextflow.config + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Change the default value of max_memory in nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 2 + assert ( + "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." + in result["failed"] + ) + assert ( + "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." + in result["failed"] + ) + + def test_catch_params_assignment_in_main_nf(self): + """Test linting fails if main.nf contains an assignment to a parameter from nextflow_schema.json.""" + # Add parameter assignment in main.nf + main_nf_file = Path(self.new_pipeline) / "main.nf" + with open(main_nf_file, "a") as f: + f.write("params.max_time = 42") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 1 + assert ( + result["failed"][0] + == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." + ) + + def test_allow_params_reference_in_main_nf(self): + """Test linting allows for references like `params.aligner == 'bwa'` in main.nf. The test will detect if the bug mentioned in GitHub-issue #2833 reemerges.""" + # Add parameter reference in main.nf + main_nf_file = Path(self.new_pipeline) / "main.nf" + with open(main_nf_file, "a") as f: + f.write("params.max_time == 42") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + + def test_default_values_ignored(self): + """Test ignoring linting of default values.""" + # Add max_cpus to the ignore list + nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write( + "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" + ) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + lint_obj._load_lint_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["ignored"]) == 1 + assert "Config default value correct: params.max_cpu" not in str(result["passed"]) + assert "Config default ignored: params.max_cpus" in str(result["ignored"]) + + def test_default_values_float(self): + """Test comparing two float values.""" + # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub( + r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + ) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Add a float value `dummy` to the nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub( + r'"validate_params": {', + ' "dummy": {"type": "number","default":0.000000001},\n"validate_params": {', + content, + ) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + assert "Config default value correct: params.dummy" in str(result["passed"]) + + def test_default_values_float_fail(self): + """Test comparing two float values.""" + # Add a float value `dummy=0.0001` to the nextflow.config below `validate_params` + nf_conf_file = Path(self.new_pipeline) / "nextflow.config" + with open(nf_conf_file) as f: + content = f.read() + fail_content = re.sub( + r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + ) + with open(nf_conf_file, "w") as f: + f.write(fail_content) + # Add a float value `dummy` to the nextflow_schema.json + nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" + with open(nf_schema_file) as f: + content = f.read() + fail_content = re.sub( + r'"validate_params": {', + ' "dummy": {"type": "float","default":0.000001},\n"validate_params": {', + content, + ) + with open(nf_schema_file, "w") as f: + f.write(fail_content) + + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj.load_pipeline_config() + result = lint_obj.nextflow_config() + + assert len(result["failed"]) == 1 + assert len(result["warned"]) == 0 + assert "Config default value incorrect: `params.dummy" in str(result["failed"]) diff --git a/tests/pipelines/lint/test_nfcore_yml.py b/tests/pipelines/lint/test_nfcore_yml.py new file mode 100644 index 000000000..955c00da8 --- /dev/null +++ b/tests/pipelines/lint/test_nfcore_yml.py @@ -0,0 +1,57 @@ +import re +from pathlib import Path + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintNfCoreYml(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + self.nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + + def test_nfcore_yml_pass(self): + """Lint test: nfcore_yml - PASS""" + self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + + assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) + assert "nf-core version in `.nf-core.yml` is set to the latest version" in str(results["passed"]) + assert len(results.get("warned", [])) == 0 + assert len(results.get("failed", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_fail_repo_type(self): + """Lint test: nfcore_yml - FAIL - repository type not set""" + + with open(self.nf_core_yml) as fh: + content = fh.read() + new_content = content.replace("repository_type: pipeline", "repository_type: foo") + with open(self.nf_core_yml, "w") as fh: + fh.write(new_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + results = lint_obj.nfcore_yml() + assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) + assert len(results.get("warned", [])) == 0 + assert len(results.get("passed", [])) >= 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_fail_nfcore_version(self): + """Lint test: nfcore_yml - FAIL - nf-core version not set""" + + with open(self.nf_core_yml) as fh: + content = fh.read() + new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) + with open(self.nf_core_yml, "w") as fh: + fh.write(new_content) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + results = lint_obj.nfcore_yml() + assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) + assert len(results.get("failed", [])) == 0 + assert len(results.get("passed", [])) >= 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_template_strings.py b/tests/pipelines/lint/test_template_strings.py new file mode 100644 index 000000000..406ba63e0 --- /dev/null +++ b/tests/pipelines/lint/test_template_strings.py @@ -0,0 +1,55 @@ +import subprocess +from pathlib import Path + +import nf_core.pipelines.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintTemplateStrings(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_template_strings(self): + """Tests finding a template string in a file fails linting.""" + # Add template string to a file + txt_file = Path(self.new_pipeline) / "docs" / "test.txt" + with open(txt_file, "w") as f: + f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.template_strings() + assert len(result["failed"]) == 1 + assert len(result["ignored"]) == 0 + + def test_template_strings_ignored(self): + """Tests ignoring template_strings""" + # Ignore template_strings test + nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write("repository_type: pipeline\nlint:\n template_strings: False") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + lint_obj._lint_pipeline() + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == 1 + + def test_template_strings_ignore_file(self): + """Tests ignoring template_strings file""" + # Add template string to a file + txt_file = Path(self.new_pipeline) / "docs" / "test.txt" + with open(txt_file, "w") as f: + f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + # Ignore template_strings test + nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + with open(nf_core_yml, "w") as f: + f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + lint_obj._load() + result = lint_obj.template_strings() + assert len(result["failed"]) == 0 + assert len(result["ignored"]) == 1 diff --git a/tests/pipelines/lint/test_version_consistency.py b/tests/pipelines/lint/test_version_consistency.py new file mode 100644 index 000000000..c5a2cc74f --- /dev/null +++ b/tests/pipelines/lint/test_version_consistency.py @@ -0,0 +1,19 @@ +import nf_core.pipelines.create.create +import nf_core.pipelines.lint + +from ..test_lint import TestLint + + +class TestLintVersionConsistency(TestLint): + def test_version_consistency(self): + """Tests that config variable existence test fails with bad pipeline name""" + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + lint_obj.load_pipeline_config() + lint_obj.nextflow_config() + + result = lint_obj.version_consistency() + assert result["passed"] == [ + "Version tags are numeric and consistent between container, release tag and config." + ] + assert result["failed"] == ["manifest.version was not numeric: 1.0.0dev!"] diff --git a/tests/pipelines/lint/version_consistency.py b/tests/pipelines/lint/version_consistency.py deleted file mode 100644 index 88eadce39..000000000 --- a/tests/pipelines/lint/version_consistency.py +++ /dev/null @@ -1,14 +0,0 @@ -import nf_core.pipelines.create.create -import nf_core.pipelines.lint - - -def test_version_consistency(self): - """Tests that config variable existence test fails with bad pipeline name""" - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - lint_obj.load_pipeline_config() - lint_obj.nextflow_config() - - result = lint_obj.version_consistency() - assert result["passed"] == ["Version tags are numeric and consistent between container, release tag and config."] - assert result["failed"] == ["manifest.version was not numeric: 1.0.0dev!"] diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 54279cd06..b3eac17cb 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -19,204 +19,166 @@ def setUp(self) -> None: super().setUp() self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir) - ########################## - # CORE lint.py FUNCTIONS # - ########################## - def test_run_linting_function(self): - """Run the master run_linting() function in lint.py - - We don't really check any of this code as it's just a series of function calls - and we're testing each of those individually. This is mostly to check for syntax errors.""" - nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) - - def test_init_pipeline_lint(self): - """Simply create a PipelineLint object. - - This checks that all of the lint test imports are working properly, - we also check that the git sha was found and that the release flag works properly - """ - lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) - - # Tests that extra test is added for release mode - assert "version_consistency" in lint_obj.lint_tests - assert lint_obj.git_sha - # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash - assert len(lint_obj.git_sha) > 0 - - def test_load_lint_config_not_found(self): - """Try to load a linting config file that doesn't exist""" - self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} - - def test_load_lint_config_ignore_all_tests(self): - """Try to load a linting config file that ignores all tests""" - - # Make a copy of the test pipeline and create a lint object - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - - # Make a config file listing all test names - config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} - with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: - yaml.dump(config_dict, fh) - - # Load the new lint config file and check - lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) - - # Try running linting and make sure that all tests are ignored - lint_obj._lint_pipeline() - assert len(lint_obj.passed) == 0 - assert len(lint_obj.warned) == 0 - assert len(lint_obj.failed) == 0 - assert len(lint_obj.ignored) == len(lint_obj.lint_tests) - - @with_temporary_folder - def test_json_output(self, tmp_dir): - """ - Test creation of a JSON file with lint results - - Expected JSON output: - { - "nf_core_tools_version": "1.10.dev0", - "date_run": "2020-06-05 10:56:42", - "tests_pass": [ - [ 1, "This test passed"], - [ 2, "This test also passed"] - ], - "tests_warned": [ - [ 2, "This test gave a warning"] - ], - "tests_failed": [], - "num_tests_pass": 2, - "num_tests_warned": 1, - "num_tests_failed": 0, - "has_tests_pass": true, - "has_tests_warned": true, - "has_tests_failed": false - } - """ - self.lint_obj.passed.append(("test_one", "This test passed")) - self.lint_obj.passed.append(("test_two", "This test also passed")) - self.lint_obj.warned.append(("test_three", "This test gave a warning")) - - # Make a temp dir for the JSON output - json_fn = Path(tmp_dir, "lint_results.json") - self.lint_obj._save_json_results(json_fn) - - # Load created JSON file and check its contents - with open(json_fn) as fh: - try: - saved_json = json.load(fh) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") - assert saved_json["num_tests_pass"] > 0 - assert saved_json["num_tests_warned"] > 0 - assert saved_json["num_tests_ignored"] == 0 - assert saved_json["num_tests_failed"] == 0 - assert saved_json["has_tests_pass"] - assert saved_json["has_tests_warned"] - assert not saved_json["has_tests_ignored"] - assert not saved_json["has_tests_failed"] - - def test_wrap_quotes(self): - md = self.lint_obj._wrap_quotes(["one", "two", "three"]) - assert md == "`one` or `two` or `three`" - - def test_sphinx_md_files(self): - """Check that we have .md files for all lint module code, - and that there are no unexpected files (eg. deleted lint tests)""" - - docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") - - # Get list of existing .md files - existing_docs = [] - existing_docs = [ - str(Path(docs_basedir, fn)) - for fn in Path(docs_basedir).iterdir() - if fn.match("*.md") and not fn.match("index.md") - ] - - # Check .md files against each test name - lint_obj = nf_core.pipelines.lint.PipelineLint("", True) - for test_name in lint_obj.lint_tests: - fn = Path(docs_basedir, f"{test_name}.md") - assert fn.exists(), f"Could not find lint docs .md file: {fn}" - existing_docs.remove(str(fn)) - - # Check that we have no remaining .md files that we didn't expect - assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" + +########################## +# CORE lint.py FUNCTIONS # +########################## +def test_run_linting_function(self): + """Run the master run_linting() function in lint.py + + We don't really check any of this code as it's just a series of function calls + and we're testing each of those individually. This is mostly to check for syntax errors.""" + nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) + + +def test_init_pipeline_lint(self): + """Simply create a PipelineLint object. + + This checks that all of the lint test imports are working properly, + we also check that the git sha was found and that the release flag works properly + """ + lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) + + # Tests that extra test is added for release mode + assert "version_consistency" in lint_obj.lint_tests + assert lint_obj.git_sha + # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash + assert len(lint_obj.git_sha) > 0 + + +def test_load_lint_config_not_found(self): + """Try to load a linting config file that doesn't exist""" + self.lint_obj._load_lint_config() + assert self.lint_obj.lint_config == {} + + +def test_load_lint_config_ignore_all_tests(self): + """Try to load a linting config file that ignores all tests""" + + # Make a copy of the test pipeline and create a lint object + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + + # Make a config file listing all test names + config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} + with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: + yaml.dump(config_dict, fh) + + # Load the new lint config file and check + lint_obj._load_lint_config() + assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + + # Try running linting and make sure that all tests are ignored + lint_obj._lint_pipeline() + assert len(lint_obj.passed) == 0 + assert len(lint_obj.warned) == 0 + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == len(lint_obj.lint_tests) + + +@with_temporary_folder +def test_json_output(self, tmp_dir): + """ + Test creation of a JSON file with lint results + + Expected JSON output: + { + "nf_core_tools_version": "1.10.dev0", + "date_run": "2020-06-05 10:56:42", + "tests_pass": [ + [ 1, "This test passed"], + [ 2, "This test also passed"] + ], + "tests_warned": [ + [ 2, "This test gave a warning"] + ], + "tests_failed": [], + "num_tests_pass": 2, + "num_tests_warned": 1, + "num_tests_failed": 0, + "has_tests_pass": true, + "has_tests_warned": true, + "has_tests_failed": false + } + """ + self.lint_obj.passed.append(("test_one", "This test passed")) + self.lint_obj.passed.append(("test_two", "This test also passed")) + self.lint_obj.warned.append(("test_three", "This test gave a warning")) + + # Make a temp dir for the JSON output + json_fn = Path(tmp_dir, "lint_results.json") + self.lint_obj._save_json_results(json_fn) + + # Load created JSON file and check its contents + with open(json_fn) as fh: + try: + saved_json = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") + assert saved_json["num_tests_pass"] > 0 + assert saved_json["num_tests_warned"] > 0 + assert saved_json["num_tests_ignored"] == 0 + assert saved_json["num_tests_failed"] == 0 + assert saved_json["has_tests_pass"] + assert saved_json["has_tests_warned"] + assert not saved_json["has_tests_ignored"] + assert not saved_json["has_tests_failed"] + + +def test_wrap_quotes(self): + md = self.lint_obj._wrap_quotes(["one", "two", "three"]) + assert md == "`one` or `two` or `three`" + + +def test_sphinx_md_files(self): + """Check that we have .md files for all lint module code, + and that there are no unexpected files (eg. deleted lint tests)""" + + docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") + + # Get list of existing .md files + existing_docs = [] + existing_docs = [ + str(Path(docs_basedir, fn)) + for fn in Path(docs_basedir).iterdir() + if fn.match("*.md") and not fn.match("index.md") + ] + + # Check .md files against each test name + lint_obj = nf_core.pipelines.lint.PipelineLint("", True) + for test_name in lint_obj.lint_tests: + fn = Path(docs_basedir, f"{test_name}.md") + assert fn.exists(), f"Could not find lint docs .md file: {fn}" + existing_docs.remove(str(fn)) + + # Check that we have no remaining .md files that we didn't expect + assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" ####################### # SPECIFIC LINT TESTS # ####################### - from .lint.actions_awstest import ( # type: ignore[misc] - test_actions_awstest_fail, - test_actions_awstest_pass, - ) - from .lint.actions_ci import ( # type: ignore[misc] - test_actions_ci_fail_wrong_nf, - test_actions_ci_fail_wrong_trigger, - test_actions_ci_pass, - ) - from .lint.actions_schema_validation import ( # type: ignore[misc] - test_actions_schema_validation_fails_for_additional_property, - test_actions_schema_validation_missing_jobs, - test_actions_schema_validation_missing_on, - ) - from .lint.configs import ( # type: ignore[misc] - test_ignore_base_config, - test_ignore_modules_config, - test_superfluous_withname_in_base_config_fails, - test_superfluous_withname_in_modules_config_fails, - test_withname_in_modules_config, - ) - from .lint.files_exist import ( # type: ignore[misc] - test_files_exist_deprecated_file, - test_files_exist_fail_conditional, - test_files_exist_missing_config, - test_files_exist_missing_main, - test_files_exist_pass, - test_files_exist_pass_conditional, - test_files_exist_pass_conditional_nfschema, - ) - from .lint.files_unchanged import ( # type: ignore[misc] - test_files_unchanged_fail, - test_files_unchanged_pass, - ) - from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] - from .lint.modules_json import test_modules_json_pass # type: ignore[misc] - from .lint.multiqc_config import ( # type: ignore[misc] - test_multiqc_config_exists, - test_multiqc_config_ignore, - test_multiqc_config_missing_report_section_order, - test_multiqc_config_report_comment_fail, - test_multiqc_config_report_comment_release_fail, - test_multiqc_config_report_comment_release_succeed, - test_multiqc_incorrect_export_plots, - ) - from .lint.nextflow_config import ( # type: ignore[misc] - test_allow_params_reference_in_main_nf, - test_catch_params_assignment_in_main_nf, - test_default_values_fail, - test_default_values_float, - test_default_values_float_fail, - test_default_values_ignored, - test_default_values_match, - test_nextflow_config_bad_name_fail, - test_nextflow_config_dev_in_release_mode_failed, - test_nextflow_config_example_pass, - test_nextflow_config_missing_test_profile_failed, - ) - from .lint.nfcore_yml import ( # type: ignore[misc] - test_nfcore_yml_fail_nfcore_version, - test_nfcore_yml_fail_repo_type, - test_nfcore_yml_pass, - ) - from .lint.template_strings import ( # type: ignore[misc] - test_template_strings, - test_template_strings_ignore_file, - test_template_strings_ignored, - ) - from .lint.version_consistency import test_version_consistency # type: ignore[misc] + # from .lint.nextflow_config import ( # type: ignore[misc] + # test_allow_params_reference_in_main_nf, + # test_catch_params_assignment_in_main_nf, + # test_default_values_fail, + # test_default_values_float, + # test_default_values_float_fail, + # test_default_values_ignored, + # test_default_values_match, + # test_nextflow_config_bad_name_fail, + # test_nextflow_config_dev_in_release_mode_failed, + # test_nextflow_config_example_pass, + # test_nextflow_config_missing_test_profile_failed, + # ) + # from .lint.nfcore_yml import ( # type: ignore[misc] + # test_nfcore_yml_fail_nfcore_version, + # test_nfcore_yml_fail_repo_type, + # test_nfcore_yml_pass, + # ) + # from .lint.template_strings import ( # type: ignore[misc] + # test_template_strings, + # test_template_strings_ignore_file, + # test_template_strings_ignored, + # ) + # from .lint.version_consistency import test_version_consistency # type: ignore[misc] From fcc43fd0c337ead404fba843eab51820d0f014a9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 14:51:51 +0200 Subject: [PATCH 318/737] add more tests to CI --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 4e873385e..fea96b452 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests/**/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} From a779d12b7fff23ce9e93b905f49f37221276af43 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 15:15:40 +0200 Subject: [PATCH 319/737] simplify sed command --- .github/workflows/pytest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index fea96b452..18f9b2809 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/**/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests/**/test_* | tac | sed 's/.*\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/**/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" From ca0769bd76b207881a8e77d3a18f3902b2307474 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 15:31:01 +0200 Subject: [PATCH 320/737] fix ci tests --- .github/workflows/pytest.yml | 11 +- tests/pipelines/test_lint.py | 284 +++++++++++++++-------------------- 2 files changed, 133 insertions(+), 162 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 18f9b2809..72bacaeab 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/**/test_* | tac | sed 's/.*\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests/**/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/**/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" @@ -149,10 +149,15 @@ jobs: name: Snapshot Report ${{ matrix.test }} path: ./snapshot_report.html + - name: remove slashes from test name + run: | + test=$(echo ${{ matrix.test }} | sed 's/\//__/g') + echo "test=${test}" >> $GITHUB_ENV + - name: Upload coverage uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 with: - name: coverage_${{ matrix.test }} + name: coverage_${{ env.test }} path: .coverage coverage: diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index b3eac17cb..ab8bcf6b2 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -23,162 +23,128 @@ def setUp(self) -> None: ########################## # CORE lint.py FUNCTIONS # ########################## -def test_run_linting_function(self): - """Run the master run_linting() function in lint.py - - We don't really check any of this code as it's just a series of function calls - and we're testing each of those individually. This is mostly to check for syntax errors.""" - nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) - - -def test_init_pipeline_lint(self): - """Simply create a PipelineLint object. - - This checks that all of the lint test imports are working properly, - we also check that the git sha was found and that the release flag works properly - """ - lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) - - # Tests that extra test is added for release mode - assert "version_consistency" in lint_obj.lint_tests - assert lint_obj.git_sha - # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash - assert len(lint_obj.git_sha) > 0 - - -def test_load_lint_config_not_found(self): - """Try to load a linting config file that doesn't exist""" - self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} - - -def test_load_lint_config_ignore_all_tests(self): - """Try to load a linting config file that ignores all tests""" - - # Make a copy of the test pipeline and create a lint object - new_pipeline = self._make_pipeline_copy() - lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) - - # Make a config file listing all test names - config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} - with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: - yaml.dump(config_dict, fh) - - # Load the new lint config file and check - lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) - - # Try running linting and make sure that all tests are ignored - lint_obj._lint_pipeline() - assert len(lint_obj.passed) == 0 - assert len(lint_obj.warned) == 0 - assert len(lint_obj.failed) == 0 - assert len(lint_obj.ignored) == len(lint_obj.lint_tests) - - -@with_temporary_folder -def test_json_output(self, tmp_dir): - """ - Test creation of a JSON file with lint results - - Expected JSON output: - { - "nf_core_tools_version": "1.10.dev0", - "date_run": "2020-06-05 10:56:42", - "tests_pass": [ - [ 1, "This test passed"], - [ 2, "This test also passed"] - ], - "tests_warned": [ - [ 2, "This test gave a warning"] - ], - "tests_failed": [], - "num_tests_pass": 2, - "num_tests_warned": 1, - "num_tests_failed": 0, - "has_tests_pass": true, - "has_tests_warned": true, - "has_tests_failed": false - } - """ - self.lint_obj.passed.append(("test_one", "This test passed")) - self.lint_obj.passed.append(("test_two", "This test also passed")) - self.lint_obj.warned.append(("test_three", "This test gave a warning")) - - # Make a temp dir for the JSON output - json_fn = Path(tmp_dir, "lint_results.json") - self.lint_obj._save_json_results(json_fn) - - # Load created JSON file and check its contents - with open(json_fn) as fh: - try: - saved_json = json.load(fh) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") - assert saved_json["num_tests_pass"] > 0 - assert saved_json["num_tests_warned"] > 0 - assert saved_json["num_tests_ignored"] == 0 - assert saved_json["num_tests_failed"] == 0 - assert saved_json["has_tests_pass"] - assert saved_json["has_tests_warned"] - assert not saved_json["has_tests_ignored"] - assert not saved_json["has_tests_failed"] - - -def test_wrap_quotes(self): - md = self.lint_obj._wrap_quotes(["one", "two", "three"]) - assert md == "`one` or `two` or `three`" - - -def test_sphinx_md_files(self): - """Check that we have .md files for all lint module code, - and that there are no unexpected files (eg. deleted lint tests)""" - - docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") - - # Get list of existing .md files - existing_docs = [] - existing_docs = [ - str(Path(docs_basedir, fn)) - for fn in Path(docs_basedir).iterdir() - if fn.match("*.md") and not fn.match("index.md") - ] - - # Check .md files against each test name - lint_obj = nf_core.pipelines.lint.PipelineLint("", True) - for test_name in lint_obj.lint_tests: - fn = Path(docs_basedir, f"{test_name}.md") - assert fn.exists(), f"Could not find lint docs .md file: {fn}" - existing_docs.remove(str(fn)) - - # Check that we have no remaining .md files that we didn't expect - assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" - - ####################### - # SPECIFIC LINT TESTS # - ####################### - - # from .lint.nextflow_config import ( # type: ignore[misc] - # test_allow_params_reference_in_main_nf, - # test_catch_params_assignment_in_main_nf, - # test_default_values_fail, - # test_default_values_float, - # test_default_values_float_fail, - # test_default_values_ignored, - # test_default_values_match, - # test_nextflow_config_bad_name_fail, - # test_nextflow_config_dev_in_release_mode_failed, - # test_nextflow_config_example_pass, - # test_nextflow_config_missing_test_profile_failed, - # ) - # from .lint.nfcore_yml import ( # type: ignore[misc] - # test_nfcore_yml_fail_nfcore_version, - # test_nfcore_yml_fail_repo_type, - # test_nfcore_yml_pass, - # ) - # from .lint.template_strings import ( # type: ignore[misc] - # test_template_strings, - # test_template_strings_ignore_file, - # test_template_strings_ignored, - # ) - # from .lint.version_consistency import test_version_consistency # type: ignore[misc] +class TestPipelinesLint(TestLint): + def test_run_linting_function(self): + """Run the master run_linting() function in lint.py + + We don't really check any of this code as it's just a series of function calls + and we're testing each of those individually. This is mostly to check for syntax errors.""" + nf_core.pipelines.lint.run_linting(self.pipeline_dir, False) + + def test_init_pipeline_lint(self): + """Simply create a PipelineLint object. + + This checks that all of the lint test imports are working properly, + we also check that the git sha was found and that the release flag works properly + """ + lint_obj = nf_core.pipelines.lint.PipelineLint(self.pipeline_dir, True) + + # Tests that extra test is added for release mode + assert "version_consistency" in lint_obj.lint_tests + assert lint_obj.git_sha + # Tests that parent nf_core.utils.Pipeline class __init__() is working to find git hash + assert len(lint_obj.git_sha) > 0 + + def test_load_lint_config_not_found(self): + """Try to load a linting config file that doesn't exist""" + self.lint_obj._load_lint_config() + assert self.lint_obj.lint_config == {} + + def test_load_lint_config_ignore_all_tests(self): + """Try to load a linting config file that ignores all tests""" + + # Make a copy of the test pipeline and create a lint object + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) + + # Make a config file listing all test names + config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} + with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: + yaml.dump(config_dict, fh) + + # Load the new lint config file and check + lint_obj._load_lint_config() + assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + + # Try running linting and make sure that all tests are ignored + lint_obj._lint_pipeline() + assert len(lint_obj.passed) == 0 + assert len(lint_obj.warned) == 0 + assert len(lint_obj.failed) == 0 + assert len(lint_obj.ignored) == len(lint_obj.lint_tests) + + @with_temporary_folder + def test_json_output(self, tmp_dir): + """ + Test creation of a JSON file with lint results + + Expected JSON output: + { + "nf_core_tools_version": "1.10.dev0", + "date_run": "2020-06-05 10:56:42", + "tests_pass": [ + [ 1, "This test passed"], + [ 2, "This test also passed"] + ], + "tests_warned": [ + [ 2, "This test gave a warning"] + ], + "tests_failed": [], + "num_tests_pass": 2, + "num_tests_warned": 1, + "num_tests_failed": 0, + "has_tests_pass": true, + "has_tests_warned": true, + "has_tests_failed": false + } + """ + self.lint_obj.passed.append(("test_one", "This test passed")) + self.lint_obj.passed.append(("test_two", "This test also passed")) + self.lint_obj.warned.append(("test_three", "This test gave a warning")) + + # Make a temp dir for the JSON output + json_fn = Path(tmp_dir, "lint_results.json") + self.lint_obj._save_json_results(json_fn) + + # Load created JSON file and check its contents + with open(json_fn) as fh: + try: + saved_json = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{json_fn}' due to error {e}") + assert saved_json["num_tests_pass"] > 0 + assert saved_json["num_tests_warned"] > 0 + assert saved_json["num_tests_ignored"] == 0 + assert saved_json["num_tests_failed"] == 0 + assert saved_json["has_tests_pass"] + assert saved_json["has_tests_warned"] + assert not saved_json["has_tests_ignored"] + assert not saved_json["has_tests_failed"] + + def test_wrap_quotes(self): + md = self.lint_obj._wrap_quotes(["one", "two", "three"]) + assert md == "`one` or `two` or `three`" + + def test_sphinx_md_files(self): + """Check that we have .md files for all lint module code, + and that there are no unexpected files (eg. deleted lint tests)""" + + docs_basedir = Path(Path(__file__).parent.parent.parent, "docs", "api", "_src", "pipeline_lint_tests") + + # Get list of existing .md files + existing_docs = [] + existing_docs = [ + str(Path(docs_basedir, fn)) + for fn in Path(docs_basedir).iterdir() + if fn.match("*.md") and not fn.match("index.md") + ] + + # Check .md files against each test name + lint_obj = nf_core.pipelines.lint.PipelineLint("", True) + for test_name in lint_obj.lint_tests: + fn = Path(docs_basedir, f"{test_name}.md") + assert fn.exists(), f"Could not find lint docs .md file: {fn}" + existing_docs.remove(str(fn)) + + # Check that we have no remaining .md files that we didn't expect + assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" From 768cfc31fff05bf4136d987f7dc811599766b7f9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 15:36:13 +0200 Subject: [PATCH 321/737] find ALL test files in CI --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 72bacaeab..dc8803188 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -67,7 +67,7 @@ jobs: - name: List tests id: list_tests run: | - echo "tests=$(find tests/**/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT + echo "tests=$(find tests -type f -name "test_*.py" | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT outputs: tests: ${{ steps.list_tests.outputs.tests }} From 3aee9cbfd0950a7f78cfcd273ce7a61bbc3d4ed9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 15:49:21 +0200 Subject: [PATCH 322/737] add pytest-asyncio --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index 2c7bb0c8c..82087edcb 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,6 +14,7 @@ types-Markdown types-PyYAML types-requests types-setuptools +pytest-asyncio pytest-textual-snapshot==0.4.0 pytest-workflow>=2.0.0 pytest>=8.0.0 From 09b0623a9682b58b1be9b5fe12bba117805681b4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 17 Jul 2024 16:10:41 +0200 Subject: [PATCH 323/737] convert subworkflow tests to new structure --- nf_core/modules/bump_versions.py | 7 +- nf_core/modules/modules_json.py | 3 +- tests/subworkflows/create.py | 110 -------- tests/subworkflows/info.py | 64 ----- tests/subworkflows/install.py | 154 ----------- tests/subworkflows/lint.py | 403 ----------------------------- tests/subworkflows/list.py | 49 ---- tests/subworkflows/remove.py | 100 ------- tests/subworkflows/test_create.py | 109 ++++++++ tests/subworkflows/test_info.py | 63 +++++ tests/subworkflows/test_install.py | 157 +++++++++++ tests/subworkflows/test_lint.py | 391 ++++++++++++++++++++++++++++ tests/subworkflows/test_list.py | 48 ++++ tests/subworkflows/test_remove.py | 101 ++++++++ tests/subworkflows/test_update.py | 370 ++++++++++++++++++++++++++ tests/subworkflows/update.py | 376 --------------------------- tests/test_subworkflows.py | 105 +++----- 17 files changed, 1278 insertions(+), 1332 deletions(-) delete mode 100644 tests/subworkflows/create.py delete mode 100644 tests/subworkflows/info.py delete mode 100644 tests/subworkflows/install.py delete mode 100644 tests/subworkflows/lint.py delete mode 100644 tests/subworkflows/list.py delete mode 100644 tests/subworkflows/remove.py create mode 100644 tests/subworkflows/test_create.py create mode 100644 tests/subworkflows/test_info.py create mode 100644 tests/subworkflows/test_install.py create mode 100644 tests/subworkflows/test_lint.py create mode 100644 tests/subworkflows/test_list.py create mode 100644 tests/subworkflows/test_remove.py create mode 100644 tests/subworkflows/test_update.py delete mode 100644 tests/subworkflows/update.py diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 6556dcf0f..fae379307 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -176,7 +176,12 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: try: response = nf_core.utils.anaconda_package(bp) except (LookupError, ValueError): - self.failed.append((f"Conda version not specified correctly: {module.main_nf}", module.component_name)) + self.failed.append( + ( + f"Conda version not specified correctly: {module.main_nf.relative_to(self.dir)}", + module.component_name, + ) + ) return False # Check that required version is available at all diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 0d6779a8d..2c2f1a32c 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,6 +6,7 @@ import shutil import tempfile from pathlib import Path +from typing import Union import git import questionary @@ -31,7 +32,7 @@ class ModulesJson: An object for handling a 'modules.json' file in a pipeline """ - def __init__(self, pipeline_dir: str): + def __init__(self, pipeline_dir: Union[str, Path]): """ Initialise the object. diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py deleted file mode 100644 index 002b88967..000000000 --- a/tests/subworkflows/create.py +++ /dev/null @@ -1,110 +0,0 @@ -import os -import shutil -from pathlib import Path -from unittest import mock - -import pytest -import yaml -from git.repo import Repo - -import nf_core.subworkflows -from tests.utils import GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL - - -def test_subworkflows_create_succeed(self): - """Succeed at creating a subworkflow from the template inside a pipeline""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.pipeline_dir, "test_subworkflow_local", "@author", True - ) - subworkflow_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "subworkflows", "local", "test_subworkflow_local.nf")) - - -def test_subworkflows_create_fail_exists(self): - """Fail at creating the same subworkflow twice""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.pipeline_dir, "test_subworkflow2", "@author", False - ) - subworkflow_create.create() - with pytest.raises(UserWarning) as excinfo: - subworkflow_create.create() - assert "Subworkflow file exists already" in str(excinfo.value) - - -def test_subworkflows_create_nfcore_modules(self): - """Create a subworkflow in nf-core/modules clone""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "test_subworkflow", "@author", force=True - ) - subworkflow_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf")) - assert os.path.exists( - os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test") - ) - - -@mock.patch("rich.prompt.Confirm.ask") -def test_subworkflows_migrate(self, mock_rich_ask): - """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test""" - pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") - subworkflow_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "bam_stats_samtools") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(subworkflow_dir / "main.nf") as fh: - old_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml") as fh: - old_meta_yml = fh.read() - - # Create a subworkflow with --migrate-pytest - mock_rich_ask.return_value = True - subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True - ) - subworkflow_create.create() - - with open(subworkflow_dir / "main.nf") as fh: - new_main_nf = fh.read() - with open(subworkflow_dir / "meta.yml") as fh: - new_meta_yml = fh.read() - nextflow_config = subworkflow_dir / "tests" / "nextflow.config" - - # Check that old files have been copied to the new module - assert old_main_nf == new_main_nf - assert old_meta_yml == new_meta_yml - assert nextflow_config.is_file() - - # Check that pytest folder is deleted - assert not pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_subworkflows_migrate_no_delete(self, mock_rich_ask): - """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test. - Test that pytest directory is not deleted.""" - pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = False - module_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True - ) - module_create.create() - - # Check that pytest folder is not deleted - assert pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() diff --git a/tests/subworkflows/info.py b/tests/subworkflows/info.py deleted file mode 100644 index 688120ac0..000000000 --- a/tests/subworkflows/info.py +++ /dev/null @@ -1,64 +0,0 @@ -from rich.console import Console - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_info_remote(self): - """Test getting info about a remote subworkflow""" - mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_subworkflows_info_remote_gitlab(self): - """Test getting info about a subworkflow in the remote gitlab repo""" - mods_info = nf_core.subworkflows.SubworkflowInfo( - self.pipeline_dir, "bam_sort_stats_samtools", remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - assert "--git-remote" in output - - -def test_subworkflows_info_local(self): - """Test getting info about a locally installed subworkflow""" - self.subworkflow_install.install("bam_sort_stats_samtools") - mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_subworkflows_info_in_modules_repo(self): - """Test getting info about a locally subworkflow in the modules repo""" - self.subworkflow_install.install("bam_sort_stats_samtools") - mods_info = nf_core.subworkflows.SubworkflowInfo(self.nfcore_modules, "bam_sort_stats_samtools") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Subworkflow: bam_sort_stats_samtools" in output - assert "Inputs" in output - assert "Outputs" in output diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py deleted file mode 100644 index dfe71686f..000000000 --- a/tests/subworkflows/install.py +++ /dev/null @@ -1,154 +0,0 @@ -import os - -import pytest - -from nf_core.modules.modules_json import ModulesJson -from nf_core.subworkflows.install import SubworkflowInstall - -from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, - GITLAB_URL, - with_temporary_folder, -) - - -def test_subworkflow_install_nopipeline(self): - """Test installing a subworkflow - no pipeline given""" - self.subworkflow_install.dir = None - assert self.subworkflow_install.install("foo") is False - - -@with_temporary_folder -def test_subworkflows_install_emptypipeline(self, tmpdir): - """Test installing a subworkflow - empty dir given""" - os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.subworkflow_install.dir = os.path.join(tmpdir, "nf-core-pipe") - with pytest.raises(UserWarning) as excinfo: - self.subworkflow_install.install("foo") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) - - -def test_subworkflows_install_nosubworkflow(self): - """Test installing a subworkflow - unrecognised subworkflow given""" - assert self.subworkflow_install.install("foo") is False - - -def test_subworkflows_install_bam_sort_stats_samtools(self): - """Test installing a subworkflow - bam_sort_stats_samtools""" - assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False - subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - sub_subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools") - samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") - samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") - samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") - assert os.path.exists(subworkflow_path) - assert os.path.exists(sub_subworkflow_path) - assert os.path.exists(samtools_index_path) - assert os.path.exists(samtools_sort_path) - assert os.path.exists(samtools_stats_path) - assert os.path.exists(samtools_idxstats_path) - assert os.path.exists(samtools_flagstat_path) - - -def test_subworkflows_install_bam_sort_stats_samtools_twice(self): - """Test installing a subworkflow - bam_sort_stats_samtools already there""" - self.subworkflow_install.install("bam_sort_stats_samtools") - assert self.subworkflow_install.install("bam_sort_stats_samtools") is False - - -def test_subworkflows_install_from_gitlab(self): - """Test installing a subworkflow from GitLab""" - assert self.subworkflow_install_gitlab.install("bam_stats_samtools") is True - # Verify that the branch entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "bam_stats_samtools", GITLAB_URL, GITLAB_REPO) - == GITLAB_SUBWORKFLOWS_BRANCH - ) - - -def test_subworkflows_install_different_branch_fail(self): - """Test installing a subworkflow from a different branch""" - install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The bam_stats_samtools subworkflow does not exists in the branch-test branch - assert install_obj.install("bam_stats_samtools") is False - - -def test_subworkflows_install_tracking(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_sort_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] == ["bam_sort_stats_samtools"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/stats"][ - "installed_by" - ] == ["bam_stats_samtools"] - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/sort"][ - "installed_by" - ] == ["bam_sort_stats_samtools"] - - # Clean directory - self.subworkflow_remove.remove("bam_sort_stats_samtools") - - -def test_subworkflows_install_tracking_added_already_installed(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.subworkflow_install.install("bam_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert sorted( - mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] - ) == sorted(["bam_sort_stats_samtools", "subworkflows"]) - - # Clean directory - self.subworkflow_remove.remove("bam_sort_stats_samtools") - self.subworkflow_remove.remove("bam_stats_samtools") - - -def test_subworkflows_install_tracking_added_super_subworkflow(self): - """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_stats_samtools") - self.subworkflow_install.install("bam_sort_stats_samtools") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ - "bam_sort_stats_samtools" - ]["installed_by"] == ["subworkflows"] - assert sorted( - mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"]["bam_stats_samtools"][ - "installed_by" - ] - ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) - - -def test_subworkflows_install_alternate_remote(self): - """Test installing a module from a different remote with the same organization path""" - install_obj = SubworkflowInstall( - self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH - ) - # Install a subworkflow from GitLab which is also installed from GitHub with the same org_path - with pytest.raises(Exception) as excinfo: - install_obj.install("fastqc") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py deleted file mode 100644 index 540f421ad..000000000 --- a/tests/subworkflows/lint.py +++ /dev/null @@ -1,403 +0,0 @@ -import json -import shutil -from pathlib import Path - -import pytest - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_lint(self): - """Test linting the fastq_align_bowtie2 subworkflow""" - self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="fastq_align_bowtie2") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_empty(self): - """Test linting a pipeline with no subworkflows installed""" - self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) - self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) - with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - - -def test_subworkflows_lint_new_subworkflow(self): - """lint a new subworkflow""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=True, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_no_gitlab(self): - """Test linting a pipeline with no subworkflows installed""" - with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - - -def test_subworkflows_lint_gitlab_subworkflows(self): - """Lint subworkflows from a different remote""" - self.subworkflow_install_gitlab.install("bam_stats_samtools") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - subworkflow_lint.lint(print_results=False, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_multiple_remotes(self): - """Lint subworkflows from a different remote""" - self.subworkflow_install_gitlab.install("bam_stats_samtools") - self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - subworkflow_lint.lint(print_results=False, all_subworkflows=True) - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file(self): - """Test linting a subworkflow with a snapshot file""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file_missing_fail(self): - """Test linting a subworkflow with a snapshot file missing, which should fail""" - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ).unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ).touch() - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_snapshot_file_not_needed(self): - """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open( - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test", - ) - ) as fh: - content = fh.read() - new_content = content.replace("snapshot(", "snap (") - with open( - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test", - ), - "w", - ) as fh: - fh.write(new_content) - - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ).unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ).touch() - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - - -def test_subworkflows_lint_less_than_two_modules_warning(self): - """Test linting a subworkflow with less than two modules""" - self.subworkflow_install.install("bam_stats_samtools") - # Remove two modules - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ) - ) as fh: - content = fh.read() - new_content = content.replace( - "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", - "", - ) - new_content = new_content.replace( - "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", - "", - ) - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ), - "w", - ) as fh: - fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) > 0 - assert subworkflow_lint.warned[0].lint_test == "main_nf_include" - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_lint_include_multiple_alias(self): - """Test linting a subworkflow with multiple include methods""" - self.subworkflow_install.install("bam_stats_samtools") - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ) - ) as fh: - content = fh.read() - new_content = content.replace("SAMTOOLS_STATS", "SAMTOOLS_STATS_1") - new_content = new_content.replace( - "include { SAMTOOLS_STATS_1 ", - "include { SAMTOOLS_STATS as SAMTOOLS_STATS_1; SAMTOOLS_STATS as SAMTOOLS_STATS_2 ", - ) - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ), - "w", - ) as fh: - fh.write(new_content) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) == 2 - assert any( - [ - x.message == "Included component 'SAMTOOLS_STATS_1' versions are added in main.nf" - for x in subworkflow_lint.passed - ] - ) - assert any([x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed]) - assert any( - [x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned] - ) - - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_lint_capitalization_fail(self): - """Test linting a subworkflow with a capitalization fail""" - self.subworkflow_install.install("bam_stats_samtools") - # change workflow name to lowercase - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ) - ) as fh: - content = fh.read() - new_content = content.replace("workflow BAM_STATS_SAMTOOLS {", "workflow bam_stats_samtools {") - with open( - Path( - self.pipeline_dir, - "subworkflows", - "nf-core", - "bam_stats_samtools", - "main.nf", - ), - "w", - ) as fh: - fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) - subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") - assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert any([x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed]) - - # cleanup - self.subworkflow_remove.remove("bam_stats_samtools", force=True) - - -def test_subworkflows_absent_version(self): - """Test linting a nf-test module if the versions is absent in the snapshot file `""" - snap_file = Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ) - with open(snap_file) as fh: - content = fh.read() - new_content = content.replace("versions", "foo") - with open(snap_file, "w") as fh: - fh.write(new_content) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_snap_versions" for x in subworkflow_lint.warned]) - - # cleanup - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_subworkflows_missing_test_dir(self): - """Test linting a nf-test subworkflow if the tests directory is missing""" - test_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests") - test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") - shutil.rmtree(test_dir) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0 - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_dir_versions" for x in subworkflow_lint.warned]) - - # cleanup - shutil.copytree(test_dir_copy, test_dir) - - -def test_subworkflows_missing_main_nf(self): - """Test linting a nf-test subworkflow if the main.nf file is missing""" - main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") - main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") - main_nf.unlink() - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" - - # cleanup - shutil.copy(main_nf_copy, main_nf) - - -def test_subworkflows_empty_file_in_snapshot(self): - """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" - snap_file = Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ) - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "test_snap_md5sum" - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_subworkflows_empty_file_in_stub_snapshot(self): - """Test linting a nf-test subworkflow with an empty file sha sum in the stub test snapshot, which should make it not fail""" - snap_file = Path( - self.nfcore_modules, - "subworkflows", - "nf-core", - "test_subworkflow", - "tests", - "main.nf.test.snap", - ) - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert any(x.lint_test == "test_snap_md5sum" for x in subworkflow_lint.passed) - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) diff --git a/tests/subworkflows/list.py b/tests/subworkflows/list.py deleted file mode 100644 index c65999d42..000000000 --- a/tests/subworkflows/list.py +++ /dev/null @@ -1,49 +0,0 @@ -from rich.console import Console - -import nf_core.subworkflows - -from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL - - -def test_subworkflows_list_remote(self): - """Test listing available subworkflows""" - subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_list_remote_gitlab(self): - """Test listing the subworkflows in the remote gitlab repo""" - subworkflows_list = nf_core.subworkflows.SubworkflowList( - None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_install_and_list_subworkflows(self): - """Test listing locally installed subworkflows""" - self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output - - -def test_subworkflows_install_gitlab_and_list_subworkflows(self): - """Test listing locally installed subworkflows""" - self.subworkflow_install_gitlab.install("bam_sort_stats_samtools") - subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) - listed_subworkflows = subworkflows_list.list_components() - console = Console(record=True) - console.print(listed_subworkflows) - output = console.export_text() - assert "bam_stats" in output diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py deleted file mode 100644 index c6a3b9845..000000000 --- a/tests/subworkflows/remove.py +++ /dev/null @@ -1,100 +0,0 @@ -from pathlib import Path - -from nf_core.modules.modules_json import ModulesJson - - -def test_subworkflows_remove_uninstalled_subworkflow(self): - """Test removing subworkflow without installing it""" - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") is False - - -def test_subworkflows_remove_subworkflow(self): - """Test removing subworkflow and all it's dependencies after installing it""" - self.subworkflow_install.install("bam_sort_stats_samtools") - - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - ModulesJson(self.pipeline_dir) - mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is False - assert Path.exists(samtools_index_path) is False - assert mod_json_before != mod_json_after - # assert subworkflows key is removed from modules.json - assert ( - "bam_sort_stats_samtools" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() - ) - assert "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() - - -def test_subworkflows_remove_subworkflow_keep_installed_module(self): - """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.mods_install.install("samtools/index") - - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - - mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() - - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is False - assert Path.exists(samtools_index_path) is True - assert mod_json_before != mod_json_after - # assert subworkflows key is removed from modules.json - assert ( - "bam_sort_stats_samtools" - not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() - ) - assert ( - "samtools/index" - in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() - ) - - -def test_subworkflows_remove_one_of_two_subworkflow(self): - """Test removing subworkflow and all it's dependencies after installing it""" - self.subworkflow_install.install("bam_sort_stats_samtools") - self.subworkflow_install.install("bam_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - - assert self.subworkflow_remove.remove("bam_sort_stats_samtools") - - assert Path.exists(subworkflow_path) is True - assert Path.exists(bam_sort_stats_samtools_path) is False - assert Path.exists(bam_stats_samtools_path) is True - assert Path.exists(samtools_index_path) is False - assert Path.exists(samtools_stats_path) is True - self.subworkflow_remove.remove("bam_stats_samtools") - - -def test_subworkflows_remove_included_subworkflow(self): - """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" - self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") - bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") - bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - - assert self.subworkflow_remove.remove("bam_stats_samtools") is False - - assert Path.exists(subworkflow_path) is True - assert Path.exists(bam_sort_stats_samtools_path) is True - assert Path.exists(bam_stats_samtools_path) is True - assert Path.exists(samtools_index_path) is True - assert Path.exists(samtools_stats_path) is True - self.subworkflow_remove.remove("bam_sort_stats_samtools") diff --git a/tests/subworkflows/test_create.py b/tests/subworkflows/test_create.py new file mode 100644 index 000000000..48cb48226 --- /dev/null +++ b/tests/subworkflows/test_create.py @@ -0,0 +1,109 @@ +import shutil +from pathlib import Path +from unittest import mock + +import pytest +import yaml +from git.repo import Repo + +import nf_core.subworkflows +from tests.utils import GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL + +from ..test_subworkflows import TestSubworkflows + + +class TestSubworkflowsCreate(TestSubworkflows): + def test_subworkflows_create_succeed(self): + """Succeed at creating a subworkflow from the template inside a pipeline""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow_local", "@author", True + ) + subworkflow_create.create() + assert Path(self.pipeline_dir, "subworkflows", "local", "test_subworkflow_local.nf").exists() + + def test_subworkflows_create_fail_exists(self): + """Fail at creating the same subworkflow twice""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow2", "@author", False + ) + subworkflow_create.create() + with pytest.raises(UserWarning) as excinfo: + subworkflow_create.create() + assert "Subworkflow file exists already" in str(excinfo.value) + + def test_subworkflows_create_nfcore_modules(self): + """Create a subworkflow in nf-core/modules clone""" + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "test_subworkflow", "@author", force=True + ) + subworkflow_create.create() + assert Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf").exists() + + assert Path( + self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test" + ).exists() + + @mock.patch("rich.prompt.Confirm.ask") + def test_subworkflows_migrate(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + subworkflow_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(subworkflow_dir / "main.nf") as fh: + old_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml") as fh: + old_meta_yml = fh.read() + + # Create a subworkflow with --migrate-pytest + mock_rich_ask.return_value = True + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + subworkflow_create.create() + + with open(subworkflow_dir / "main.nf") as fh: + new_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml") as fh: + new_meta_yml = fh.read() + nextflow_config = subworkflow_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_subworkflows_migrate_no_delete(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() diff --git a/tests/subworkflows/test_info.py b/tests/subworkflows/test_info.py new file mode 100644 index 000000000..cf0f49271 --- /dev/null +++ b/tests/subworkflows/test_info.py @@ -0,0 +1,63 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsInfo(TestSubworkflows): + def test_subworkflows_info_remote(self): + """Test getting info about a remote subworkflow""" + mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_subworkflows_info_remote_gitlab(self): + """Test getting info about a subworkflow in the remote gitlab repo""" + mods_info = nf_core.subworkflows.SubworkflowInfo( + self.pipeline_dir, "bam_sort_stats_samtools", remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + assert "--git-remote" in output + + def test_subworkflows_info_local(self): + """Test getting info about a locally installed subworkflow""" + self.subworkflow_install.install("bam_sort_stats_samtools") + mods_info = nf_core.subworkflows.SubworkflowInfo(self.pipeline_dir, "bam_sort_stats_samtools") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_subworkflows_info_in_modules_repo(self): + """Test getting info about a locally subworkflow in the modules repo""" + self.subworkflow_install.install("bam_sort_stats_samtools") + mods_info = nf_core.subworkflows.SubworkflowInfo(self.nfcore_modules, "bam_sort_stats_samtools") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Subworkflow: bam_sort_stats_samtools" in output + assert "Inputs" in output + assert "Outputs" in output diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py new file mode 100644 index 000000000..e0b2fc1ab --- /dev/null +++ b/tests/subworkflows/test_install.py @@ -0,0 +1,157 @@ +import os +from pathlib import Path + +import pytest + +from nf_core.modules.modules_json import ModulesJson +from nf_core.subworkflows.install import SubworkflowInstall + +from ..test_subworkflows import TestSubworkflows +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + with_temporary_folder, +) + + +class TestSubworkflowsInstall(TestSubworkflows): + def test_subworkflow_install_nopipeline(self): + """Test installing a subworkflow - no pipeline given""" + assert self.subworkflow_install.dir is not None + self.subworkflow_install.dir = "" + assert self.subworkflow_install.install("foo") is False + + @with_temporary_folder + def test_subworkflows_install_emptypipeline(self, tmpdir): + """Test installing a subworkflow - empty dir given""" + + Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) + self.subworkflow_install.dir = os.path.join(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.subworkflow_install.install("foo") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + def test_subworkflows_install_nosubworkflow(self): + """Test installing a subworkflow - unrecognised subworkflow given""" + assert self.subworkflow_install.install("foo") is False + + def test_subworkflows_install_bam_sort_stats_samtools(self): + """Test installing a subworkflow - bam_sort_stats_samtools""" + assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False + subworkflow_path = os.path.join( + self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools" + ) + sub_subworkflow_path = os.path.join( + self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools" + ) + samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") + samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_idxstats_path = os.path.join( + self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats" + ) + samtools_flagstat_path = os.path.join( + self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat" + ) + assert os.path.exists(subworkflow_path) + assert os.path.exists(sub_subworkflow_path) + assert os.path.exists(samtools_index_path) + assert os.path.exists(samtools_sort_path) + assert os.path.exists(samtools_stats_path) + assert os.path.exists(samtools_idxstats_path) + assert os.path.exists(samtools_flagstat_path) + + def test_subworkflows_install_bam_sort_stats_samtools_twice(self): + """Test installing a subworkflow - bam_sort_stats_samtools already there""" + self.subworkflow_install.install("bam_sort_stats_samtools") + assert self.subworkflow_install.install("bam_sort_stats_samtools") is False + + def test_subworkflows_install_from_gitlab(self): + """Test installing a subworkflow from GitLab""" + assert self.subworkflow_install_gitlab.install("bam_stats_samtools") is True + # Verify that the branch entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "bam_stats_samtools", GITLAB_URL, GITLAB_REPO) + == GITLAB_SUBWORKFLOWS_BRANCH + ) + + def test_subworkflows_install_different_branch_fail(self): + """Test installing a subworkflow from a different branch""" + install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The bam_stats_samtools subworkflow does not exists in the branch-test branch + assert install_obj.install("bam_stats_samtools") is False + + def test_subworkflows_install_tracking(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] == ["bam_sort_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/stats"][ + "installed_by" + ] == ["bam_stats_samtools"] + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["samtools/sort"][ + "installed_by" + ] == ["bam_sort_stats_samtools"] + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + + def test_subworkflows_install_tracking_added_already_installed(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] + ) == sorted(["bam_sort_stats_samtools", "subworkflows"]) + + # Clean directory + self.subworkflow_remove.remove("bam_sort_stats_samtools") + self.subworkflow_remove.remove("bam_stats_samtools") + + def test_subworkflows_install_tracking_added_super_subworkflow(self): + """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" + self.subworkflow_install.install("bam_stats_samtools") + self.subworkflow_install.install("bam_sort_stats_samtools") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_sort_stats_samtools" + ]["installed_by"] == ["subworkflows"] + assert sorted( + mod_json["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"]["nf-core"][ + "bam_stats_samtools" + ]["installed_by"] + ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) + + def test_subworkflows_install_alternate_remote(self): + """Test installing a module from a different remote with the same organization path""" + install_obj = SubworkflowInstall( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH + ) + # Install a subworkflow from GitLab which is also installed from GitHub with the same org_path + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py new file mode 100644 index 000000000..f8c9bedbf --- /dev/null +++ b/tests/subworkflows/test_lint.py @@ -0,0 +1,391 @@ +import json +import shutil +from pathlib import Path + +import pytest + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsLint(TestSubworkflows): + def test_subworkflows_lint(self): + """Test linting the fastq_align_bowtie2 subworkflow""" + self.subworkflow_install.install("fastq_align_bowtie2") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="fastq_align_bowtie2") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_empty(self): + """Test linting a pipeline with no subworkflows installed""" + self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) + self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) + self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) + with pytest.raises(LookupError): + nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + + def test_subworkflows_lint_new_subworkflow(self): + """lint a new subworkflow""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=True, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_no_gitlab(self): + """Test linting a pipeline with no subworkflows installed""" + with pytest.raises(LookupError): + nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + + def test_subworkflows_lint_gitlab_subworkflows(self): + """Lint subworkflows from a different remote""" + self.subworkflow_install_gitlab.install("bam_stats_samtools") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint( + dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + subworkflow_lint.lint(print_results=False, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_multiple_remotes(self): + """Lint subworkflows from a different remote""" + self.subworkflow_install_gitlab.install("bam_stats_samtools") + self.subworkflow_install.install("fastq_align_bowtie2") + subworkflow_lint = nf_core.subworkflows.SubworkflowLint( + dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + subworkflow_lint.lint(print_results=False, all_subworkflows=True) + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file(self): + """Test linting a subworkflow with a snapshot file""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file_missing_fail(self): + """Test linting a subworkflow with a snapshot file missing, which should fail""" + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).unlink() + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).touch() + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_snapshot_file_not_needed(self): + """Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" + with open( + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test", + ) + ) as fh: + content = fh.read() + new_content = content.replace("snapshot(", "snap (") + with open( + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test", + ), + "w", + ) as fh: + fh.write(new_content) + + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).unlink() + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ).touch() + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_less_than_two_modules_warning(self): + """Test linting a subworkflow with less than two modules""" + self.subworkflow_install.install("bam_stats_samtools") + # Remove two modules + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace( + "include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main'", + "", + ) + new_content = new_content.replace( + "include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main'", + "", + ) + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) > 0 + assert subworkflow_lint.warned[0].lint_test == "main_nf_include" + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_lint_include_multiple_alias(self): + """Test linting a subworkflow with multiple include methods""" + self.subworkflow_install.install("bam_stats_samtools") + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace("SAMTOOLS_STATS", "SAMTOOLS_STATS_1") + new_content = new_content.replace( + "include { SAMTOOLS_STATS_1 ", + "include { SAMTOOLS_STATS as SAMTOOLS_STATS_1; SAMTOOLS_STATS as SAMTOOLS_STATS_2 ", + ) + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) == 2 + assert any( + [ + x.message == "Included component 'SAMTOOLS_STATS_1' versions are added in main.nf" + for x in subworkflow_lint.passed + ] + ) + assert any( + [x.message == "Included component 'SAMTOOLS_STATS_1' used in main.nf" for x in subworkflow_lint.passed] + ) + assert any( + [x.message == "Included component 'SAMTOOLS_STATS_2' not used in main.nf" for x in subworkflow_lint.warned] + ) + + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_lint_capitalization_fail(self): + """Test linting a subworkflow with a capitalization fail""" + self.subworkflow_install.install("bam_stats_samtools") + # change workflow name to lowercase + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ) + ) as fh: + content = fh.read() + new_content = content.replace("workflow BAM_STATS_SAMTOOLS {", "workflow bam_stats_samtools {") + with open( + Path( + self.pipeline_dir, + "subworkflows", + "nf-core", + "bam_stats_samtools", + "main.nf", + ), + "w", + ) as fh: + fh.write(new_content) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") + assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert any([x.lint_test == "workflow_capitals" for x in subworkflow_lint.failed]) + + # cleanup + self.subworkflow_remove.remove("bam_stats_samtools", force=True) + + def test_subworkflows_absent_version(self): + """Test linting a nf-test module if the versions is absent in the snapshot file `""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + with open(snap_file) as fh: + content = fh.read() + new_content = content.replace("versions", "foo") + with open(snap_file, "w") as fh: + fh.write(new_content) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" + assert any([x.lint_test == "test_snap_versions" for x in subworkflow_lint.warned]) + + # cleanup + with open(snap_file, "w") as fh: + fh.write(content) + + def test_subworkflows_missing_test_dir(self): + """Test linting a nf-test subworkflow if the tests directory is missing""" + test_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests") + test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") + shutil.rmtree(test_dir) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" + assert any([x.lint_test == "test_dir_versions" for x in subworkflow_lint.warned]) + + # cleanup + shutil.copytree(test_dir_copy, test_dir) + + def test_subworkflows_missing_main_nf(self): + """Test linting a nf-test subworkflow if the main.nf file is missing""" + main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") + main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") + main_nf.unlink() + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" + + # cleanup + shutil.copy(main_nf_copy, main_nf) + + def test_subworkflows_empty_file_in_snapshot(self): + """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert subworkflow_lint.failed[0].lint_test == "test_snap_md5sum" + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + def test_subworkflows_empty_file_in_stub_snapshot(self): + """Test linting a nf-test subworkflow with an empty file sha sum in the stub test snapshot, which should make it not fail""" + snap_file = Path( + self.nfcore_modules, + "subworkflows", + "nf-core", + "test_subworkflow", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + assert any(x.lint_test == "test_snap_md5sum" for x in subworkflow_lint.passed) + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) diff --git a/tests/subworkflows/test_list.py b/tests/subworkflows/test_list.py new file mode 100644 index 000000000..5e4e6feb0 --- /dev/null +++ b/tests/subworkflows/test_list.py @@ -0,0 +1,48 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +class TestSubworkflowsList(TestSubworkflows): + def test_subworkflows_list_remote(self): + """Test listing available subworkflows""" + subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_list_remote_gitlab(self): + """Test listing the subworkflows in the remote gitlab repo""" + subworkflows_list = nf_core.subworkflows.SubworkflowList( + None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_install_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.subworkflow_install.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + def test_subworkflows_install_gitlab_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.subworkflow_install_gitlab.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output diff --git a/tests/subworkflows/test_remove.py b/tests/subworkflows/test_remove.py new file mode 100644 index 000000000..61c016b23 --- /dev/null +++ b/tests/subworkflows/test_remove.py @@ -0,0 +1,101 @@ +from pathlib import Path + +from nf_core.modules.modules_json import ModulesJson + +from ..test_subworkflows import TestSubworkflows + + +class TestSubworkflowsRemove(TestSubworkflows): + def test_subworkflows_remove_uninstalled_subworkflow(self): + """Test removing subworkflow without installing it""" + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") is False + + def test_subworkflows_remove_subworkflow(self): + """Test removing subworkflow and all it's dependencies after installing it""" + self.subworkflow_install.install("bam_sort_stats_samtools") + + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + ModulesJson(self.pipeline_dir) + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is False + assert Path.exists(samtools_index_path) is False + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert ( + "bam_sort_stats_samtools" + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + ) + assert ( + "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys() + ) + + def test_subworkflows_remove_subworkflow_keep_installed_module(self): + """Test removing subworkflow and all it's dependencies after installing it, except for a separately installed module""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.mods_install.install("samtools/index") + + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + + mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json() + + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is False + assert Path.exists(samtools_index_path) is True + assert mod_json_before != mod_json_after + # assert subworkflows key is removed from modules.json + assert ( + "bam_sort_stats_samtools" + not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys() + ) + assert ( + "samtools/index" + in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys() + ) + + def test_subworkflows_remove_one_of_two_subworkflow(self): + """Test removing subworkflow and all it's dependencies after installing it""" + self.subworkflow_install.install("bam_sort_stats_samtools") + self.subworkflow_install.install("bam_stats_samtools") + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + + assert self.subworkflow_remove.remove("bam_sort_stats_samtools") + + assert Path.exists(subworkflow_path) is True + assert Path.exists(bam_sort_stats_samtools_path) is False + assert Path.exists(bam_stats_samtools_path) is True + assert Path.exists(samtools_index_path) is False + assert Path.exists(samtools_stats_path) is True + self.subworkflow_remove.remove("bam_stats_samtools") + + def test_subworkflows_remove_included_subworkflow(self): + """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" + self.subworkflow_install.install("bam_sort_stats_samtools") + subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") + bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + + assert self.subworkflow_remove.remove("bam_stats_samtools") is False + + assert Path.exists(subworkflow_path) is True + assert Path.exists(bam_sort_stats_samtools_path) is True + assert Path.exists(bam_stats_samtools_path) is True + assert Path.exists(samtools_index_path) is True + assert Path.exists(samtools_stats_path) is True + self.subworkflow_remove.remove("bam_sort_stats_samtools") diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py new file mode 100644 index 000000000..d3b243357 --- /dev/null +++ b/tests/subworkflows/test_update.py @@ -0,0 +1,370 @@ +import logging +import shutil +import tempfile +from pathlib import Path +from unittest import mock + +import questionary +import yaml + +import nf_core.utils +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.update import ModuleUpdate +from nf_core.subworkflows.update import SubworkflowUpdate + +from ..test_subworkflows import TestSubworkflows +from ..utils import OLD_SUBWORKFLOWS_SHA, cmp_component + + +class TestSubworkflowsUpdate(TestSubworkflows): + def test_install_and_update(self): + """Installs a subworkflow in the pipeline and updates it (no change)""" + self.subworkflow_install.install("bam_stats_samtools") + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False) + + # Copy the sw files and check that they are unaffected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "bam_stats_samtools") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("bam_stats_samtools") is True + assert cmp_component(tmpdir, sw_path) is True + + def test_install_at_hash_and_update(self): + """Installs an old version of a subworkflow in the pipeline and updates it""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True) + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is False + + # Check that the modules.json is correctly updated + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Get the up-to-date git_sha for the sw from the ModulesRepo object + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + + # Mock questionary answer: update components + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) + def test_install_at_hash_and_update_limit_output(self, mock_prompt): + """Installs an old version of a subworkflow in the pipeline and updates it with limit_output=True""" + self.caplog.set_level(logging.INFO) + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + + update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=True, update_deps=True, limit_output=True) + + assert update_obj.update("fastq_align_bowtie2") + + # Check changes not shown for non-.nf files + assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in self.caplog.text + assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in self.caplog.text + # Check changes only shown for main.nf files + assert "Changes in 'fastq_align_bowtie2/main.nf'" in self.caplog.text + for line in self.caplog.text.split("\n"): + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_install_at_hash_and_update_and_save_diff_to_file(self): + """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" + assert self.subworkflow_install_old.install("fastq_align_bowtie2") + patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True) + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, tmpdir) + + assert update_obj.update("fastq_align_bowtie2") is True + assert cmp_component(tmpdir, sw_path) is True + + with open(patch_path) as fh: + line = fh.readline() + assert line.startswith( + "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + ) + + def test_install_at_hash_and_update_and_save_diff_limit_output(self): + """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" + # Install old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") + # Update saving the differences to a patch file and with `limit_output` + update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True, limit_output=True) + assert update_obj.update("fastq_align_bowtie2") + + # Check that the patch file was created + assert patch_path.exists(), f"Patch file was not created at {patch_path}" + + # Read the contents of the patch file + with open(patch_path) as fh: + content = fh.read() + # Check changes not shown for non-.nf files + assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in content + assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in content + assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in content + assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in content + # Check changes only shown for main.nf files + assert "Changes in 'fastq_align_bowtie2/main.nf'" in content + for line in content: + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_update_all(self): + """Updates all subworkflows present in the pipeline""" + # Install subworkflows fastq_align_bowtie2, bam_sort_stats_samtools, bam_stats_samtools + self.subworkflow_install.install("fastq_align_bowtie2") + # Update all subworkflows + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # We must reload the modules.json to get the updated version + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Loop through all subworkflows and check that they are updated (according to the modules.json file) + for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(sw, "subworkflows", depth=1))[0][ + "git_sha" + ] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw][ + "git_sha" + ] + assert correct_git_sha == current_git_sha + + def test_update_with_config_fixed_version(self): + """Try updating when there are entries in the .nf-core.yml""" + # Install subworkflow at the latest version + assert self.subworkflow_install.install("fastq_align_bowtie2") + + # Fix the subworkflow version in the .nf-core.yml to an old version + update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_dont_update(self): + """Try updating when sw is to be ignored""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + + # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml + update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_fix_all(self): + """Fix the version of all nf-core subworkflows""" + # Install subworkflow at the latest version + assert self.subworkflow_install.install("fastq_align_bowtie2") + + # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version + update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update fastq_align_bowtie2 + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) + assert update_obj.update("fastq_align_bowtie2") is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert ( + "git_sha" + in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] + ) + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + == OLD_SUBWORKFLOWS_SHA + ) + + def test_update_with_config_no_updates(self): + """Don't update any nf-core subworkflows""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Set all repository updates to False + update_config = {NF_CORE_MODULES_REMOTE: False} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all subworkflows in the pipeline + update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + # Check that the git sha for fastq_align_bowtie2 is correctly downgraded and none of the subworkflows has changed + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw] + assert ( + mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + == old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + ) + + def test_update_all_linked_components_from_subworkflow(self): + """Update a subworkflow and all modules and subworkflows used on it""" + # Install an old version of fastq_align_bowtie2 + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + subworkflows_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME) + modules_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME) + shutil.copytree(subworkflows_path, Path(tmpdir, "subworkflows")) + shutil.copytree(modules_path, Path(tmpdir, "modules")) + + # Update fastq_align_bowtie2 and all modules and subworkflows used by that + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("fastq_align_bowtie2") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Loop through all modules and subworkflows used in fastq_align_bowtie2 + # check that they are updated (according to the modules.json file) + for sw in ["fastq_align_bowtie2", "bam_sort_stats_samtools", "bam_stats_samtools"]: + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] + ) + for mod in [ + "bowtie2/align", + "samtools/index", + "samtools/sort", + "samtools/flagstat", + "samtools/idxstats", + "samtools/stats", + ]: + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + ) + # Check that the subworkflow files are updated + assert ( + cmp_component( + Path(tmpdir, "subworkflows", "fastq_align_bowtie2"), Path(subworkflows_path, "fastq_align_bowtie2") + ) + is False + ) + + def test_update_all_subworkflows_from_module(self): + """Update a module and all subworkflows that use this module""" + # Install an old version of fastq_align_bowtie2 and thus all modules used by it (bowtie2/align) + self.subworkflow_install_old.install("fastq_align_bowtie2") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Copy the sw files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") + shutil.copytree(sw_path, Path(tmpdir, "fastq_align_bowtie2")) + + # Update bowtie2/align and all subworkflows using it + update_obj = ModuleUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("bowtie2/align") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Check that bowtie2/align and fastq_align_bowtie2 are updated + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ + "git_sha" + ] + ) + assert cmp_component(Path(tmpdir, "fastq_align_bowtie2"), sw_path) is False + assert ( + old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] + != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] + ) + + def test_update_change_of_included_modules(self): + """Update a subworkflow which has a module change in the new version.""" + # Install an old version of vcf_annotate_ensemblvep with tabix/bgziptabix and without tabix/tabix + self.subworkflow_install_module_change.install("vcf_annotate_ensemblvep") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is there + assert "tabix/bgziptabix" in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is not there + assert "tabix/tabix" not in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + + # Update vcf_annotate_ensemblvep without tabix/bgziptabix and with tabix/tabix + update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) + assert update_obj.update("vcf_annotate_ensemblvep") is True + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Check that tabix/bgziptabix is not there + assert "tabix/bgziptabix" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() + # Check that tabix/tabix is there + assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + # Check that ensemblevep is not there but instead we have ensemblevep/vep (due to a file re-naming) + assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py deleted file mode 100644 index 42ed716b1..000000000 --- a/tests/subworkflows/update.py +++ /dev/null @@ -1,376 +0,0 @@ -import logging -import shutil -import tempfile -from pathlib import Path -from unittest import mock - -import questionary -import yaml - -import nf_core.utils -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE -from nf_core.modules.update import ModuleUpdate -from nf_core.subworkflows.update import SubworkflowUpdate - -from ..utils import OLD_SUBWORKFLOWS_SHA, cmp_component - - -def test_install_and_update(self): - """Installs a subworkflow in the pipeline and updates it (no change)""" - self.subworkflow_install.install("bam_stats_samtools") - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False) - - # Copy the sw files and check that they are unaffected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "bam_stats_samtools") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("bam_stats_samtools") is True - assert cmp_component(tmpdir, sw_path) is True - - -def test_install_at_hash_and_update(self): - """Installs an old version of a subworkflow in the pipeline and updates it""" - assert self.subworkflow_install_old.install("fastq_align_bowtie2") - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=False, update_deps=True) - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("fastq_align_bowtie2") is True - assert cmp_component(tmpdir, sw_path) is False - - # Check that the modules.json is correctly updated - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Get the up-to-date git_sha for the sw from the ModulesRepo object - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - ) - - -# Mock questionary answer: update components -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) -def test_install_at_hash_and_update_limit_output(self, mock_prompt): - """Installs an old version of a subworkflow in the pipeline and updates it with limit_output=True""" - self.caplog.set_level(logging.INFO) - assert self.subworkflow_install_old.install("fastq_align_bowtie2") - - update_obj = SubworkflowUpdate(self.pipeline_dir, show_diff=True, update_deps=True, limit_output=True) - - assert update_obj.update("fastq_align_bowtie2") - - # Check changes not shown for non-.nf files - assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in self.caplog.text - assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in self.caplog.text - assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in self.caplog.text - assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in self.caplog.text - # Check changes only shown for main.nf files - assert "Changes in 'fastq_align_bowtie2/main.nf'" in self.caplog.text - for line in self.caplog.text.split("\n"): - if line.startswith("---"): - assert line.endswith("main.nf") - - -def test_install_at_hash_and_update_and_save_diff_to_file(self): - """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" - assert self.subworkflow_install_old.install("fastq_align_bowtie2") - patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") - update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True) - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, tmpdir) - - assert update_obj.update("fastq_align_bowtie2") is True - assert cmp_component(tmpdir, sw_path) is True - - with open(patch_path) as fh: - line = fh.readline() - assert line.startswith( - "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" - ) - - -def test_install_at_hash_and_update_and_save_diff_limit_output(self): - """Installs an old version of a sw in the pipeline and updates it. Save differences to a file.""" - # Install old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - patch_path = Path(self.pipeline_dir, "fastq_align_bowtie2.patch") - # Update saving the differences to a patch file and with `limit_output` - update_obj = SubworkflowUpdate(self.pipeline_dir, save_diff_fn=patch_path, update_deps=True, limit_output=True) - assert update_obj.update("fastq_align_bowtie2") - - # Check that the patch file was created - assert patch_path.exists(), f"Patch file was not created at {patch_path}" - - # Read the contents of the patch file - with open(patch_path) as fh: - content = fh.read() - # Check changes not shown for non-.nf files - assert "Changes in 'fastq_align_bowtie2/meta.yml' but not shown" in content - assert "Changes in 'bam_sort_stats_samtools/meta.yml' but not shown" in content - assert "Changes in 'bam_stats_samtools/meta.yml' but not shown" in content - assert "Changes in 'samtools/flagstat/meta.yml' but not shown" in content - # Check changes only shown for main.nf files - assert "Changes in 'fastq_align_bowtie2/main.nf'" in content - for line in content: - if line.startswith("---"): - assert line.endswith("main.nf") - - -def test_update_all(self): - """Updates all subworkflows present in the pipeline""" - # Install subworkflows fastq_align_bowtie2, bam_sort_stats_samtools, bam_stats_samtools - self.subworkflow_install.install("fastq_align_bowtie2") - # Update all subworkflows - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # We must reload the modules.json to get the updated version - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Loop through all subworkflows and check that they are updated (according to the modules.json file) - for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(sw, "subworkflows", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - assert correct_git_sha == current_git_sha - - -def test_update_with_config_fixed_version(self): - """Try updating when there are entries in the .nf-core.yml""" - # Install subworkflow at the latest version - assert self.subworkflow_install.install("fastq_align_bowtie2") - - # Fix the subworkflow version in the .nf-core.yml to an old version - update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all subworkflows in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_dont_update(self): - """Try updating when sw is to be ignored""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - - # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml - update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "fastq_align_bowtie2" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME] - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_fix_all(self): - """Fix the version of all nf-core subworkflows""" - # Install subworkflow at the latest version - assert self.subworkflow_install.install("fastq_align_bowtie2") - - # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version - update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update fastq_align_bowtie2 - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) - assert update_obj.update("fastq_align_bowtie2") is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert ( - "git_sha" - in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"] - ) - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - == OLD_SUBWORKFLOWS_SHA - ) - - -def test_update_with_config_no_updates(self): - """Don't update any nf-core subworkflows""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Set all repository updates to False - update_config = {NF_CORE_MODULES_REMOTE: False} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all subworkflows in the pipeline - update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - # Check that the git sha for fastq_align_bowtie2 is correctly downgraded and none of the subworkflows has changed - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - for sw in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]: - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw] - assert ( - mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - == old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - ) - - -def test_update_all_linked_components_from_subworkflow(self): - """Update a subworkflow and all modules and subworkflows used on it""" - # Install an old version of fastq_align_bowtie2 - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - subworkflows_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME) - modules_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME) - shutil.copytree(subworkflows_path, Path(tmpdir, "subworkflows")) - shutil.copytree(modules_path, Path(tmpdir, "modules")) - - # Update fastq_align_bowtie2 and all modules and subworkflows used by that - update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("fastq_align_bowtie2") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Loop through all modules and subworkflows used in fastq_align_bowtie2 - # check that they are updated (according to the modules.json file) - for sw in ["fastq_align_bowtie2", "bam_sort_stats_samtools", "bam_stats_samtools"]: - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME][sw]["git_sha"] - ) - for mod in [ - "bowtie2/align", - "samtools/index", - "samtools/sort", - "samtools/flagstat", - "samtools/idxstats", - "samtools/stats", - ]: - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - ) - # Check that the subworkflow files are updated - assert ( - cmp_component( - Path(tmpdir, "subworkflows", "fastq_align_bowtie2"), Path(subworkflows_path, "fastq_align_bowtie2") - ) - is False - ) - - -def test_update_all_subworkflows_from_module(self): - """Update a module and all subworkflows that use this module""" - # Install an old version of fastq_align_bowtie2 and thus all modules used by it (bowtie2/align) - self.subworkflow_install_old.install("fastq_align_bowtie2") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Copy the sw files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - sw_path = Path(self.pipeline_dir, "subworkflows", NF_CORE_MODULES_NAME, "fastq_align_bowtie2") - shutil.copytree(sw_path, Path(tmpdir, "fastq_align_bowtie2")) - - # Update bowtie2/align and all subworkflows using it - update_obj = ModuleUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("bowtie2/align") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Check that bowtie2/align and fastq_align_bowtie2 are updated - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["subworkflows"][NF_CORE_MODULES_NAME]["fastq_align_bowtie2"][ - "git_sha" - ] - ) - assert cmp_component(Path(tmpdir, "fastq_align_bowtie2"), sw_path) is False - assert ( - old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] - != mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]["bowtie2/align"]["git_sha"] - ) - - -def test_update_change_of_included_modules(self): - """Update a subworkflow which has a module change in the new version.""" - # Install an old version of vcf_annotate_ensemblvep with tabix/bgziptabix and without tabix/tabix - self.subworkflow_install_module_change.install("vcf_annotate_ensemblvep") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Check that tabix/bgziptabix is there - assert "tabix/bgziptabix" in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() - # Check that tabix/tabix is not there - assert "tabix/tabix" not in old_mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() - - # Update vcf_annotate_ensemblvep without tabix/bgziptabix and with tabix/tabix - update_obj = SubworkflowUpdate(self.pipeline_dir, update_deps=True, show_diff=False) - assert update_obj.update("vcf_annotate_ensemblvep") is True - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Check that tabix/bgziptabix is not there - assert "tabix/bgziptabix" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert not Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/bgziptabix").is_dir() - # Check that tabix/tabix is there - assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() - # Check that ensemblevep is not there but instead we have ensemblevep/vep (due to a file re-naming) - assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] - assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 17bc678ca..af581fc9a 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -119,77 +119,34 @@ def tearDown(self): def _use_caplog(self, caplog): self.caplog = caplog - ################################################ - # Test of the individual subworkflow commands. # - ################################################ - - from .subworkflows.create import ( # type: ignore[misc] - test_subworkflows_create_fail_exists, - test_subworkflows_create_nfcore_modules, - test_subworkflows_create_succeed, - test_subworkflows_migrate, - test_subworkflows_migrate_no_delete, - ) - from .subworkflows.info import ( # type: ignore[misc] - test_subworkflows_info_in_modules_repo, - test_subworkflows_info_local, - test_subworkflows_info_remote, - test_subworkflows_info_remote_gitlab, - ) - from .subworkflows.install import ( # type: ignore[misc] - test_subworkflow_install_nopipeline, - test_subworkflows_install_alternate_remote, - test_subworkflows_install_bam_sort_stats_samtools, - test_subworkflows_install_bam_sort_stats_samtools_twice, - test_subworkflows_install_different_branch_fail, - test_subworkflows_install_emptypipeline, - test_subworkflows_install_from_gitlab, - test_subworkflows_install_nosubworkflow, - test_subworkflows_install_tracking, - test_subworkflows_install_tracking_added_already_installed, - test_subworkflows_install_tracking_added_super_subworkflow, - ) - from .subworkflows.lint import ( # type: ignore[misc] - test_subworkflows_absent_version, - test_subworkflows_empty_file_in_snapshot, - test_subworkflows_empty_file_in_stub_snapshot, - test_subworkflows_lint, - test_subworkflows_lint_capitalization_fail, - test_subworkflows_lint_empty, - test_subworkflows_lint_gitlab_subworkflows, - test_subworkflows_lint_include_multiple_alias, - test_subworkflows_lint_less_than_two_modules_warning, - test_subworkflows_lint_multiple_remotes, - test_subworkflows_lint_new_subworkflow, - test_subworkflows_lint_no_gitlab, - test_subworkflows_lint_snapshot_file, - test_subworkflows_lint_snapshot_file_missing_fail, - test_subworkflows_lint_snapshot_file_not_needed, - ) - from .subworkflows.list import ( # type: ignore[misc] - test_subworkflows_install_and_list_subworkflows, - test_subworkflows_install_gitlab_and_list_subworkflows, - test_subworkflows_list_remote, - test_subworkflows_list_remote_gitlab, - ) - from .subworkflows.remove import ( # type: ignore[misc] - test_subworkflows_remove_included_subworkflow, - test_subworkflows_remove_one_of_two_subworkflow, - test_subworkflows_remove_subworkflow, - test_subworkflows_remove_subworkflow_keep_installed_module, - ) - from .subworkflows.update import ( # type: ignore[misc] - test_install_and_update, - test_install_at_hash_and_update, - test_install_at_hash_and_update_and_save_diff_limit_output, - test_install_at_hash_and_update_and_save_diff_to_file, - test_install_at_hash_and_update_limit_output, - test_update_all, - test_update_all_linked_components_from_subworkflow, - test_update_all_subworkflows_from_module, - test_update_change_of_included_modules, - test_update_with_config_dont_update, - test_update_with_config_fix_all, - test_update_with_config_fixed_version, - test_update_with_config_no_updates, - ) + # ################################################ + # # Test of the individual subworkflow commands. # + # ################################################ + + # from .subworkflows.list import ( # type: ignore[misc] + # test_subworkflows_install_and_list_subworkflows, + # test_subworkflows_install_gitlab_and_list_subworkflows, + # test_subworkflows_list_remote, + # test_subworkflows_list_remote_gitlab, + # ) + # from .subworkflows.remove import ( # type: ignore[misc] + # test_subworkflows_remove_included_subworkflow, + # test_subworkflows_remove_one_of_two_subworkflow, + # test_subworkflows_remove_subworkflow, + # test_subworkflows_remove_subworkflow_keep_installed_module, + # ) + # from .subworkflows.update import ( # type: ignore[misc] + # test_install_and_update, + # test_install_at_hash_and_update, + # test_install_at_hash_and_update_and_save_diff_limit_output, + # test_install_at_hash_and_update_and_save_diff_to_file, + # test_install_at_hash_and_update_limit_output, + # test_update_all, + # test_update_all_linked_components_from_subworkflow, + # test_update_all_subworkflows_from_module, + # test_update_change_of_included_modules, + # test_update_with_config_dont_update, + # test_update_with_config_fix_all, + # test_update_with_config_fixed_version, + # test_update_with_config_no_updates, + # ) From c00623dda555b708dca2fc7e90d62a5aaa65d6c4 Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Wed, 17 Jul 2024 16:20:43 +0200 Subject: [PATCH 324/737] Remove release announcement for non nf-core pipelines --- nf_core/pipelines/create/create.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index bdbbca646..f9ba2ff7f 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -92,6 +92,7 @@ def __init__( "CODE_OF_CONDUCT.md", ".github/workflows/awsfulltest.yml", ".github/workflows/awstest.yml", + ".github/workflows/release-announcements.yml" ], } # Get list of files we're skipping with the supplied skip keys From c71dac8e6e2217324a417b9b58c60716102b1ae5 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 16 Jul 2024 09:54:59 +0000 Subject: [PATCH 325/737] Update python:3.12-slim Docker digest to f11725a --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 7178b6526..8943b7062 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:d5f16749562233aa4bd26538771d76bf0dfd0a0ea7ea8771985e267451397ae4 +FROM python:3.12-slim@sha256:f11725aba18c19664a408902103365eaf8013823ffc56270f921d1dc78a198cb LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From 33a4b67479000a458774b68c107a6c5794bcfec5 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 17 Jul 2024 13:50:05 +0000 Subject: [PATCH 326/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 823e000be..fba54a953 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -50,6 +50,7 @@ - update api docs to new structure ([#3054](https://github.com/nf-core/tools/pull/3054)) - Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) - handle new jsonschema error type ([#3061](https://github.com/nf-core/tools/pull/3061)) +- Update python:3.12-slim Docker digest to f11725a ([#3071](https://github.com/nf-core/tools/pull/3071)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 4e7be452d4fb16f8949a6bde0de70271bb5977a8 Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Wed, 17 Jul 2024 16:28:01 +0200 Subject: [PATCH 327/737] Ran pre commit --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index f9ba2ff7f..27ca8ac84 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -92,7 +92,7 @@ def __init__( "CODE_OF_CONDUCT.md", ".github/workflows/awsfulltest.yml", ".github/workflows/awstest.yml", - ".github/workflows/release-announcements.yml" + ".github/workflows/release-announcements.yml", ], } # Get list of files we're skipping with the supplied skip keys From 77f9dd73d1e4a755406e70af4ad5bdc462eaa1ba Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Wed, 17 Jul 2024 16:32:30 +0200 Subject: [PATCH 328/737] update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fba54a953..8cda88788 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,8 @@ - Run awsfulltest on PRs to `master` with two PR approvals ([#3042](https://github.com/nf-core/tools/pull/3042)) - Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) - Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) +- Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) +- Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) ### Linting From 789a7841af08b9f621043e8701a79c39a8c9877e Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Wed, 17 Jul 2024 17:01:15 +0200 Subject: [PATCH 329/737] Fix number of arguments for pipelines_create --- nf_core/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index f33c63e87..192c7cd13 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2181,14 +2181,14 @@ def command_lint( help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", ) @click.pass_context -def command_create(ctx, name, description, author, version, force, outdir, template_yaml, plain, organisation): +def command_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation): """ Use `nf-core pipelines create` instead. """ log.warning( "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." ) - pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, plain, organisation) + pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation) # Main script is being run - launch the CLI From 2f66b482c0945099d8bdca78dbffb96810482525 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 17 Jul 2024 15:05:41 +0000 Subject: [PATCH 330/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fba54a953..b3f1688e7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -51,6 +51,7 @@ - Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) - handle new jsonschema error type ([#3061](https://github.com/nf-core/tools/pull/3061)) - Update python:3.12-slim Docker digest to f11725a ([#3071](https://github.com/nf-core/tools/pull/3071)) +- Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From f7be79d6d599918af49513fb41bd5943df9b7e60 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 18 Jul 2024 10:12:10 +0200 Subject: [PATCH 331/737] migrate modules tests to new structure --- nf_core/components/components_command.py | 4 +- nf_core/modules/bump_versions.py | 3 +- tests/modules/bump_versions.py | 50 -- tests/modules/create.py | 165 ----- tests/modules/info.py | 63 -- tests/modules/install.py | 95 --- tests/modules/lint.py | 892 ----------------------- tests/modules/list.py | 134 ---- tests/modules/modules_json.py | 253 ------- tests/modules/patch.py | 360 --------- tests/modules/remove.py | 22 - tests/modules/test_bump_versions.py | 50 ++ tests/modules/test_create.py | 164 +++++ tests/modules/test_info.py | 62 ++ tests/modules/test_install.py | 90 +++ tests/modules/test_lint.py | 865 ++++++++++++++++++++++ tests/modules/test_list.py | 126 ++++ tests/modules/test_modules_json.py | 245 +++++++ tests/modules/test_patch.py | 365 ++++++++++ tests/modules/test_remove.py | 26 + tests/modules/test_update.py | 435 +++++++++++ tests/modules/update.py | 444 ----------- tests/test_modules.py | 128 ---- 23 files changed, 2432 insertions(+), 2609 deletions(-) delete mode 100644 tests/modules/bump_versions.py delete mode 100644 tests/modules/create.py delete mode 100644 tests/modules/info.py delete mode 100644 tests/modules/install.py delete mode 100644 tests/modules/lint.py delete mode 100644 tests/modules/list.py delete mode 100644 tests/modules/modules_json.py delete mode 100644 tests/modules/patch.py delete mode 100644 tests/modules/remove.py create mode 100644 tests/modules/test_bump_versions.py create mode 100644 tests/modules/test_create.py create mode 100644 tests/modules/test_info.py create mode 100644 tests/modules/test_install.py create mode 100644 tests/modules/test_lint.py create mode 100644 tests/modules/test_list.py create mode 100644 tests/modules/test_modules_json.py create mode 100644 tests/modules/test_patch.py create mode 100644 tests/modules/test_remove.py create mode 100644 tests/modules/test_update.py delete mode 100644 tests/modules/update.py diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 4df67639e..aa1dccc0d 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -22,7 +22,7 @@ class ComponentCommand: def __init__( self, component_type: str, - dir: str, + dir: Union[str, Path], remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -33,7 +33,7 @@ def __init__( Initialise the ComponentClass object """ self.component_type = component_type - self.dir = dir + self.dir = Path(dir) if dir else None self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress self.no_prompts = no_prompts diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index fae379307..1b94d5910 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -6,6 +6,7 @@ import logging import os import re +from pathlib import Path from typing import Any, Dict, List, Optional, Tuple, Union import questionary @@ -30,7 +31,7 @@ class ModuleVersionBumper(ComponentCommand): # type: ignore[misc] def __init__( self, - pipeline_dir: str, + pipeline_dir: Union[str, Path], remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py deleted file mode 100644 index ce8c6dbe1..000000000 --- a/tests/modules/bump_versions.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -import re - -import pytest - -import nf_core.modules -from nf_core.modules.modules_utils import ModuleExceptionError - - -def test_modules_bump_versions_single_module(self): - """Test updating a single module""" - # Change the bpipe/test version to an older version - env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path) as fh: - content = fh.read() - new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) - with open(env_yml_path, "w") as fh: - fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(module="bpipe/test") - assert len(version_bumper.failed) == 0 - - -def test_modules_bump_versions_all_modules(self): - """Test updating all modules""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(all_modules=True) - assert len(version_bumper.failed) == 0 - - -def test_modules_bump_versions_fail(self): - """Fail updating a module with wrong name""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - with pytest.raises(ModuleExceptionError) as excinfo: - version_bumper.bump_versions(module="no/module") - assert "Could not find the specified module:" in str(excinfo.value) - - -def test_modules_bump_versions_fail_unknown_version(self): - """Fail because of an unknown version""" - # Change the bpipe/test version to an older version - env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") - with open(env_yml_path) as fh: - content = fh.read() - new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) - with open(env_yml_path, "w") as fh: - fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) - version_bumper.bump_versions(module="bpipe/test") - assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/create.py b/tests/modules/create.py deleted file mode 100644 index cf39621f0..000000000 --- a/tests/modules/create.py +++ /dev/null @@ -1,165 +0,0 @@ -import os -import shutil -from pathlib import Path -from unittest import mock - -import pytest -import requests_cache -import responses -import yaml -from git.repo import Repo - -import nf_core.modules -from tests.utils import ( - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, - GITLAB_URL, - mock_anaconda_api_calls, - mock_biocontainers_api_calls, -) - - -def test_modules_create_succeed(self): - """Succeed at creating the TrimGalore! module""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") - mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) - - -def test_modules_create_fail_exists(self): - """Fail at creating the same module twice""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") - mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( - self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" - ) - with requests_cache.disabled(): - module_create.create() - with pytest.raises(UserWarning) as excinfo: - with requests_cache.disabled(): - module_create.create() - assert "Module file exists already" in str(excinfo.value) - - -def test_modules_create_nfcore_modules(self): - """Create a module in nf-core/modules clone""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") - mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") - module_create = nf_core.modules.ModuleCreate( - self.nfcore_modules, "fastqc", "@author", "process_low", False, False - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test")) - - -def test_modules_create_nfcore_modules_subtool(self): - """Create a tool/subtool module in a nf-core/modules clone""" - with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "star", "2.8.10a") - mock_biocontainers_api_calls(rsps, "star", "2.8.10a") - module_create = nf_core.modules.ModuleCreate( - self.nfcore_modules, "star/index", "@author", "process_medium", False, False - ) - with requests_cache.disabled(): - module_create.create() - assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) - assert os.path.exists( - os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") - ) - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest to nf-test""" - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - with open(module_dir / "main.nf") as fh: - old_main_nf = fh.read() - with open(module_dir / "meta.yml") as fh: - old_meta_yml = fh.read() - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - with open(module_dir / "main.nf") as fh: - new_main_nf = fh.read() - with open(module_dir / "meta.yml") as fh: - new_meta_yml = fh.read() - nextflow_config = module_dir / "tests" / "nextflow.config" - - # Check that old files have been copied to the new module - assert old_main_nf == new_main_nf - assert old_meta_yml == new_meta_yml - assert nextflow_config.is_file() - - # Check that pytest folder is deleted - assert not pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "samtools/sort" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate_no_delete(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest to nf-test. - Test that pytest directory is not deleted.""" - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = False - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - # Check that pytest folder is not deleted - assert pytest_dir.is_dir() - - # Check that pytest_modules.yml is updated - with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: - modules_yml = yaml.safe_load(fh) - assert "samtools/sort" not in modules_yml.keys() - - -@mock.patch("rich.prompt.Confirm.ask") -def test_modules_migrate_symlink(self, mock_rich_ask): - """Create a module with the --migrate-pytest option to convert pytest with symlinks to nf-test. - Test that the symlink is deleted and the file is copied.""" - - pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") - module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") - - # Clone modules repo with pytests - shutil.rmtree(self.nfcore_modules) - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) - - # Create a symlinked file in the pytest directory - symlink_file = pytest_dir / "symlink_file.txt" - symlink_file.symlink_to(module_dir / "main.nf") - - # Create a module with --migrate-pytest - mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) - module_create.create() - - # Check that symlink is deleted - assert not symlink_file.is_symlink() diff --git a/tests/modules/info.py b/tests/modules/info.py deleted file mode 100644 index 2dbd48b24..000000000 --- a/tests/modules/info.py +++ /dev/null @@ -1,63 +0,0 @@ -from rich.console import Console - -import nf_core.modules - -from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL - - -def test_modules_info_remote(self): - """Test getting info about a remote module""" - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "fastqc") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output - - -def test_modules_info_remote_gitlab(self): - """Test getting info about a module in the remote gitlab repo""" - mods_info = nf_core.modules.ModuleInfo( - self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH - ) - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output - assert "--git-remote" in output - - -def test_modules_info_local(self): - """Test getting info about a locally installed module""" - self.mods_install.install("trimgalore") - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: trimgalore" in output - assert "Inputs" in output - assert "Outputs" in output - assert "Location" in output - - -def test_modules_info_in_modules_repo(self): - """Test getting info about a module in the modules repo""" - mods_info = nf_core.modules.ModuleInfo(self.nfcore_modules, "fastqc") - mods_info.local = True - mods_info_output = mods_info.get_component_info() - console = Console(record=True) - console.print(mods_info_output) - output = console.export_text() - - assert "Module: fastqc" in output - assert "Inputs" in output - assert "Outputs" in output diff --git a/tests/modules/install.py b/tests/modules/install.py deleted file mode 100644 index deca31204..000000000 --- a/tests/modules/install.py +++ /dev/null @@ -1,95 +0,0 @@ -import os - -import pytest - -from nf_core.modules.install import ModuleInstall -from nf_core.modules.modules_json import ModulesJson - -from ..utils import ( - GITLAB_BRANCH_ORG_PATH_BRANCH, - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_URL, - with_temporary_folder, -) - - -def test_modules_install_nopipeline(self): - """Test installing a module - no pipeline given""" - self.mods_install.dir = None - assert self.mods_install.install("foo") is False - - -@with_temporary_folder -def test_modules_install_emptypipeline(self, tmpdir): - """Test installing a module - empty dir given""" - os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.mods_install.dir = os.path.join(tmpdir, "nf-core-pipe") - with pytest.raises(UserWarning) as excinfo: - self.mods_install.install("foo") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) - - -def test_modules_install_nomodule(self): - """Test installing a module - unrecognised module given""" - assert self.mods_install.install("foo") is False - - -def test_modules_install_trimgalore(self): - """Test installing a module - TrimGalore!""" - assert self.mods_install.install("trimgalore") is not False - module_path = os.path.join(self.mods_install.dir, "modules", "nf-core", "trimgalore") - assert os.path.exists(module_path) - - -def test_modules_install_trimgalore_twice(self): - """Test installing a module - TrimGalore! already there""" - self.mods_install.install("trimgalore") - assert self.mods_install.install("trimgalore") is True - - -def test_modules_install_from_gitlab(self): - """Test installing a module from GitLab""" - assert self.mods_install_gitlab.install("fastqc") is True - - -def test_modules_install_different_branch_fail(self): - """Test installing a module from a different branch""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The FastQC module does not exists in the branch-test branch - assert install_obj.install("fastqc") is False - - -def test_modules_install_different_branch_succeed(self): - """Test installing a module from a different branch""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) - # The fastp module does exists in the branch-test branch - assert install_obj.install("fastp") is True - - # Verify that the branch entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - - -def test_modules_install_tracking(self): - """Test installing a module and finding 'modules' in the installed_by section of modules.json""" - self.mods_install.install("trimgalore") - - # Verify that the installed_by entry was added correctly - modules_json = ModulesJson(self.pipeline_dir) - mod_json = modules_json.get_modules_json() - assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ - "installed_by" - ] == ["modules"] - - -def test_modules_install_alternate_remote(self): - """Test installing a module from a different remote with the same organization path""" - install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_ORG_PATH_BRANCH) - # Install fastqc from GitLab which is also installed from GitHub with the same org_path - with pytest.raises(Exception) as excinfo: - install_obj.install("fastqc") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/lint.py b/tests/modules/lint.py deleted file mode 100644 index e1a4e27ff..000000000 --- a/tests/modules/lint.py +++ /dev/null @@ -1,892 +0,0 @@ -import json -from pathlib import Path - -import pytest -import yaml -from git.repo import Repo - -import nf_core.modules -from nf_core.modules.lint import main_nf -from nf_core.utils import set_wd - -from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL -from .patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf - - -def setup_patch(pipeline_dir: str, modify_module: bool): - install_obj = nf_core.modules.ModuleInstall( - pipeline_dir, - prompt=False, - force=False, - remote_url=GITLAB_URL, - branch=PATCH_BRANCH, - sha=CORRECT_SHA, - ) - - # Install the module - install_obj.install(BISMARK_ALIGN) - - if modify_module: - # Modify the module - module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - modify_main_nf(module_path / "main.nf") - - -def test_modules_lint_trimgalore(self): - """Test linting the TrimGalore! module""" - self.mods_install.install("trimgalore") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) - module_lint.lint(print_results=False, module="trimgalore") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_empty(self): - """Test linting a pipeline with no modules installed""" - self.mods_remove.remove("fastqc", force=True) - self.mods_remove.remove("multiqc", force=True) - with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir) - - -def test_modules_lint_new_modules(self): - """lint a new module""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_no_gitlab(self): - """Test linting a pipeline with no modules installed""" - self.mods_remove.remove("fastqc", force=True) - self.mods_remove.remove("multiqc", force=True) - with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - - -def test_modules_lint_gitlab_modules(self): - """Lint modules from a different remote""" - self.mods_install_gitlab.install("fastqc") - self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 2 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_multiple_remotes(self): - """Lint modules from a different remote""" - self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) - module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 1 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_registry(self): - """Test linting the samtools module and alternative registry""" - self.mods_install.install("samtools") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, registry="public.ecr.aws") - module_lint.lint(print_results=False, module="samtools") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) - module_lint.lint(print_results=False, module="samtools") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_patched_modules(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - setup_patch(self.pipeline_dir, True) - - # Create a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - # change temporarily working directory to the pipeline directory - # to avoid error from try_apply_patch() during linting - with set_wd(self.pipeline_dir): - module_lint = nf_core.modules.ModuleLint( - dir=self.pipeline_dir, - remote_url=GITLAB_URL, - branch=PATCH_BRANCH, - hide_progress=True, - ) - module_lint.lint( - print_results=False, - all_modules=True, - ) - - assert len(module_lint.failed) == 1 - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -# A skeleton object with the passed/warned/failed list attrs -# Use this in place of a ModuleLint object to test behaviour of -# linting methods which don't need the full setup -class MockModuleLint: - def __init__(self): - self.passed = [] - self.warned = [] - self.failed = [] - - self.main_nf = "main_nf" - - -PROCESS_LABEL_GOOD = ( - """ - label 'process_high' - cpus 12 - """, - 1, - 0, - 0, -) -PROCESS_LABEL_NON_ALPHANUMERIC = ( - """ - label 'a:label:with:colons' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_GOOD_CONFLICTING = ( - """ - label 'process_high' - label 'process_low' - cpus 12 - """, - 0, - 1, - 0, -) -PROCESS_LABEL_GOOD_DUPLICATES = ( - """ - label 'process_high' - label 'process_high' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_GOOD_AND_NONSTANDARD = ( - """ - label 'process_high' - label 'process_extra_label' - cpus 12 - """, - 1, - 1, - 0, -) -PROCESS_LABEL_NONSTANDARD = ( - """ - label 'process_extra_label' - cpus 12 - """, - 0, - 2, - 0, -) -PROCESS_LABEL_NONSTANDARD_DUPLICATES = ( - """ - label process_extra_label - label process_extra_label - cpus 12 - """, - 0, - 3, - 0, -) -PROCESS_LABEL_NONE_FOUND = ( - """ - cpus 12 - """, - 0, - 1, - 0, -) - -PROCESS_LABEL_TEST_CASES = [ - PROCESS_LABEL_GOOD, - PROCESS_LABEL_NON_ALPHANUMERIC, - PROCESS_LABEL_GOOD_CONFLICTING, - PROCESS_LABEL_GOOD_DUPLICATES, - PROCESS_LABEL_GOOD_AND_NONSTANDARD, - PROCESS_LABEL_NONSTANDARD, - PROCESS_LABEL_NONSTANDARD_DUPLICATES, - PROCESS_LABEL_NONE_FOUND, -] - - -def test_modules_lint_check_process_labels(self): - for test_case in PROCESS_LABEL_TEST_CASES: - process, passed, warned, failed = test_case - mocked_ModuleLint = MockModuleLint() - main_nf.check_process_labels(mocked_ModuleLint, process.splitlines()) - assert len(mocked_ModuleLint.passed) == passed - assert len(mocked_ModuleLint.warned) == warned - assert len(mocked_ModuleLint.failed) == failed - - -# Test cases for linting the container definitions - -CONTAINER_SINGLE_GOOD = ( - "Single-line container definition should pass", - """ - container "quay.io/nf-core/gatk:4.4.0.0" //Biocontainers is missing a package - """, - 2, # passed - 0, # warned - 0, # failed -) - -CONTAINER_TWO_LINKS_GOOD = ( - "Multi-line container definition should pass", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0': - 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" - """, - 6, - 0, - 0, -) - -CONTAINER_WITH_SPACE_BAD = ( - "Space in container URL should fail", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': - 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" - """, - 5, - 0, - 1, -) - -CONTAINER_MULTIPLE_DBLQUOTES_BAD = ( - "Incorrect quoting of container string should fail", - """ - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': - "biocontainers/gatk4:4.4.0.0--py36hdfd78af_0" }" - """, - 4, - 0, - 1, -) - -CONTAINER_TEST_CASES = [ - CONTAINER_SINGLE_GOOD, - CONTAINER_TWO_LINKS_GOOD, - CONTAINER_WITH_SPACE_BAD, - CONTAINER_MULTIPLE_DBLQUOTES_BAD, -] - - -def test_modules_lint_check_url(self): - for test_case in CONTAINER_TEST_CASES: - test, process, passed, warned, failed = test_case - mocked_ModuleLint = MockModuleLint() - for line in process.splitlines(): - if line.strip(): - main_nf.check_container_link_line(mocked_ModuleLint, line, registry="quay.io") - - assert ( - len(mocked_ModuleLint.passed) == passed - ), f"{test}: Expected {passed} PASS, got {len(mocked_ModuleLint.passed)}." - assert ( - len(mocked_ModuleLint.warned) == warned - ), f"{test}: Expected {warned} WARN, got {len(mocked_ModuleLint.warned)}." - assert ( - len(mocked_ModuleLint.failed) == failed - ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." - - -def test_modules_lint_snapshot_file(self): - """Test linting a module with a snapshot file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_lint_snapshot_file_missing_fail(self): - """Test linting a module with a snapshot file missing, which should fail""" - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ).unlink() - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ).touch() - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snapshot_exists" - - -def test_modules_lint_snapshot_file_not_needed(self): - """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test", - ) - ) as fh: - content = fh.read() - new_content = content.replace("snapshot(", "snap (") - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test", - ), - "w", - ) as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_doesnt_exists(self): - """Test linting a module with an environment.yml file""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml").rename( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml.bak", - ) - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml.bak", - ).rename( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_exists" - - -def test_modules_environment_yml_file_sorted_correctly(self): - """Test linting a module with a correctly sorted environment.yml file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_sorted_incorrectly(self): - """Test linting a module with an incorrectly sorted environment.yml file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - yaml_content = yaml.safe_load(fh) - # Add a new dependency to the environment.yml file and reverse the order - yaml_content["dependencies"].append("z") - yaml_content["dependencies"].reverse() - yaml_content = yaml.dump(yaml_content) - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - # we fix the sorting on the fly, so this should pass - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - - -def test_modules_environment_yml_file_not_array(self): - """Test linting a module with an incorrectly formatted environment.yml file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - yaml_content = yaml.safe_load(fh) - yaml_content["dependencies"] = "z" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_valid" - - -def test_modules_environment_yml_file_name_mismatch(self): - """Test linting a module with a different name in the environment.yml file""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - yaml_content = yaml.safe_load(fh) - yaml_content["name"] = "bpipe-test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - # reset changes - yaml_content["name"] = "bpipe_test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(yaml_content)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_name" - - -def test_modules_meta_yml_incorrect_licence_field(self): - """Test linting a module with an incorrect Licence field in meta.yml""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: - meta_yml = yaml.safe_load(fh) - meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), - "w", - ) as fh: - fh.write(yaml.dump(meta_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - - # reset changes - meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), - "w", - ) as fh: - fh.write(yaml.dump(meta_yml)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "meta_yml_valid" - - -def test_modules_meta_yml_input_mismatch(self): - """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("path bam", "path bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_input_meta_only") == 1 - assert lint_tests.count("meta_input_main_only") == 1 - - -def test_modules_meta_yml_output_mismatch(self): - """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("emit: bam", "emit: bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_output_meta_only") == 1 - assert lint_tests.count("meta_output_main_only") == 1 - - -def test_modules_meta_yml_incorrect_name(self): - """Test linting a module with an incorrect name in meta.yml""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: - meta_yml = yaml.safe_load(fh) - meta_yml["name"] = "bpipe/test" - # need to make the same change to the environment.yml file - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ) - ) as fh: - environment_yml = yaml.safe_load(fh) - environment_yml["name"] = "bpipe/test" - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), - "w", - ) as fh: - fh.write(yaml.dump(meta_yml)) - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(environment_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - - # reset changes - meta_yml["name"] = "bpipe_test" - with open( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), - "w", - ) as fh: - fh.write(yaml.dump(meta_yml)) - environment_yml["name"] = "bpipe_test" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "environment.yml", - ), - "w", - ) as fh: - fh.write(yaml.dump(environment_yml)) - - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "meta_name" - - -def test_modules_missing_test_dir(self): - """Test linting a module with a missing test directory""" - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( - Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_dir_exists" - - -def test_modules_missing_test_main_nf(self): - """Test linting a module with a missing test/main.nf file""" - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test", - ).rename( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.bak", - ) - ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.bak", - ).rename( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test", - ) - ) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_main_nf_exists" - - -def test_modules_unused_pytest_files(self): - """Test linting a nf-test module with files still present in `tests/modules/`""" - Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_old_test_dir" - - -def test_nftest_failing_linting(self): - """Test linting a module which includes other modules in nf-test tests. - Linting tests""" - # Clone modules repo with testing modules - tmp_dir = self.nfcore_modules.parent - self.nfcore_modules = Path(tmp_dir, "modules-test") - Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="kallisto/quant") - - assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "environment_yml_valid" - assert module_lint.failed[1].lint_test == "meta_yml_valid" - assert module_lint.failed[2].lint_test == "test_main_tags" - assert "kallisto/index" in module_lint.failed[2].message - - -def test_modules_absent_version(self): - """Test linting a nf-test module if the versions is absent in the snapshot file `""" - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ) - ) as fh: - content = fh.read() - new_content = content.replace("versions", "foo") - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ), - "w", - ) as fh: - fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open( - Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ), - "w", - ) as fh: - fh.write(content) - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snap_versions" - - -def test_modules_empty_file_in_snapshot(self): - """Test linting a nf-test module with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" - snap_file = Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ) - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert module_lint.failed[0].lint_test == "test_snap_md5sum" - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) - - -def test_modules_empty_file_in_stub_snapshot(self): - """Test linting a nf-test module with an empty file sha sum in the stub test snapshot, which should make it not fail""" - snap_file = Path( - self.nfcore_modules, - "modules", - "nf-core", - "bpipe", - "test", - "tests", - "main.nf.test.snap", - ) - snap = json.load(snap_file.open()) - content = snap_file.read_text() - snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} - - with open(snap_file, "w") as fh: - json.dump(snap, fh) - - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) > 0 - assert len(module_lint.warned) >= 0 - assert any(x.lint_test == "test_snap_md5sum" for x in module_lint.passed) - - # reset the file - with open(snap_file, "w") as fh: - fh.write(content) diff --git a/tests/modules/list.py b/tests/modules/list.py deleted file mode 100644 index 3cb00a84d..000000000 --- a/tests/modules/list.py +++ /dev/null @@ -1,134 +0,0 @@ -import json -from pathlib import Path - -import yaml -from rich.console import Console - -import nf_core.modules - -from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL - - -def test_modules_list_remote(self): - """Test listing available modules""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_remote_gitlab(self): - """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_pipeline(self): - """Test listing locally installed modules""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - assert "multiqc" in output - - -def test_modules_install_and_list_pipeline(self): - """Test listing locally installed modules""" - self.mods_install.install("trimgalore") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "trimgalore" in output - - -def test_modules_install_gitlab_and_list_pipeline(self): - """Test listing locally installed modules""" - self.mods_install_gitlab.install("fastqc") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_local_json(self): - """Test listing locally installed modules as JSON""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components(print_json=True) - listed_mods = json.loads(listed_mods) - assert "fastqc" in listed_mods - assert "multiqc" in listed_mods - - -def test_modules_list_remote_json(self): - """Test listing available modules as JSON""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(print_json=True) - listed_mods = json.loads(listed_mods) - assert "fastqc" in listed_mods - assert "multiqc" in listed_mods - - -def test_modules_list_with_one_keyword(self): - """Test listing available modules with one keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(keywords=["qc"]) - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "multiqc" in output - - -def test_modules_list_with_keywords(self): - """Test listing available modules with multiple keywords""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) - console = Console(record=True) - console.print(listed_mods) - output = console.export_text() - assert "fastqc" in output - - -def test_modules_list_with_unused_keyword(self): - """Test listing available modules with an unused keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) - with self.assertLogs(level="INFO") as log: - listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) - self.assertIn("No available", log.output[0]) - # expect empty list - assert listed_mods == "" - - -def test_modules_list_in_wrong_repo_fail(self): - """Test listing available modules in a non-pipeline repo""" - # modify repotype in .nf-core.yml - with open(Path(self.pipeline_dir, ".nf-core.yml")) as fh: - nf_core_yml = yaml.safe_load(fh) - nf_core_yml_orig = nf_core_yml.copy() - nf_core_yml["repository_type"] = "modules" - nf_core_yml["org_path"] = "nf-core" - - print(nf_core_yml) - with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: - yaml.safe_dump(nf_core_yml, fh) - # expect error logged - with self.assertLogs(level="ERROR") as log: - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components() - self.assertIn("must be run from a pipeline directory", log.output[0]) - # expect empty list - assert listed_mods == "" - # restore .nf-core.yml - with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: - yaml.safe_dump(nf_core_yml_orig, fh) diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py deleted file mode 100644 index e0100adfb..000000000 --- a/tests/modules/modules_json.py +++ /dev/null @@ -1,253 +0,0 @@ -import copy -import json -import shutil -from pathlib import Path - -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import ( - NF_CORE_MODULES_DEFAULT_BRANCH, - NF_CORE_MODULES_NAME, - NF_CORE_MODULES_REMOTE, - ModulesRepo, -) -from nf_core.modules.patch import ModulePatch - - -def test_get_modules_json(self): - """Checks that the get_modules_json function returns the correct result""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - with open(mod_json_path) as fh: - try: - mod_json_sb = json.load(fh) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") - - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - # Check that the modules.json hasn't changed - assert mod_json == mod_json_sb - - -def test_mod_json_update(self): - """Checks whether the update function works properly""" - mod_json_obj = ModulesJson(self.pipeline_dir) - # Update the modules.json file - mod_repo_obj = ModulesRepo() - mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", "modules", write_file=False) - mod_json = mod_json_obj.get_modules_json() - assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] - assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] - assert ( - NF_CORE_MODULES_DEFAULT_BRANCH - == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] - ) - assert "modules" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["installed_by"] - - -def test_mod_json_create(self): - """Test creating a modules.json file from scratch""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - # Remove the existing modules.json file - mod_json_path.unlink() - - # Create the new modules.json file - # (There are no prompts as long as there are only nf-core modules) - ModulesJson(self.pipeline_dir).create() - - # Check that the file exists - assert (mod_json_path).exists() - - # Get the contents of the file - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - mods = ["fastqc", "multiqc"] - for mod in mods: - assert mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] - assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] - - -def modify_main_nf(path): - """Modify a file to test patch creation""" - with open(path) as fh: - lines = fh.readlines() - # Modify $meta.id to $meta.single_end - lines[1] = ' tag "$meta.single_end"\n' - with open(path, "w") as fh: - fh.writelines(lines) - - -def test_mod_json_create_with_patch(self): - """Test creating a modules.json file from scratch when there are patched modules""" - mod_json_path = Path(self.pipeline_dir, "modules.json") - - # Modify the module - module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") - modify_main_nf(module_path / "main.nf") - - # Try creating a patch file - patch_obj = ModulePatch(self.pipeline_dir, NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_DEFAULT_BRANCH) - patch_obj.patch("fastqc") - - # Remove the existing modules.json file - mod_json_path.unlink() - - # Create the new modules.json file - ModulesJson(self.pipeline_dir).create() - - # Check that the file exists - assert mod_json_path.is_file() - - # Get the contents of the file - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - - # Check that fastqc is in the file - assert "fastqc" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] - assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] - - # Check that fastqc/main.nf maintains the changes - with open(module_path / "main.nf") as fh: - lines = fh.readlines() - assert lines[1] == ' tag "$meta.single_end"\n' - - -def test_mod_json_up_to_date(self): - """ - Checks if the modules.json file is up to date - when no changes have been made to the pipeline - """ - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_before = mod_json_obj.get_modules_json() - mod_json_obj.check_up_to_date() - mod_json_after = mod_json_obj.get_modules_json() - - # Check that the modules.json hasn't changed - assert mod_json_before == mod_json_after - - -def test_mod_json_up_to_date_module_removed(self): - """ - Reinstall a module that has an entry in the modules.json - but is missing in the pipeline - """ - # Remove the fastqc module - fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") - shutil.rmtree(fastqc_path) - - # Check that the modules.json file is up to date, and reinstall the module - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.check_up_to_date() - - # Check that the module has been reinstalled - files = ["main.nf", "meta.yml"] - assert fastqc_path.exists() - for f in files: - assert Path(fastqc_path, f).exists() - - -def test_mod_json_up_to_date_reinstall_fails(self): - """ - Try reinstalling a module where the git_sha is invalid - """ - mod_json_obj = ModulesJson(self.pipeline_dir) - - # Update the fastqc module entry to an invalid git_sha - mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) - - # Remove the fastqc module - fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") - shutil.rmtree(fastqc_path) - - # Check that the modules.json file is up to date, and remove the fastqc module entry - mod_json_obj.check_up_to_date() - mod_json = mod_json_obj.get_modules_json() - - # Check that the module has been removed from the modules.json - assert "fastqc" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] - - -def test_mod_json_repo_present(self): - """Tests the repo_present function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - - assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True - assert mod_json_obj.repo_present("INVALID_REPO") is False - - -def test_mod_json_module_present(self): - """Tests the module_present function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False - - -def test_mod_json_get_module_version(self): - """Test the get_module_version function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - assert ( - mod_json_obj.get_module_version("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) - == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"]["git_sha"] - ) - assert mod_json_obj.get_module_version("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is None - - -def test_mod_json_dump(self): - """Tests the dump function""" - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Remove the modules.json file - mod_json_path = Path(self.pipeline_dir, "modules.json") - mod_json_path.unlink() - - # Check that the dump function creates the file - mod_json_obj.dump() - assert mod_json_path.exists() - - # Check that the dump function writes the correct content - with open(mod_json_path) as f: - try: - mod_json_new = json.load(f) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") - assert mod_json == mod_json_new - - -def test_mod_json_with_empty_modules_value(self): - # Load module.json and remove the modules entry - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.create() # Create modules.json explicitly to get correct module sha - mod_json_orig = mod_json_obj.get_modules_json() - mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = "" - # save the altered module.json and load it again to check if it will fix itself - mod_json_obj.modules_json = mod_json - mod_json_obj.dump() - mod_json_obj_new = ModulesJson(self.pipeline_dir) - mod_json_obj_new.check_up_to_date() - mod_json_new = mod_json_obj_new.get_modules_json() - assert mod_json_orig == mod_json_new - - -def test_mod_json_with_missing_modules_entry(self): - # Load module.json and remove the modules entry - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json_obj.create() # Create modules.json explicitly to get correct module sha - mod_json_orig = mod_json_obj.get_modules_json() - mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"][NF_CORE_MODULES_REMOTE].pop("modules") - # save the altered module.json and load it again to check if it will fix itself - mod_json_obj.modules_json = mod_json - mod_json_obj.dump() - mod_json_obj_new = ModulesJson(self.pipeline_dir) - mod_json_obj_new.check_up_to_date() - mod_json_new = mod_json_obj_new.get_modules_json() - assert mod_json_orig == mod_json_new diff --git a/tests/modules/patch.py b/tests/modules/patch.py deleted file mode 100644 index 513ea8a43..000000000 --- a/tests/modules/patch.py +++ /dev/null @@ -1,360 +0,0 @@ -import os -import tempfile -from pathlib import Path -from unittest import mock - -import pytest - -import nf_core.components.components_command -import nf_core.modules - -from ..utils import GITLAB_URL - -""" -Test the 'nf-core modules patch' command - -Uses a branch (patch-tester) in the GitLab nf-core/modules-test repo when -testing if the update commands works correctly with patch files -""" - -ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" -CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" -SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" -FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" -BISMARK_ALIGN = "bismark/align" -REPO_NAME = "nf-core-test" -PATCH_BRANCH = "patch-tester" -REPO_URL = "https://gitlab.com/nf-core/modules-test.git" - - -def setup_patch(pipeline_dir, modify_module): - install_obj = nf_core.modules.ModuleInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA - ) - - # Install the module - install_obj.install(BISMARK_ALIGN) - - if modify_module: - # Modify the module - module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - modify_main_nf(module_path / "main.nf") - - -def modify_main_nf(path): - """Modify a file to test patch creation""" - with open(path) as fh: - lines = fh.readlines() - # We want a patch file that looks something like: - # - tuple val(meta), path(reads) - # - path index - # + tuple val(meta), path(reads), path(index) - for line_index in range(len(lines)): - if lines[line_index] == " tuple val(meta), path(reads)\n": - lines[line_index] = " tuple val(meta), path(reads), path(index)\n" - elif lines[line_index] == " path index\n": - to_pop = line_index - lines.pop(to_pop) - with open(path, "w") as fh: - fh.writelines(lines) - - -def test_create_patch_no_change(self): - """Test creating a patch when there is no change to the module""" - setup_patch(self.pipeline_dir, False) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - with pytest.raises(UserWarning): - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} - - # Check the 'modules.json' contains no patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None - - -def test_create_patch_change(self): - """Test creating a patch when there is a change to the module""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - -def test_create_patch_try_apply_successful(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - - setup_patch(self.pipeline_dir, True) - module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) - module_path = Path(self.pipeline_dir, module_relpath) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files(BISMARK_ALIGN, SUCCEED_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - module_install_dir = install_dir / BISMARK_ALIGN - patch_relpath = module_relpath / patch_fn - assert update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_relpath, module_path, module_install_dir) is True - - # Move the files from the temporary directory - update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) - - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf") as fh: - main_nf_lines = fh.readlines() - # These lines should have been removed by the patch - assert " tuple val(meta), path(reads)\n" not in main_nf_lines - assert " path index\n" not in main_nf_lines - # This line should have been added - assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines - - -def test_create_patch_try_apply_failed(self): - """ - Test creating a patch file and applying it to a new version of the the files - """ - - setup_patch(self.pipeline_dir, True) - module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) - module_path = Path(self.pipeline_dir, module_relpath) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=FAIL_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - module_install_dir = install_dir / BISMARK_ALIGN - patch_path = module_relpath / patch_fn - assert update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_path, module_path, module_install_dir) is False - - -def test_create_patch_update_success(self): - """ - Test creating a patch file and the updating the module - - Should have the same effect as 'test_create_patch_try_apply_successful' - but uses higher level api - """ - - setup_patch(self.pipeline_dir, True) - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Update the module - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, - sha=SUCCEED_SHA, - show_diff=False, - update_deps=True, - remote_url=GITLAB_URL, - branch=PATCH_BRANCH, - ) - assert update_obj.update(BISMARK_ALIGN) - - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) - - # Check that the correct lines are in the patch file - with open(module_path / patch_fn) as fh: - patch_lines = fh.readlines() - module_relpath = module_path.relative_to(self.pipeline_dir) - assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines - assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines - assert "- tuple val(meta), path(reads)\n" in patch_lines - assert "- path index\n" in patch_lines - assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines - - # Check that 'main.nf' is updated correctly - with open(module_path / "main.nf") as fh: - main_nf_lines = fh.readlines() - # These lines should have been removed by the patch - assert " tuple val(meta), path(reads)\n" not in main_nf_lines - assert " path index\n" not in main_nf_lines - # This line should have been added - assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines - - -def test_create_patch_update_fail(self): - """ - Test creating a patch file and updating a module when there is a diff conflict - """ - - setup_patch(self.pipeline_dir, True) - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - # Save the file contents for downstream comparison - with open(module_path / patch_fn) as fh: - patch_contents = fh.read() - - update_obj = nf_core.modules.ModuleUpdate( - self.pipeline_dir, sha=FAIL_SHA, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=PATCH_BRANCH - ) - update_obj.update(BISMARK_ALIGN) - - # Check that the installed files have not been affected by the attempted patch - temp_dir = Path(tempfile.mkdtemp()) - nf_core.components.components_command.ComponentCommand( - "modules", self.pipeline_dir, GITLAB_URL, PATCH_BRANCH - ).install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir) - - temp_module_dir = temp_dir / BISMARK_ALIGN - for file in os.listdir(temp_module_dir): - assert file in os.listdir(module_path) - with open(module_path / file) as fh: - installed = fh.read() - with open(temp_module_dir / file) as fh: - shouldbe = fh.read() - assert installed == shouldbe - - # Check that the patch file is unaffected - with open(module_path / patch_fn) as fh: - new_patch_contents = fh.read() - assert patch_contents == new_patch_contents - - -def test_remove_patch(self): - """Test creating a patch when there is no change to the module""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) - patch_obj.patch(BISMARK_ALIGN) - - module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) - - # Check that a patch file with the correct name has been created - patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check the 'modules.json' contains a patch file for the module - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( - "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ) - - with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: - mock_questionary.unsafe_ask.return_value = True - patch_obj.remove(BISMARK_ALIGN) - # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} - - # Check that the 'modules.json' entry has been removed - modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/modules/remove.py b/tests/modules/remove.py deleted file mode 100644 index e412fd35a..000000000 --- a/tests/modules/remove.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - - -def test_modules_remove_trimgalore(self): - """Test removing TrimGalore! module after installing it""" - self.mods_install.install("trimgalore") - module_path = os.path.join(self.mods_install.dir, "modules", "nf-core", "modules", "trimgalore") - assert self.mods_remove.remove("trimgalore") - assert os.path.exists(module_path) is False - - -def test_modules_remove_trimgalore_uninstalled(self): - """Test removing TrimGalore! module without installing it""" - assert self.mods_remove.remove("trimgalore") is False - - -def test_modules_remove_multiqc_from_gitlab(self): - """Test removing multiqc module after installing it from an alternative source""" - self.mods_install_gitlab.install("multiqc") - module_path = os.path.join(self.mods_install_gitlab.dir, "modules", "nf-core-test", "multiqc") - assert self.mods_remove_gitlab.remove("multiqc", force=True) - assert os.path.exists(module_path) is False diff --git a/tests/modules/test_bump_versions.py b/tests/modules/test_bump_versions.py new file mode 100644 index 000000000..29e030668 --- /dev/null +++ b/tests/modules/test_bump_versions.py @@ -0,0 +1,50 @@ +import os +import re + +import pytest + +import nf_core.modules +from nf_core.modules.modules_utils import ModuleExceptionError + +from ..test_modules import TestModules + + +class TestModulesBumpVersions(TestModules): + def test_modules_bump_versions_single_module(self): + """Test updating a single module""" + # Change the bpipe/test version to an older version + env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + with open(env_yml_path) as fh: + content = fh.read() + new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) + with open(env_yml_path, "w") as fh: + fh.write(new_content) + version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(module="bpipe/test") + assert len(version_bumper.failed) == 0 + + def test_modules_bump_versions_all_modules(self): + """Test updating all modules""" + version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(all_modules=True) + assert len(version_bumper.failed) == 0 + + def test_modules_bump_versions_fail(self): + """Fail updating a module with wrong name""" + version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + with pytest.raises(ModuleExceptionError) as excinfo: + version_bumper.bump_versions(module="no/module") + assert "Could not find the specified module:" in str(excinfo.value) + + def test_modules_bump_versions_fail_unknown_version(self): + """Fail because of an unknown version""" + # Change the bpipe/test version to an older version + env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml") + with open(env_yml_path) as fh: + content = fh.read() + new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) + with open(env_yml_path, "w") as fh: + fh.write(new_content) + version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper.bump_versions(module="bpipe/test") + assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/test_create.py b/tests/modules/test_create.py new file mode 100644 index 000000000..c84750183 --- /dev/null +++ b/tests/modules/test_create.py @@ -0,0 +1,164 @@ +import os +import shutil +from pathlib import Path +from unittest import mock + +import pytest +import requests_cache +import responses +import yaml +from git.repo import Repo + +import nf_core.modules +from tests.utils import ( + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + mock_anaconda_api_calls, + mock_biocontainers_api_calls, +) + +from ..test_modules import TestModules + + +class TestModulesCreate(TestModules): + def test_modules_create_succeed(self): + """Succeed at creating the TrimGalore! module""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") + mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") + module_create = nf_core.modules.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) + + def test_modules_create_fail_exists(self): + """Fail at creating the same module twice""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") + mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") + module_create = nf_core.modules.ModuleCreate( + self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" + ) + with requests_cache.disabled(): + module_create.create() + with pytest.raises(UserWarning) as excinfo: + with requests_cache.disabled(): + module_create.create() + assert "Module file exists already" in str(excinfo.value) + + def test_modules_create_nfcore_modules(self): + """Create a module in nf-core/modules clone""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") + mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") + module_create = nf_core.modules.ModuleCreate( + self.nfcore_modules, "fastqc", "@author", "process_low", False, False + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) + assert os.path.exists( + os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "tests", "main.nf.test") + ) + + def test_modules_create_nfcore_modules_subtool(self): + """Create a tool/subtool module in a nf-core/modules clone""" + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "star", "2.8.10a") + mock_biocontainers_api_calls(rsps, "star", "2.8.10a") + module_create = nf_core.modules.ModuleCreate( + self.nfcore_modules, "star/index", "@author", "process_medium", False, False + ) + with requests_cache.disabled(): + module_create.create() + assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) + assert os.path.exists( + os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") + ) + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(module_dir / "main.nf") as fh: + old_main_nf = fh.read() + with open(module_dir / "meta.yml") as fh: + old_meta_yml = fh.read() + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + with open(module_dir / "main.nf") as fh: + new_main_nf = fh.read() + with open(module_dir / "meta.yml") as fh: + new_meta_yml = fh.read() + nextflow_config = module_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate_no_delete(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() + + @mock.patch("rich.prompt.Confirm.ask") + def test_modules_migrate_symlink(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest with symlinks to nf-test. + Test that the symlink is deleted and the file is copied.""" + + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a symlinked file in the pytest directory + symlink_file = pytest_dir / "symlink_file.txt" + symlink_file.symlink_to(module_dir / "main.nf") + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that symlink is deleted + assert not symlink_file.is_symlink() diff --git a/tests/modules/test_info.py b/tests/modules/test_info.py new file mode 100644 index 000000000..890685404 --- /dev/null +++ b/tests/modules/test_info.py @@ -0,0 +1,62 @@ +from rich.console import Console + +import nf_core.modules + +from ..test_modules import TestModules +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL + + +class TestModulesCreate(TestModules): + def test_modules_info_remote(self): + """Test getting info about a remote module""" + mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "fastqc") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output + + def test_modules_info_remote_gitlab(self): + """Test getting info about a module in the remote gitlab repo""" + mods_info = nf_core.modules.ModuleInfo( + self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH + ) + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output + assert "--git-remote" in output + + def test_modules_info_local(self): + """Test getting info about a locally installed module""" + self.mods_install.install("trimgalore") + mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: trimgalore" in output + assert "Inputs" in output + assert "Outputs" in output + assert "Location" in output + + def test_modules_info_in_modules_repo(self): + """Test getting info about a module in the modules repo""" + mods_info = nf_core.modules.ModuleInfo(self.nfcore_modules, "fastqc") + mods_info.local = True + mods_info_output = mods_info.get_component_info() + console = Console(record=True) + console.print(mods_info_output) + output = console.export_text() + + assert "Module: fastqc" in output + assert "Inputs" in output + assert "Outputs" in output diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py new file mode 100644 index 000000000..b90f01ee6 --- /dev/null +++ b/tests/modules/test_install.py @@ -0,0 +1,90 @@ +import os +from pathlib import Path + +import pytest + +from nf_core.modules.install import ModuleInstall +from nf_core.modules.modules_json import ModulesJson + +from ..test_modules import TestModules +from ..utils import ( + GITLAB_BRANCH_ORG_PATH_BRANCH, + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_URL, + with_temporary_folder, +) + + +class TestModulesCreate(TestModules): + def test_modules_install_nopipeline(self): + """Test installing a module - no pipeline given""" + self.mods_install.dir = None + assert self.mods_install.install("foo") is False + + @with_temporary_folder + def test_modules_install_emptypipeline(self, tmpdir): + """Test installing a module - empty dir given""" + os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) + self.mods_install.dir = os.path.join(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.mods_install.install("foo") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + def test_modules_install_nomodule(self): + """Test installing a module - unrecognised module given""" + assert self.mods_install.install("foo") is False + + def test_modules_install_trimgalore(self): + """Test installing a module - TrimGalore!""" + assert self.mods_install.install("trimgalore") is not False + assert self.mods_install.dir is not None + module_path = Path(self.mods_install.dir, "modules", "nf-core", "trimgalore") + assert os.path.exists(module_path) + + def test_modules_install_trimgalore_twice(self): + """Test installing a module - TrimGalore! already there""" + self.mods_install.install("trimgalore") + assert self.mods_install.install("trimgalore") is True + + def test_modules_install_from_gitlab(self): + """Test installing a module from GitLab""" + assert self.mods_install_gitlab.install("fastqc") is True + + def test_modules_install_different_branch_fail(self): + """Test installing a module from a different branch""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The FastQC module does not exists in the branch-test branch + assert install_obj.install("fastqc") is False + + def test_modules_install_different_branch_succeed(self): + """Test installing a module from a different branch""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) + # The fastp module does exists in the branch-test branch + assert install_obj.install("fastp") is True + + # Verify that the branch entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + + def test_modules_install_tracking(self): + """Test installing a module and finding 'modules' in the installed_by section of modules.json""" + self.mods_install.install("trimgalore") + + # Verify that the installed_by entry was added correctly + modules_json = ModulesJson(self.pipeline_dir) + mod_json = modules_json.get_modules_json() + assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ + "installed_by" + ] == ["modules"] + + def test_modules_install_alternate_remote(self): + """Test installing a module from a different remote with the same organization path""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_ORG_PATH_BRANCH) + # Install fastqc from GitLab which is also installed from GitHub with the same org_path + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py new file mode 100644 index 000000000..6448916ac --- /dev/null +++ b/tests/modules/test_lint.py @@ -0,0 +1,865 @@ +import json +from pathlib import Path +from typing import Union + +import pytest +import yaml +from git.repo import Repo + +import nf_core.modules +from nf_core.modules.lint import main_nf +from nf_core.utils import set_wd + +from ..test_modules import TestModules +from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL +from .test_patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf + +PROCESS_LABEL_GOOD = ( + """ + label 'process_high' + cpus 12 + """, + 1, + 0, + 0, +) +PROCESS_LABEL_NON_ALPHANUMERIC = ( + """ + label 'a:label:with:colons' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_GOOD_CONFLICTING = ( + """ + label 'process_high' + label 'process_low' + cpus 12 + """, + 0, + 1, + 0, +) +PROCESS_LABEL_GOOD_DUPLICATES = ( + """ + label 'process_high' + label 'process_high' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_GOOD_AND_NONSTANDARD = ( + """ + label 'process_high' + label 'process_extra_label' + cpus 12 + """, + 1, + 1, + 0, +) +PROCESS_LABEL_NONSTANDARD = ( + """ + label 'process_extra_label' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_NONSTANDARD_DUPLICATES = ( + """ + label process_extra_label + label process_extra_label + cpus 12 + """, + 0, + 3, + 0, +) +PROCESS_LABEL_NONE_FOUND = ( + """ + cpus 12 + """, + 0, + 1, + 0, +) + +PROCESS_LABEL_TEST_CASES = [ + PROCESS_LABEL_GOOD, + PROCESS_LABEL_NON_ALPHANUMERIC, + PROCESS_LABEL_GOOD_CONFLICTING, + PROCESS_LABEL_GOOD_DUPLICATES, + PROCESS_LABEL_GOOD_AND_NONSTANDARD, + PROCESS_LABEL_NONSTANDARD, + PROCESS_LABEL_NONSTANDARD_DUPLICATES, + PROCESS_LABEL_NONE_FOUND, +] + + +# Test cases for linting the container definitions + +CONTAINER_SINGLE_GOOD = ( + "Single-line container definition should pass", + """ + container "quay.io/nf-core/gatk:4.4.0.0" //Biocontainers is missing a package + """, + 2, # passed + 0, # warned + 0, # failed +) + +CONTAINER_TWO_LINKS_GOOD = ( + "Multi-line container definition should pass", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0': + 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" + """, + 6, + 0, + 0, +) + +CONTAINER_WITH_SPACE_BAD = ( + "Space in container URL should fail", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': + 'biocontainers/gatk4:4.4.0.0--py36hdfd78af_0' }" + """, + 5, + 0, + 1, +) + +CONTAINER_MULTIPLE_DBLQUOTES_BAD = ( + "Incorrect quoting of container string should fail", + """ + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.4.0.0--py36hdfd78af_0 ': + "biocontainers/gatk4:4.4.0.0--py36hdfd78af_0" }" + """, + 4, + 0, + 1, +) + +CONTAINER_TEST_CASES = [ + CONTAINER_SINGLE_GOOD, + CONTAINER_TWO_LINKS_GOOD, + CONTAINER_WITH_SPACE_BAD, + CONTAINER_MULTIPLE_DBLQUOTES_BAD, +] + + +class TestModulesCreate(TestModules): + def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): + install_obj = nf_core.modules.ModuleInstall( + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + sha=CORRECT_SHA, + ) + + # Install the module + install_obj.install(BISMARK_ALIGN) + + if modify_module: + # Modify the module + module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + modify_main_nf(module_path / "main.nf") + + def test_modules_lint_trimgalore(self): + """Test linting the TrimGalore! module""" + self.mods_install.install("trimgalore") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) + module_lint.lint(print_results=False, module="trimgalore") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_empty(self): + """Test linting a pipeline with no modules installed""" + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + with pytest.raises(LookupError): + nf_core.modules.ModuleLint(dir=self.pipeline_dir) + + def test_modules_lint_new_modules(self): + """lint a new module""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_no_gitlab(self): + """Test linting a pipeline with no modules installed""" + self.mods_remove.remove("fastqc", force=True) + self.mods_remove.remove("multiqc", force=True) + with pytest.raises(LookupError): + nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + + def test_modules_lint_gitlab_modules(self): + """Lint modules from a different remote""" + self.mods_install_gitlab.install("fastqc") + self.mods_install_gitlab.install("multiqc") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 2 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_multiple_remotes(self): + """Lint modules from a different remote""" + self.mods_install_gitlab.install("multiqc") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint.lint(print_results=False, all_modules=True) + assert len(module_lint.failed) == 1 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_registry(self): + """Test linting the samtools module and alternative registry""" + self.mods_install.install("samtools") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, registry="public.ecr.aws") + module_lint.lint(print_results=False, module="samtools") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) + module_lint.lint(print_results=False, module="samtools") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_patched_modules(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + self._setup_patch(str(self.pipeline_dir), True) + + # Create a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + # change temporarily working directory to the pipeline directory + # to avoid error from try_apply_patch() during linting + with set_wd(self.pipeline_dir): + module_lint = nf_core.modules.ModuleLint( + dir=self.pipeline_dir, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + hide_progress=True, + ) + module_lint.lint( + print_results=False, + all_modules=True, + ) + + assert len(module_lint.failed) == 1 + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_check_process_labels(self): + for test_case in PROCESS_LABEL_TEST_CASES: + process, passed, warned, failed = test_case + mocked_ModuleLint = MockModuleLint() + main_nf.check_process_labels(mocked_ModuleLint, process.splitlines()) + assert len(mocked_ModuleLint.passed) == passed + assert len(mocked_ModuleLint.warned) == warned + assert len(mocked_ModuleLint.failed) == failed + + def test_modules_lint_check_url(self): + for test_case in CONTAINER_TEST_CASES: + test, process, passed, warned, failed = test_case + mocked_ModuleLint = MockModuleLint() + for line in process.splitlines(): + if line.strip(): + main_nf.check_container_link_line(mocked_ModuleLint, line, registry="quay.io") + + assert ( + len(mocked_ModuleLint.passed) == passed + ), f"{test}: Expected {passed} PASS, got {len(mocked_ModuleLint.passed)}." + assert ( + len(mocked_ModuleLint.warned) == warned + ), f"{test}: Expected {warned} WARN, got {len(mocked_ModuleLint.warned)}." + assert ( + len(mocked_ModuleLint.failed) == failed + ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." + + def test_modules_lint_snapshot_file(self): + """Test linting a module with a snapshot file""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_snapshot_file_missing_fail(self): + """Test linting a module with a snapshot file missing, which should fail""" + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ).unlink() + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ).touch() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snapshot_exists" + + def test_modules_lint_snapshot_file_not_needed(self): + """Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ) + ) as fh: + content = fh.read() + new_content = content.replace("snapshot(", "snap (") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ), + "w", + ) as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_doesnt_exists(self): + """Test linting a module with an environment.yml file""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml").rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml.bak", + ) + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml.bak", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_exists" + + def test_modules_environment_yml_file_sorted_correctly(self): + """Test linting a module with a correctly sorted environment.yml file""" + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_sorted_incorrectly(self): + """Test linting a module with an incorrectly sorted environment.yml file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + yaml_content = yaml.safe_load(fh) + # Add a new dependency to the environment.yml file and reverse the order + yaml_content["dependencies"].append("z") + yaml_content["dependencies"].reverse() + yaml_content = yaml.dump(yaml_content) + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + # we fix the sorting on the fly, so this should pass + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_environment_yml_file_not_array(self): + """Test linting a module with an incorrectly formatted environment.yml file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + yaml_content = yaml.safe_load(fh) + yaml_content["dependencies"] = "z" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(yaml_content)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + + def test_modules_environment_yml_file_name_mismatch(self): + """Test linting a module with a different name in the environment.yml file""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + yaml_content = yaml.safe_load(fh) + yaml_content["name"] = "bpipe-test" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(yaml_content)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + # reset changes + yaml_content["name"] = "bpipe_test" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(yaml_content)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_name" + + def test_modules_meta_yml_incorrect_licence_field(self): + """Test linting a module with an incorrect Licence field in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_yml_valid" + + def test_modules_meta_yml_input_mismatch(self): + """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("path bam", "path bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) == 2 + lint_tests = [x.lint_test for x in module_lint.warned] + # check that it is there twice: + assert lint_tests.count("meta_input_meta_only") == 1 + assert lint_tests.count("meta_input_main_only") == 1 + + def test_modules_meta_yml_output_mismatch(self): + """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("emit: bam", "emit: bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) == 2 + lint_tests = [x.lint_test for x in module_lint.warned] + # check that it is there twice: + assert lint_tests.count("meta_output_meta_only") == 1 + assert lint_tests.count("meta_output_main_only") == 1 + + def test_modules_meta_yml_incorrect_name(self): + """Test linting a module with an incorrect name in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["name"] = "bpipe/test" + # need to make the same change to the environment.yml file + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ) + ) as fh: + environment_yml = yaml.safe_load(fh) + environment_yml["name"] = "bpipe/test" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(environment_yml)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["name"] = "bpipe_test" + with open( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), + "w", + ) as fh: + fh.write(yaml.dump(meta_yml)) + environment_yml["name"] = "bpipe_test" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "environment.yml", + ), + "w", + ) as fh: + fh.write(yaml.dump(environment_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_name" + + def test_modules_missing_test_dir(self): + """Test linting a module with a missing test directory""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_dir_exists" + + def test_modules_missing_test_main_nf(self): + """Test linting a module with a missing test/main.nf file""" + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.bak", + ) + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.bak", + ).rename( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test", + ) + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_main_nf_exists" + + def test_modules_unused_pytest_files(self): + """Test linting a nf-test module with files still present in `tests/modules/`""" + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_old_test_dir" + + def test_nftest_failing_linting(self): + """Test linting a module which includes other modules in nf-test tests. + Linting tests""" + # Clone modules repo with testing modules + tmp_dir = self.nfcore_modules.parent + self.nfcore_modules = Path(tmp_dir, "modules-test") + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) + + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="kallisto/quant") + + assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + assert module_lint.failed[1].lint_test == "meta_yml_valid" + assert module_lint.failed[2].lint_test == "test_main_tags" + assert "kallisto/index" in module_lint.failed[2].message + + def test_modules_absent_version(self): + """Test linting a nf-test module if the versions is absent in the snapshot file `""" + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + ) as fh: + content = fh.read() + new_content = content.replace("versions", "foo") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ), + "w", + ) as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open( + Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ), + "w", + ) as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snap_versions" + + def test_modules_empty_file_in_snapshot(self): + """Test linting a nf-test module with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" + snap_file = Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my test"]["content"][0]["0"] = "test:md5,d41d8cd98f00b204e9800998ecf8427e" + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_snap_md5sum" + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + def test_modules_empty_file_in_stub_snapshot(self): + """Test linting a nf-test module with an empty file sha sum in the stub test snapshot, which should make it not fail""" + snap_file = Path( + self.nfcore_modules, + "modules", + "nf-core", + "bpipe", + "test", + "tests", + "main.nf.test.snap", + ) + snap = json.load(snap_file.open()) + content = snap_file.read_text() + snap["my_test_stub"] = {"content": [{"0": "test:md5,d41d8cd98f00b204e9800998ecf8427e", "versions": {}}]} + + with open(snap_file, "w") as fh: + json.dump(snap, fh) + + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + assert any(x.lint_test == "test_snap_md5sum" for x in module_lint.passed) + + # reset the file + with open(snap_file, "w") as fh: + fh.write(content) + + +# A skeleton object with the passed/warned/failed list attrs +# Use this in place of a ModuleLint object to test behaviour of +# linting methods which don't need the full setup +class MockModuleLint: + def __init__(self): + self.passed = [] + self.warned = [] + self.failed = [] + + self.main_nf = "main_nf" diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py new file mode 100644 index 000000000..81484cc8f --- /dev/null +++ b/tests/modules/test_list.py @@ -0,0 +1,126 @@ +import json +from pathlib import Path + +import yaml +from rich.console import Console + +import nf_core.modules + +from ..test_modules import TestModules +from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL + + +class TestModulesCreate(TestModules): + def test_modules_list_remote(self): + """Test listing available modules""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_remote_gitlab(self): + """Test listing the modules in the remote gitlab repo""" + mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_pipeline(self): + """Test listing locally installed modules""" + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + assert "multiqc" in output + + def test_modules_install_and_list_pipeline(self): + """Test listing locally installed modules""" + self.mods_install.install("trimgalore") + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "trimgalore" in output + + def test_modules_install_gitlab_and_list_pipeline(self): + """Test listing locally installed modules""" + self.mods_install_gitlab.install("fastqc") + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_local_json(self): + """Test listing locally installed modules as JSON""" + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components(print_json=True) + listed_mods = json.loads(listed_mods) + assert "fastqc" in listed_mods + assert "multiqc" in listed_mods + + def test_modules_list_remote_json(self): + """Test listing available modules as JSON""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(print_json=True) + listed_mods = json.loads(listed_mods) + assert "fastqc" in listed_mods + assert "multiqc" in listed_mods + + def test_modules_list_with_one_keyword(self): + """Test listing available modules with one keyword""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(keywords=["qc"]) + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "multiqc" in output + + def test_modules_list_with_keywords(self): + """Test listing available modules with multiple keywords""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) + console = Console(record=True) + console.print(listed_mods) + output = console.export_text() + assert "fastqc" in output + + def test_modules_list_with_unused_keyword(self): + """Test listing available modules with an unused keyword""" + mods_list = nf_core.modules.ModuleList(None, remote=True) + with self.assertLogs(level="INFO") as log: + listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) + self.assertIn("No available", log.output[0]) + # expect empty list + assert listed_mods == "" + + def test_modules_list_in_wrong_repo_fail(self): + """Test listing available modules in a non-pipeline repo""" + # modify repotype in .nf-core.yml + with open(Path(self.pipeline_dir, ".nf-core.yml")) as fh: + nf_core_yml = yaml.safe_load(fh) + nf_core_yml_orig = nf_core_yml.copy() + nf_core_yml["repository_type"] = "modules" + nf_core_yml["org_path"] = "nf-core" + + print(nf_core_yml) + with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: + yaml.safe_dump(nf_core_yml, fh) + # expect error logged + with self.assertLogs(level="ERROR") as log: + mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + listed_mods = mods_list.list_components() + self.assertIn("must be run from a pipeline directory", log.output[0]) + # expect empty list + assert listed_mods == "" + # restore .nf-core.yml + with open(Path(self.pipeline_dir, ".nf-core.yml"), "w") as fh: + yaml.safe_dump(nf_core_yml_orig, fh) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py new file mode 100644 index 000000000..319b5ad65 --- /dev/null +++ b/tests/modules/test_modules_json.py @@ -0,0 +1,245 @@ +import copy +import json +import shutil +from pathlib import Path + +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ( + NF_CORE_MODULES_DEFAULT_BRANCH, + NF_CORE_MODULES_NAME, + NF_CORE_MODULES_REMOTE, + ModulesRepo, +) +from nf_core.modules.patch import ModulePatch + +from ..test_modules import TestModules + + +class TestModulesCreate(TestModules): + def test_get_modules_json(self): + """Checks that the get_modules_json function returns the correct result""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + with open(mod_json_path) as fh: + try: + mod_json_sb = json.load(fh) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") + + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + # Check that the modules.json hasn't changed + assert mod_json == mod_json_sb + + def test_mod_json_update(self): + """Checks whether the update function works properly""" + mod_json_obj = ModulesJson(self.pipeline_dir) + # Update the modules.json file + mod_repo_obj = ModulesRepo() + mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", "modules", write_file=False) + mod_json = mod_json_obj.get_modules_json() + assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] + assert "GIT_SHA" == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["git_sha"] + assert ( + NF_CORE_MODULES_DEFAULT_BRANCH + == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["branch"] + ) + assert ( + "modules" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"]["installed_by"] + ) + + def test_mod_json_create(self): + """Test creating a modules.json file from scratch""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + # Remove the existing modules.json file + mod_json_path.unlink() + + # Create the new modules.json file + # (There are no prompts as long as there are only nf-core modules) + ModulesJson(self.pipeline_dir).create() + + # Check that the file exists + assert (mod_json_path).exists() + + # Get the contents of the file + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + mods = ["fastqc", "multiqc"] + for mod in mods: + assert mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"][mod] + + def _modify_main_nf(self, path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # Modify $meta.id to $meta.single_end + lines[1] = ' tag "$meta.single_end"\n' + with open(path, "w") as fh: + fh.writelines(lines) + + def test_mod_json_create_with_patch(self): + """Test creating a modules.json file from scratch when there are patched modules""" + mod_json_path = Path(self.pipeline_dir, "modules.json") + + # Modify the module + module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") + self._modify_main_nf(module_path / "main.nf") + + # Try creating a patch file + patch_obj = ModulePatch(self.pipeline_dir, NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_DEFAULT_BRANCH) + patch_obj.patch("fastqc") + + # Remove the existing modules.json file + mod_json_path.unlink() + + # Create the new modules.json file + ModulesJson(self.pipeline_dir).create() + + # Check that the file exists + assert mod_json_path.is_file() + + # Get the contents of the file + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + + # Check that fastqc is in the file + assert "fastqc" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] + assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"] + + # Check that fastqc/main.nf maintains the changes + with open(module_path / "main.nf") as fh: + lines = fh.readlines() + assert lines[1] == ' tag "$meta.single_end"\n' + + def test_mod_json_up_to_date(self): + """ + Checks if the modules.json file is up to date + when no changes have been made to the pipeline + """ + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_before = mod_json_obj.get_modules_json() + mod_json_obj.check_up_to_date() + mod_json_after = mod_json_obj.get_modules_json() + + # Check that the modules.json hasn't changed + assert mod_json_before == mod_json_after + + def test_mod_json_up_to_date_module_removed(self): + """ + Reinstall a module that has an entry in the modules.json + but is missing in the pipeline + """ + # Remove the fastqc module + fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") + shutil.rmtree(fastqc_path) + + # Check that the modules.json file is up to date, and reinstall the module + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.check_up_to_date() + + # Check that the module has been reinstalled + files = ["main.nf", "meta.yml"] + assert fastqc_path.exists() + for f in files: + assert Path(fastqc_path, f).exists() + + def test_mod_json_up_to_date_reinstall_fails(self): + """ + Try reinstalling a module where the git_sha is invalid + """ + mod_json_obj = ModulesJson(self.pipeline_dir) + + # Update the fastqc module entry to an invalid git_sha + mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) + + # Remove the fastqc module + fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") + shutil.rmtree(fastqc_path) + + # Check that the modules.json file is up to date, and remove the fastqc module entry + mod_json_obj.check_up_to_date() + mod_json = mod_json_obj.get_modules_json() + + # Check that the module has been removed from the modules.json + assert "fastqc" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] + + def test_mod_json_repo_present(self): + """Tests the repo_present function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + + assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True + assert mod_json_obj.repo_present("INVALID_REPO") is False + + def test_mod_json_module_present(self): + """Tests the module_present function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + + assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True + assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False + assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False + + def test_mod_json_get_module_version(self): + """Test the get_module_version function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + assert ( + mod_json_obj.get_module_version("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) + == mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"]["git_sha"] + ) + assert mod_json_obj.get_module_version("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is None + + def test_mod_json_dump(self): + """Tests the dump function""" + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Remove the modules.json file + mod_json_path = Path(self.pipeline_dir, "modules.json") + mod_json_path.unlink() + + # Check that the dump function creates the file + mod_json_obj.dump() + assert mod_json_path.exists() + + # Check that the dump function writes the correct content + with open(mod_json_path) as f: + try: + mod_json_new = json.load(f) + except json.JSONDecodeError as e: + raise UserWarning(f"Unable to load JSON file '{mod_json_path}' due to error {e}") + assert mod_json == mod_json_new + + def test_mod_json_with_empty_modules_value(self): + # Load module.json and remove the modules entry + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.create() # Create modules.json explicitly to get correct module sha + mod_json_orig = mod_json_obj.get_modules_json() + mod_json = copy.deepcopy(mod_json_orig) + mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = "" + # save the altered module.json and load it again to check if it will fix itself + mod_json_obj.modules_json = mod_json + mod_json_obj.dump() + mod_json_obj_new = ModulesJson(self.pipeline_dir) + mod_json_obj_new.check_up_to_date() + mod_json_new = mod_json_obj_new.get_modules_json() + assert mod_json_orig == mod_json_new + + def test_mod_json_with_missing_modules_entry(self): + # Load module.json and remove the modules entry + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json_obj.create() # Create modules.json explicitly to get correct module sha + mod_json_orig = mod_json_obj.get_modules_json() + mod_json = copy.deepcopy(mod_json_orig) + mod_json["repos"][NF_CORE_MODULES_REMOTE].pop("modules") + # save the altered module.json and load it again to check if it will fix itself + mod_json_obj.modules_json = mod_json + mod_json_obj.dump() + mod_json_obj_new = ModulesJson(self.pipeline_dir) + mod_json_obj_new.check_up_to_date() + mod_json_new = mod_json_obj_new.get_modules_json() + assert mod_json_orig == mod_json_new diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py new file mode 100644 index 000000000..230bb1ce8 --- /dev/null +++ b/tests/modules/test_patch.py @@ -0,0 +1,365 @@ +import os +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.components.components_command +import nf_core.modules + +from ..test_modules import TestModules +from ..utils import GITLAB_URL + +""" +Test the 'nf-core modules patch' command + +Uses a branch (patch-tester) in the GitLab nf-core/modules-test repo when +testing if the update commands works correctly with patch files +""" + +ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" +CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" +SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" +FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" +BISMARK_ALIGN = "bismark/align" +REPO_NAME = "nf-core-test" +PATCH_BRANCH = "patch-tester" +REPO_URL = "https://gitlab.com/nf-core/modules-test.git" + + +def setup_patch(pipeline_dir, modify_module): + install_obj = nf_core.modules.ModuleInstall( + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA + ) + + # Install the module + install_obj.install(BISMARK_ALIGN) + + if modify_module: + # Modify the module + module_path = Path(pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + modify_main_nf(module_path / "main.nf") + + +def modify_main_nf(path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - tuple val(meta), path(reads) + # - path index + # + tuple val(meta), path(reads), path(index) + for line_index in range(len(lines)): + if lines[line_index] == " tuple val(meta), path(reads)\n": + lines[line_index] = " tuple val(meta), path(reads), path(index)\n" + elif lines[line_index] == " path index\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + + +class TestModulesCreate(TestModules): + def test_create_patch_no_change(self): + """Test creating a patch when there is no change to the module""" + setup_patch(self.pipeline_dir, False) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Check that no patch file has been added to the directory + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + + # Check the 'modules.json' contains no patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + + def test_create_patch_change(self): + """Test creating a patch when there is a change to the module""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf" + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + def test_create_patch_try_apply_successful(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + + setup_patch(self.pipeline_dir, True) + module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) + module_path = Path(self.pipeline_dir, module_relpath) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + update_obj = nf_core.modules.ModuleUpdate( + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH + ) + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files(BISMARK_ALIGN, SUCCEED_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + module_install_dir = install_dir / BISMARK_ALIGN + patch_relpath = module_relpath / patch_fn + assert ( + update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_relpath, module_path, module_install_dir) is True + ) + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(module_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert " tuple val(meta), path(reads)\n" not in main_nf_lines + assert " path index\n" not in main_nf_lines + # This line should have been added + assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines + + def test_create_patch_try_apply_failed(self): + """ + Test creating a patch file and applying it to a new version of the the files + """ + + setup_patch(self.pipeline_dir, True) + module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) + module_path = Path(self.pipeline_dir, module_relpath) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + update_obj = nf_core.modules.ModuleUpdate( + self.pipeline_dir, sha=FAIL_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH + ) + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + module_install_dir = install_dir / BISMARK_ALIGN + patch_path = module_relpath / patch_fn + assert ( + update_obj.try_apply_patch(BISMARK_ALIGN, REPO_NAME, patch_path, module_path, module_install_dir) is False + ) + + def test_create_patch_update_success(self): + """ + Test creating a patch file and the updating the module + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + + setup_patch(self.pipeline_dir, True) + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Update the module + update_obj = nf_core.modules.ModuleUpdate( + self.pipeline_dir, + sha=SUCCEED_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + ) + assert update_obj.update(BISMARK_ALIGN) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) + + # Check that the correct lines are in the patch file + with open(module_path / patch_fn) as fh: + patch_lines = fh.readlines() + module_relpath = module_path.relative_to(self.pipeline_dir) + assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {module_relpath / 'main.nf'}\n" in patch_lines + assert "- tuple val(meta), path(reads)\n" in patch_lines + assert "- path index\n" in patch_lines + assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(module_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert " tuple val(meta), path(reads)\n" not in main_nf_lines + assert " path index\n" not in main_nf_lines + # This line should have been added + assert " tuple val(meta), path(reads), path(index)\n" in main_nf_lines + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and updating a module when there is a diff conflict + """ + + setup_patch(self.pipeline_dir, True) + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + # Save the file contents for downstream comparison + with open(module_path / patch_fn) as fh: + patch_contents = fh.read() + + update_obj = nf_core.modules.ModuleUpdate( + self.pipeline_dir, + sha=FAIL_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=PATCH_BRANCH, + ) + update_obj.update(BISMARK_ALIGN) + + # Check that the installed files have not been affected by the attempted patch + temp_dir = Path(tempfile.mkdtemp()) + nf_core.components.components_command.ComponentCommand( + "modules", self.pipeline_dir, GITLAB_URL, PATCH_BRANCH + ).install_component_files(BISMARK_ALIGN, FAIL_SHA, update_obj.modules_repo, temp_dir) + + temp_module_dir = temp_dir / BISMARK_ALIGN + for file in os.listdir(temp_module_dir): + assert file in os.listdir(module_path) + with open(module_path / file) as fh: + installed = fh.read() + with open(temp_module_dir / file) as fh: + shouldbe = fh.read() + assert installed == shouldbe + + # Check that the patch file is unaffected + with open(module_path / patch_fn) as fh: + new_patch_contents = fh.read() + assert patch_contents == new_patch_contents + + def test_remove_patch(self): + """Test creating a patch when there is no change to the module""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Check that a patch file with the correct name has been created + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove(BISMARK_ALIGN) + # Check that the diff file has been removed + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + + # Check that the 'modules.json' entry has been removed + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/modules/test_remove.py b/tests/modules/test_remove.py new file mode 100644 index 000000000..a80c8b098 --- /dev/null +++ b/tests/modules/test_remove.py @@ -0,0 +1,26 @@ +import os +from pathlib import Path + +from ..test_modules import TestModules + + +class TestModulesRemove(TestModules): + def test_modules_remove_trimgalore(self): + """Test removing TrimGalore! module after installing it""" + self.mods_install.install("trimgalore") + assert self.mods_install.dir is not None + module_path = Path(self.mods_install.dir, "modules", "nf-core", "modules", "trimgalore") + assert self.mods_remove.remove("trimgalore") + assert os.path.exists(module_path) is False + + def test_modules_remove_trimgalore_uninstalled(self): + """Test removing TrimGalore! module without installing it""" + assert self.mods_remove.remove("trimgalore") is False + + def test_modules_remove_multiqc_from_gitlab(self): + """Test removing multiqc module after installing it from an alternative source""" + self.mods_install_gitlab.install("multiqc") + assert self.mods_install.dir is not None + module_path = Path(self.mods_install_gitlab.dir, "modules", "nf-core-test", "multiqc") + assert self.mods_remove_gitlab.remove("multiqc", force=True) + assert os.path.exists(module_path) is False diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py new file mode 100644 index 000000000..a33aac377 --- /dev/null +++ b/tests/modules/test_update.py @@ -0,0 +1,435 @@ +import logging +import shutil +import tempfile +from pathlib import Path +from unittest import mock + +import questionary +import yaml + +import nf_core.utils +from nf_core.modules.install import ModuleInstall +from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.patch import ModulePatch +from nf_core.modules.update import ModuleUpdate + +from ..test_modules import TestModules +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_BRANCH_TEST_NEW_SHA, + GITLAB_BRANCH_TEST_OLD_SHA, + GITLAB_DEFAULT_BRANCH, + GITLAB_REPO, + GITLAB_URL, + OLD_TRIMGALORE_BRANCH, + OLD_TRIMGALORE_SHA, + cmp_component, +) + + +class TestModulesInstall(TestModules): + def test_install_and_update(self): + """Installs a module in the pipeline and updates it (no change)""" + self.mods_install.install("trimgalore") + update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) + + # Copy the module files and check that they are unaffected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True + + def test_install_at_hash_and_update(self): + """Installs an old version of a module in the pipeline and updates it""" + assert self.mods_install_old.install("trimgalore") + update_obj = ModuleUpdate( + self.pipeline_dir, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + + # Copy the module files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False + + # Check that the modules.json is correctly updated + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Get the up-to-date git_sha for the module from the ModulesRepo object + correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") + current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] + assert correct_git_sha == current_git_sha + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) + def test_install_at_hash_and_update_limit_output(self, mock_prompt): + """Installs an old version of a module in the pipeline and updates it with limited output reporting""" + self.caplog.set_level(logging.INFO) + assert self.mods_install_old.install("trimgalore") + + update_obj = ModuleUpdate( + self.pipeline_dir, + show_diff=True, + update_deps=True, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + limit_output=True, + ) + assert update_obj.update("trimgalore") + + # Check changes not shown for non-.nf files + assert "Changes in 'trimgalore/meta.yml' but not shown" in self.caplog.text + # Check changes shown for .nf files + assert "Changes in 'trimgalore/main.nf'" in self.caplog.text + for line in self.caplog.text.split("\n"): + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_install_at_hash_and_update_and_save_diff_to_file(self): + """Installs an old version of a module in the pipeline and updates it""" + self.mods_install_old.install("trimgalore") + patch_path = Path(self.pipeline_dir, "trimgalore.patch") + update_obj = ModuleUpdate( + self.pipeline_dir, + save_diff_fn=patch_path, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + + # Copy the module files and check that they are affected by the update + tmpdir = Path(tempfile.TemporaryDirectory().name) + trimgalore_tmpdir = tmpdir / "trimgalore" + trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") + shutil.copytree(trimgalore_path, trimgalore_tmpdir) + + assert update_obj.update("trimgalore") is True + assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True + + # TODO: Apply the patch to the module + + def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): + """Installs an old version of a module in the pipeline and updates it""" + # Install old version of trimgalore + self.mods_install_old.install("trimgalore") + patch_path = Path(self.pipeline_dir, "trimgalore.patch") + # Update saving the differences to a patch file and with `limit_output` + update_obj = ModuleUpdate( + self.pipeline_dir, + save_diff_fn=patch_path, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + limit_output=True, + ) + assert update_obj.update("trimgalore") + + # Check that the patch file was created + assert patch_path.exists(), f"Patch file was not created at {patch_path}" + + # Read the contents of the patch file + with open(patch_path) as fh: + patch_content = fh.read() + # Check changes not shown for non-.nf files + assert "Changes in 'trimgalore/meta.yml' but not shown" in patch_content + # Check changes only shown for main.nf + assert "Changes in 'trimgalore/main.nf'" in patch_content + for line in patch_content: + if line.startswith("---"): + assert line.endswith("main.nf") + + def test_update_all(self): + """Updates all modules present in the pipeline""" + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + # Get the current modules.json + assert update_obj.update() is True + + # We must reload the modules.json to get the updated version + mod_json_obj = ModulesJson(self.pipeline_dir) + mod_json = mod_json_obj.get_modules_json() + # Loop through all modules and check that they are updated (according to the modules.json file) + for mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]: + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha == current_git_sha + + def test_update_with_config_fixed_version(self): + """Try updating when there are entries in the .nf-core.yml""" + # Install trimgalore at the latest version + assert self.mods_install_trimgalore.install("trimgalore") + + # Fix the trimgalore version in the .nf-core.yml to an old version + update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_dont_update(self): + """Try updating when module is to be ignored""" + # Install an old version of trimgalore + self.mods_install_old.install("trimgalore") + + # Set the trimgalore field to no update in the .nf-core.yml + update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_fix_all(self): + """Fix the version of all nf-core modules""" + self.mods_install_trimgalore.install("trimgalore") + + # Fix the version of all nf-core modules in the .nf-core.yml to an old version + update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] + assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA + + def test_update_with_config_no_updates(self): + """Don't update any nf-core modules""" + assert self.mods_install_old.install("trimgalore") + old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + + # Fix the version of all nf-core modules in the .nf-core.yml to an old version + update_config = {GITLAB_URL: False} + config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) + tools_config["update"] = update_config + with open(Path(self.pipeline_dir, config_fn), "w") as f: + yaml.dump(tools_config, f) + + # Update all modules in the pipeline + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + sha=OLD_TRIMGALORE_SHA, + remote_url=GITLAB_URL, + branch=OLD_TRIMGALORE_BRANCH, + ) + assert update_obj.update() is True + + # Check that the git sha for trimgalore is correctly downgraded and none of the modules has changed + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + for module in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]: + assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module] + assert ( + mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] + == old_mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] + ) + + def test_update_different_branch_single_module(self): + """Try updating a module in a specific branch""" + install_obj = ModuleInstall( + self.pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_OLD_SHA, + ) + assert install_obj.install("fastp") + + update_obj = ModuleUpdate( + self.pipeline_dir, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + show_diff=False, + ) + update_obj.update("fastp") + + # Verify that the branch entry was updated correctly + modules_json = ModulesJson(self.pipeline_dir) + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + + def test_update_different_branch_mixed_modules_main(self): + """Try updating all modules where MultiQC is installed from main branch""" + # Install fastp + assert self.mods_install_gitlab_old.install("fastp") + + # Install MultiQC from gitlab default branch + assert self.mods_install_gitlab.install("multiqc") + + # Try updating + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) + assert update_obj.update() is True + + modules_json = ModulesJson(self.pipeline_dir) + # Verify that the branch entry was updated correctly + assert ( + modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + # MultiQC is present in both branches but should've been updated using the 'main' branch + assert ( + modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) + == GITLAB_DEFAULT_BRANCH + ) + + def test_update_different_branch_mix_modules_branch_test(self): + """Try updating all modules where MultiQC is installed from branch-test branch""" + # Install multiqc from the branch-test branch + assert self.mods_install_gitlab_old.install( + "multiqc" + ) # Force as the same module is installed from github nf-core modules repo + modules_json = ModulesJson(self.pipeline_dir) + update_obj = ModuleUpdate( + self.pipeline_dir, + update_all=True, + show_diff=False, + remote_url=GITLAB_URL, + branch=GITLAB_BRANCH_TEST_BRANCH, + sha=GITLAB_BRANCH_TEST_NEW_SHA, + ) + assert update_obj.update() + + assert ( + modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) + == GITLAB_BRANCH_TEST_BRANCH + ) + assert modules_json.get_module_version("multiqc", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) + def test_update_only_show_differences(self, mock_prompt): + """Try updating all modules showing differences. + Only show diffs, don't actually save any updated files. + Check that the sha in modules.json is not changed.""" + + # Update modules to a fixed old SHA + update_old = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" + ) + update_old.update() + + tmpdir = Path(tempfile.TemporaryDirectory().name) + shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) + + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + assert ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True).update() + + mod_json = ModulesJson(self.pipeline_dir).get_modules_json() + # Loop through all modules and check that they are NOT updated (according to the modules.json file) + # A module that can be updated but shouldn't is fastqc + # Module multiqc is already up to date so don't check + mod = "fastqc" + non_updated_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert non_updated_git_sha != current_git_sha + assert cmp_component(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True + + # Mock questionary answer: do not update module, only show diffs + @mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) + def test_update_only_show_differences_when_patch(self, mock_prompt): + """Try updating all modules showing differences when there's a patched module. + Don't update some of them. + Check that the sha in modules.json is not changed.""" + modules_json = ModulesJson(self.pipeline_dir) + update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) + + # Update modules to a fixed old SHA + update_old = ModuleUpdate( + self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" + ) + assert update_old.update() + + # Modify fastqc module, it will have a patch which will be applied during update + # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) + module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") + main_path = Path(module_path, "main.nf") + with open(main_path) as fh: + lines = fh.readlines() + for line_index in range(len(lines)): + if lines[line_index] == " label 'process_medium'\n": + lines[line_index] = " label 'process_low'\n" + break + with open(main_path, "w") as fh: + fh.writelines(lines) + # Create a patch file + patch_obj = ModulePatch(self.pipeline_dir) + patch_obj.patch("fastqc") + # Check that a patch file with the correct name has been created + assert "fastqc.diff" in [f.name for f in module_path.glob("*.diff")] + + # Update all modules + assert update_obj.update() is True + + mod_json = modules_json.get_modules_json() + # Loop through all modules and check that they are NOT updated (according to the modules.json file) + # A module that can be updated but shouldn't is fastqc + # Module multiqc is already up to date so don't check + mod = "fastqc" + correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] + current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] + assert correct_git_sha != current_git_sha + + def test_update_module_with_extra_config_file(self): + """Try updating a module with a config file""" + # Install the module + assert self.mods_install.install("trimgalore") + # Add a nextflow_test.config file to the module + trimgalore_path = Path(self.pipeline_dir, "modules", "nf-core", "trimgalore") + Path(trimgalore_path, "nextflow_test.config").touch() + with open(Path(trimgalore_path, "nextflow_test.config"), "w") as fh: + fh.write("params.my_param = 'my_value'\n") + # Update the module + update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) + assert update_obj.update("trimgalore") + # Check that the nextflow_test.config file is still there + assert Path(trimgalore_path, "nextflow_test.config").exists() + with open(Path(trimgalore_path, "nextflow_test.config")) as fh: + assert "params.my_param = 'my_value'" in fh.read() diff --git a/tests/modules/update.py b/tests/modules/update.py deleted file mode 100644 index e02b058fb..000000000 --- a/tests/modules/update.py +++ /dev/null @@ -1,444 +0,0 @@ -import logging -import shutil -import tempfile -from pathlib import Path -from unittest import mock - -import questionary -import yaml - -import nf_core.utils -from nf_core.modules.install import ModuleInstall -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE -from nf_core.modules.patch import ModulePatch -from nf_core.modules.update import ModuleUpdate - -from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_BRANCH_TEST_NEW_SHA, - GITLAB_BRANCH_TEST_OLD_SHA, - GITLAB_DEFAULT_BRANCH, - GITLAB_REPO, - GITLAB_URL, - OLD_TRIMGALORE_BRANCH, - OLD_TRIMGALORE_SHA, - cmp_component, -) - - -def test_install_and_update(self): - """Installs a module in the pipeline and updates it (no change)""" - self.mods_install.install("trimgalore") - update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) - - # Copy the module files and check that they are unaffected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - trimgalore_tmpdir = tmpdir / "trimgalore" - trimgalore_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True - - -def test_install_at_hash_and_update(self): - """Installs an old version of a module in the pipeline and updates it""" - assert self.mods_install_old.install("trimgalore") - update_obj = ModuleUpdate( - self.pipeline_dir, show_diff=False, update_deps=True, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - - # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - trimgalore_tmpdir = tmpdir / "trimgalore" - trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_component(trimgalore_tmpdir, trimgalore_path) is False - - # Check that the modules.json is correctly updated - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Get the up-to-date git_sha for the module from the ModulesRepo object - correct_git_sha = update_obj.modules_repo.get_latest_component_version("trimgalore", "modules") - current_git_sha = mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] - assert correct_git_sha == current_git_sha - - -# Mock questionary answer: do not update module, only show diffs -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=True) -def test_install_at_hash_and_update_limit_output(self, mock_prompt): - """Installs an old version of a module in the pipeline and updates it with limited output reporting""" - self.caplog.set_level(logging.INFO) - assert self.mods_install_old.install("trimgalore") - - update_obj = ModuleUpdate( - self.pipeline_dir, - show_diff=True, - update_deps=True, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - limit_output=True, - ) - assert update_obj.update("trimgalore") - - # Check changes not shown for non-.nf files - assert "Changes in 'trimgalore/meta.yml' but not shown" in self.caplog.text - # Check changes shown for .nf files - assert "Changes in 'trimgalore/main.nf'" in self.caplog.text - for line in self.caplog.text.split("\n"): - if line.startswith("---"): - assert line.endswith("main.nf") - - -def test_install_at_hash_and_update_and_save_diff_to_file(self): - """Installs an old version of a module in the pipeline and updates it""" - self.mods_install_old.install("trimgalore") - patch_path = Path(self.pipeline_dir, "trimgalore.patch") - update_obj = ModuleUpdate( - self.pipeline_dir, - save_diff_fn=patch_path, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - - # Copy the module files and check that they are affected by the update - tmpdir = Path(tempfile.TemporaryDirectory().name) - trimgalore_tmpdir = tmpdir / "trimgalore" - trimgalore_path = Path(self.pipeline_dir, "modules", GITLAB_REPO, "trimgalore") - shutil.copytree(trimgalore_path, trimgalore_tmpdir) - - assert update_obj.update("trimgalore") is True - assert cmp_component(trimgalore_tmpdir, trimgalore_path) is True - - # TODO: Apply the patch to the module - - -def test_install_at_hash_and_update_and_save_diff_to_file_limit_output(self): - """Installs an old version of a module in the pipeline and updates it""" - # Install old version of trimgalore - self.mods_install_old.install("trimgalore") - patch_path = Path(self.pipeline_dir, "trimgalore.patch") - # Update saving the differences to a patch file and with `limit_output` - update_obj = ModuleUpdate( - self.pipeline_dir, - save_diff_fn=patch_path, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - limit_output=True, - ) - assert update_obj.update("trimgalore") - - # Check that the patch file was created - assert patch_path.exists(), f"Patch file was not created at {patch_path}" - - # Read the contents of the patch file - with open(patch_path) as fh: - patch_content = fh.read() - # Check changes not shown for non-.nf files - assert "Changes in 'trimgalore/meta.yml' but not shown" in patch_content - # Check changes only shown for main.nf - assert "Changes in 'trimgalore/main.nf'" in patch_content - for line in patch_content: - if line.startswith("---"): - assert line.endswith("main.nf") - - -def test_update_all(self): - """Updates all modules present in the pipeline""" - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) - # Get the current modules.json - assert update_obj.update() is True - - # We must reload the modules.json to get the updated version - mod_json_obj = ModulesJson(self.pipeline_dir) - mod_json = mod_json_obj.get_modules_json() - # Loop through all modules and check that they are updated (according to the modules.json file) - for mod in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME]: - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha == current_git_sha - - -def test_update_with_config_fixed_version(self): - """Try updating when there are entries in the .nf-core.yml""" - # Install trimgalore at the latest version - assert self.mods_install_trimgalore.install("trimgalore") - - # Fix the trimgalore version in the .nf-core.yml to an old version - update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_dont_update(self): - """Try updating when module is to be ignored""" - # Install an old version of trimgalore - self.mods_install_old.install("trimgalore") - - # Set the trimgalore field to no update in the .nf-core.yml - update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "trimgalore" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO] - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_fix_all(self): - """Fix the version of all nf-core modules""" - self.mods_install_trimgalore.install("trimgalore") - - # Fix the version of all nf-core modules in the .nf-core.yml to an old version - update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"] - assert mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]["trimgalore"]["git_sha"] == OLD_TRIMGALORE_SHA - - -def test_update_with_config_no_updates(self): - """Don't update any nf-core modules""" - assert self.mods_install_old.install("trimgalore") - old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - - # Fix the version of all nf-core modules in the .nf-core.yml to an old version - update_config = {GITLAB_URL: False} - config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config - with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) - - # Update all modules in the pipeline - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - sha=OLD_TRIMGALORE_SHA, - remote_url=GITLAB_URL, - branch=OLD_TRIMGALORE_BRANCH, - ) - assert update_obj.update() is True - - # Check that the git sha for trimgalore is correctly downgraded and none of the modules has changed - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - for module in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO]: - assert "git_sha" in mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module] - assert ( - mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] - == old_mod_json["repos"][GITLAB_URL]["modules"][GITLAB_REPO][module]["git_sha"] - ) - - -def test_update_different_branch_single_module(self): - """Try updating a module in a specific branch""" - install_obj = ModuleInstall( - self.pipeline_dir, - prompt=False, - force=False, - remote_url=GITLAB_URL, - branch=GITLAB_BRANCH_TEST_BRANCH, - sha=GITLAB_BRANCH_TEST_OLD_SHA, - ) - assert install_obj.install("fastp") - - update_obj = ModuleUpdate( - self.pipeline_dir, update_deps=True, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, show_diff=False - ) - update_obj.update("fastp") - - # Verify that the branch entry was updated correctly - modules_json = ModulesJson(self.pipeline_dir) - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - - -def test_update_different_branch_mixed_modules_main(self): - """Try updating all modules where MultiQC is installed from main branch""" - # Install fastp - assert self.mods_install_gitlab_old.install("fastp") - - # Install MultiQC from gitlab default branch - assert self.mods_install_gitlab.install("multiqc") - - # Try updating - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=False) - assert update_obj.update() is True - - modules_json = ModulesJson(self.pipeline_dir) - # Verify that the branch entry was updated correctly - assert ( - modules_json.get_component_branch(self.component_type, "fastp", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("fastp", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - # MultiQC is present in both branches but should've been updated using the 'main' branch - assert ( - modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) - == GITLAB_DEFAULT_BRANCH - ) - - -def test_update_different_branch_mix_modules_branch_test(self): - """Try updating all modules where MultiQC is installed from branch-test branch""" - # Install multiqc from the branch-test branch - assert self.mods_install_gitlab_old.install( - "multiqc" - ) # Force as the same module is installed from github nf-core modules repo - modules_json = ModulesJson(self.pipeline_dir) - update_obj = ModuleUpdate( - self.pipeline_dir, - update_all=True, - show_diff=False, - remote_url=GITLAB_URL, - branch=GITLAB_BRANCH_TEST_BRANCH, - sha=GITLAB_BRANCH_TEST_NEW_SHA, - ) - assert update_obj.update() - - assert ( - modules_json.get_component_branch(self.component_type, "multiqc", GITLAB_URL, GITLAB_REPO) - == GITLAB_BRANCH_TEST_BRANCH - ) - assert modules_json.get_module_version("multiqc", GITLAB_URL, GITLAB_REPO) == GITLAB_BRANCH_TEST_NEW_SHA - - -# Mock questionary answer: do not update module, only show diffs -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) -def test_update_only_show_differences(self, mock_prompt): - """Try updating all modules showing differences. - Only show diffs, don't actually save any updated files. - Check that the sha in modules.json is not changed.""" - - # Update modules to a fixed old SHA - update_old = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" - ) - update_old.update() - - tmpdir = Path(tempfile.TemporaryDirectory().name) - shutil.copytree(Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME), tmpdir) - - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) - assert ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True).update() - - mod_json = ModulesJson(self.pipeline_dir).get_modules_json() - # Loop through all modules and check that they are NOT updated (according to the modules.json file) - # A module that can be updated but shouldn't is fastqc - # Module multiqc is already up to date so don't check - mod = "fastqc" - non_updated_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert non_updated_git_sha != current_git_sha - assert cmp_component(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True - - -# Mock questionary answer: do not update module, only show diffs -@mock.patch.object(questionary.Question, "unsafe_ask", return_value=False) -def test_update_only_show_differences_when_patch(self, mock_prompt): - """Try updating all modules showing differences when there's a patched module. - Don't update some of them. - Check that the sha in modules.json is not changed.""" - modules_json = ModulesJson(self.pipeline_dir) - update_obj = ModuleUpdate(self.pipeline_dir, update_all=True, show_diff=True) - - # Update modules to a fixed old SHA - update_old = ModuleUpdate( - self.pipeline_dir, update_all=True, show_diff=False, sha="5e34754d42cd2d5d248ca8673c0a53cdf5624905" - ) - assert update_old.update() - - # Modify fastqc module, it will have a patch which will be applied during update - # We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions) - module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc") - main_path = Path(module_path, "main.nf") - with open(main_path) as fh: - lines = fh.readlines() - for line_index in range(len(lines)): - if lines[line_index] == " label 'process_medium'\n": - lines[line_index] = " label 'process_low'\n" - break - with open(main_path, "w") as fh: - fh.writelines(lines) - # Create a patch file - patch_obj = ModulePatch(self.pipeline_dir) - patch_obj.patch("fastqc") - # Check that a patch file with the correct name has been created - assert "fastqc.diff" in [f.name for f in module_path.glob("*.diff")] - - # Update all modules - assert update_obj.update() is True - - mod_json = modules_json.get_modules_json() - # Loop through all modules and check that they are NOT updated (according to the modules.json file) - # A module that can be updated but shouldn't is fastqc - # Module multiqc is already up to date so don't check - mod = "fastqc" - correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"] - current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"] - assert correct_git_sha != current_git_sha - - -def test_update_module_with_extra_config_file(self): - """Try updating a module with a config file""" - # Install the module - assert self.mods_install.install("trimgalore") - # Add a nextflow_test.config file to the module - trimgalore_path = Path(self.pipeline_dir, "modules", "nf-core", "trimgalore") - Path(trimgalore_path, "nextflow_test.config").touch() - with open(Path(trimgalore_path, "nextflow_test.config"), "w") as fh: - fh.write("params.my_param = 'my_value'\n") - # Update the module - update_obj = ModuleUpdate(self.pipeline_dir, show_diff=False) - assert update_obj.update("trimgalore") - # Check that the nextflow_test.config file is still there - assert Path(trimgalore_path, "nextflow_test.config").exists() - with open(Path(trimgalore_path, "nextflow_test.config")) as fh: - assert "params.my_param = 'my_value'" in fh.read() diff --git a/tests/test_modules.py b/tests/test_modules.py index 6e601ce7a..13bf32f97 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -160,131 +160,3 @@ def test_modulesrepo_class(self): @pytest.fixture(autouse=True) def _use_caplog(self, caplog): self.caplog = caplog - - ############################################ - # Test of the individual modules commands. # - ############################################ - - from .modules.bump_versions import ( # type: ignore[misc] - test_modules_bump_versions_all_modules, - test_modules_bump_versions_fail, - test_modules_bump_versions_fail_unknown_version, - test_modules_bump_versions_single_module, - ) - from .modules.create import ( # type: ignore[misc] - test_modules_create_fail_exists, - test_modules_create_nfcore_modules, - test_modules_create_nfcore_modules_subtool, - test_modules_create_succeed, - test_modules_migrate, - test_modules_migrate_no_delete, - test_modules_migrate_symlink, - ) - from .modules.info import ( # type: ignore[misc] - test_modules_info_in_modules_repo, - test_modules_info_local, - test_modules_info_remote, - test_modules_info_remote_gitlab, - ) - from .modules.install import ( # type: ignore[misc] - test_modules_install_alternate_remote, - test_modules_install_different_branch_fail, - test_modules_install_different_branch_succeed, - test_modules_install_emptypipeline, - test_modules_install_from_gitlab, - test_modules_install_nomodule, - test_modules_install_nopipeline, - test_modules_install_tracking, - test_modules_install_trimgalore, - test_modules_install_trimgalore_twice, - ) - from .modules.lint import ( # type: ignore[misc] - test_modules_absent_version, - test_modules_empty_file_in_snapshot, - test_modules_empty_file_in_stub_snapshot, - test_modules_environment_yml_file_doesnt_exists, - test_modules_environment_yml_file_name_mismatch, - test_modules_environment_yml_file_not_array, - test_modules_environment_yml_file_sorted_correctly, - test_modules_environment_yml_file_sorted_incorrectly, - test_modules_lint_check_process_labels, - test_modules_lint_check_url, - test_modules_lint_empty, - test_modules_lint_gitlab_modules, - test_modules_lint_multiple_remotes, - test_modules_lint_new_modules, - test_modules_lint_no_gitlab, - test_modules_lint_patched_modules, - test_modules_lint_snapshot_file, - test_modules_lint_snapshot_file_missing_fail, - test_modules_lint_snapshot_file_not_needed, - test_modules_lint_trimgalore, - test_modules_meta_yml_incorrect_licence_field, - test_modules_meta_yml_incorrect_name, - test_modules_meta_yml_input_mismatch, - test_modules_meta_yml_output_mismatch, - test_modules_missing_test_dir, - test_modules_missing_test_main_nf, - test_modules_unused_pytest_files, - test_nftest_failing_linting, - ) - from .modules.list import ( # type: ignore[misc] - test_modules_install_and_list_pipeline, - test_modules_install_gitlab_and_list_pipeline, - test_modules_list_in_wrong_repo_fail, - test_modules_list_local_json, - test_modules_list_pipeline, - test_modules_list_remote, - test_modules_list_remote_gitlab, - test_modules_list_remote_json, - test_modules_list_with_keywords, - test_modules_list_with_one_keyword, - test_modules_list_with_unused_keyword, - ) - from .modules.modules_json import ( # type: ignore[misc] - test_get_modules_json, - test_mod_json_create, - test_mod_json_create_with_patch, - test_mod_json_dump, - test_mod_json_get_module_version, - test_mod_json_module_present, - test_mod_json_repo_present, - test_mod_json_up_to_date, - test_mod_json_up_to_date_module_removed, - test_mod_json_up_to_date_reinstall_fails, - test_mod_json_update, - test_mod_json_with_empty_modules_value, - test_mod_json_with_missing_modules_entry, - ) - from .modules.patch import ( # type: ignore[misc] - test_create_patch_change, - test_create_patch_no_change, - test_create_patch_try_apply_failed, - test_create_patch_try_apply_successful, - test_create_patch_update_fail, - test_create_patch_update_success, - test_remove_patch, - ) - from .modules.remove import ( # type: ignore[misc] - test_modules_remove_multiqc_from_gitlab, - test_modules_remove_trimgalore, - test_modules_remove_trimgalore_uninstalled, - ) - from .modules.update import ( # type: ignore[misc] - test_install_and_update, - test_install_at_hash_and_update, - test_install_at_hash_and_update_and_save_diff_to_file, - test_install_at_hash_and_update_and_save_diff_to_file_limit_output, - test_install_at_hash_and_update_limit_output, - test_update_all, - test_update_different_branch_mix_modules_branch_test, - test_update_different_branch_mixed_modules_main, - test_update_different_branch_single_module, - test_update_module_with_extra_config_file, - test_update_only_show_differences, - test_update_only_show_differences_when_patch, - test_update_with_config_dont_update, - test_update_with_config_fix_all, - test_update_with_config_fixed_version, - test_update_with_config_no_updates, - ) From 772ff5244af03351e35584a28154a94818f2cd5f Mon Sep 17 00:00:00 2001 From: LaurenceKuhl Date: Thu, 18 Jul 2024 11:25:23 +0200 Subject: [PATCH 332/737] Update CHANGELOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Júlia Mir Pedrol --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8cda88788..b80a63c6a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,6 @@ - Run awsfulltest on PRs to `master` with two PR approvals ([#3042](https://github.com/nf-core/tools/pull/3042)) - Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) - Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) -- Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) ### Linting From d95f7f8631f0605319f8a80d4ab2199b08e6fea4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 18 Jul 2024 10:14:54 +0200 Subject: [PATCH 333/737] avoid naming collision with variable name `dir` --- nf_core/commands_pipelines.py | 8 ++-- nf_core/components/components_command.py | 52 +++++++++++------------ nf_core/components/components_test.py | 4 +- nf_core/components/install.py | 16 +++---- nf_core/components/lint/__init__.py | 54 +++++++++++++----------- nf_core/components/update.py | 49 ++++++++++++--------- nf_core/modules/lint/__init__.py | 9 ++-- nf_core/modules/modules_json.py | 18 ++++---- nf_core/pipelines/create_logo.py | 14 +++--- 9 files changed, 121 insertions(+), 103 deletions(-) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 3f569bfe3..deb1f691a 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -308,7 +308,7 @@ def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, usern # nf-core pipelines create-logo -def pipelines_create_logo(logo_text, dir, name, theme, width, format, force): +def pipelines_create_logo(logo_text, directory, name, theme, width, format, force): """ Generate a logo with the nf-core logo template. @@ -317,9 +317,9 @@ def pipelines_create_logo(logo_text, dir, name, theme, width, format, force): from nf_core.pipelines.create_logo import create_logo try: - if dir == ".": - dir = Path.cwd() - logo_path = create_logo(logo_text, dir, name, theme, width, format, force) + if directory == ".": + directory = Path.cwd() + logo_path = create_logo(logo_text, directory, name, theme, width, format, force) # Print path to logo relative to current working directory try: logo_path = Path(logo_path).relative_to(Path.cwd()) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index aa1dccc0d..bf80b4611 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -22,7 +22,7 @@ class ComponentCommand: def __init__( self, component_type: str, - dir: Union[str, Path], + directory: Union[str, Path], remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -33,7 +33,7 @@ def __init__( Initialise the ComponentClass object """ self.component_type = component_type - self.dir = Path(dir) if dir else None + self.directory = Path(directory) if directory else None self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress self.no_prompts = no_prompts @@ -49,8 +49,8 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ try: - if self.dir: - self.dir, self.repo_type, self.org = get_repo_info(self.dir, use_prompt=not self.no_prompts) + if self.directory: + self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) else: self.repo_type = None self.org = "" @@ -68,7 +68,7 @@ def get_local_components(self) -> List[str]: """ Get the local modules/subworkflows in a pipeline """ - local_component_dir = Path(self.dir, self.component_type, "local") + local_component_dir = Path(self.directory, self.component_type, "local") return [ str(path.relative_to(local_component_dir)) for path in local_component_dir.iterdir() if path.suffix == ".nf" ] @@ -78,9 +78,9 @@ def get_components_clone_modules(self) -> List[str]: Get the modules/subworkflows repository available in a clone of nf-core/modules """ if self.component_type == "modules": - component_base_path = Path(self.dir, self.default_modules_path) + component_base_path = Path(self.directory, self.default_modules_path) elif self.component_type == "subworkflows": - component_base_path = Path(self.dir, self.default_subworkflows_path) + component_base_path = Path(self.directory, self.default_subworkflows_path) return [ str(Path(dir).relative_to(component_base_path)) for dir, _, files in os.walk(component_base_path) @@ -91,23 +91,23 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if self.dir is None or not os.path.exists(self.dir): - log.error(f"Could not find directory: {self.dir}") + if self.directory is None or not os.path.exists(self.directory): + log.error(f"Could not find directory: {self.directory}") return False - main_nf = os.path.join(self.dir, "main.nf") - nf_config = os.path.join(self.dir, "nextflow.config") + main_nf = os.path.join(self.directory, "main.nf") + nf_config = os.path.join(self.directory, "nextflow.config") if not os.path.exists(main_nf) and not os.path.exists(nf_config): - if Path(self.dir).resolve().parts[-1].startswith("nf-core"): - raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") - log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.dir}'") + if Path(self.directory).resolve().parts[-1].startswith("nf-core"): + raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") + log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") return True def has_modules_file(self) -> None: """Checks whether a module.json file has been created and creates one if it is missing""" - modules_json_path = os.path.join(self.dir, "modules.json") + modules_json_path = os.path.join(self.directory, "modules.json") if not os.path.exists(modules_json_path): log.info("Creating missing 'module.json' file.") - ModulesJson(self.dir).create() + ModulesJson(self.directory).create() def clear_component_dir(self, component_name: str, component_dir: str) -> bool: """ @@ -122,7 +122,7 @@ def clear_component_dir(self, component_name: str, component_dir: str) -> bool: try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in os.walk(self.dir, topdown=False): + for dir_path, dir_names, filenames in os.walk(self.directory, topdown=False): if not dir_names and not filenames: try: os.rmdir(dir_path) @@ -147,7 +147,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: Returns: [str]: The names of the modules/subworkflows """ - repo_dir = Path(self.dir, self.component_type, install_dir) + repo_dir = Path(self.directory, self.component_type, install_dir) if not repo_dir.exists(): raise LookupError(f"Nothing installed from {install_dir} in pipeline") @@ -180,7 +180,7 @@ def load_lint_config(self) -> None: Add parsed config to the `self.lint_config` class attribute. """ - _, tools_config = nf_core.utils.load_tools_config(self.dir) + _, tools_config = nf_core.utils.load_tools_config(self.directory) self.lint_config = tools_config.get("lint", {}) def check_modules_structure(self) -> None: @@ -193,9 +193,9 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in os.walk(Path(self.dir, "modules")): + for directory, _, files in os.walk(Path(self.directory, "modules")): if "main.nf" in files: - module_path = Path(directory).relative_to(Path(self.dir, "modules")) + module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts # Check that there are modules installed directly under the 'modules' directory if parts[1] == "modules": @@ -215,9 +215,9 @@ def check_modules_structure(self) -> None: wrong_dir = Path(modules_dir, module) shutil.move(str(wrong_dir), str(correct_dir)) log.info(f"Moved {wrong_dir} to {correct_dir}.") - shutil.rmtree(Path(self.dir, "modules", self.modules_repo.repo_path, "modules")) + shutil.rmtree(Path(self.directory, "modules", self.modules_repo.repo_path, "modules")) # Regenerate modules.json file - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.check_up_to_date() def check_patch_paths(self, patch_path: Path, module_name: str) -> None: @@ -243,12 +243,12 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: for line in lines: fh.write(line) # Update path in modules.json if the file is in the correct format - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.load() if modules_json.has_git_url_and_modules() and modules_json.modules_json is not None: modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ self.modules_repo.repo_path - ][module_name]["patch"] = str(patch_path.relative_to(Path(self.dir).resolve())) + ][module_name]["patch"] = str(patch_path.relative_to(Path(self.directory).resolve())) modules_json.dump() def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[str, Union[int, str]]]]: @@ -262,7 +262,7 @@ def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[ """ include_stmts: Dict[str, List[Dict[str, Union[int, str]]]] = {} if self.repo_type == "pipeline": - workflow_files = Path(self.dir, "workflows").glob("*.nf") + workflow_files = Path(self.directory, "workflows").glob("*.nf") for workflow_file in workflow_files: with open(workflow_file) as fh: # Check if component path is in the file using mmap diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index 9b81f54f0..f9b891004 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -93,7 +93,7 @@ def run(self) -> None: os.environ["NFT_DIFF_ARGS"] = ( "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences ) - with nf_core.utils.set_wd(Path(self.dir)): + with nf_core.utils.set_wd(Path(self.directory)): self.check_snapshot_stability() if len(self.errors) > 0: errors = "\n - ".join(self.errors) @@ -126,7 +126,7 @@ def check_inputs(self) -> None: self.component_dir = Path(self.component_type, self.modules_repo.repo_path, *self.component_name.split("/")) # First, sanity check that the module directory exists - if not Path(self.dir, self.component_dir).is_dir(): + if not Path(self.directory, self.component_dir).is_dir(): raise UserWarning( f"Cannot find directory '{self.component_dir}'.{' Should be TOOL/SUBTOOL or TOOL' if self.component_type == 'modules' else ''}" ) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 6385ee409..dddc3f93e 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -54,7 +54,7 @@ def install(self, component, silent=False): self.check_modules_structure() # Verify that 'modules.json' is consistent with the installed modules and subworkflows - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) if not silent: modules_json.check_up_to_date() @@ -79,7 +79,7 @@ def install(self, component, silent=False): ) # Set the install folder based on the repository name - install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + install_folder = Path(self.directory, self.component_type, self.modules_repo.repo_path) # Compute the component directory component_dir = Path(install_folder, component) @@ -134,14 +134,14 @@ def install(self, component, silent=False): log.info(f"Use the following statement to include this {self.component_type[:-1]}:") Console().print( Syntax( - f"include {{ {component_name} }} from '../{Path(install_folder, component).relative_to(self.dir)}/main'", + f"include {{ {component_name} }} from '../{Path(install_folder, component).relative_to(self.directory)}/main'", "groovy", theme="ansi_dark", padding=1, ) ) if self.component_type == "subworkflows": - subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.dir) + subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.directory) if os.path.isfile(subworkflow_config): log.info("Add the following config statement to use this subworkflow:") Console().print( @@ -261,9 +261,9 @@ def clean_modules_json(self, component, modules_repo, modules_json): Remove installed version of module/subworkflow from modules.json """ for repo_url, repo_content in modules_json.modules_json["repos"].items(): - for dir, dir_components in repo_content[self.component_type].items(): + for directory, dir_components in repo_content[self.component_type].items(): for name, component_values in dir_components.items(): - if name == component and dir == modules_repo.repo_path: + if name == component and directory == modules_repo.repo_path: repo_to_remove = repo_url log.debug( f"Removing {self.component_type[:-1]} '{modules_repo.repo_path}/{component}' from repo '{repo_to_remove}' from modules.json." @@ -285,7 +285,7 @@ def check_alternate_remotes(self, modules_json): modules_json.load() for repo_url, repo_content in modules_json.modules_json.get("repos", dict()).items(): for component_type in repo_content: - for dir in repo_content.get(component_type, dict()).keys(): - if dir == self.modules_repo.repo_path and repo_url != self.modules_repo.remote_url: + for directory in repo_content.get(component_type, dict()).keys(): + if directory == self.modules_repo.repo_path and repo_url != self.modules_repo.remote_url: return True return False diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 6d47f1e7a..7dd39bd90 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -7,6 +7,7 @@ import operator import os from pathlib import Path +from typing import List, Optional, Union import rich.box import rich.console @@ -53,18 +54,18 @@ class ComponentLint(ComponentCommand): def __init__( self, - component_type, - dir, - fail_warned=False, - remote_url=None, - branch=None, - no_pull=False, - registry=None, - hide_progress=False, + component_type: str, + directory: Union[str, Path], + fail_warned: bool = False, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + registry: Optional[str] = None, + hide_progress: bool = False, ): super().__init__( component_type, - dir=dir, + directory=directory, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -72,18 +73,18 @@ def __init__( ) self.fail_warned = fail_warned - self.passed = [] - self.warned = [] - self.failed = [] + self.passed: List[str] = [] + self.warned: List[str] = [] + self.failed: List[str] = [] if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") else: self.lint_tests = self.get_all_subworkflow_lint_tests(self.repo_type == "pipeline") if self.repo_type == "pipeline": - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.check_up_to_date() - self.all_remote_components = [] + self.all_remote_components: List[NFCoreComponent] = [] for repo_url, components in modules_json.get_all_components(self.component_type).items(): if remote_url is not None and remote_url != repo_url: continue @@ -92,9 +93,9 @@ def __init__( NFCoreComponent( comp, repo_url, - Path(self.dir, self.component_type, org, comp), + Path(self.directory, self.component_type, org, comp), self.repo_type, - Path(self.dir), + Path(self.directory), self.component_type, ) ) @@ -102,7 +103,7 @@ def __init__( raise LookupError( f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." ) - local_component_dir = Path(self.dir, self.component_type, "local") + local_component_dir = Path(self.directory, self.component_type, "local") self.all_local_components = [] if local_component_dir.exists(): self.all_local_components = [ @@ -111,20 +112,20 @@ def __init__( None, Path(local_component_dir, comp), self.repo_type, - Path(self.dir), + Path(self.directory), self.component_type, remote_component=False, ) for comp in self.get_local_components() ] - self.config = nf_core.utils.fetch_wf_config(Path(self.dir), cache_config=True) + self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) else: component_dir = Path( - self.dir, + self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, ) self.all_remote_components = [ - NFCoreComponent(m, None, component_dir / m, self.repo_type, Path(self.dir), self.component_type) + NFCoreComponent(m, None, component_dir / m, self.repo_type, Path(self.directory), self.component_type) for m in self.get_components_clone_modules() ] self.all_local_components = [] @@ -132,7 +133,9 @@ def __init__( raise LookupError(f"No {self.component_type} in '{self.component_type}' directory") # This could be better, perhaps glob for all nextflow.config files in? - self.config = nf_core.utils.fetch_wf_config(Path(self.dir).joinpath("tests", "config"), cache_config=True) + self.config = nf_core.utils.fetch_wf_config( + Path(self.directory).joinpath("tests", "config"), cache_config=True + ) if registry is None: self.registry = self.config.get("docker.registry", "quay.io") @@ -143,6 +146,9 @@ def __init__( self.lint_config = None self.modules_json = None + def __repr__(self) -> str: + return f"ComponentLint({self.component_type}, {self.directory})" + @staticmethod def get_all_module_lint_tests(is_pipeline): if is_pipeline: @@ -168,7 +174,7 @@ def get_all_subworkflow_lint_tests(is_pipeline): def set_up_pipeline_files(self): self.load_lint_config() - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.modules_json.load() # Only continue if a lint config has been loaded @@ -243,7 +249,7 @@ def format_result(test_results, table): module_name = lint_result.component_name # Make the filename clickable to open in VSCode - file_path = os.path.relpath(lint_result.file_path, self.dir) + file_path = os.path.relpath(lint_result.file_path, self.directory) file_path_link = f"[link=vscode://file/{os.path.abspath(file_path)}]{file_path}[/link]" table.add_row( diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 1e31e5627..9b24b6c0c 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -51,7 +51,7 @@ def __init__( self.update_deps = update_deps self.component = None self.update_config = None - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.branch = branch def _parameter_checks(self): @@ -96,9 +96,12 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if updated is None: updated = [] - _, tool_config = nf_core.utils.load_tools_config(self.dir) + _, tool_config = nf_core.utils.load_tools_config(self.directory) self.update_config = tool_config.get("update", {}) + if self.update_config is None: + raise UserWarning("Could not find '.nf-core.yml' file in pipeline directory") + self._parameter_checks() # Check modules directory structure @@ -171,7 +174,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr component_install_dir = install_tmp_dir / component # Compute the component directory - component_dir = os.path.join(self.dir, self.component_type, modules_repo.repo_path, component) + component_dir = Path(self.directory, self.component_type, modules_repo.repo_path, component) if sha is not None: version = sha @@ -318,7 +321,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.save_diff_fn, old_modules_json, self.modules_json.get_modules_json(), - Path(self.dir, "modules.json"), + Path(self.directory, "modules.json"), ) if exit_value and not silent: log.info( @@ -479,7 +482,9 @@ def get_all_components_info(self, branch=None): # Loop through all the modules/subworkflows in the pipeline # and check if they have an entry in the '.nf-core.yml' file for repo_name, components in self.modules_json.get_all_components(self.component_type).items(): - if repo_name not in self.update_config or self.update_config[repo_name] is True: + if isinstance(self.update_config, dict) and ( + repo_name not in self.update_config or self.update_config[repo_name] is True + ): # There aren't restrictions for the repository in .nf-core.yml file components_info[repo_name] = {} for component_dir, component in components: @@ -503,7 +508,7 @@ def get_all_components_info(self, branch=None): ), ) ] - elif isinstance(self.update_config[repo_name], dict): + elif isinstance(self.update_config, dict) and isinstance(self.update_config[repo_name], dict): # If it is a dict, then there are entries for individual components or component directories for component_dir in set([dir for dir, _ in components]): if isinstance(self.update_config[repo_name][component_dir], str): @@ -535,8 +540,8 @@ def get_all_components_info(self, branch=None): if self.sha is not None: overridden_repos.append(repo_name) elif self.update_config[repo_name][component_dir] is False: - for dir, component in components: - if dir == component_dir: + for directory, component in components: + if directory == component_dir: skipped_components.append(f"{component_dir}/{components}") elif isinstance(self.update_config[repo_name][component_dir], dict): # If it's a dict, there are entries for individual components @@ -596,7 +601,7 @@ def get_all_components_info(self, branch=None): raise UserWarning( f"{self.component_type[:-1].title()} '{component}' in '{component_dir}' has an invalid entry in '.nf-core.yml'" ) - elif isinstance(self.update_config[repo_name], str): + elif isinstance(self.update_config, dict) and isinstance(self.update_config[repo_name], str): # If a string is given it is the commit SHA to which we should update to custom_sha = self.update_config[repo_name] components_info[repo_name] = {} @@ -623,8 +628,10 @@ def get_all_components_info(self, branch=None): ] if self.sha is not None: overridden_repos.append(repo_name) - elif self.update_config[repo_name] is False: + elif isinstance(self.update_config, dict) and self.update_config[repo_name] is False: skipped_repos.append(repo_name) + elif not isinstance(self.update_config, dict): + raise UserWarning("`.nf-core.yml` is not correctly formatted.") else: raise UserWarning(f"Repo '{repo_name}' has an invalid entry in '.nf-core.yml'") @@ -712,8 +719,10 @@ def setup_diff_file(self, check_diff_exist=True): self.save_diff_fn = questionary.path( "Enter the filename: ", style=nf_core.utils.nfcore_question_style ).unsafe_ask() - - self.save_diff_fn = Path(self.save_diff_fn) + if self.save_diff_fn is not None: + self.save_diff_fn = Path(self.save_diff_fn) + else: + raise UserWarning("No filename provided for saving the diff file") if not check_diff_exist: # This guarantees that the file exists after calling the function @@ -744,7 +753,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path """ temp_component_dir = Path(install_folder, component) files = [file_path for file_path in temp_component_dir.rglob("*") if file_path.is_file()] - pipeline_path = Path(self.dir, self.component_type, repo_path, component) + pipeline_path = Path(self.directory, self.component_type, repo_path, component) if pipeline_path.exists(): pipeline_files = [f.name for f in pipeline_path.iterdir() if f.is_file()] @@ -795,7 +804,7 @@ def try_apply_patch( component_fullname = str(Path(repo_path, component)) log.info(f"Found patch for {self.component_type[:-1]} '{component_fullname}'. Trying to apply it to new files") - patch_path = Path(self.dir / patch_relpath) + patch_path = Path(self.directory / patch_relpath) component_relpath = Path(self.component_type, repo_path, component) # Check that paths in patch file are updated @@ -928,29 +937,31 @@ def update_linked_components( def manage_changes_in_linked_components(self, component, modules_to_update, subworkflows_to_update): """Check for linked components added or removed in the new subworkflow version""" if self.component_type == "subworkflows": - subworkflow_directory = Path(self.dir, self.component_type, self.modules_repo.repo_path, component) + subworkflow_directory = Path(self.directory, self.component_type, self.modules_repo.repo_path, component) included_modules, included_subworkflows = get_components_to_install(subworkflow_directory) # If a module/subworkflow has been removed from the subworkflow for module in modules_to_update: if module not in included_modules: log.info(f"Removing module '{module}' which is not included in '{component}' anymore.") - remove_module_object = ComponentRemove("modules", self.dir) + remove_module_object = ComponentRemove("modules", self.directory) remove_module_object.remove(module, removed_by=component) for subworkflow in subworkflows_to_update: if subworkflow not in included_subworkflows: log.info(f"Removing subworkflow '{subworkflow}' which is not included in '{component}' anymore.") - remove_subworkflow_object = ComponentRemove("subworkflows", self.dir) + remove_subworkflow_object = ComponentRemove("subworkflows", self.directory) remove_subworkflow_object.remove(subworkflow, removed_by=component) # If a new module/subworkflow is included in the subworklfow and wasn't included before for module in included_modules: if module not in modules_to_update: log.info(f"Installing newly included module '{module}' for '{component}'") - install_module_object = ComponentInstall(self.dir, "modules", installed_by=component) + install_module_object = ComponentInstall(self.directory, "modules", installed_by=component) install_module_object.install(module, silent=True) for subworkflow in included_subworkflows: if subworkflow not in subworkflows_to_update: log.info(f"Installing newly included subworkflow '{subworkflow}' for '{component}'") - install_subworkflow_object = ComponentInstall(self.dir, "subworkflows", installed_by=component) + install_subworkflow_object = ComponentInstall( + self.directory, "subworkflows", installed_by=component + ) install_subworkflow_object.install(subworkflow, silent=True) def _change_component_type(self, new_component_type): diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index b780144ef..90d39104d 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -11,6 +11,7 @@ import questionary import rich +import rich.progress import nf_core.modules.modules_utils import nf_core.utils @@ -39,7 +40,7 @@ class ModuleLint(ComponentLint): def __init__( self, - dir, + directory, fail_warned=False, remote_url=None, branch=None, @@ -49,7 +50,7 @@ def __init__( ): super().__init__( component_type="modules", - dir=dir, + directory=directory, fail_warned=fail_warned, remote_url=remote_url, branch=branch, @@ -127,9 +128,9 @@ def lint( remote_modules = self.all_remote_components if self.repo_type == "modules": - log.info(f"Linting modules repo: [magenta]'{self.dir}'") + log.info(f"Linting modules repo: [magenta]'{self.directory}'") else: - log.info(f"Linting pipeline: [magenta]'{self.dir}'") + log.info(f"Linting pipeline: [magenta]'{self.directory}'") if module: log.info(f"Linting module: [magenta]'{module}'") diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 2c2f1a32c..63c356a79 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -39,10 +39,10 @@ def __init__(self, pipeline_dir: Union[str, Path]): Args: pipeline_dir (str): The pipeline directory """ - self.dir = Path(pipeline_dir) - self.modules_dir = self.dir / "modules" - self.subworkflows_dir = self.dir / "subworkflows" - self.modules_json_path = self.dir / "modules.json" + self.directory = Path(pipeline_dir) + self.modules_dir = self.directory / "modules" + self.subworkflows_dir = self.directory / "subworkflows" + self.modules_json_path = self.directory / "modules.json" self.modules_json = None self.pipeline_modules = None self.pipeline_subworkflows = None @@ -63,7 +63,7 @@ def create(self): Raises: UserWarning: If the creation fails """ - pipeline_config = nf_core.utils.fetch_wf_config(self.dir) + pipeline_config = nf_core.utils.fetch_wf_config(self.directory) pipeline_name = pipeline_config.get("manifest.name", "") pipeline_url = pipeline_config.get("manifest.homePage", "") new_modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} @@ -72,7 +72,7 @@ def create(self): if rich.prompt.Confirm.ask( "[bold][blue]?[/] Can't find a ./modules directory. Would you like me to create one?", default=True ): - log.info(f"Creating ./modules directory in '{self.dir}'") + log.info(f"Creating ./modules directory in '{self.directory}'") self.modules_dir.mkdir() else: raise UserWarning("Cannot proceed without a ./modules directory.") @@ -153,7 +153,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None # The function might rename some directories, keep track of them renamed_dirs = {} # Check if there are any untracked repositories - dirs_not_covered = self.dir_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) + dirs_not_covered = self.directory_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) if len(dirs_not_covered) > 0: log.info(f"Found custom {component_type[:-1]} repositories when creating 'modules.json'") # Loop until all directories in the base directory are covered by a remote @@ -203,7 +203,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None if component_type not in repos[nrepo_remote]: repos[nrepo_remote][component_type] = {} repos[nrepo_remote][component_type][nrepo_name] = {} - dirs_not_covered = self.dir_tree_uncovered( + dirs_not_covered = self.directory_tree_uncovered( directory, [Path(name) for url in repos for name in repos[url][component_type]] ) @@ -816,7 +816,7 @@ def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): LookupError: If patch was not applied """ module_fullname = str(Path(repo_name, module)) - patch_path = Path(self.dir / patch_relpath) + patch_path = Path(self.directory / patch_relpath) try: new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, module_dir, reverse=True) diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 0643d2e29..f49e98e93 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -12,7 +12,7 @@ def create_logo( text: str, - dir: Union[Path, str], + directory: Union[Path, str], filename: str = "", theme: str = "light", width: int = 2300, @@ -22,10 +22,10 @@ def create_logo( """Create a logo for a pipeline.""" if not text: raise UserWarning("Please provide the name of the text to put on the logo.") - dir = Path(dir) - if not dir.is_dir(): - log.debug(f"Creating directory {dir}") - dir.mkdir(parents=True, exist_ok=True) + directory = Path(directory) + if not directory.is_dir(): + log.debug(f"Creating directory {directory}") + directory.mkdir(parents=True, exist_ok=True) assets = Path(nf_core.__file__).parent / "assets/logo" if format == "svg": @@ -43,7 +43,7 @@ def create_logo( # save the svg logo_filename = f"nf-core-{text}_logo_{theme}.svg" if not filename else filename logo_filename = f"{logo_filename}.svg" if not logo_filename.lower().endswith(".svg") else logo_filename - logo_path = Path(dir, logo_filename) + logo_path = Path(directory, logo_filename) with open(logo_path, "w") as fh: fh.write(svg) @@ -51,7 +51,7 @@ def create_logo( logo_filename = f"nf-core-{text}_logo_{theme}.png" if not filename else filename logo_filename = f"{logo_filename}.png" if not logo_filename.lower().endswith(".png") else logo_filename cache_name = f"nf-core-{text}_logo_{theme}_{width}.png" - logo_path = Path(dir, logo_filename) + logo_path = Path(directory, logo_filename) # Check if we haven't already created this logo if logo_path.is_file() and not force: From 089bb672b13265d40b5cd5cc7e96146943288402 Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Thu, 18 Jul 2024 16:31:08 +0200 Subject: [PATCH 334/737] Fix the command_create by putting the plain param again --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 192c7cd13..189218996 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2181,7 +2181,7 @@ def command_lint( help="The name of the GitHub organisation where the pipeline will be hosted (default: nf-core)", ) @click.pass_context -def command_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation): +def command_create(ctx, name, description, author, version, force, outdir, template_yaml, plain, organisation): """ Use `nf-core pipelines create` instead. """ From dcb1dc0b6802d39494389bfb401d75647b201e83 Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Thu, 18 Jul 2024 16:41:38 +0200 Subject: [PATCH 335/737] Automatically add a default version in pipelines_create --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 189218996..152814c9c 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2188,7 +2188,7 @@ def command_create(ctx, name, description, author, version, force, outdir, templ log.warning( "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." ) - pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation) + pipelines_create(ctx, name, description, author, version="1.0.0dev", force, outdir, template_yaml, organisation) # Main script is being run - launch the CLI From 9dc96b8768e4a289cb6784924c7f29e07ed4a39c Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Thu, 18 Jul 2024 17:03:45 +0200 Subject: [PATCH 336/737] Add default version --- nf_core/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 152814c9c..cb99bdedc 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2169,7 +2169,7 @@ def command_lint( ) @click.option("-d", "--description", type=str, help="A short description of your pipeline") @click.option("-a", "--author", type=str, help="Name of the main author(s)") -@click.option("--version", type=str, help="The initial version number to use") +@click.option("--version", type=str, default="1.0.0dev", help="The initial version number to use") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") @click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") @@ -2188,7 +2188,7 @@ def command_create(ctx, name, description, author, version, force, outdir, templ log.warning( "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." ) - pipelines_create(ctx, name, description, author, version="1.0.0dev", force, outdir, template_yaml, organisation) + pipelines_create(ctx, name, description, author, force, outdir, template_yaml, organisation) # Main script is being run - launch the CLI From 60973a19a2381f89b11ff6c408c9f150ff6a1d55 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 19 Jul 2024 06:45:58 +0200 Subject: [PATCH 337/737] fix ALL the mypy errors --- nf_core/commands_pipelines.py | 2 +- nf_core/components/components_command.py | 10 +- nf_core/components/components_utils.py | 8 +- nf_core/components/create.py | 2 +- nf_core/components/info.py | 66 +++++---- nf_core/components/install.py | 42 +++--- nf_core/components/lint/__init__.py | 38 +++--- nf_core/components/list.py | 10 +- nf_core/components/nfcore_component.py | 51 ++++--- nf_core/components/patch.py | 4 + nf_core/components/remove.py | 19 ++- nf_core/modules/bump_versions.py | 12 +- nf_core/modules/lint/__init__.py | 60 ++++++--- nf_core/modules/lint/environment_yml.py | 6 +- nf_core/modules/lint/main_nf.py | 21 ++- nf_core/modules/lint/meta_yml.py | 6 +- nf_core/modules/modules_differ.py | 16 ++- nf_core/modules/modules_json.py | 126 ++++++++++++------ nf_core/modules/modules_repo.py | 9 +- nf_core/modules/modules_utils.py | 12 +- .../subworkflows/lint/subworkflow_tests.py | 7 +- nf_core/synced_repo.py | 58 +++++--- nf_core/utils.py | 16 ++- 23 files changed, 381 insertions(+), 220 deletions(-) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index deb1f691a..432a36aae 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -284,7 +284,7 @@ def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, usern Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. To keep nf-core pipelines up to date with improvements in the main - template, we use a method of synchronisation that uses a special + template, we use a method of w that uses a special git branch called [cyan i]TEMPLATE[/]. This command updates the [cyan i]TEMPLATE[/] branch with the latest version of diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index bf80b4611..13a6fed33 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -33,7 +33,7 @@ def __init__( Initialise the ComponentClass object """ self.component_type = component_type - self.directory = Path(directory) if directory else None + self.directory = Path(directory) self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress self.no_prompts = no_prompts @@ -109,13 +109,13 @@ def has_modules_file(self) -> None: log.info("Creating missing 'module.json' file.") ModulesJson(self.directory).create() - def clear_component_dir(self, component_name: str, component_dir: str) -> bool: + def clear_component_dir(self, component_name: str, component_dir: Union[str, Path]) -> bool: """ Removes all files in the module/subworkflow directory Args: component_name (str): The name of the module/subworkflow - component_dir (str): The path to the module/subworkflow in the module repository + component_dir (str, Path): The path to the module/subworkflow """ @@ -156,7 +156,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: ] def install_component_files( - self, component_name: str, component_version: str, modules_repo: ModulesRepo, install_dir: str + self, component_name: str, component_version: str, modules_repo: ModulesRepo, install_dir: Union[str, Path] ) -> bool: """ Installs a module/subworkflow into the given directory @@ -165,7 +165,7 @@ def install_component_files( component_name (str): The name of the module/subworkflow component_version (str): Git SHA for the version of the module/subworkflow to be installed modules_repo (ModulesRepo): A correctly configured ModulesRepo object - install_dir (str): The path to where the module/subworkflow should be installed (should be the 'modules/' or 'subworkflows/' dir of the pipeline) + install_dir (str, Path): The path to where the module/subworkflow should be installed (should be the 'modules/' or 'subworkflows/' dir of the pipeline) Returns: (bool): Whether the operation was successful of not diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 01650a643..32f6c0fc1 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,7 +1,7 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple +from typing import List, Optional, Tuple, Union import questionary import rich.prompt @@ -12,7 +12,7 @@ log = logging.getLogger(__name__) -def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[str, Optional[str], str]: +def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[Path, Optional[str], str]: """ Determine whether this is a pipeline repository or a clone of nf-core/modules @@ -23,7 +23,7 @@ def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[st raise UserWarning(f"Could not find directory: {directory}") # Try to find the root directory - base_dir: str = nf_core.utils.determine_base_dir(directory) + base_dir: Path = nf_core.utils.determine_base_dir(directory) # Figure out the repository type from the .nf-core.yml config file if we can config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) @@ -132,7 +132,7 @@ def prompt_component_version_sha( return git_sha -def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str]]: +def get_components_to_install(subworkflow_dir: Union[str, Path]) -> Tuple[List[str], List[str]]: """ Parse the subworkflow main.nf file to retrieve all imported modules and subworkflows. """ diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 5d6c411bd..532c2a46d 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -29,7 +29,7 @@ class ComponentCreate(ComponentCommand): def __init__( self, component_type: str, - directory: str = ".", + directory: Path = Path("."), component: str = "", author: Optional[str] = None, process_label: Optional[str] = None, diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 54fc0004d..8597875af 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -1,6 +1,7 @@ import logging import os from pathlib import Path +from typing import Dict, Optional, Union import questionary import yaml @@ -57,25 +58,26 @@ class ComponentInfo(ComponentCommand): def __init__( self, - component_type, - pipeline_dir, - component_name, - remote_url=None, - branch=None, - no_pull=False, + component_type: str, + pipeline_dir: Union[str, Path], + component_name: str, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - self.meta = None - self.local_path = None - self.remote_location = None - self.local = None + self.meta: Optional[Dict] = None + self.local_path: Optional[Path] = None + self.remote_location: Optional[str] = None + self.local: bool = False + self.modules_json: Optional[ModulesJson] = None if self.repo_type == "pipeline": # Check modules directory structure if self.component_type == "modules": self.check_modules_structure() # Check modules.json up to date - self.modules_json = ModulesJson(self.dir) + self.modules_json = ModulesJson(self.directory) self.modules_json.check_up_to_date() else: self.modules_json = None @@ -95,6 +97,7 @@ def init_mod_name(self, component): Args: module: str: Module name to check """ + assert self.modules_json is not None # mypy if component is None: self.local = questionary.confirm( f"Is the {self.component_type[:-1]} locally installed?", style=nf_core.utils.nfcore_question_style @@ -103,12 +106,12 @@ def init_mod_name(self, component): if self.repo_type == "modules": components = self.get_components_clone_modules() else: - components = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, {} + all_components = self.modules_json.get_all_components(self.component_type).get( + self.modules_repo.remote_url, [] ) components = [ component if directory == self.modules_repo.repo_path else f"{directory}/{component}" - for directory, component in components + for directory, component in all_components ] if not components: raise UserWarning( @@ -133,11 +136,13 @@ def init_mod_name(self, component): if self.repo_type == "pipeline": # check if the module is locally installed local_paths = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, {} - ) + self.modules_repo.remote_url + ) # type: ignore + if local_paths is None: + raise LookupError(f"No {self.component_type[:-1]} installed from {self.modules_repo.remote_url}") for directory, comp in local_paths: if comp == component: - component_base_path = Path(self.dir, self.component_type) + component_base_path = Path(self.directory, self.component_type) self.local_path = Path(component_base_path, directory, component) break if self.local_path: @@ -166,20 +171,22 @@ def get_local_yaml(self): """Attempt to get the meta.yml file from a locally installed module/subworkflow. Returns: - dict or bool: Parsed meta.yml found, False otherwise + Optional[dict]: Parsed meta.yml if found, None otherwise """ + assert self.modules_json is not None # mypy if self.repo_type == "pipeline": # Try to find and load the meta.yml file - component_base_path = Path(self.dir, self.component_type) + component_base_path = Path(self.directory, self.component_type) # Check that we have any modules/subworkflows installed from this repo components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) - component_names = [component for _, component in components] if components is None: raise LookupError(f"No {self.component_type[:-1]} installed from {self.modules_repo.remote_url}") + component_names = [component for _, component in components] + if self.component in component_names: - install_dir = [dir for dir, module in components if module == self.component][0] + install_dir = [directory for directory, module in components if module == self.component][0] comp_dir = Path(component_base_path, install_dir, self.component) meta_fn = Path(comp_dir, "meta.yml") if meta_fn.exists(): @@ -190,7 +197,7 @@ def get_local_yaml(self): log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") else: - component_base_path = Path(self.dir, self.component_type, self.org) + component_base_path = Path(self.directory, self.component_type, self.org) if self.component in os.listdir(component_base_path): comp_dir = Path(component_base_path, self.component) meta_fn = Path(comp_dir, "meta.yml") @@ -203,7 +210,7 @@ def get_local_yaml(self): return None - def get_remote_yaml(self): + def get_remote_yaml(self) -> Optional[dict]: """Attempt to get the meta.yml file from a remote repo. Returns: @@ -211,11 +218,11 @@ def get_remote_yaml(self): """ # Check if our requested module/subworkflow is there if self.component not in self.modules_repo.get_avail_components(self.component_type): - return False + return None file_contents = self.modules_repo.get_meta_yml(self.component_type, self.component) if file_contents is None: - return False + return None self.remote_location = self.modules_repo.remote_url return yaml.safe_load(file_contents) @@ -242,7 +249,8 @@ def generate_component_info_help(self): "\n" ) ) - + if self.meta is None: + raise UserWarning("No meta.yml file found") if self.meta.get("tools"): tools_strings = [] for tool in self.meta["tools"]: @@ -307,21 +315,21 @@ def generate_component_info_help(self): # Print include statement if self.local_path: - install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + install_folder = Path(self.directory, self.component_type, self.modules_repo.repo_path) component_name = "_".join(self.component.upper().split("/")) renderables.append( Text.from_markup(f"\n [blue]Use the following statement to include this {self.component_type[:-1]}:") ) renderables.append( Syntax( - f"include {{ {component_name} }} from '../{Path(install_folder, self.component).relative_to(self.dir)}/main'", + f"include {{ {component_name} }} from '../{Path(install_folder, self.component).relative_to(self.directory)}/main'", "groovy", theme="ansi_dark", padding=1, ) ) if self.component_type == "subworkflows": - subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.dir) + subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.directory) if os.path.isfile(subworkflow_config): renderables.append( Text.from_markup("\n [blue]Add the following config statement to use this subworkflow:") diff --git a/nf_core/components/install.py b/nf_core/components/install.py index dddc3f93e..e6c31b3cb 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,11 +1,13 @@ import logging import os from pathlib import Path +from typing import List, Optional, Union import questionary from rich.console import Console from rich.syntax import Syntax +import nf_core.components import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand @@ -22,26 +24,26 @@ class ComponentInstall(ComponentCommand): def __init__( self, - pipeline_dir, - component_type, - force=False, - prompt=False, - sha=None, - remote_url=None, - branch=None, - no_pull=False, - installed_by=False, + pipeline_dir: Union[str, Path], + component_type: str, + force: bool = False, + prompt: bool = False, + sha: Optional[str] = None, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + installed_by: Optional[List[str]] = None, ): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.force = force self.prompt = prompt self.sha = sha - if installed_by: + if installed_by is not None: self.installed_by = installed_by else: - self.installed_by = self.component_type + self.installed_by = [self.component_type] - def install(self, component, silent=False): + def install(self, component: str, silent: bool = False) -> bool: if self.repo_type == "modules": log.error(f"You cannot install a {component} in a clone of nf-core/modules") return False @@ -67,8 +69,11 @@ def install(self, component, silent=False): # Verify SHA if not self.modules_repo.verify_sha(self.prompt, self.sha): return False + if self.modules_repo is None: + return False # Check and verify component name + component = self.collect_and_verify_name(component, self.modules_repo) if not component: return False @@ -156,19 +161,21 @@ def install_included_components(self, subworkflow_dir): modules_to_install, subworkflows_to_install = get_components_to_install(subworkflow_dir) for s_install in subworkflows_to_install: original_installed = self.installed_by - self.installed_by = Path(subworkflow_dir).parts[-1] + self.installed_by = [Path(subworkflow_dir).parts[-1]] self.install(s_install, silent=True) self.installed_by = original_installed for m_install in modules_to_install: original_component_type = self.component_type self.component_type = "modules" original_installed = self.installed_by - self.installed_by = Path(subworkflow_dir).parts[-1] + self.installed_by = [Path(subworkflow_dir).parts[-1]] self.install(m_install, silent=True) self.component_type = original_component_type self.installed_by = original_installed - def collect_and_verify_name(self, component, modules_repo): + def collect_and_verify_name( + self, component: Optional[str], modules_repo: nf_core.modules.modules_repo.ModulesRepo + ) -> str: """ Collect component name. Check that the supplied name is an available module/subworkflow. @@ -180,18 +187,19 @@ def collect_and_verify_name(self, component, modules_repo): style=nf_core.utils.nfcore_question_style, ).unsafe_ask() + if component is None: + return "" + # Check that the supplied name is an available module/subworkflow if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): log.error( f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." ) log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") - return False if not modules_repo.component_exists(component, self.component_type, commit=self.sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" log.warning(warn_msg) - return False return component diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 7dd39bd90..2cd59dc48 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -7,7 +7,7 @@ import operator import os from pathlib import Path -from typing import List, Optional, Union +from typing import List, Optional, Tuple, Union import rich.box import rich.console @@ -37,12 +37,12 @@ class LintExceptionError(Exception): class LintResult: """An object to hold the results of a lint test""" - def __init__(self, component, lint_test, message, file_path): + def __init__(self, component: NFCoreComponent, lint_test: str, message: str, file_path: Path): self.component = component self.lint_test = lint_test self.message = message self.file_path = file_path - self.component_name = component.component_name + self.component_name: str = component.component_name @rich.repr.auto @@ -73,9 +73,9 @@ def __init__( ) self.fail_warned = fail_warned - self.passed: List[str] = [] - self.warned: List[str] = [] - self.failed: List[str] = [] + self.passed: List[LintResult] = [] + self.warned: List[LintResult] = [] + self.failed: List[LintResult] = [] if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") else: @@ -88,17 +88,21 @@ def __init__( for repo_url, components in modules_json.get_all_components(self.component_type).items(): if remote_url is not None and remote_url != repo_url: continue - for org, comp in components: - self.all_remote_components.append( - NFCoreComponent( - comp, - repo_url, - Path(self.directory, self.component_type, org, comp), - self.repo_type, - Path(self.directory), - self.component_type, - ) + if isinstance(components, str): + raise LookupError( + f"Error parsing modules.json: {components}. " f"Please check the file for errors or try again." ) + org, comp = components + self.all_remote_components.append( + NFCoreComponent( + comp, + repo_url, + Path(self.directory, self.component_type, org, comp), + self.repo_type, + Path(self.directory), + self.component_type, + ) + ) if not self.all_remote_components: raise LookupError( f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." @@ -119,7 +123,7 @@ def __init__( for comp in self.get_local_components() ] self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) - else: + elif self.repo_type == "modules": component_dir = Path( self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, diff --git a/nf_core/components/list.py b/nf_core/components/list.py index f5f2744e1..b24732e5c 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -1,6 +1,6 @@ import json import logging -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Union, cast import rich.table @@ -87,18 +87,18 @@ def pattern_msg(keywords: List[str]) -> str: return "" # Verify that 'modules.json' is consistent with the installed modules - modules_json: ModulesJson = ModulesJson(self.dir) + modules_json: ModulesJson = ModulesJson(self.directory) modules_json.check_up_to_date() # Filter by keywords - repos_with_comps: Dict[str, List[Tuple[str, str]]] = { + repos_with_comps = { repo_url: [comp for comp in components if all(k in comp[1] for k in keywords)] for repo_url, components in modules_json.get_all_components(self.component_type).items() } # Nothing found if sum(map(len, repos_with_comps)) == 0: - log.info(f"No nf-core {self.component_type} found in '{self.dir}'{pattern_msg(keywords)}") + log.info(f"No nf-core {self.component_type} found in '{self.directory}'{pattern_msg(keywords)}") return "" table.add_column("Repository") @@ -160,5 +160,5 @@ def pattern_msg(keywords: List[str]) -> str: f"{pattern_msg(keywords)}:\n" ) else: - log.info(f"{self.component_type.capitalize()} installed in '{self.dir}'{pattern_msg(keywords)}:\n") + log.info(f"{self.component_type.capitalize()} installed in '{self.directory}'{pattern_msg(keywords)}:\n") return table diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 5d0baf63d..143312222 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -5,7 +5,7 @@ import logging import re from pathlib import Path -from typing import Union +from typing import List, Optional, Tuple, Union log = logging.getLogger(__name__) @@ -18,13 +18,13 @@ class NFCoreComponent: def __init__( self, - component_name, - repo_url, - component_dir, - repo_type, - base_dir, - component_type, - remote_component=True, + component_name: str, + repo_url: Optional[str], + component_dir: Path, + repo_type: str, + base_dir: Path, + component_type: str, + remote_component: bool = True, ): """ Initialize the object @@ -46,21 +46,21 @@ def __init__( self.component_dir = component_dir self.repo_type = repo_type self.base_dir = base_dir - self.passed = [] - self.warned = [] - self.failed = [] - self.inputs = [] - self.outputs = [] - self.has_meta = False - self.git_sha = None - self.is_patched = False + self.passed: List[Tuple[str, str, Path]] = [] + self.warned: List[Tuple[str, str, Path]] = [] + self.failed: List[Tuple[str, str, Path]] = [] + self.inputs: List[str] = [] + self.outputs: List[str] = [] + self.has_meta: bool = False + self.git_sha: Optional[str] = None + self.is_patched: bool = False if remote_component: # Initialize the important files - self.main_nf = Path(self.component_dir, "main.nf") - self.meta_yml = Path(self.component_dir, "meta.yml") + self.main_nf: Path = Path(self.component_dir, "main.nf") + self.meta_yml: Optional[Path] = Path(self.component_dir, "meta.yml") self.process_name = "" - self.environment_yml = Path(self.component_dir, "environment.yml") + self.environment_yml: Optional[Path] = Path(self.component_dir, "environment.yml") repo_dir = self.component_dir.parts[: self.component_dir.parts.index(self.component_name.split("/")[0])][-1] self.org = repo_dir @@ -79,8 +79,8 @@ def __init__( self.component_name = self.component_dir.stem # These attributes are only used by nf-core modules # so just initialize them to None - self.meta_yml = "" - self.environment_yml = "" + self.meta_yml = None + self.environment_yml = None self.test_dir = None self.test_yml = None self.test_main_nf = None @@ -155,10 +155,10 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st included_components.append(component) return included_components - def get_inputs_from_main_nf(self): + def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" - inputs = [] - with open(self.main_nf) as f: + inputs: List[str] = [] + with open(str(self.main_nf)) as f: data = f.read() # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo # regex matches: @@ -171,7 +171,6 @@ def get_inputs_from_main_nf(self): # don't match anything inside comments or after "output:" if "input:" not in data: log.debug(f"Could not find any inputs in {self.main_nf}") - return inputs input_data = data.split("input:")[1].split("output:")[0] regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" matches = re.finditer(regex, input_data, re.MULTILINE) @@ -187,7 +186,7 @@ def get_inputs_from_main_nf(self): def get_outputs_from_main_nf(self): outputs = [] - with open(self.main_nf) as f: + with open(str(self.main_nf)) as f: data = f.read() # get output values from main.nf after "output:". the names are always after "emit:" if "output:" not in data: diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 55d574745..5b29c152d 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -30,6 +30,10 @@ def _parameter_checks(self, component): raise UserWarning("The command was not run in a valid pipeline directory.") components = self.modules_json.get_all_components(self.component_type).get(self.modules_repo.remote_url) + if components is None: + raise UserWarning( + f"No {self.component_type[:-1]}s found in the 'modules.json' file for the remote '{self.modules_repo.remote_url}'" + ) component_names = [component for _, component in components] if component is not None and component not in component_names: diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index 8d884db6c..c2c584391 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -58,10 +58,10 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals removed_components = [] # Get the module/subworkflow directory - component_dir = Path(self.dir, self.component_type, repo_path, component) + component_dir = Path(self.directory, self.component_type, repo_path, component) # Load the modules.json file - modules_json = ModulesJson(self.dir) + modules_json = ModulesJson(self.directory) modules_json.load() # Verify that the module/subworkflow is actually installed @@ -98,9 +98,16 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals for file, stmts in include_stmts.items(): renderables = [] for stmt in stmts: + # check that the line number is integer + if not isinstance(stmt["line_number"], int): + log.error( + f"Could not parse line number '{stmt['line_number']}' in '{file}'. Please report this issue." + ) + continue + renderables.append( Syntax( - stmt["line"], + str(stmt["line"]), "groovy", theme="ansi_dark", line_numbers=True, @@ -123,7 +130,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals style=nf_core.utils.nfcore_question_style, ).unsafe_ask(): # add the component back to modules.json - if not ComponentInstall(self.dir, self.component_type, force=True).install( + if not ComponentInstall(self.directory, self.component_type, force=True).install( component, silent=True ): log.warning( @@ -133,7 +140,9 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals return removed # Remove the component files of all entries removed from modules.json removed = ( - True if self.clear_component_dir(component, Path(self.dir, removed_component_dir)) or removed else False + True + if self.clear_component_dir(component, Path(self.directory, removed_component_dir)) or removed + else False ) removed_components.append(component) diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 1b94d5910..fb0dc7d50 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -76,10 +76,10 @@ def bump_versions( ) # Get list of all modules - _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.dir) + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.directory) # Load the .nf-core.yml config - _, self.tools_config = nf_core.utils.load_tools_config(self.dir) + _, self.tools_config = nf_core.utils.load_tools_config(self.directory) # Prompt for module or all if module is None and not all_modules: @@ -179,7 +179,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: except (LookupError, ValueError): self.failed.append( ( - f"Conda version not specified correctly: {module.main_nf.relative_to(self.dir)}", + f"Conda version not specified correctly: {Path(module.main_nf).relative_to(self.directory)}", module.component_name, ) ) @@ -245,12 +245,12 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: fh.write(content) # change version in environment.yml - with open(module.environment_yml) as fh: + with open(str(module.environment_yml)) as fh: env_yml = yaml.safe_load(fh) env_yml["dependencies"][0] = re.sub( bioconda_packages[0], f"bioconda::{bioconda_tool_name}={last_ver}", env_yml["dependencies"][0] ) - with open(module.environment_yml, "w") as fh: + with open(str(module.environment_yml), "w") as fh: yaml.dump(env_yml, fh, default_flow_style=False, Dumper=custom_yaml_dumper()) self.updated.append( @@ -272,7 +272,7 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: # Check whether file exists and load it bioconda_packages = [] try: - with open(module.environment_yml) as fh: + with open(str(module.environment_yml)) as fh: env_yml = yaml.safe_load(fh) bioconda_packages = env_yml.get("dependencies", []) except FileNotFoundError: diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 90d39104d..2b10b4df5 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -8,18 +8,33 @@ import logging import os +from pathlib import Path +from typing import List, Optional, Union import questionary import rich import rich.progress +import nf_core.components +import nf_core.components.nfcore_component import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult +from nf_core.components.nfcore_component import NFCoreComponent from nf_core.pipelines.lint_utils import console log = logging.getLogger(__name__) +from .environment_yml import environment_yml +from .main_nf import main_nf +from .meta_yml import meta_yml +from .module_changes import module_changes +from .module_deprecations import module_deprecations +from .module_patch import module_patch +from .module_tests import module_tests +from .module_todos import module_todos +from .module_version import module_version + class ModuleLint(ComponentLint): """ @@ -28,25 +43,25 @@ class ModuleLint(ComponentLint): """ # Import lint functions - from .environment_yml import environment_yml # type: ignore[misc] - from .main_nf import main_nf # type: ignore[misc] - from .meta_yml import meta_yml # type: ignore[misc] - from .module_changes import module_changes # type: ignore[misc] - from .module_deprecations import module_deprecations # type: ignore[misc] - from .module_patch import module_patch # type: ignore[misc] - from .module_tests import module_tests # type: ignore[misc] - from .module_todos import module_todos # type: ignore[misc] - from .module_version import module_version # type: ignore[misc] + environment_yml = environment_yml + main_nf = main_nf + meta_yml = meta_yml + module_changes = module_changes + module_deprecations = module_deprecations + module_patch = module_patch + module_tests = module_tests + module_todos = module_todos + module_version = module_version def __init__( self, - directory, - fail_warned=False, - remote_url=None, - branch=None, - no_pull=False, - registry=None, - hide_progress=False, + directory: Union[str, Path], + fail_warned: bool = False, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + registry: Optional[str] = None, + hide_progress: bool = False, ): super().__init__( component_type="modules", @@ -155,7 +170,9 @@ def lint( self._print_results(show_passed=show_passed, sort_by=sort_by) self.print_summary() - def lint_modules(self, modules, registry="quay.io", local=False, fix_version=False): + def lint_modules( + self, modules: List[NFCoreComponent], registry: str = "quay.io", local: bool = False, fix_version: bool = False + ) -> None: """ Lint a list of modules @@ -185,7 +202,14 @@ def lint_modules(self, modules, registry="quay.io", local=False, fix_version=Fal progress_bar.update(lint_progress, advance=1, test_name=mod.component_name) self.lint_module(mod, progress_bar, registry=registry, local=local, fix_version=fix_version) - def lint_module(self, mod, progress_bar, registry, local=False, fix_version=False): + def lint_module( + self, + mod: NFCoreComponent, + progress_bar: rich.progress.Progress, + registry: str, + local: bool = False, + fix_version: bool = False, + ): """ Perform linting on one module diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index e10ef1761..e34b9d585 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -5,7 +5,7 @@ import yaml from jsonschema import exceptions, validators -from nf_core.components.lint import ComponentLint +from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent from nf_core.utils import custom_yaml_dumper @@ -22,8 +22,10 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) """ env_yml = None # load the environment.yml file + if module.environment_yml is None: + raise LintExceptionError("Module does not have an `environment.yml` file") try: - with open(Path(module.component_dir, "environment.yml")) as fh: + with open(str(module.environment_yml)) as fh: env_yml = yaml.safe_load(fh) module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 81308ba5c..985a92fa1 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -6,19 +6,24 @@ import re import sqlite3 from pathlib import Path +from typing import List, Tuple from urllib.parse import urlparse, urlunparse import requests import yaml +from rich.progress import Progress import nf_core import nf_core.modules.modules_utils +from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) -def main_nf(module_lint_object, module, fix_version, registry, progress_bar): +def main_nf( + module_lint_object, module: NFCoreComponent, fix_version: bool, registry: str, progress_bar: Progress +) -> Tuple[List[str], List[str]]: """ Lint a ``main.nf`` module file @@ -43,7 +48,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): # Check if we have a patch file affecting the 'main.nf' file # otherwise read the lines directly from the module - lines = None + lines: List[str] = [] if module.is_patched: lines = ModulesDiffer.try_apply_patch( module.component_name, @@ -51,8 +56,9 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): module.patch_path, Path(module.component_dir).relative_to(module.base_dir), reverse=True, - ).get("main.nf") - if lines is None: + ).get("main.nf", [""]) + + if not lines: try: # Check whether file exists and load it with open(module.main_nf) as fh: @@ -60,10 +66,13 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): module.passed.append(("main_nf_exists", "Module file exists", module.main_nf)) except FileNotFoundError: module.failed.append(("main_nf_exists", "Module file does not exist", module.main_nf)) - return deprecated_i = ["initOptions", "saveFiles", "getSoftwareName", "getProcessName", "publishDir"] - lines_j = "\n".join(lines) + if lines is not None: + lines_j = "\n".join(lines) + else: + lines_j = "" + for i in deprecated_i: if i in lines_j: module.failed.append( diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 4c036713c..32110b713 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -4,7 +4,7 @@ import yaml from jsonschema import exceptions, validators -from nf_core.components.lint import ComponentLint +from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_differ import ModulesDiffer @@ -53,9 +53,11 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None ).get("meta.yml") if lines is not None: meta_yaml = yaml.safe_load("".join(lines)) + if module.meta_yml is None: + raise LintExceptionError("Module does not have a `meta.yml` file") if meta_yaml is None: try: - with open(module.meta_yml) as fh: + with open(str(module.meta_yml)) as fh: meta_yaml = yaml.safe_load(fh) module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) except FileNotFoundError: diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index e79554f2b..36d927f08 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -4,6 +4,7 @@ import logging import os from pathlib import Path +from typing import List, Union from rich.console import Console from rich.syntax import Syntax @@ -295,7 +296,7 @@ def print_diff( console.print(Syntax("".join(diff), "diff", theme="ansi_dark", padding=1)) @staticmethod - def per_file_patch(patch_fn): + def per_file_patch(patch_fn: Union[str, Path]) -> dict[str, List[str]]: """ Splits a patch file for several files into one patch per file. @@ -306,12 +307,12 @@ def per_file_patch(patch_fn): dict[str, str]: A dictionary indexed by the filenames with the file patches as values """ - with open(patch_fn) as fh: + with open(str(patch_fn)) as fh: lines = fh.readlines() patches = {} i = 0 - patch_lines = [] + patch_lines: list[str] = [] key = "preamble" while i < len(lines): line = lines[i] @@ -391,12 +392,12 @@ def try_apply_single_patch(file_lines, patch, reverse=False): """ Tries to apply a patch to a modified file. Since the line numbers in the patch does not agree if the file is modified, the old and new - lines in the patch are reconstructed and then we look for the old lines + lines inpatch are reconstructed and then we look for the old lines in the modified file. If all hunk in the patch are found in the new file it is updated with the new lines from the patch file. Args: - new_fn (str | Path): Path to the modified file + file_lines ([str]): The lines of the file to be patched patch (str | Path): (Outdated) patch for the file reverse (bool): Apply the patch in reverse @@ -450,7 +451,9 @@ def try_apply_single_patch(file_lines, patch, reverse=False): return patched_new_lines @staticmethod - def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): + def try_apply_patch( + module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False + ) -> dict[str, List[str]]: """ Try applying a full patch file to a module @@ -459,6 +462,7 @@ def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False): repo_path (str): Name of the repository where the module resides patch_path (str): The absolute path to the patch file to be applied module_dir (Path): The directory containing the module + reverse (bool): Apply the patch in reverse Returns: dict[str, str]: A dictionary with file paths (relative to the pipeline dir) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 63c356a79..faeb84fc3 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,7 +6,7 @@ import shutil import tempfile from pathlib import Path -from typing import Union +from typing import Any, List, Optional, Tuple, Union import git import questionary @@ -153,7 +153,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None # The function might rename some directories, keep track of them renamed_dirs = {} # Check if there are any untracked repositories - dirs_not_covered = self.directory_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) + dirs_not_covered = self.dir_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) if len(dirs_not_covered) > 0: log.info(f"Found custom {component_type[:-1]} repositories when creating 'modules.json'") # Loop until all directories in the base directory are covered by a remote @@ -203,7 +203,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None if component_type not in repos[nrepo_remote]: repos[nrepo_remote][component_type] = {} repos[nrepo_remote][component_type][nrepo_name] = {} - dirs_not_covered = self.directory_tree_uncovered( + dirs_not_covered = self.dir_tree_uncovered( directory, [Path(name) for url in repos for name in repos[url][component_type]] ) @@ -244,7 +244,9 @@ def dir_tree_uncovered(self, components_directory, repos): depth += 1 return dirs_not_covered - def determine_branches_and_shas(self, component_type, install_dir, remote_url, components): + def determine_branches_and_shas( + self, component_type: str, install_dir: Union[str, Path], remote_url: str, components: List[Path] + ) -> dict[Path, dict[str, Any]]: """ Determines what branch and commit sha each module/subworkflow in the pipeline belongs to @@ -265,6 +267,8 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c repo_path = self.modules_dir / install_dir elif component_type == "subworkflows": repo_path = self.subworkflows_dir / install_dir + else: + raise ValueError(f"Unknown component type '{component_type}'") # Get the branches present in the repository, as well as the default branch available_branches = ModulesRepo.get_remote_branches(remote_url) sb_local = [] @@ -282,16 +286,16 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c if patch_file.is_file(): temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) correct_commit_sha = self.find_correct_commit_sha( - component_type, component, temp_module_dir, modules_repo + component_type, str(component), temp_module_dir, modules_repo ) else: correct_commit_sha = self.find_correct_commit_sha( - component_type, component, component_path, modules_repo + component_type, str(component), component_path, modules_repo ) if correct_commit_sha is None: # Check in the old path correct_commit_sha = self.find_correct_commit_sha( - component_type, component, repo_path / component_type / component, modules_repo + component_type, str(component), repo_path / component_type / component, modules_repo ) if correct_commit_sha is None: log.info( @@ -334,7 +338,7 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c # Clean up the modules/subworkflows we were unable to find the sha for for component in sb_local: log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, component)}' to 'local' directory") - self.move_component_to_local(component_type, component, install_dir) + self.move_component_to_local(component_type, component, str(install_dir)) for component in dead_components: log.debug(f"Removing {component_type[:-1]} {Path(install_dir, component)}'") @@ -342,7 +346,13 @@ def determine_branches_and_shas(self, component_type, install_dir, remote_url, c return repo_entry - def find_correct_commit_sha(self, component_type, component_name, component_path, modules_repo): + def find_correct_commit_sha( + self, + component_type: str, + component_name: Union[str, Path], + component_path: Union[str, Path], + modules_repo: ModulesRepo, + ) -> Optional[str]: """ Returns the SHA for the latest commit where the local files are identical to the remote files Args: @@ -370,24 +380,27 @@ def find_correct_commit_sha(self, component_type, component_name, component_path return commit_sha return None - def move_component_to_local(self, component_type, component, repo_name): + def move_component_to_local(self, component_type: str, component: Union[str, Path], repo_name: str): """ Move a module/subworkflow to the 'local' directory Args: - component (str): The name of the module/subworkflow + component_type (str): The type of component, either 'modules' or 'subworkflows' + component (Union[str,Path]): The name of the module/subworkflow repo_name (str): The name of the repository the module resides in """ if component_type == "modules": directory = self.modules_dir elif component_type == "subworkflows": directory = self.subworkflows_dir + else: + raise ValueError(f"Unknown component type '{component_type}'") current_path = directory / repo_name / component local_dir = directory / "local" if not local_dir.exists(): local_dir.mkdir() - to_name = component + to_name = str(component) # Check if there is already a subdirectory with the name while (local_dir / to_name).exists(): # Add a time suffix to the path to make it unique @@ -395,7 +408,7 @@ def move_component_to_local(self, component_type, component, repo_name): to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(current_path, local_dir / to_name) - def unsynced_components(self): + def unsynced_components(self) -> Tuple[List[Path], List[Path], dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all @@ -406,6 +419,7 @@ def unsynced_components(self): by the modules.json file, and modules/subworkflows in the modules.json where the installation directory is missing """ + assert self.modules_json is not None # mypy # Add all modules from modules.json to missing_installation missing_installation = copy.deepcopy(self.modules_json["repos"]) # Obtain the path of all installed modules @@ -429,14 +443,27 @@ def unsynced_components(self): return untracked_dirs_modules, untracked_dirs_subworkflows, missing_installation - def parse_dirs(self, dirs, missing_installation, component_type): + def parse_dirs(self, dirs: List[Path], missing_installation: dict, component_type: str) -> Tuple[List[Path], dict]: + """ + Parse directories and check if they are tracked in the modules.json file + + Args: + dirs ([ Path ]): List of directories to check + missing_installation (dict): Dictionary with the modules.json entries + component_type (str): The type of component, either 'modules' or 'subworkflows' + + Returns: + (untracked_dirs ([ Path ]), missing_installation (dict)): List of directories that are not tracked + by the modules.json file, and the updated missing_installation dictionary + """ + untracked_dirs = [] for dir_ in dirs: # Check if the module/subworkflows directory exists in modules.json install_dir = dir_.parts[0] - component = str(Path(*dir_.parts[1:])) + component = Path(*dir_.parts[1:]) component_in_file = False - git_url = None + git_url = "" for repo in missing_installation: if component_type in missing_installation[repo]: if install_dir in missing_installation[repo][component_type]: @@ -453,9 +480,7 @@ def parse_dirs(self, dirs, missing_installation, component_type): # Check if the entry has a git sha and branch before removing components_dict = module_repo[component_type][install_dir] if "git_sha" not in components_dict[component] or "branch" not in components_dict[component]: - self.determine_branches_and_shas( - component_type, component, git_url, module_repo["base_path"], [component] - ) + self.determine_branches_and_shas(component_type, component, git_url, [component]) # Remove the module/subworkflow from modules/subworkflows without installation module_repo[component_type][install_dir].pop(component) if len(module_repo[component_type][install_dir]) == 0: @@ -470,13 +495,14 @@ def parse_dirs(self, dirs, missing_installation, component_type): return untracked_dirs, missing_installation - def has_git_url_and_modules(self): + def has_git_url_and_modules(self) -> bool: """ Check that all repo entries in the modules.json has a git url and a modules dict entry Returns: (bool): True if they are found for all repos, False otherwise """ + assert self.modules_json is not None # mypy for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): if "modules" not in repo_entry: if "subworkflows" in repo_entry: @@ -538,7 +564,7 @@ def reinstall_repo(self, install_dir, remote_url, module_entries): failed_to_install.append(module) return failed_to_install - def check_up_to_date(self): + def check_up_to_date(self) -> bool: """ Checks whether the modules and subworkflows installed in the directory are consistent with the entries in the 'modules.json' file and vice versa. @@ -558,6 +584,8 @@ def check_up_to_date(self): self.load() if not self.has_git_url_and_modules(): raise UserWarning + + assert self.modules_json is not None # mypy # check that all "installed_by" entries are lists and not strings # [these strings come from an older dev version, so this check can probably be removed in a future release] for _, repo_entry in self.modules_json.get("repos", {}).items(): @@ -601,7 +629,7 @@ def check_up_to_date(self): if len(subworkflows_missing_from_modules_json) > 0: dump_modules_json = True self.resolve_missing_from_modules_json(subworkflows_missing_from_modules_json, "subworkflows") - + assert self.modules_json is not None # mypy # If the "installed_by" value is not present for modules/subworkflows, add it. for repo, repo_content in self.modules_json["repos"].items(): for component_type, dir_content in repo_content.items(): @@ -626,8 +654,9 @@ def check_up_to_date(self): if dump_modules_json: self.dump(run_prettier=True) + return True - def load(self): + def load(self) -> None: """ Loads the modules.json file into the variable 'modules_json' @@ -648,14 +677,14 @@ def load(self): def update( self, - component_type, - modules_repo, - component_name, - component_version, - installed_by, - installed_by_log=None, - write_file=True, - ): + component_type: str, + modules_repo: ModulesRepo, + component_name: str, + component_version: str, + installed_by: Optional[List[str]], + installed_by_log: Optional[List[str]] = None, + write_file: bool = True, + ) -> bool: """ Updates the 'module.json' file with new module/subworkflow info @@ -675,9 +704,11 @@ def update( if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy repo_name = modules_repo.repo_path remote_url = modules_repo.remote_url branch = modules_repo.branch + if remote_url not in self.modules_json["repos"]: self.modules_json["repos"][remote_url] = {component_type: {repo_name: {}}} if component_type not in self.modules_json["repos"][remote_url]: @@ -757,6 +788,8 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + if repo_url not in self.modules_json["repos"]: raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: @@ -768,6 +801,8 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True): if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + try: del self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] except KeyError: @@ -789,6 +824,7 @@ def get_patch_fn(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy path = ( self.modules_json["repos"] .get(repo_url, {}) @@ -845,6 +881,8 @@ def repo_present(self, repo_name): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + return repo_name in self.modules_json.get("repos", {}) def module_present(self, module_name, repo_url, install_dir): @@ -859,6 +897,7 @@ def module_present(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( install_dir, {} ) @@ -872,8 +911,8 @@ def get_modules_json(self) -> dict: """ if self.modules_json is None: self.load() - - return copy.deepcopy(self.modules_json) # type: ignore + assert self.modules_json is not None # mypy + return copy.deepcopy(self.modules_json) def get_component_version(self, component_type, component_name, repo_url, install_dir): """ @@ -889,6 +928,7 @@ def get_component_version(self, component_type, component_name, repo_url, instal """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return ( self.modules_json.get("repos", {}) .get(repo_url, {}) @@ -912,6 +952,7 @@ def get_module_version(self, module_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return ( self.modules_json.get("repos", {}) .get(repo_url, {}) @@ -935,6 +976,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy return ( self.modules_json.get("repos", {}) .get(repo_url, {}) @@ -944,7 +986,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): .get("git_sha", None) ) - def get_all_components(self, component_type): + def get_all_components(self, component_type: str) -> dict[str, Tuple[(str, str)]]: """ Retrieves all pipeline modules/subworkflows that are reported in the modules.json @@ -954,6 +996,8 @@ def get_all_components(self, component_type): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy + if self.pipeline_components is None: self.pipeline_components = {} for repo, repo_entry in self.modules_json.get("repos", {}).items(): @@ -987,6 +1031,7 @@ def get_dependent_components( if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy component_types = ["modules"] if component_type == "modules" else ["modules", "subworkflows"] # Find all components that have an entry of install by of a given component, recursively call this function for subworkflows for type in component_types: @@ -1016,10 +1061,11 @@ def get_installed_by_entries(self, component_type, name): """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy installed_by_entries = {} - for repo_url, repo_entry in self.modules_json.get("repos", {}).items(): + for _, repo_entry in self.modules_json.get("repos", {}).items(): if component_type in repo_entry: - for install_dir, components in repo_entry[component_type].items(): + for _, components in repo_entry[component_type].items(): if name in components: installed_by_entries = components[name]["installed_by"] break @@ -1037,6 +1083,7 @@ def get_component_branch(self, component_type, component, repo_url, install_dir) """ if self.modules_json is None: self.load() + assert self.modules_json is not None # mypy branch = ( self.modules_json["repos"] .get(repo_url, {}) @@ -1096,7 +1143,8 @@ def resolve_missing_installation(self, missing_installation, component_type): log.info( f"Was unable to reinstall some {component_type}. Removing 'modules.json' entries: {', '.join(uninstallable_components)}" ) - + if self.modules_json is None: + raise UserWarning("No modules.json file found") for (repo_url, install_dir), component_entries in remove_from_mod_json.items(): for component in component_entries: self.modules_json["repos"][repo_url][component_type][install_dir].pop(component) @@ -1113,7 +1161,7 @@ def resolve_missing_from_modules_json(self, missing_from_modules_json, component log.info( f"Recomputing commit SHAs for {component_type} which were missing from 'modules.json': {', '.join(format_missing)}" ) - + assert self.modules_json is not None # mypy # Get the remotes we are missing tracked_repos = {repo_url: (repo_entry) for repo_url, repo_entry in self.modules_json["repos"].items()} repos, _ = self.get_pipeline_module_repositories(component_type, self.modules_dir, tracked_repos) @@ -1186,7 +1234,7 @@ def recreate_dependencies(self, repo, org, subworkflow): sw_path = Path(self.subworkflows_dir, org, subworkflow) dep_mods, dep_subwfs = get_components_to_install(sw_path) - + assert self.modules_json is not None # mypy for dep_mod in dep_mods: installed_by = self.modules_json["repos"][repo]["modules"][org][dep_mod]["installed_by"] if installed_by == ["modules"]: diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 969492027..b345dfe8b 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -1,6 +1,7 @@ import logging import os import shutil +from typing import Optional import git import rich @@ -35,7 +36,13 @@ class ModulesRepo(SyncedRepo): local_repo_statuses = {} no_pull_global = False - def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=False): + def __init__( + self, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + hide_progress: bool = False, + ): """ Initializes the object and clones the git repository if it is not already present """ diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index 6796de41e..ecfe5f24e 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -36,7 +36,7 @@ def repo_full_name_from_remote(remote_url: str) -> str: return path -def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], List[NFCoreComponent]]: +def get_installed_modules(directory: Path, repo_type="modules") -> Tuple[List[str], List[NFCoreComponent]]: """ Make a list of all modules installed in this repository @@ -52,15 +52,15 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis # initialize lists local_modules: List[str] = [] nfcore_modules_names: List[str] = [] - local_modules_dir: Optional[str] = None - nfcore_modules_dir = os.path.join(dir, "modules", "nf-core") + local_modules_dir: Optional[Path] = None + nfcore_modules_dir = Path(directory, "modules", "nf-core") # Get local modules if repo_type == "pipeline": - local_modules_dir = os.path.join(dir, "modules", "local") + local_modules_dir = Path(directory, "modules", "local") # Filter local modules - if os.path.exists(local_modules_dir): + if local_modules_dir.exists(): local_modules = os.listdir(local_modules_dir) local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) @@ -89,7 +89,7 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis "nf-core/modules", Path(nfcore_modules_dir, m), repo_type=repo_type, - base_dir=Path(dir), + base_dir=directory, component_type="modules", ) for m in nfcore_modules_names diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 601f351fd..af3933474 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -202,9 +202,12 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): f"subworkflows_{org_alphabet}", ] included_components = [] - if subworkflow.main_nf.is_file(): + if subworkflow.main_nf is not None and Path(subworkflow.main_nf).is_file(): included_components = subworkflow._get_included_components(subworkflow.main_nf) - chained_components_tags = subworkflow._get_included_components_in_chained_tests(subworkflow.nftest_main_nf) + if subworkflow.nftest_main_nf is not None and subworkflow.nftest_main_nf.is_file(): + chained_components_tags = subworkflow._get_included_components_in_chained_tests( + subworkflow.nftest_main_nf + ) log.debug(f"Included components: {included_components}") log.debug(f"Required tags: {required_tags}") log.debug(f"Included components for chained nf-tests: {chained_components_tags}") diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 4d6a3f6a4..4b69d4af8 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -4,7 +4,7 @@ import shutil from configparser import NoOptionError, NoSectionError from pathlib import Path -from typing import Dict +from typing import Dict, Optional, Union import git from git.exc import GitCommandError @@ -51,9 +51,14 @@ def update(self, op_code, cur_count, max_count=None, message=""): """ if not self.progress_bar.tasks[self.tid].started: self.progress_bar.start_task(self.tid) - self.progress_bar.update( - self.tid, total=max_count, completed=cur_count, state=f"{cur_count / max_count * 100:.1f}%" - ) + if cur_count is not None and max_count is not None: + cur_count = float(cur_count) + max_count = float(max_count) + state = f"{cur_count / max_count * 100:.1f}%" + else: + state = "Unknown" + + self.progress_bar.update(self.tid, total=max_count, completed=cur_count, state=state) class SyncedRepo: @@ -139,6 +144,9 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.avail_module_names = None + def setup_local_repo(self, remote_url, branch, hide_progress): + pass + def verify_sha(self, prompt, sha): """ Verify that 'sha' and 'prompt' arguments are not provided together. @@ -258,7 +266,7 @@ def component_exists(self, component_name, component_type, checkout=True, commit """ return component_name in self.get_avail_components(component_type, checkout=checkout, commit=commit) - def get_component_dir(self, component_name, component_type): + def get_component_dir(self, component_name: str, component_type: str) -> Path: """ Returns the file path of a module/subworkflow directory in the repo. Does not verify that the path exists. @@ -269,11 +277,15 @@ def get_component_dir(self, component_name, component_type): component_path (str): The path of the module/subworkflow in the local copy of the repository """ if component_type == "modules": - return os.path.join(self.modules_dir, component_name) + return Path(self.modules_dir, component_name) elif component_type == "subworkflows": - return os.path.join(self.subworkflows_dir, component_name) + return Path(self.subworkflows_dir, component_name) + else: + raise ValueError(f"Invalid component type: {component_type}") - def install_component(self, component_name, install_dir, commit, component_type): + def install_component( + self, component_name: str, install_dir: Union[str, Path], commit: str, component_type: str + ) -> bool: """ Install the module/subworkflow files into a pipeline at the given commit @@ -281,6 +293,7 @@ def install_component(self, component_name, install_dir, commit, component_type) component_name (str): The name of the module/subworkflow install_dir (str): The path where the module/subworkflow should be installed commit (str): The git SHA for the version of the module/subworkflow to be installed + component_type (str): Either 'modules' or 'subworkflows' Returns: (bool): Whether the operation was successful or not @@ -332,6 +345,8 @@ def component_files_identical(self, component_name, base_path, commit, component return files_identical def ensure_git_user_config(self, default_name: str, default_email: str) -> None: + if self.repo is None: + raise ValueError("Repository not initialized") try: with self.repo.config_reader() as git_config: user_name = git_config.get_value("user", "name", default=None) @@ -346,7 +361,7 @@ def ensure_git_user_config(self, default_name: str, default_email: str) -> None: if not user_email: git_config.set_value("user", "email", default_email) - def get_component_git_log(self, component_name, component_type, depth=None): + def get_component_git_log(self, component_name: Union[str, Path], component_type: str, depth: Optional[int] = None): """ Fetches the commit history the of requested module/subworkflow since a given date. The default value is not arbitrary - it is the last time the structure of the nf-core/modules repository was had an @@ -358,19 +373,26 @@ def get_component_git_log(self, component_name, component_type, depth=None): Returns: ( dict ): Iterator of commit SHAs and associated (truncated) message """ + + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() - component_path = os.path.join(component_type, self.repo_path, component_name) + component_path = Path(component_type, self.repo_path, component_name) + commits_new = self.repo.iter_commits(max_count=depth, paths=component_path) - commits_new = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_new - ] + if not commits_new: + raise ValueError(f"Could not find any commits for '{component_name}' in '{self.remote_url}'") + else: + commits_new = [ + {"git_sha": commit.hexsha, "trunc_message": commit.message.splitlines()[0]} for commit in commits_new + ] commits_old = [] if component_type == "modules": # Grab commits also from previous modules structure - component_path = os.path.join("modules", component_name) + component_path = Path("modules", component_name) commits_old = self.repo.iter_commits(max_count=depth, paths=component_path) commits_old = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_old + {"git_sha": commit.hexsha, "trunc_message": commit.message.splitlines()[0]} for commit in commits_old ] commits = iter(commits_new + commits_old) return commits @@ -385,6 +407,8 @@ def sha_exists_on_branch(self, sha): """ Verifies that a given commit sha exists on the branch """ + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() return sha in (commit.hexsha for commit in self.repo.iter_commits()) @@ -399,10 +423,12 @@ def get_commit_info(self, sha): Raises: LookupError: If the search for the commit fails """ + if self.repo is None: + raise ValueError("Repository not initialized") self.checkout_branch() for commit in self.repo.iter_commits(): if commit.hexsha == sha: - message = commit.message.partition("\n")[0] + message = commit.message.splitlines()[0] date_obj = commit.committed_datetime date = str(date_obj.date()) return message, date diff --git a/nf_core/utils.py b/nf_core/utils.py index 0cd812cb0..d1e9ccfe9 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -22,11 +22,12 @@ from typing import Dict, Generator, List, Optional, Tuple, Union import git -import prompt_toolkit +import prompt_toolkit.styles import questionary -import requests +import requests.auth import requests_cache import rich +import rich.markup import yaml from packaging.version import Version from rich.live import Live @@ -524,8 +525,9 @@ def __call__(self, r): self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}" except Exception: ex_type, ex_value, _ = sys.exc_info() - output = rich.markup.escape(f"{ex_type.__name__}: {ex_value}") - log.debug(f"Couldn't auto-auth with GitHub CLI auth from '{gh_cli_config_fn}': [red]{output}") + if ex_type is not None: + output = rich.markup.escape(f"{ex_type.__name__}: {ex_value}") + log.debug(f"Couldn't auto-auth with GitHub CLI auth from '{gh_cli_config_fn}': [red]{output}") # Default auth if we have a GitHub Token (eg. GitHub Actions CI) if os.environ.get("GITHUB_TOKEN") is not None and self.auth is None: @@ -804,6 +806,8 @@ def get_tag_date(tag_date): singularity_image = all_singularity[k]["image"] current_date = date docker_image_name = docker_image["image_name"].lstrip("quay.io/") + if singularity_image is None: + raise LookupError(f"Could not find singularity container for {package}") return docker_image_name, singularity_image["image_name"] except TypeError: raise LookupError(f"Could not find docker or singularity container for {package}") @@ -1072,7 +1076,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, dict]: return config_fn, tools_config -def determine_base_dir(directory="."): +def determine_base_dir(directory: Union[Path, str] = ".") -> Path: base_dir = start_dir = Path(directory).absolute() # Only iterate up the tree if the start dir doesn't have a config while not get_first_available_path(base_dir, CONFIG_PATHS) and base_dir != base_dir.parent: @@ -1080,7 +1084,7 @@ def determine_base_dir(directory="."): config_fn = get_first_available_path(base_dir, CONFIG_PATHS) if config_fn: break - return directory if (base_dir == start_dir or str(base_dir) == base_dir.root) else base_dir + return Path(directory) if (base_dir == start_dir or str(base_dir) == base_dir.root) else base_dir def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> Union[Path, None]: From 69d59a81c3b23a583857c1f2d03db04cdd3f0da9 Mon Sep 17 00:00:00 2001 From: LaurenceKuhl Date: Fri, 19 Jul 2024 08:44:03 +0200 Subject: [PATCH 338/737] Update nf_core/__main__.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Júlia Mir Pedrol --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index cb99bdedc..3da153ee1 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2188,7 +2188,7 @@ def command_create(ctx, name, description, author, version, force, outdir, templ log.warning( "The `[magenta]nf-core create[/]` command is deprecated. Use `[magenta]nf-core pipelines create[/]` instead." ) - pipelines_create(ctx, name, description, author, force, outdir, template_yaml, organisation) + pipelines_create(ctx, name, description, author, version, force, outdir, template_yaml, organisation) # Main script is being run - launch the CLI From 6a4ff6133f7efbe74f321c20436635d778166d14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Mon, 22 Jul 2024 09:40:06 +0200 Subject: [PATCH 339/737] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 872bd61e0..fc2011cb9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.2 + rev: v0.5.4 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 61898dc14663fa3e81f78fdd5a62756217de7dc5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 22 Jul 2024 11:11:55 +0200 Subject: [PATCH 340/737] fix rich imports, remove boolean type for `installed_by`, fix more occurences of `dir` --- nf_core/components/create.py | 1 + nf_core/components/patch.py | 10 +-- nf_core/modules/install.py | 2 +- nf_core/modules/modules_repo.py | 1 + nf_core/modules/patch.py | 2 +- nf_core/pipelines/create/create.py | 6 +- nf_core/subworkflows/install.py | 2 +- nf_core/subworkflows/lint/__init__.py | 8 +-- .../subworkflows/lint/subworkflow_version.py | 2 +- tests/modules/test_install.py | 8 +-- tests/modules/test_lint.py | 62 +++++++++---------- tests/modules/test_remove.py | 8 +-- tests/subworkflows/test_install.py | 45 ++++++-------- tests/subworkflows/test_lint.py | 40 ++++++------ tests/subworkflows/test_remove.py | 20 +++--- 15 files changed, 109 insertions(+), 108 deletions(-) diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 532c2a46d..413790099 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -14,6 +14,7 @@ import jinja2 import questionary import rich +import rich.prompt import yaml from packaging.version import parse as parse_version diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 5b29c152d..0332a1fe4 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -15,7 +15,7 @@ class ComponentPatch(ComponentCommand): - def __init__(self, pipeline_dir, component_type, remote_url=None, branch=None, no_pull=False, installed_by=False): + def __init__(self, pipeline_dir, component_type, remote_url=None, branch=None, no_pull=False, installed_by=None): super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.modules_json = ModulesJson(pipeline_dir) @@ -88,8 +88,8 @@ def patch(self, component=None): patch_filename = f"{component.replace('/', '-')}.diff" component_relpath = Path(self.component_type, component_dir, component) patch_relpath = Path(component_relpath, patch_filename) - component_current_dir = Path(self.dir, component_relpath) - patch_path = Path(self.dir, patch_relpath) + component_current_dir = Path(self.directory, component_relpath) + patch_path = Path(self.directory, patch_relpath) if patch_path.exists(): remove = questionary.confirm( @@ -189,8 +189,8 @@ def remove(self, component): patch_filename = f"{component.replace('/', '-')}.diff" component_relpath = Path(self.component_type, component_dir, component) patch_relpath = Path(component_relpath, patch_filename) - patch_path = Path(self.dir, patch_relpath) - component_path = Path(self.dir, component_relpath) + patch_path = Path(self.directory, patch_relpath) + component_path = Path(self.directory, component_relpath) if patch_path.exists(): remove = questionary.confirm( diff --git a/nf_core/modules/install.py b/nf_core/modules/install.py index e1755cee9..7055abe49 100644 --- a/nf_core/modules/install.py +++ b/nf_core/modules/install.py @@ -11,7 +11,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, - installed_by=False, + installed_by=None, ): super().__init__( pipeline_dir, diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index b345dfe8b..a979ae45b 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -6,6 +6,7 @@ import git import rich import rich.progress +import rich.prompt from git.exc import GitCommandError, InvalidGitRepositoryError import nf_core.modules.modules_json diff --git a/nf_core/modules/patch.py b/nf_core/modules/patch.py index b4e86f2d1..bbad0d428 100644 --- a/nf_core/modules/patch.py +++ b/nf_core/modules/patch.py @@ -6,5 +6,5 @@ class ModulePatch(ComponentPatch): - def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=None): super().__init__(pipeline_dir, "modules", remote_url, branch, no_pull, installed_by) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index c5af95669..aecba9423 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -512,12 +512,14 @@ def fix_linting(self): def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" email_logo_path = Path(self.outdir) / "assets" - create_logo(text=self.jinja_params["short_name"], dir=email_logo_path, theme="light", force=bool(self.force)) + create_logo( + text=self.jinja_params["short_name"], directory=email_logo_path, theme="light", force=bool(self.force) + ) for theme in ["dark", "light"]: readme_logo_path = Path(self.outdir) / "docs" / "images" create_logo( text=self.jinja_params["short_name"], - dir=readme_logo_path, + directory=readme_logo_path, width=600, theme=theme, force=bool(self.force), diff --git a/nf_core/subworkflows/install.py b/nf_core/subworkflows/install.py index 6c5cfb12b..70a6b0afa 100644 --- a/nf_core/subworkflows/install.py +++ b/nf_core/subworkflows/install.py @@ -11,7 +11,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, - installed_by=False, + installed_by=None, ): super().__init__( pipeline_dir, diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index a3cacf295..a07371088 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -36,7 +36,7 @@ class SubworkflowLint(ComponentLint): def __init__( self, - dir, + directory, fail_warned=False, remote_url=None, branch=None, @@ -46,7 +46,7 @@ def __init__( ): super().__init__( component_type="subworkflows", - dir=dir, + directory=directory, fail_warned=fail_warned, remote_url=remote_url, branch=branch, @@ -122,9 +122,9 @@ def lint( remote_subworkflows = self.all_remote_components if self.repo_type == "modules": - log.info(f"Linting modules repo: [magenta]'{self.dir}'") + log.info(f"Linting modules repo: [magenta]'{self.directory}'") else: - log.info(f"Linting pipeline: [magenta]'{self.dir}'") + log.info(f"Linting pipeline: [magenta]'{self.directory}'") if subworkflow: log.info(f"Linting subworkflow: [magenta]'{subworkflow}'") diff --git a/nf_core/subworkflows/lint/subworkflow_version.py b/nf_core/subworkflows/lint/subworkflow_version.py index 5801abd88..1acb95e77 100644 --- a/nf_core/subworkflows/lint/subworkflow_version.py +++ b/nf_core/subworkflows/lint/subworkflow_version.py @@ -21,7 +21,7 @@ def subworkflow_version(subworkflow_lint_object, subworkflow): newer version of the subworkflow available. """ - modules_json_path = Path(subworkflow_lint_object.dir, "modules.json") + modules_json_path = Path(subworkflow_lint_object.directory, "modules.json") # Verify that a git_sha exists in the `modules.json` file for this module version = subworkflow_lint_object.modules_json.get_subworkflow_version( subworkflow.component_name, subworkflow.repo_url, subworkflow.org diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py index b90f01ee6..cfdaac47e 100644 --- a/tests/modules/test_install.py +++ b/tests/modules/test_install.py @@ -19,14 +19,14 @@ class TestModulesCreate(TestModules): def test_modules_install_nopipeline(self): """Test installing a module - no pipeline given""" - self.mods_install.dir = None + self.mods_install.directory = None assert self.mods_install.install("foo") is False @with_temporary_folder def test_modules_install_emptypipeline(self, tmpdir): """Test installing a module - empty dir given""" os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.mods_install.dir = os.path.join(tmpdir, "nf-core-pipe") + self.mods_install.directory = os.path.join(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: self.mods_install.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) @@ -38,8 +38,8 @@ def test_modules_install_nomodule(self): def test_modules_install_trimgalore(self): """Test installing a module - TrimGalore!""" assert self.mods_install.install("trimgalore") is not False - assert self.mods_install.dir is not None - module_path = Path(self.mods_install.dir, "modules", "nf-core", "trimgalore") + assert self.mods_install.directory is not None + module_path = Path(self.mods_install.directory, "modules", "nf-core", "trimgalore") assert os.path.exists(module_path) def test_modules_install_trimgalore_twice(self): diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py index 6448916ac..fc9871db2 100644 --- a/tests/modules/test_lint.py +++ b/tests/modules/test_lint.py @@ -180,7 +180,7 @@ def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): def test_modules_lint_trimgalore(self): """Test linting the TrimGalore! module""" self.mods_install.install("trimgalore") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir) module_lint.lint(print_results=False, module="trimgalore") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -191,11 +191,11 @@ def test_modules_lint_empty(self): self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir) + nf_core.modules.ModuleLint(directory=self.pipeline_dir) def test_modules_lint_new_modules(self): """lint a new module""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -206,13 +206,13 @@ def test_modules_lint_no_gitlab(self): self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) with pytest.raises(LookupError): - nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) def test_modules_lint_gitlab_modules(self): """Lint modules from a different remote""" self.mods_install_gitlab.install("fastqc") self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 2 assert len(module_lint.passed) > 0 @@ -221,7 +221,7 @@ def test_modules_lint_gitlab_modules(self): def test_modules_lint_multiple_remotes(self): """Lint modules from a different remote""" self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 1 assert len(module_lint.passed) > 0 @@ -229,14 +229,14 @@ def test_modules_lint_multiple_remotes(self): def test_modules_lint_registry(self): """Test linting the samtools module and alternative registry""" - self.mods_install.install("samtools") - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, registry="public.ecr.aws") - module_lint.lint(print_results=False, module="samtools") + assert self.mods_install.install("samtools/sort") + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, registry="public.ecr.aws") + module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 - module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) - module_lint.lint(print_results=False, module="samtools") + module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir) + module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -255,7 +255,7 @@ def test_modules_lint_patched_modules(self): # to avoid error from try_apply_patch() during linting with set_wd(self.pipeline_dir): module_lint = nf_core.modules.ModuleLint( - dir=self.pipeline_dir, + directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH, hide_progress=True, @@ -298,7 +298,7 @@ def test_modules_lint_check_url(self): def test_modules_lint_snapshot_file(self): """Test linting a module with a snapshot file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -315,7 +315,7 @@ def test_modules_lint_snapshot_file_missing_fail(self): "tests", "main.nf.test.snap", ).unlink() - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -359,7 +359,7 @@ def test_modules_lint_snapshot_file_not_needed(self): "w", ) as fh: fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -377,7 +377,7 @@ def test_modules_environment_yml_file_doesnt_exists(self): "environment.yml.bak", ) ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -403,7 +403,7 @@ def test_modules_environment_yml_file_doesnt_exists(self): def test_modules_environment_yml_file_sorted_correctly(self): """Test linting a module with a correctly sorted environment.yml file""" - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -438,7 +438,7 @@ def test_modules_environment_yml_file_sorted_incorrectly(self): "w", ) as fh: fh.write(yaml_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # we fix the sorting on the fly, so this should pass assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -471,7 +471,7 @@ def test_modules_environment_yml_file_not_array(self): "w", ) as fh: fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -504,7 +504,7 @@ def test_modules_environment_yml_file_name_mismatch(self): "w", ) as fh: fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes yaml_content["name"] = "bpipe_test" @@ -536,7 +536,7 @@ def test_modules_meta_yml_incorrect_licence_field(self): "w", ) as fh: fh.write(yaml.dump(meta_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes @@ -559,7 +559,7 @@ def test_modules_meta_yml_input_mismatch(self): main_nf_new = main_nf.replace("path bam", "path bai") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) @@ -578,7 +578,7 @@ def test_modules_meta_yml_output_mismatch(self): main_nf_new = main_nf.replace("emit: bam", "emit: bai") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) @@ -625,7 +625,7 @@ def test_modules_meta_yml_incorrect_name(self): "w", ) as fh: fh.write(yaml.dump(environment_yml)) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes @@ -659,7 +659,7 @@ def test_modules_missing_test_dir(self): Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") @@ -690,7 +690,7 @@ def test_modules_missing_test_main_nf(self): "main.nf.test.bak", ) ) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -719,7 +719,7 @@ def test_modules_missing_test_main_nf(self): def test_modules_unused_pytest_files(self): """Test linting a nf-test module with files still present in `tests/modules/`""" Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -735,7 +735,7 @@ def test_nftest_failing_linting(self): self.nfcore_modules = Path(tmp_dir, "modules-test") Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="kallisto/quant") assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -774,7 +774,7 @@ def test_modules_absent_version(self): "w", ) as fh: fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open( Path( @@ -812,7 +812,7 @@ def test_modules_empty_file_in_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -841,7 +841,7 @@ def test_modules_empty_file_in_stub_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 diff --git a/tests/modules/test_remove.py b/tests/modules/test_remove.py index a80c8b098..2caece7ce 100644 --- a/tests/modules/test_remove.py +++ b/tests/modules/test_remove.py @@ -8,8 +8,8 @@ class TestModulesRemove(TestModules): def test_modules_remove_trimgalore(self): """Test removing TrimGalore! module after installing it""" self.mods_install.install("trimgalore") - assert self.mods_install.dir is not None - module_path = Path(self.mods_install.dir, "modules", "nf-core", "modules", "trimgalore") + assert self.mods_install.directory is not None + module_path = Path(self.mods_install.directory, "modules", "nf-core", "modules", "trimgalore") assert self.mods_remove.remove("trimgalore") assert os.path.exists(module_path) is False @@ -20,7 +20,7 @@ def test_modules_remove_trimgalore_uninstalled(self): def test_modules_remove_multiqc_from_gitlab(self): """Test removing multiqc module after installing it from an alternative source""" self.mods_install_gitlab.install("multiqc") - assert self.mods_install.dir is not None - module_path = Path(self.mods_install_gitlab.dir, "modules", "nf-core-test", "multiqc") + assert self.mods_install.directory is not None + module_path = Path(self.mods_install_gitlab.directory, "modules", "nf-core-test", "multiqc") assert self.mods_remove_gitlab.remove("multiqc", force=True) assert os.path.exists(module_path) is False diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py index e0b2fc1ab..af1ad9241 100644 --- a/tests/subworkflows/test_install.py +++ b/tests/subworkflows/test_install.py @@ -1,4 +1,3 @@ -import os from pathlib import Path import pytest @@ -20,8 +19,8 @@ class TestSubworkflowsInstall(TestSubworkflows): def test_subworkflow_install_nopipeline(self): """Test installing a subworkflow - no pipeline given""" - assert self.subworkflow_install.dir is not None - self.subworkflow_install.dir = "" + assert self.subworkflow_install.directory is not None + self.subworkflow_install.directory = Path("non_existent_dir") assert self.subworkflow_install.install("foo") is False @with_temporary_folder @@ -29,7 +28,7 @@ def test_subworkflows_install_emptypipeline(self, tmpdir): """Test installing a subworkflow - empty dir given""" Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) - self.subworkflow_install.dir = os.path.join(tmpdir, "nf-core-pipe") + self.subworkflow_install.directory = Path(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: self.subworkflow_install.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) @@ -41,28 +40,22 @@ def test_subworkflows_install_nosubworkflow(self): def test_subworkflows_install_bam_sort_stats_samtools(self): """Test installing a subworkflow - bam_sort_stats_samtools""" assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False - subworkflow_path = os.path.join( - self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools" + subworkflow_path = Path( + self.subworkflow_install.directory, "subworkflows", "nf-core", "bam_sort_stats_samtools" ) - sub_subworkflow_path = os.path.join( - self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools" - ) - samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") - samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - samtools_idxstats_path = os.path.join( - self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats" - ) - samtools_flagstat_path = os.path.join( - self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat" - ) - assert os.path.exists(subworkflow_path) - assert os.path.exists(sub_subworkflow_path) - assert os.path.exists(samtools_index_path) - assert os.path.exists(samtools_sort_path) - assert os.path.exists(samtools_stats_path) - assert os.path.exists(samtools_idxstats_path) - assert os.path.exists(samtools_flagstat_path) + sub_subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core", "bam_stats_samtools") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_sort_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "sort") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") + samtools_idxstats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "idxstats") + samtools_flagstat_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "flagstat") + assert subworkflow_path.exists() + assert sub_subworkflow_path.exists() + assert samtools_index_path.exists() + assert samtools_sort_path.exists() + assert samtools_stats_path.exists() + assert samtools_idxstats_path.exists() + assert samtools_flagstat_path.exists() def test_subworkflows_install_bam_sort_stats_samtools_twice(self): """Test installing a subworkflow - bam_sort_stats_samtools already there""" @@ -87,7 +80,7 @@ def test_subworkflows_install_different_branch_fail(self): def test_subworkflows_install_tracking(self): """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" - self.subworkflow_install.install("bam_sort_stats_samtools") + assert self.subworkflow_install.install("bam_sort_stats_samtools") # Verify that the installed_by entry was added correctly modules_json = ModulesJson(self.pipeline_dir) diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index f8c9bedbf..38bcc2b2c 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -14,7 +14,7 @@ class TestSubworkflowsLint(TestSubworkflows): def test_subworkflows_lint(self): """Test linting the fastq_align_bowtie2 subworkflow""" self.subworkflow_install.install("fastq_align_bowtie2") - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="fastq_align_bowtie2") assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -26,11 +26,11 @@ def test_subworkflows_lint_empty(self): self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) def test_subworkflows_lint_new_subworkflow(self): """lint a new subworkflow""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=True, all_subworkflows=True) assert len(subworkflow_lint.failed) == 0 @@ -40,13 +40,13 @@ def test_subworkflows_lint_new_subworkflow(self): def test_subworkflows_lint_no_gitlab(self): """Test linting a pipeline with no subworkflows installed""" with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) + nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) def test_subworkflows_lint_gitlab_subworkflows(self): """Lint subworkflows from a different remote""" self.subworkflow_install_gitlab.install("bam_stats_samtools") subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) subworkflow_lint.lint(print_results=False, all_subworkflows=True) assert len(subworkflow_lint.failed) == 0 @@ -58,7 +58,7 @@ def test_subworkflows_lint_multiple_remotes(self): self.subworkflow_install_gitlab.install("bam_stats_samtools") self.subworkflow_install.install("fastq_align_bowtie2") subworkflow_lint = nf_core.subworkflows.SubworkflowLint( - dir=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) subworkflow_lint.lint(print_results=False, all_subworkflows=True) assert len(subworkflow_lint.failed) == 0 @@ -67,7 +67,7 @@ def test_subworkflows_lint_multiple_remotes(self): def test_subworkflows_lint_snapshot_file(self): """Test linting a subworkflow with a snapshot file""" - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -83,7 +83,7 @@ def test_subworkflows_lint_snapshot_file_missing_fail(self): "tests", "main.nf.test.snap", ).unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") Path( self.nfcore_modules, @@ -132,7 +132,7 @@ def test_subworkflows_lint_snapshot_file_not_needed(self): "tests", "main.nf.test.snap", ).unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") Path( self.nfcore_modules, @@ -179,7 +179,7 @@ def test_subworkflows_lint_less_than_two_modules_warning(self): "w", ) as fh: fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -218,7 +218,7 @@ def test_subworkflows_lint_include_multiple_alias(self): ) as fh: fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") assert len(subworkflow_lint.failed) >= 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -265,7 +265,7 @@ def test_subworkflows_lint_capitalization_fail(self): "w", ) as fh: fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) subworkflow_lint.lint(print_results=False, subworkflow="bam_stats_samtools") assert len(subworkflow_lint.failed) >= 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -276,7 +276,7 @@ def test_subworkflows_lint_capitalization_fail(self): self.subworkflow_remove.remove("bam_stats_samtools", force=True) def test_subworkflows_absent_version(self): - """Test linting a nf-test module if the versions is absent in the snapshot file `""" + """Test linting a nf-test subworkflow if the versions is absent in the snapshot file `""" snap_file = Path( self.nfcore_modules, "subworkflows", @@ -291,7 +291,11 @@ def test_subworkflows_absent_version(self): with open(snap_file, "w") as fh: fh.write(new_content) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + import ipdb + + ipdb.set_trace() + + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0 assert len(subworkflow_lint.passed) > 0 @@ -308,7 +312,7 @@ def test_subworkflows_missing_test_dir(self): test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") shutil.rmtree(test_dir) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0 assert len(subworkflow_lint.passed) > 0 @@ -324,7 +328,7 @@ def test_subworkflows_missing_main_nf(self): main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") main_nf.unlink() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -351,7 +355,7 @@ def test_subworkflows_empty_file_in_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 @@ -379,7 +383,7 @@ def test_subworkflows_empty_file_in_stub_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 diff --git a/tests/subworkflows/test_remove.py b/tests/subworkflows/test_remove.py index 61c016b23..bad5a2ddb 100644 --- a/tests/subworkflows/test_remove.py +++ b/tests/subworkflows/test_remove.py @@ -14,10 +14,10 @@ def test_subworkflows_remove_subworkflow(self): """Test removing subworkflow and all it's dependencies after installing it""" self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") ModulesJson(self.pipeline_dir) mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() assert self.subworkflow_remove.remove("bam_sort_stats_samtools") @@ -40,10 +40,10 @@ def test_subworkflows_remove_subworkflow_keep_installed_module(self): self.subworkflow_install.install("bam_sort_stats_samtools") self.mods_install.install("samtools/index") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json() assert self.subworkflow_remove.remove("bam_sort_stats_samtools") @@ -67,11 +67,11 @@ def test_subworkflows_remove_one_of_two_subworkflow(self): """Test removing subworkflow and all it's dependencies after installing it""" self.subworkflow_install.install("bam_sort_stats_samtools") self.subworkflow_install.install("bam_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") assert self.subworkflow_remove.remove("bam_sort_stats_samtools") @@ -85,11 +85,11 @@ def test_subworkflows_remove_one_of_two_subworkflow(self): def test_subworkflows_remove_included_subworkflow(self): """Test removing subworkflow which is installed by another subworkflow and all it's dependencies.""" self.subworkflow_install.install("bam_sort_stats_samtools") - subworkflow_path = Path(self.subworkflow_install.dir, "subworkflows", "nf-core") + subworkflow_path = Path(self.subworkflow_install.directory, "subworkflows", "nf-core") bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools") bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools") - samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_stats_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_index_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "index") + samtools_stats_path = Path(self.subworkflow_install.directory, "modules", "nf-core", "samtools", "stats") assert self.subworkflow_remove.remove("bam_stats_samtools") is False From 47229ff0bc9a30f8b5ee95e7b308d5445ad7339a Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 22 Jul 2024 12:15:33 +0200 Subject: [PATCH 341/737] fix types and tests --- nf_core/components/components_command.py | 1 + nf_core/components/components_utils.py | 4 +- nf_core/components/info.py | 8 +- nf_core/components/install.py | 9 +- nf_core/components/lint/__init__.py | 32 +++--- nf_core/components/list.py | 4 +- nf_core/components/nfcore_component.py | 6 +- nf_core/modules/lint/module_version.py | 8 +- nf_core/modules/modules_json.py | 123 +++++++++++++---------- nf_core/modules/modules_repo.py | 5 +- nf_core/synced_repo.py | 63 ++++++------ nf_core/utils.py | 52 +++++++--- 12 files changed, 185 insertions(+), 130 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 13a6fed33..8d200021c 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -38,6 +38,7 @@ def __init__( self.hide_progress = hide_progress self.no_prompts = no_prompts self._configure_repo_and_paths() + self.repo_type: Optional[str] = None def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 32f6c0fc1..743efd386 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -27,7 +27,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P # Figure out the repository type from the .nf-core.yml config file if we can config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) - repo_type: Optional[str] = tools_config.get("repository_type", None) + repo_type = tools_config.get("repository_type", None) # If not set, prompt the user if not repo_type and use_prompt: @@ -101,7 +101,7 @@ def prompt_component_version_sha( git_sha = "" page_nbr = 1 - all_commits = modules_repo.get_component_git_log(component_name, component_type) + all_commits = iter(modules_repo.get_component_git_log(component_name, component_type)) next_page_commits = [next(all_commits, None) for _ in range(10)] next_page_commits = [commit for commit in next_page_commits if commit is not None] diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 8597875af..d07486f63 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -97,7 +97,6 @@ def init_mod_name(self, component): Args: module: str: Module name to check """ - assert self.modules_json is not None # mypy if component is None: self.local = questionary.confirm( f"Is the {self.component_type[:-1]} locally installed?", style=nf_core.utils.nfcore_question_style @@ -105,7 +104,8 @@ def init_mod_name(self, component): if self.local: if self.repo_type == "modules": components = self.get_components_clone_modules() - else: + elif self.repo_type == "pipeline": + assert self.modules_json is not None # mypy all_components = self.modules_json.get_all_components(self.component_type).get( self.modules_repo.remote_url, [] ) @@ -117,6 +117,8 @@ def init_mod_name(self, component): raise UserWarning( f"No {self.component_type[:-1]} installed from '{self.modules_repo.remote_url}'" ) + else: + raise UserWarning("Unknown repository type") else: components = self.modules_repo.get_avail_components(self.component_type) components.sort() @@ -174,8 +176,8 @@ def get_local_yaml(self): Optional[dict]: Parsed meta.yml if found, None otherwise """ - assert self.modules_json is not None # mypy if self.repo_type == "pipeline": + assert self.modules_json is not None # mypy # Try to find and load the meta.yml file component_base_path = Path(self.directory, self.component_type) # Check that we have any modules/subworkflows installed from this repo diff --git a/nf_core/components/install.py b/nf_core/components/install.py index e6c31b3cb..aa8aac81e 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -100,8 +100,11 @@ def install(self, component: str, silent: bool = False) -> bool: modules_json.load() modules_json.update(self.component_type, self.modules_repo, component, current_version, self.installed_by) return False - - version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + try: + version = self.get_version(component, self.sha, self.prompt, current_version, self.modules_repo) + except UserWarning as e: + log.error(e) + return False if not version: return False @@ -174,7 +177,7 @@ def install_included_components(self, subworkflow_dir): self.installed_by = original_installed def collect_and_verify_name( - self, component: Optional[str], modules_repo: nf_core.modules.modules_repo.ModulesRepo + self, component: Optional[str], modules_repo: "nf_core.modules.modules_repo.ModulesRepo" ) -> str: """ Collect component name. diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 2cd59dc48..ada3ee30c 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -92,17 +92,17 @@ def __init__( raise LookupError( f"Error parsing modules.json: {components}. " f"Please check the file for errors or try again." ) - org, comp = components - self.all_remote_components.append( - NFCoreComponent( - comp, - repo_url, - Path(self.directory, self.component_type, org, comp), - self.repo_type, - Path(self.directory), - self.component_type, + for org, comp in components: + self.all_remote_components.append( + NFCoreComponent( + comp, + repo_url, + Path(self.directory, self.component_type, org, comp), + self.repo_type, + Path(self.directory), + self.component_type, + ) ) - ) if not self.all_remote_components: raise LookupError( f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." @@ -123,7 +123,7 @@ def __init__( for comp in self.get_local_components() ] self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) - elif self.repo_type == "modules": + else: component_dir = Path( self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, @@ -141,11 +141,11 @@ def __init__( Path(self.directory).joinpath("tests", "config"), cache_config=True ) - if registry is None: - self.registry = self.config.get("docker.registry", "quay.io") - else: - self.registry = registry - log.debug(f"Registry set to {self.registry}") + if registry is None: + self.registry = self.config.get("docker.registry", "quay.io") + else: + self.registry = registry + log.debug(f"Registry set to {self.registry}") self.lint_config = None self.modules_json = None diff --git a/nf_core/components/list.py b/nf_core/components/list.py index b24732e5c..67468b4a5 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -5,7 +5,7 @@ import rich.table from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_json import ModulesJson, ModulesJsonModuleEntry from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) @@ -110,7 +110,7 @@ def pattern_msg(keywords: List[str]) -> str: modules_json_file = modules_json.modules_json for repo_url, component_with_dir in sorted(repos_with_comps.items()): - repo_entry: Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]] + repo_entry: Dict[str, Dict[str, Dict[str, ModulesJsonModuleEntry]]] if modules_json_file is None: log.warning(f"Modules JSON file '{modules_json.modules_json_path}' is missing. ") continue diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 143312222..0c63141c7 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -21,7 +21,7 @@ def __init__( component_name: str, repo_url: Optional[str], component_dir: Path, - repo_type: str, + repo_type: Optional[str], base_dir: Path, component_type: str, remote_component: bool = True, @@ -54,6 +54,7 @@ def __init__( self.has_meta: bool = False self.git_sha: Optional[str] = None self.is_patched: bool = False + self.branch: Optional[str] = None if remote_component: # Initialize the important files @@ -85,6 +86,9 @@ def __init__( self.test_yml = None self.test_main_nf = None + def __repr__(self) -> str: + return f"" + def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): """Collect all tags from the main.nf.test file.""" tags = [] diff --git a/nf_core/modules/lint/module_version.py b/nf_core/modules/lint/module_version.py index d08658f5d..d0ef17a44 100644 --- a/nf_core/modules/lint/module_version.py +++ b/nf_core/modules/lint/module_version.py @@ -8,11 +8,12 @@ import nf_core import nf_core.modules.modules_repo import nf_core.modules.modules_utils +from nf_core.modules.modules_utils import NFCoreComponent log = logging.getLogger(__name__) -def module_version(module_lint_object, module): +def module_version(module_lint_object: "nf_core.modules.lint.ModuleLint", module: "NFCoreComponent"): """ Verifies that the module has a version specified in the ``modules.json`` file @@ -20,8 +21,9 @@ def module_version(module_lint_object, module): containing a commit SHA. If that is true, it verifies that there are no newer version of the module available. """ - - modules_json_path = Path(module_lint_object.dir, "modules.json") + assert module_lint_object.modules_json is not None # mypy + assert module.repo_url is not None # mypy + modules_json_path = Path(module_lint_object.directory, "modules.json") # Verify that a git_sha exists in the `modules.json` file for this module version = module_lint_object.modules_json.get_module_version(module.component_name, module.repo_url, module.org) if version is None: diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index faeb84fc3..c0e41d1b4 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,7 +6,7 @@ import shutil import tempfile from pathlib import Path -from typing import Any, List, Optional, Tuple, Union +from typing import Dict, List, NotRequired, Optional, Tuple, TypedDict, Union import git import questionary @@ -27,6 +27,19 @@ log = logging.getLogger(__name__) +class ModulesJsonModuleEntry(TypedDict): + branch: str + git_sha: str + installed_by: List[str] + patch: NotRequired[str] + + +class ModulesJsonType(TypedDict): + name: str + homePage: str + repos: Dict[str, Dict[str, Dict[str, Dict[str, ModulesJsonModuleEntry]]]] + + class ModulesJson: """ An object for handling a 'modules.json' file in a pipeline @@ -43,10 +56,10 @@ def __init__(self, pipeline_dir: Union[str, Path]): self.modules_dir = self.directory / "modules" self.subworkflows_dir = self.directory / "subworkflows" self.modules_json_path = self.directory / "modules.json" - self.modules_json = None + self.modules_json: Optional[ModulesJsonType] = None self.pipeline_modules = None self.pipeline_subworkflows = None - self.pipeline_components = None + self.pipeline_components: Optional[Dict[str, List[Tuple[str, str]]]] = None def __str__(self): if self.modules_json is None: @@ -56,7 +69,7 @@ def __str__(self): def __repr__(self): return self.__str__() - def create(self): + def create(self) -> None: """ Creates the modules.json file from the modules and subworkflows installed in the pipeline directory @@ -66,7 +79,7 @@ def create(self): pipeline_config = nf_core.utils.fetch_wf_config(self.directory) pipeline_name = pipeline_config.get("manifest.name", "") pipeline_url = pipeline_config.get("manifest.homePage", "") - new_modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} + new_modules_json = ModulesJsonType(name=pipeline_name, homePage=pipeline_url, repos={}) if not self.modules_dir.exists(): if rich.prompt.Confirm.ask( @@ -79,7 +92,6 @@ def create(self): # Get repositories repos, _ = self.get_pipeline_module_repositories("modules", self.modules_dir) - # Get all module/subworkflow names in the repos repo_module_names = self.get_component_names_from_repo(repos, self.modules_dir) repo_subworkflow_names = self.get_component_names_from_repo(repos, self.subworkflows_dir) @@ -105,7 +117,9 @@ def create(self): self.modules_json = new_modules_json self.dump() - def get_component_names_from_repo(self, repos, directory): + def get_component_names_from_repo( + self, repos: Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, str | List[str]]]]]], directory: Path + ) -> List[Tuple[str, List[str], str]]: """ Get component names from repositories in a pipeline. @@ -122,8 +136,8 @@ def get_component_names_from_repo(self, repos, directory): components = ( repo_url, [ - str(Path(component_name).relative_to(directory / modules_repo.repo_path)) - for component_name, _, file_names in os.walk(directory / modules_repo.repo_path) + str(component_name.relative_to(directory / modules_repo.repo_path)) + for component_name, _, file_names in Path.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], modules_repo.repo_path, @@ -131,7 +145,9 @@ def get_component_names_from_repo(self, repos, directory): names.append(components) return names - def get_pipeline_module_repositories(self, component_type, directory, repos=None): + def get_pipeline_module_repositories( + self, component_type: str, directory: Path, repos: Optional[Dict] = None + ) -> Tuple[Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]]], Dict[Path, Path]]: """ Finds all module repositories in the modules and subworkflows directory. Ignores the local modules/subworkflows. @@ -153,6 +169,7 @@ def get_pipeline_module_repositories(self, component_type, directory, repos=None # The function might rename some directories, keep track of them renamed_dirs = {} # Check if there are any untracked repositories + dirs_not_covered = self.dir_tree_uncovered(directory, [Path(ModulesRepo(url).repo_path) for url in repos]) if len(dirs_not_covered) > 0: log.info(f"Found custom {component_type[:-1]} repositories when creating 'modules.json'") @@ -245,8 +262,12 @@ def dir_tree_uncovered(self, components_directory, repos): return dirs_not_covered def determine_branches_and_shas( - self, component_type: str, install_dir: Union[str, Path], remote_url: str, components: List[Path] - ) -> dict[Path, dict[str, Any]]: + self, + component_type: str, + install_dir: Union[str, Path], + remote_url: str, + components: List[str], + ) -> Dict[str, ModulesJsonModuleEntry]: """ Determines what branch and commit sha each module/subworkflow in the pipeline belongs to @@ -273,10 +294,10 @@ def determine_branches_and_shas( available_branches = ModulesRepo.get_remote_branches(remote_url) sb_local = [] dead_components = [] - repo_entry = {} + repo_entry: Dict[str, ModulesJsonModuleEntry] = {} for component in sorted(components): modules_repo = default_modules_repo - component_path = repo_path / component + component_path = Path(repo_path, component) correct_commit_sha = None tried_branches = {default_modules_repo.branch} found_sha = False @@ -286,16 +307,16 @@ def determine_branches_and_shas( if patch_file.is_file(): temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) correct_commit_sha = self.find_correct_commit_sha( - component_type, str(component), temp_module_dir, modules_repo + component_type, component, temp_module_dir, modules_repo ) else: correct_commit_sha = self.find_correct_commit_sha( - component_type, str(component), component_path, modules_repo + component_type, component, component_path, modules_repo ) if correct_commit_sha is None: # Check in the old path correct_commit_sha = self.find_correct_commit_sha( - component_type, str(component), repo_path / component_type / component, modules_repo + component_type, component, repo_path / component_type / component, modules_repo ) if correct_commit_sha is None: log.info( @@ -328,7 +349,7 @@ def determine_branches_and_shas( else: found_sha = True break - if found_sha: + if found_sha and correct_commit_sha is not None: repo_entry[component] = { "branch": modules_repo.branch, "git_sha": correct_commit_sha, @@ -337,7 +358,7 @@ def determine_branches_and_shas( # Clean up the modules/subworkflows we were unable to find the sha for for component in sb_local: - log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, component)}' to 'local' directory") + log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, str(component))}' to 'local' directory") self.move_component_to_local(component_type, component, str(install_dir)) for component in dead_components: @@ -408,14 +429,14 @@ def move_component_to_local(self, component_type: str, component: Union[str, Pat to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(current_path, local_dir / to_name) - def unsynced_components(self) -> Tuple[List[Path], List[Path], dict]: + def unsynced_components(self) -> Tuple[List[str], List[str], dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all directories containing a 'main.nf' file Returns: - (untrack_dirs ([ Path ]), missing_installation (dict)): Directories that are not tracked + (untrack_dirs ([ str ]), missing_installation (dict)): Directories that are not tracked by the modules.json file, and modules/subworkflows in the modules.json where the installation directory is missing """ @@ -443,7 +464,7 @@ def unsynced_components(self) -> Tuple[List[Path], List[Path], dict]: return untracked_dirs_modules, untracked_dirs_subworkflows, missing_installation - def parse_dirs(self, dirs: List[Path], missing_installation: dict, component_type: str) -> Tuple[List[Path], dict]: + def parse_dirs(self, dirs: List[Path], missing_installation: Dict, component_type: str) -> Tuple[List[str], Dict]: """ Parse directories and check if they are tracked in the modules.json file @@ -461,9 +482,10 @@ def parse_dirs(self, dirs: List[Path], missing_installation: dict, component_typ for dir_ in dirs: # Check if the module/subworkflows directory exists in modules.json install_dir = dir_.parts[0] - component = Path(*dir_.parts[1:]) + component = "/".join(dir_.parts[1:]) component_in_file = False git_url = "" + for repo in missing_installation: if component_type in missing_installation[repo]: if install_dir in missing_installation[repo][component_type]: @@ -564,7 +586,7 @@ def reinstall_repo(self, install_dir, remote_url, module_entries): failed_to_install.append(module) return failed_to_install - def check_up_to_date(self) -> bool: + def check_up_to_date(self): """ Checks whether the modules and subworkflows installed in the directory are consistent with the entries in the 'modules.json' file and vice versa. @@ -715,14 +737,12 @@ def update( self.modules_json["repos"][remote_url][component_type] = {repo_name: {}} repo_component_entry = self.modules_json["repos"][remote_url][component_type][repo_name] if component_name not in repo_component_entry: - repo_component_entry[component_name] = {} + repo_component_entry[component_name] = {"branch": "", "git_sha": "", "installed_by": []} repo_component_entry[component_name]["git_sha"] = component_version repo_component_entry[component_name]["branch"] = branch try: if installed_by not in repo_component_entry[component_name]["installed_by"] and installed_by is not None: - repo_component_entry[component_name]["installed_by"].append(installed_by) - except KeyError: - repo_component_entry[component_name]["installed_by"] = [installed_by] + repo_component_entry[component_name]["installed_by"] += installed_by finally: new_installed_by = repo_component_entry[component_name]["installed_by"] + list(installed_by_log) repo_component_entry[component_name]["installed_by"] = sorted([*set(new_installed_by)]) @@ -912,7 +932,7 @@ def get_modules_json(self) -> dict: if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return copy.deepcopy(self.modules_json) + return copy.deepcopy(self.modules_json) # type: ignore def get_component_version(self, component_type, component_name, repo_url, install_dir): """ @@ -938,7 +958,7 @@ def get_component_version(self, component_type, component_name, repo_url, instal .get("git_sha", None) ) - def get_module_version(self, module_name, repo_url, install_dir): + def get_module_version(self, module_name: str, repo_url: str, install_dir: str) -> Optional[str]: """ Returns the version of a module @@ -953,14 +973,11 @@ def get_module_version(self, module_name, repo_url, install_dir): if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return ( - self.modules_json.get("repos", {}) - .get(repo_url, {}) - .get("modules", {}) - .get(install_dir, {}) - .get(module_name, {}) - .get("git_sha", None) - ) + try: + sha = self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["git_sha"] + except KeyError: + sha = None + return sha def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): """ @@ -986,7 +1003,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): .get("git_sha", None) ) - def get_all_components(self, component_type: str) -> dict[str, Tuple[(str, str)]]: + def get_all_components(self, component_type: str) -> dict[str, List[Tuple[(str, str)]]]: """ Retrieves all pipeline modules/subworkflows that are reported in the modules.json @@ -1002,8 +1019,8 @@ def get_all_components(self, component_type: str) -> dict[str, Tuple[(str, str)] self.pipeline_components = {} for repo, repo_entry in self.modules_json.get("repos", {}).items(): if component_type in repo_entry: - for dir, components in repo_entry[component_type].items(): - self.pipeline_components[repo] = [(dir, m) for m in components] + for directory, components in repo_entry[component_type].items(): + self.pipeline_components[repo] = [(directory, m) for m in components] return self.pipeline_components @@ -1072,7 +1089,9 @@ def get_installed_by_entries(self, component_type, name): return installed_by_entries - def get_component_branch(self, component_type, component, repo_url, install_dir): + def get_component_branch( + self, component_type: str, component: Union[str, Path], repo_url: str, install_dir: str + ) -> str: """ Gets the branch from which the module/subworkflow was installed @@ -1084,14 +1103,10 @@ def get_component_branch(self, component_type, component, repo_url, install_dir) if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - branch = ( - self.modules_json["repos"] - .get(repo_url, {}) - .get(component_type, {}) - .get(install_dir, {}) - .get(component, {}) - .get("branch") - ) + try: + branch = self.modules_json["repos"][repo_url][component_type][install_dir][str(component)]["branch"] + except (KeyError, TypeError): + branch = None if branch is None: raise LookupError( f"Could not find branch information for component '{Path(install_dir, component)}'." @@ -1112,7 +1127,7 @@ def dump(self, run_prettier: bool = False) -> None: with open(self.modules_json_path, "w") as fh: json.dump(self.modules_json, fh, indent=4) - def resolve_missing_installation(self, missing_installation, component_type): + def resolve_missing_installation(self, missing_installation: Dict, component_type: str) -> None: missing_but_in_mod_json = [ f"'{component_type}/{install_dir}/{component}'" for repo_url, contents in missing_installation.items() @@ -1168,7 +1183,7 @@ def resolve_missing_from_modules_json(self, missing_from_modules_json, component # Get tuples of components that miss installation and their install directory def components_with_repos(): - for dir in missing_from_modules_json: + for directory in missing_from_modules_json: for repo_url in repos: modules_repo = ModulesRepo(repo_url) paths_in_directory = [] @@ -1178,12 +1193,12 @@ def components_with_repos(): ) for dir_name, _, _ in os.walk(repo_url_path): if component_type == "modules": - if len(Path(dir).parts) > 1: # The module name is TOOL/SUBTOOL + if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) pass paths_in_directory.append(Path(dir_name).parts[-1]) if dir in paths_in_directory: - yield (modules_repo.repo_path, dir) + yield (modules_repo.repo_path, directory) # Add all components into a dictionary with install directories repos_with_components = {} diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index a979ae45b..daa7b5981 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -1,6 +1,7 @@ import logging import os import shutil +from pathlib import Path from typing import Optional import git @@ -72,8 +73,8 @@ def __init__( self.verify_branch() # Convenience variable - self.modules_dir = os.path.join(self.local_repo_dir, "modules", self.repo_path) - self.subworkflows_dir = os.path.join(self.local_repo_dir, "subworkflows", self.repo_path) + self.modules_dir = Path(self.local_repo_dir, "modules", self.repo_path) + self.subworkflows_dir = Path(self.local_repo_dir, "subworkflows", self.repo_path) self.avail_module_names = None diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 4b69d4af8..33e7f0a54 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -4,7 +4,7 @@ import shutil from configparser import NoOptionError, NoSectionError from pathlib import Path -from typing import Dict, Optional, Union +from typing import Dict, Iterable, Optional, Union import git from git.exc import GitCommandError @@ -121,28 +121,32 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa remote_url = NF_CORE_MODULES_REMOTE self.remote_url = remote_url + self.fullname = None + self.local_repo_dir = None self.repo = None # TODO: SyncedRepo doesn't have this method and both the ModulesRepo and # the WorkflowRepo define their own including custom init methods. This needs # fixing. self.setup_local_repo(remote_url, branch, hide_progress) + if self.local_repo_dir is None: + raise ValueError("Repository not initialized") + else: + config_fn, repo_config = load_tools_config(self.local_repo_dir) + try: + self.repo_path = repo_config["org_path"] + except KeyError: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") - config_fn, repo_config = load_tools_config(self.local_repo_dir) - try: - self.repo_path = repo_config["org_path"] - except KeyError: - raise UserWarning(f"'org_path' key not present in {config_fn.name}") - - # Verify that the repo seems to be correctly configured - if self.repo_path != NF_CORE_MODULES_NAME or self.branch: - self.verify_branch() + # Verify that the repo seems to be correctly configured + if self.repo_path != NF_CORE_MODULES_NAME or self.branch: + self.verify_branch() - # Convenience variable - self.modules_dir = os.path.join(self.local_repo_dir, "modules", self.repo_path) - self.subworkflows_dir = os.path.join(self.local_repo_dir, "subworkflows", self.repo_path) + # Convenience variable + self.modules_dir = Path(self.local_repo_dir, "modules", self.repo_path) + self.subworkflows_dir = Path(self.local_repo_dir, "subworkflows", self.repo_path) - self.avail_module_names = None + self.avail_module_names = None def setup_local_repo(self, remote_url, branch, hide_progress): pass @@ -361,7 +365,9 @@ def ensure_git_user_config(self, default_name: str, default_email: str) -> None: if not user_email: git_config.set_value("user", "email", default_email) - def get_component_git_log(self, component_name: Union[str, Path], component_type: str, depth: Optional[int] = None): + def get_component_git_log( + self, component_name: Union[str, Path], component_type: str, depth: Optional[int] = None + ) -> Iterable[Dict[str, str]]: """ Fetches the commit history the of requested module/subworkflow since a given date. The default value is not arbitrary - it is the last time the structure of the nf-core/modules repository was had an @@ -373,35 +379,32 @@ def get_component_git_log(self, component_name: Union[str, Path], component_type Returns: ( dict ): Iterator of commit SHAs and associated (truncated) message """ - if self.repo is None: raise ValueError("Repository not initialized") self.checkout_branch() component_path = Path(component_type, self.repo_path, component_name) - commits_new = self.repo.iter_commits(max_count=depth, paths=component_path) - if not commits_new: - raise ValueError(f"Could not find any commits for '{component_name}' in '{self.remote_url}'") - else: - commits_new = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.splitlines()[0]} for commit in commits_new - ] - commits_old = [] + commits_new_iter = self.repo.iter_commits(max_count=depth, paths=component_path) + commits_old_iter = [] if component_type == "modules": # Grab commits also from previous modules structure - component_path = Path("modules", component_name) - commits_old = self.repo.iter_commits(max_count=depth, paths=component_path) - commits_old = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.splitlines()[0]} for commit in commits_old - ] + old_component_path = Path("modules", component_name) + commits_old_iter = self.repo.iter_commits(max_count=depth, paths=old_component_path) + + commits_old = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_old_iter] + commits_new = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_new_iter] commits = iter(commits_new + commits_old) + return commits def get_latest_component_version(self, component_name, component_type): """ Returns the latest commit in the repository """ - return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] + try: + return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] + except UserWarning: + return None def sha_exists_on_branch(self, sha): """ diff --git a/nf_core/utils.py b/nf_core/utils.py index d1e9ccfe9..4d0566b1a 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -30,6 +30,7 @@ import rich.markup import yaml from packaging.version import Version +from pydantic import BaseModel, ValidationError from rich.live import Live from rich.spinner import Spinner @@ -175,9 +176,9 @@ def _load_conda_environment(self) -> bool: log.debug("No conda `environment.yml` file found.") return False - def _fp(self, fn): + def _fp(self, fn: Union[str, Path]) -> Path: """Convenience function to get full path to a file in the pipeline""" - return os.path.join(self.wf_path, fn) + return Path(self.wf_path, fn) def list_files(self) -> List[Path]: """Get a list of all files in the pipeline""" @@ -191,9 +192,8 @@ def list_files(self) -> List[Path]: files.append(full_fn) else: log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: # Failed, so probably not initialised as a git repository - just a list of all files - log.debug(f"Couldn't call 'git ls-files': {e}") files = [] for file_path in self.wf_path.rglob("*"): if file_path.is_file(): @@ -1041,7 +1041,26 @@ def get_repo_releases_branches(pipeline, wfs): DEPRECATED_CONFIG_PATHS = [".nf-core-lint.yml", ".nf-core-lint.yaml"] -def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, dict]: +class NFCoreTemplateConfig(BaseModel): + org: str + name: str + description: str + author: str + version: Optional[str] + force: Optional[bool] + outdir: Optional[str] + skip_features: Optional[list] + is_nfcore: Optional[bool] + + +class NFCoreYamlConfig(BaseModel): + nf_core_version: str + repository_type: str + org_path: str + template: NFCoreTemplateConfig + + +def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, NFCoreYamlConfig]: """ Parse the nf-core.yml configuration file @@ -1059,21 +1078,26 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, dict]: if config_fn is None: depr_path = get_first_available_path(directory, DEPRECATED_CONFIG_PATHS) if depr_path: - log.error( - f"Deprecated `{depr_path.name}` file found! The file will not be loaded. " - f"Please rename the file to `{CONFIG_PATHS[0]}`." + raise AssertionError( + f"Deprecated `{depr_path.name}` file found! Please rename the file to `{CONFIG_PATHS[0]}`." ) else: - log.debug(f"No tools config file found: {CONFIG_PATHS[0]}") - return Path(directory, CONFIG_PATHS[0]), {} - - with open(config_fn) as fh: + raise AssertionError(f"Could not find a config file in the directory '{directory}'") + with open(str(config_fn)) as fh: tools_config = yaml.safe_load(fh) + # If the file is empty - tools_config = tools_config or {} + if tools_config is None: + raise AssertionError(f"Config file '{config_fn}' is empty") + + # Check for required fields + try: + nf_core_yaml_config = NFCoreYamlConfig(**tools_config) + except ValidationError as e: + raise AssertionError(f"Config file '{config_fn}' is invalid: {e}") log.debug("Using config file: %s", config_fn) - return config_fn, tools_config + return config_fn, nf_core_yaml_config def determine_base_dir(directory: Union[Path, str] = ".") -> Path: From b2336adaf288e3c64d02c28fb7e3e58cd22bd4cc Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 23 Jul 2024 14:24:21 +0200 Subject: [PATCH 342/737] remove remaining mention to 'defaults' --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 48d1c3ca3..d5bf328b5 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -635,7 +635,7 @@ def anaconda_package(dep, dep_channels=None): """ if dep_channels is None: - dep_channels = ["conda-forge", "bioconda", "defaults"] + dep_channels = ["conda-forge", "bioconda"] # Check if each dependency is the latest available version if "=" in dep: From 297735e06df52450a6a133dade10ad45a3f1799e Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 24 Jul 2024 10:46:34 +0200 Subject: [PATCH 343/737] add --fix option to fix the meta.yml of subworkflows --- nf_core/__main__.py | 5 +- nf_core/commands_subworkflows.py | 3 +- nf_core/components/nfcore_component.py | 142 +++++++++++++++---------- nf_core/subworkflows/lint/__init__.py | 55 +++++++++- nf_core/subworkflows/lint/meta_yml.py | 6 ++ tests/subworkflows/lint.py | 9 ++ tests/test_subworkflows.py | 1 + 7 files changed, 162 insertions(+), 59 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 1dd92d6b8..dd800ad81 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1431,11 +1431,12 @@ def command_subworkflows_list_local(ctx, keywords, json, dir): # pylint: disabl help="Sort lint output by subworkflow or test name.", show_default=True, ) -def command_subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by, fix): """ Lint one or more subworkflows in a directory. """ - subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by) + subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by, fix) # nf-core subworkflows info diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index cc1a544ec..70a0c94ed 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -102,7 +102,7 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi sys.exit(1) -def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): +def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by, fix): """ Lint one or more subworkflows in a directory. @@ -119,6 +119,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo subworkflow_lint = SubworkflowLint( dir, fail_warned=fail_warned, + fix=fix, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 3e5a68648..db817db90 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -41,6 +41,7 @@ def __init__( remote_component (bool): Whether the module is to be treated as a nf-core or local component """ + self.component_type = component_type self.component_name = component_name self.repo_url = repo_url self.component_dir = component_dir @@ -155,67 +156,98 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st included_components.append(component) return included_components - def get_inputs_from_main_nf(self): + def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" inputs = [] with open(self.main_nf) as f: data = f.read() - # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo - # regex matches: - # val(foo) - # path(bar) - # val foo - # val bar - # path bar - # path foo - # don't match anything inside comments or after "output:" - if "input:" not in data: - log.debug(f"Could not find any inputs in {self.main_nf}") - return inputs - input_data = data.split("input:")[1].split("output:")[0] - for line in input_data.split("\n"): - channel_elements = [] - regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" - matches = re.finditer(regex, line) - for _, match in enumerate(matches, start=1): - input_val = None - if match.group(3): - input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - elif match.group(4): - input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - if input_val: - channel_elements.append({input_val: {}}) - if len(channel_elements) > 0: - inputs.append(channel_elements) - log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") - self.inputs = inputs + if self.component_type == "modules": + # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo + # regex matches: + # val(foo) + # path(bar) + # val foo + # val bar + # path bar + # path foo + # don't match anything inside comments or after "output:" + if "input:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return inputs + input_data = data.split("input:")[1].split("output:")[0] + for line in input_data.split("\n"): + channel_elements = [] + regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + matches = re.finditer(regex, line) + for _, match in enumerate(matches, start=1): + input_val = None + if match.group(3): + input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + elif match.group(4): + input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + if input_val: + channel_elements.append({input_val: {}}) + if len(channel_elements) > 0: + inputs.append(channel_elements) + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs + elif self.component_type == "subworkflows": + # get input values from main.nf after "take:" + if "take:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return inputs + # get all lines between "take" and "main" or "emit" + input_data = data.split("take:")[1].split("main:")[0].split("emit:")[0] + for line in input_data.split("\n"): + try: + inputs.append(line.split()[0]) + except IndexError: + # Empty lines + pass + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs def get_outputs_from_main_nf(self): outputs = [] with open(self.main_nf) as f: data = f.read() - # get output values from main.nf after "output:". the names are always after "emit:" - if "output:" not in data: - log.debug(f"Could not find any outputs in {self.main_nf}") - return outputs - output_data = data.split("output:")[1].split("when:")[0] - regex_emit = r"emit:\s*([^)\s,]+)" - regex_elements = r"(val|path|env|stdout)\s*(\(([^)]+)\)|\s*([^)\s,]+))" - for line in output_data.split("\n"): - match_emit = re.search(regex_emit, line) - matches_elements = re.finditer(regex_elements, line) - if not match_emit: - continue - output_channel = {match_emit.group(1): []} - for _, match_element in enumerate(matches_elements, start=1): - output_val = None - if match_element.group(3): - output_val = match_element.group(3) - elif match_element.group(4): - output_val = match_element.group(4) - if output_val: - output_val = output_val.strip("'").strip('"') # remove quotes - output_channel[match_emit.group(1)].append({output_val: {}}) - outputs.append(output_channel) - log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") - self.outputs = outputs + if self.component_type == "modules": + # get output values from main.nf after "output:". the names are always after "emit:" + if "output:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("output:")[1].split("when:")[0] + regex_emit = r"emit:\s*([^)\s,]+)" + regex_elements = r"(val|path|env|stdout)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + for line in output_data.split("\n"): + match_emit = re.search(regex_emit, line) + matches_elements = re.finditer(regex_elements, line) + if not match_emit: + continue + output_channel = {match_emit.group(1): []} + for _, match_element in enumerate(matches_elements, start=1): + output_val = None + if match_element.group(3): + output_val = match_element.group(3) + elif match_element.group(4): + output_val = match_element.group(4) + if output_val: + output_val = output_val.strip("'").strip('"') # remove quotes + output_channel[match_emit.group(1)].append({output_val: {}}) + outputs.append(output_channel) + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs + elif self.component_type == "subworkflows": + # get output values from main.nf after "emit:". Can be named outputs or not. + if "emit:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("emit:")[1].split("}")[0] + for line in output_data.split("\n"): + try: + outputs.append(line.split("=")[0].split()[0]) + except IndexError: + # Empty lines + pass + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index a3cacf295..e0911e5bd 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -11,11 +11,12 @@ import questionary import rich +import ruamel.yaml import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.pipelines.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) @@ -38,6 +39,7 @@ def __init__( self, dir, fail_warned=False, + fix=False, remote_url=None, branch=None, no_pull=False, @@ -48,6 +50,7 @@ def __init__( component_type="subworkflows", dir=dir, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -207,6 +210,10 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): # Otherwise run all the lint tests else: + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(swf) + if self.repo_type == "pipeline" and self.modules_json: # Set correct sha version = self.modules_json.get_subworkflow_version(swf.component_name, swf.repo_url, swf.org) @@ -223,3 +230,49 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): self.failed += warned self.failed += [LintResult(swf, *s) for s in swf.failed] + + + def update_meta_yml_file(self, swf): + """ + Update the meta.yml file with the correct inputs and outputs + """ + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Read meta.yml + with open(swf.meta_yml) as fh: + meta_yaml = yaml.load(fh) + meta_yaml_corrected = meta_yaml.copy() + # Obtain inputs and outputs from main.nf + swf.get_inputs_from_main_nf() + swf.get_outputs_from_main_nf() + + # Compare inputs and add them if missing + if "input" in meta_yaml: + # Delete inputs from meta.yml which are not present in main.nf + meta_yaml_corrected["input"] = [input for input in meta_yaml["input"] if list(input.keys())[0] in swf.inputs] + # Obtain inputs from main.nf missing in meta.yml + inputs_correct = [list(input.keys())[0] for input in meta_yaml_corrected["input"] if list(input.keys())[0] in swf.inputs] + inputs_missing = [input for input in swf.inputs if input not in inputs_correct] + # Add missing inputs to meta.yml + for missing_input in inputs_missing: + meta_yaml_corrected["input"].append({missing_input: {"description": ""}}) + + if "output" in meta_yaml: + # Delete outputs from meta.yml which are not present in main.nf + meta_yaml_corrected["output"] = [output for output in meta_yaml["output"] if list(output.keys())[0] in swf.outputs] + # Obtain output from main.nf missing in meta.yml + outputs_correct = [list(output.keys())[0] for output in meta_yaml_corrected["output"] if list(output.keys())[0] in swf.outputs] + outputs_missing = [output for output in swf.outputs if output not in outputs_correct] + # Add missing inputs to meta.yml + for missing_output in outputs_missing: + meta_yaml_corrected["output"].append({missing_output: {"description": ""}}) + + # Write corrected meta.yml to file + with open(swf.meta_yml, "w") as fh: + log.info(f"Updating {swf.meta_yml}") + yaml.dump(meta_yaml_corrected, fh) + run_prettier_on_file(fh.name) + + diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 24e75eddb..633061e24 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -1,4 +1,5 @@ import json +import logging from pathlib import Path import jsonschema.validators @@ -6,6 +7,7 @@ import nf_core.components.components_utils +log = logging.getLogger(__name__) def meta_yml(subworkflow_lint_object, subworkflow): """ @@ -65,6 +67,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.passed.append(("meta_input", f"`{input}` specified", subworkflow.meta_yml)) else: subworkflow.failed.append(("meta_input", f"`{input}` missing in `meta.yml`", subworkflow.meta_yml)) + else: + log.debug(f"No inputs specified in subworkflow `main.nf`: {subworkflow.component_name}") if "output" in meta_yaml: meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] @@ -75,6 +79,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.failed.append( ("meta_output", f"`{output}` missing in `meta.yml`", subworkflow.meta_yml) ) + else: + log.debug(f"No outputs specified in subworkflow `main.nf`: {subworkflow.component_name}") # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == subworkflow.workflow_name: diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py index 540f421ad..35e2c6b51 100644 --- a/tests/subworkflows/lint.py +++ b/tests/subworkflows/lint.py @@ -38,6 +38,15 @@ def test_subworkflows_lint_new_subworkflow(self): assert len(subworkflow_lint.warned) >= 0 +def test_subworkflows_lint_update_meta_yml(self): + """update the meta.yml of a subworkflow""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules, fix=True) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + + def test_subworkflows_lint_no_gitlab(self): """Test linting a pipeline with no subworkflows installed""" with pytest.raises(LookupError): diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 786ba5383..cb8bcf7a5 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -159,6 +159,7 @@ def tearDown(self): test_subworkflows_lint_snapshot_file, test_subworkflows_lint_snapshot_file_missing_fail, test_subworkflows_lint_snapshot_file_not_needed, + test_subworkflows_lint_update_meta_yml, ) from .subworkflows.list import ( # type: ignore[misc] test_subworkflows_install_and_list_subworkflows, From a79a4f0e0c4ad3546c39d22ef46ce74e9bc25de8 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 24 Jul 2024 12:12:17 +0200 Subject: [PATCH 344/737] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16db104fc..e25e8f894 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ ### Components - The `modules_nfcore` tag in the `main.nf.test` file of modules/subworkflows now displays the organization name in custom modules repositories ([#3005](https://github.com/nf-core/tools/pull/3005)) +- Add option `--fix` to update the `meta.yml` file of subworkflows ([#3077](https://github.com/nf-core/tools/pull/3077)) ### General From 2d2be3877fe04dbc4ddb80cf525be0db13083740 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 24 Jul 2024 16:03:33 +0200 Subject: [PATCH 345/737] fix typing and linting --- nf_core/components/nfcore_component.py | 11 +++++------ nf_core/subworkflows/lint/__init__.py | 21 ++++++++++++++------- nf_core/subworkflows/lint/meta_yml.py | 1 + 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index db817db90..39a4ba51f 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -158,7 +158,7 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" - inputs = [] + inputs: list[list | str] = [] with open(self.main_nf) as f: data = f.read() if self.component_type == "modules": @@ -173,10 +173,10 @@ def get_inputs_from_main_nf(self) -> None: # don't match anything inside comments or after "output:" if "input:" not in data: log.debug(f"Could not find any inputs in {self.main_nf}") - return inputs + return input_data = data.split("input:")[1].split("output:")[0] for line in input_data.split("\n"): - channel_elements = [] + channel_elements: list[dict] = [] regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" matches = re.finditer(regex, line) for _, match in enumerate(matches, start=1): @@ -195,15 +195,14 @@ def get_inputs_from_main_nf(self) -> None: # get input values from main.nf after "take:" if "take:" not in data: log.debug(f"Could not find any inputs in {self.main_nf}") - return inputs + return # get all lines between "take" and "main" or "emit" input_data = data.split("take:")[1].split("main:")[0].split("emit:")[0] for line in input_data.split("\n"): try: inputs.append(line.split()[0]) except IndexError: - # Empty lines - pass + pass # Empty lines log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") self.inputs = inputs diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index e0911e5bd..22025d2a0 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -231,7 +231,6 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): self.failed += [LintResult(swf, *s) for s in swf.failed] - def update_meta_yml_file(self, swf): """ Update the meta.yml file with the correct inputs and outputs @@ -251,9 +250,13 @@ def update_meta_yml_file(self, swf): # Compare inputs and add them if missing if "input" in meta_yaml: # Delete inputs from meta.yml which are not present in main.nf - meta_yaml_corrected["input"] = [input for input in meta_yaml["input"] if list(input.keys())[0] in swf.inputs] + meta_yaml_corrected["input"] = [ + input for input in meta_yaml["input"] if list(input.keys())[0] in swf.inputs + ] # Obtain inputs from main.nf missing in meta.yml - inputs_correct = [list(input.keys())[0] for input in meta_yaml_corrected["input"] if list(input.keys())[0] in swf.inputs] + inputs_correct = [ + list(input.keys())[0] for input in meta_yaml_corrected["input"] if list(input.keys())[0] in swf.inputs + ] inputs_missing = [input for input in swf.inputs if input not in inputs_correct] # Add missing inputs to meta.yml for missing_input in inputs_missing: @@ -261,9 +264,15 @@ def update_meta_yml_file(self, swf): if "output" in meta_yaml: # Delete outputs from meta.yml which are not present in main.nf - meta_yaml_corrected["output"] = [output for output in meta_yaml["output"] if list(output.keys())[0] in swf.outputs] + meta_yaml_corrected["output"] = [ + output for output in meta_yaml["output"] if list(output.keys())[0] in swf.outputs + ] # Obtain output from main.nf missing in meta.yml - outputs_correct = [list(output.keys())[0] for output in meta_yaml_corrected["output"] if list(output.keys())[0] in swf.outputs] + outputs_correct = [ + list(output.keys())[0] + for output in meta_yaml_corrected["output"] + if list(output.keys())[0] in swf.outputs + ] outputs_missing = [output for output in swf.outputs if output not in outputs_correct] # Add missing inputs to meta.yml for missing_output in outputs_missing: @@ -274,5 +283,3 @@ def update_meta_yml_file(self, swf): log.info(f"Updating {swf.meta_yml}") yaml.dump(meta_yaml_corrected, fh) run_prettier_on_file(fh.name) - - diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 633061e24..be282bc45 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -9,6 +9,7 @@ log = logging.getLogger(__name__) + def meta_yml(subworkflow_lint_object, subworkflow): """ Lint a ``meta.yml`` file From ff1a49520f69d228acead56fc232760ded321fe3 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 24 Jul 2024 14:21:12 +0200 Subject: [PATCH 346/737] avoid circular import due to modules_json being importetd --- nf_core/modules/__init__.py | 12 ----- nf_core/modules/lint/module_version.py | 5 +- tests/modules/test_bump_versions.py | 10 ++-- tests/modules/test_create.py | 16 +++--- tests/modules/test_info.py | 10 ++-- tests/modules/test_install.py | 6 +-- tests/modules/test_lint.py | 69 +++++++++++++------------- tests/modules/test_list.py | 26 +++++----- tests/modules/test_modules_json.py | 4 +- tests/modules/test_patch.py | 29 ++++++----- tests/modules/test_update.py | 20 +++++--- tests/test_subworkflows.py | 43 +--------------- 12 files changed, 105 insertions(+), 145 deletions(-) diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index 6be871ece..e69de29bb 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -1,12 +0,0 @@ -from .bump_versions import ModuleVersionBumper -from .create import ModuleCreate -from .info import ModuleInfo -from .install import ModuleInstall -from .lint import ModuleLint -from .list import ModuleList -from .modules_json import ModulesJson -from .modules_repo import ModulesRepo -from .modules_utils import ModuleExceptionError -from .patch import ModulePatch -from .remove import ModuleRemove -from .update import ModuleUpdate diff --git a/nf_core/modules/lint/module_version.py b/nf_core/modules/lint/module_version.py index d0ef17a44..207d5e941 100644 --- a/nf_core/modules/lint/module_version.py +++ b/nf_core/modules/lint/module_version.py @@ -6,6 +6,7 @@ from pathlib import Path import nf_core +import nf_core.modules.lint import nf_core.modules.modules_repo import nf_core.modules.modules_utils from nf_core.modules.modules_utils import NFCoreComponent @@ -40,8 +41,8 @@ def module_version(module_lint_object: "nf_core.modules.lint.ModuleLint", module ) modules_repo = nf_core.modules.modules_repo.ModulesRepo(remote_url=module.repo_url, branch=module.branch) - module_git_log = modules_repo.get_component_git_log(module.component_name, "modules") - if version == next(module_git_log)["git_sha"]: + module_git_log = list(modules_repo.get_component_git_log(module.component_name, "modules")) + if version == module_git_log[0]["git_sha"]: module.passed.append(("module_version", "Module is the latest version", module.component_dir)) else: module.warned.append(("module_version", "New version available", module.component_dir)) diff --git a/tests/modules/test_bump_versions.py b/tests/modules/test_bump_versions.py index 29e030668..d46b8747c 100644 --- a/tests/modules/test_bump_versions.py +++ b/tests/modules/test_bump_versions.py @@ -3,7 +3,7 @@ import pytest -import nf_core.modules +import nf_core.modules.bump_versions from nf_core.modules.modules_utils import ModuleExceptionError from ..test_modules import TestModules @@ -19,19 +19,19 @@ def test_modules_bump_versions_single_module(self): new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content) with open(env_yml_path, "w") as fh: fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) version_bumper.bump_versions(module="bpipe/test") assert len(version_bumper.failed) == 0 def test_modules_bump_versions_all_modules(self): """Test updating all modules""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) version_bumper.bump_versions(all_modules=True) assert len(version_bumper.failed) == 0 def test_modules_bump_versions_fail(self): """Fail updating a module with wrong name""" - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) with pytest.raises(ModuleExceptionError) as excinfo: version_bumper.bump_versions(module="no/module") assert "Could not find the specified module:" in str(excinfo.value) @@ -45,6 +45,6 @@ def test_modules_bump_versions_fail_unknown_version(self): new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content) with open(env_yml_path, "w") as fh: fh.write(new_content) - version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) + version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper(pipeline_dir=self.nfcore_modules) version_bumper.bump_versions(module="bpipe/test") assert "Conda package had unknown version" in version_bumper.failed[0][0] diff --git a/tests/modules/test_create.py b/tests/modules/test_create.py index c84750183..219f86999 100644 --- a/tests/modules/test_create.py +++ b/tests/modules/test_create.py @@ -9,7 +9,7 @@ import yaml from git.repo import Repo -import nf_core.modules +import nf_core.modules.create from tests.utils import ( GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, @@ -26,7 +26,7 @@ def test_modules_create_succeed(self): with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( + module_create = nf_core.modules.create.ModuleCreate( self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" ) with requests_cache.disabled(): @@ -38,7 +38,7 @@ def test_modules_create_fail_exists(self): with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") - module_create = nf_core.modules.ModuleCreate( + module_create = nf_core.modules.create.ModuleCreate( self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" ) with requests_cache.disabled(): @@ -53,7 +53,7 @@ def test_modules_create_nfcore_modules(self): with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") - module_create = nf_core.modules.ModuleCreate( + module_create = nf_core.modules.create.ModuleCreate( self.nfcore_modules, "fastqc", "@author", "process_low", False, False ) with requests_cache.disabled(): @@ -68,7 +68,7 @@ def test_modules_create_nfcore_modules_subtool(self): with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "star", "2.8.10a") mock_biocontainers_api_calls(rsps, "star", "2.8.10a") - module_create = nf_core.modules.ModuleCreate( + module_create = nf_core.modules.create.ModuleCreate( self.nfcore_modules, "star/index", "@author", "process_medium", False, False ) with requests_cache.disabled(): @@ -94,7 +94,7 @@ def test_modules_migrate(self, mock_rich_ask): # Create a module with --migrate-pytest mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() with open(module_dir / "main.nf") as fh: @@ -128,7 +128,7 @@ def test_modules_migrate_no_delete(self, mock_rich_ask): # Create a module with --migrate-pytest mock_rich_ask.return_value = False - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() # Check that pytest folder is not deleted @@ -157,7 +157,7 @@ def test_modules_migrate_symlink(self, mock_rich_ask): # Create a module with --migrate-pytest mock_rich_ask.return_value = True - module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create = nf_core.modules.create.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) module_create.create() # Check that symlink is deleted diff --git a/tests/modules/test_info.py b/tests/modules/test_info.py index 890685404..8e60bed31 100644 --- a/tests/modules/test_info.py +++ b/tests/modules/test_info.py @@ -1,6 +1,6 @@ from rich.console import Console -import nf_core.modules +import nf_core.modules.info from ..test_modules import TestModules from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL @@ -9,7 +9,7 @@ class TestModulesCreate(TestModules): def test_modules_info_remote(self): """Test getting info about a remote module""" - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "fastqc") + mods_info = nf_core.modules.info.ModuleInfo(self.pipeline_dir, "fastqc") mods_info_output = mods_info.get_component_info() console = Console(record=True) console.print(mods_info_output) @@ -21,7 +21,7 @@ def test_modules_info_remote(self): def test_modules_info_remote_gitlab(self): """Test getting info about a module in the remote gitlab repo""" - mods_info = nf_core.modules.ModuleInfo( + mods_info = nf_core.modules.info.ModuleInfo( self.pipeline_dir, "fastqc", remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH ) mods_info_output = mods_info.get_component_info() @@ -37,7 +37,7 @@ def test_modules_info_remote_gitlab(self): def test_modules_info_local(self): """Test getting info about a locally installed module""" self.mods_install.install("trimgalore") - mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") + mods_info = nf_core.modules.info.ModuleInfo(self.pipeline_dir, "trimgalore") mods_info_output = mods_info.get_component_info() console = Console(record=True) console.print(mods_info_output) @@ -50,7 +50,7 @@ def test_modules_info_local(self): def test_modules_info_in_modules_repo(self): """Test getting info about a module in the modules repo""" - mods_info = nf_core.modules.ModuleInfo(self.nfcore_modules, "fastqc") + mods_info = nf_core.modules.info.ModuleInfo(self.nfcore_modules, "fastqc") mods_info.local = True mods_info_output = mods_info.get_component_info() console = Console(record=True) diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py index cfdaac47e..8f7ac0a1d 100644 --- a/tests/modules/test_install.py +++ b/tests/modules/test_install.py @@ -19,14 +19,14 @@ class TestModulesCreate(TestModules): def test_modules_install_nopipeline(self): """Test installing a module - no pipeline given""" - self.mods_install.directory = None + self.pipeline_dir = None assert self.mods_install.install("foo") is False @with_temporary_folder def test_modules_install_emptypipeline(self, tmpdir): """Test installing a module - empty dir given""" - os.mkdir(os.path.join(tmpdir, "nf-core-pipe")) - self.mods_install.directory = os.path.join(tmpdir, "nf-core-pipe") + Path(tmpdir, "nf-core-pipe").mkdir() + self.mods_install.directory = Path(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: self.mods_install.install("foo") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py index fc9871db2..dfe288a6e 100644 --- a/tests/modules/test_lint.py +++ b/tests/modules/test_lint.py @@ -6,8 +6,9 @@ import yaml from git.repo import Repo -import nf_core.modules -from nf_core.modules.lint import main_nf +import nf_core.modules.lint +import nf_core.modules.patch +from nf_core.modules.lint.main_nf import check_container_link_line, check_process_labels from nf_core.utils import set_wd from ..test_modules import TestModules @@ -160,7 +161,7 @@ class TestModulesCreate(TestModules): def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): - install_obj = nf_core.modules.ModuleInstall( + install_obj = nf_core.modules.install.ModuleInstall( pipeline_dir, prompt=False, force=False, @@ -180,7 +181,7 @@ def _setup_patch(self, pipeline_dir: Union[str, Path], modify_module: bool): def test_modules_lint_trimgalore(self): """Test linting the TrimGalore! module""" self.mods_install.install("trimgalore") - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) module_lint.lint(print_results=False, module="trimgalore") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -191,11 +192,11 @@ def test_modules_lint_empty(self): self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) with pytest.raises(LookupError): - nf_core.modules.ModuleLint(directory=self.pipeline_dir) + nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) def test_modules_lint_new_modules(self): """lint a new module""" - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -206,13 +207,13 @@ def test_modules_lint_no_gitlab(self): self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) with pytest.raises(LookupError): - nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) def test_modules_lint_gitlab_modules(self): """Lint modules from a different remote""" self.mods_install_gitlab.install("fastqc") self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 2 assert len(module_lint.passed) > 0 @@ -221,7 +222,7 @@ def test_modules_lint_gitlab_modules(self): def test_modules_lint_multiple_remotes(self): """Lint modules from a different remote""" self.mods_install_gitlab.install("multiqc") - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) assert len(module_lint.failed) == 1 assert len(module_lint.passed) > 0 @@ -230,12 +231,12 @@ def test_modules_lint_multiple_remotes(self): def test_modules_lint_registry(self): """Test linting the samtools module and alternative registry""" assert self.mods_install.install("samtools/sort") - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir, registry="public.ecr.aws") + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, registry="public.ecr.aws") module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 - module_lint = nf_core.modules.ModuleLint(directory=self.pipeline_dir) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -248,13 +249,13 @@ def test_modules_lint_patched_modules(self): self._setup_patch(str(self.pipeline_dir), True) # Create a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) # change temporarily working directory to the pipeline directory # to avoid error from try_apply_patch() during linting with set_wd(self.pipeline_dir): - module_lint = nf_core.modules.ModuleLint( + module_lint = nf_core.modules.lint.ModuleLint( directory=self.pipeline_dir, remote_url=GITLAB_URL, branch=PATCH_BRANCH, @@ -273,7 +274,7 @@ def test_modules_lint_check_process_labels(self): for test_case in PROCESS_LABEL_TEST_CASES: process, passed, warned, failed = test_case mocked_ModuleLint = MockModuleLint() - main_nf.check_process_labels(mocked_ModuleLint, process.splitlines()) + check_process_labels(mocked_ModuleLint, process.splitlines()) assert len(mocked_ModuleLint.passed) == passed assert len(mocked_ModuleLint.warned) == warned assert len(mocked_ModuleLint.failed) == failed @@ -284,7 +285,7 @@ def test_modules_lint_check_url(self): mocked_ModuleLint = MockModuleLint() for line in process.splitlines(): if line.strip(): - main_nf.check_container_link_line(mocked_ModuleLint, line, registry="quay.io") + check_container_link_line(mocked_ModuleLint, line, registry="quay.io") assert ( len(mocked_ModuleLint.passed) == passed @@ -298,7 +299,7 @@ def test_modules_lint_check_url(self): def test_modules_lint_snapshot_file(self): """Test linting a module with a snapshot file""" - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -315,7 +316,7 @@ def test_modules_lint_snapshot_file_missing_fail(self): "tests", "main.nf.test.snap", ).unlink() - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -359,7 +360,7 @@ def test_modules_lint_snapshot_file_not_needed(self): "w", ) as fh: fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -377,7 +378,7 @@ def test_modules_environment_yml_file_doesnt_exists(self): "environment.yml.bak", ) ) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -403,7 +404,7 @@ def test_modules_environment_yml_file_doesnt_exists(self): def test_modules_environment_yml_file_sorted_correctly(self): """Test linting a module with a correctly sorted environment.yml file""" - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -438,7 +439,7 @@ def test_modules_environment_yml_file_sorted_incorrectly(self): "w", ) as fh: fh.write(yaml_content) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # we fix the sorting on the fly, so this should pass assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -471,7 +472,7 @@ def test_modules_environment_yml_file_not_array(self): "w", ) as fh: fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -504,7 +505,7 @@ def test_modules_environment_yml_file_name_mismatch(self): "w", ) as fh: fh.write(yaml.dump(yaml_content)) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes yaml_content["name"] = "bpipe_test" @@ -536,7 +537,7 @@ def test_modules_meta_yml_incorrect_licence_field(self): "w", ) as fh: fh.write(yaml.dump(meta_yml)) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes @@ -559,7 +560,7 @@ def test_modules_meta_yml_input_mismatch(self): main_nf_new = main_nf.replace("path bam", "path bai") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) @@ -578,7 +579,7 @@ def test_modules_meta_yml_output_mismatch(self): main_nf_new = main_nf.replace("emit: bam", "emit: bai") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf_new) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) @@ -625,7 +626,7 @@ def test_modules_meta_yml_incorrect_name(self): "w", ) as fh: fh.write(yaml.dump(environment_yml)) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") # reset changes @@ -659,7 +660,7 @@ def test_modules_missing_test_dir(self): Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") ) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") @@ -690,7 +691,7 @@ def test_modules_missing_test_main_nf(self): "main.nf.test.bak", ) ) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path( self.nfcore_modules, @@ -719,7 +720,7 @@ def test_modules_missing_test_main_nf(self): def test_modules_unused_pytest_files(self): """Test linting a nf-test module with files still present in `tests/modules/`""" Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -735,7 +736,7 @@ def test_nftest_failing_linting(self): self.nfcore_modules = Path(tmp_dir, "modules-test") Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="kallisto/quant") assert len(module_lint.failed) == 3, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -774,7 +775,7 @@ def test_modules_absent_version(self): "w", ) as fh: fh.write(new_content) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") with open( Path( @@ -812,7 +813,7 @@ def test_modules_empty_file_in_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 @@ -841,7 +842,7 @@ def test_modules_empty_file_in_stub_snapshot(self): with open(snap_file, "w") as fh: json.dump(snap, fh) - module_lint = nf_core.modules.ModuleLint(directory=self.nfcore_modules) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) module_lint.lint(print_results=False, module="bpipe/test") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py index 81484cc8f..fdbb61f69 100644 --- a/tests/modules/test_list.py +++ b/tests/modules/test_list.py @@ -4,7 +4,7 @@ import yaml from rich.console import Console -import nf_core.modules +import nf_core.modules.list from ..test_modules import TestModules from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL @@ -13,7 +13,7 @@ class TestModulesCreate(TestModules): def test_modules_list_remote(self): """Test listing available modules""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -22,7 +22,9 @@ def test_modules_list_remote(self): def test_modules_list_remote_gitlab(self): """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + mods_list = nf_core.modules.list.ModuleList( + None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH + ) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -31,7 +33,7 @@ def test_modules_list_remote_gitlab(self): def test_modules_list_pipeline(self): """Test listing locally installed modules""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -42,7 +44,7 @@ def test_modules_list_pipeline(self): def test_modules_install_and_list_pipeline(self): """Test listing locally installed modules""" self.mods_install.install("trimgalore") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -52,7 +54,7 @@ def test_modules_install_and_list_pipeline(self): def test_modules_install_gitlab_and_list_pipeline(self): """Test listing locally installed modules""" self.mods_install_gitlab.install("fastqc") - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -61,7 +63,7 @@ def test_modules_install_gitlab_and_list_pipeline(self): def test_modules_list_local_json(self): """Test listing locally installed modules as JSON""" - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components(print_json=True) listed_mods = json.loads(listed_mods) assert "fastqc" in listed_mods @@ -69,7 +71,7 @@ def test_modules_list_local_json(self): def test_modules_list_remote_json(self): """Test listing available modules as JSON""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) listed_mods = mods_list.list_components(print_json=True) listed_mods = json.loads(listed_mods) assert "fastqc" in listed_mods @@ -77,7 +79,7 @@ def test_modules_list_remote_json(self): def test_modules_list_with_one_keyword(self): """Test listing available modules with one keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) listed_mods = mods_list.list_components(keywords=["qc"]) console = Console(record=True) console.print(listed_mods) @@ -86,7 +88,7 @@ def test_modules_list_with_one_keyword(self): def test_modules_list_with_keywords(self): """Test listing available modules with multiple keywords""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) console = Console(record=True) console.print(listed_mods) @@ -95,7 +97,7 @@ def test_modules_list_with_keywords(self): def test_modules_list_with_unused_keyword(self): """Test listing available modules with an unused keyword""" - mods_list = nf_core.modules.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(None, remote=True) with self.assertLogs(level="INFO") as log: listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) self.assertIn("No available", log.output[0]) @@ -116,7 +118,7 @@ def test_modules_list_in_wrong_repo_fail(self): yaml.safe_dump(nf_core_yml, fh) # expect error logged with self.assertLogs(level="ERROR") as log: - mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) + mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) listed_mods = mods_list.list_components() self.assertIn("must be run from a pipeline directory", log.output[0]) # expect empty list diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 319b5ad65..2ab058fa7 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -36,7 +36,7 @@ def test_mod_json_update(self): mod_json_obj = ModulesJson(self.pipeline_dir) # Update the modules.json file mod_repo_obj = ModulesRepo() - mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", "modules", write_file=False) + mod_json_obj.update("modules", mod_repo_obj, "MODULE_NAME", "GIT_SHA", ["modules"], write_file=False) mod_json = mod_json_obj.get_modules_json() assert "MODULE_NAME" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"] assert "git_sha" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["MODULE_NAME"] @@ -155,7 +155,7 @@ def test_mod_json_up_to_date_reinstall_fails(self): mod_json_obj = ModulesJson(self.pipeline_dir) # Update the fastqc module entry to an invalid git_sha - mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", "modules", write_file=True) + mod_json_obj.update("modules", ModulesRepo(), "fastqc", "INVALID_GIT_SHA", ["modules"], write_file=True) # Remove the fastqc module fastqc_path = Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "fastqc") diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 230bb1ce8..c3eb94d37 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -6,7 +6,10 @@ import pytest import nf_core.components.components_command -import nf_core.modules +import nf_core.components.patch +import nf_core.modules.modules_json +import nf_core.modules.patch +import nf_core.modules.update from ..test_modules import TestModules from ..utils import GITLAB_URL @@ -29,7 +32,7 @@ def setup_patch(pipeline_dir, modify_module): - install_obj = nf_core.modules.ModuleInstall( + install_obj = nf_core.modules.install.ModuleInstall( pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=PATCH_BRANCH, sha=ORG_SHA ) @@ -66,7 +69,7 @@ def test_create_patch_no_change(self): setup_patch(self.pipeline_dir, False) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) with pytest.raises(UserWarning): patch_obj.patch(BISMARK_ALIGN) @@ -84,7 +87,7 @@ def test_create_patch_change(self): setup_patch(self.pipeline_dir, True) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) @@ -119,7 +122,7 @@ def test_create_patch_try_apply_successful(self): module_path = Path(self.pipeline_dir, module_relpath) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" @@ -132,7 +135,7 @@ def test_create_patch_try_apply_successful(self): "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) - update_obj = nf_core.modules.ModuleUpdate( + update_obj = nf_core.modules.update.ModuleUpdate( self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH ) # Install the new files @@ -187,7 +190,7 @@ def test_create_patch_try_apply_failed(self): module_path = Path(self.pipeline_dir, module_relpath) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" @@ -200,7 +203,7 @@ def test_create_patch_try_apply_failed(self): "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) - update_obj = nf_core.modules.ModuleUpdate( + update_obj = nf_core.modules.update.ModuleUpdate( self.pipeline_dir, sha=FAIL_SHA, remote_url=GITLAB_URL, branch=PATCH_BRANCH ) # Install the new files @@ -226,7 +229,7 @@ def test_create_patch_update_success(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" @@ -240,7 +243,7 @@ def test_create_patch_update_success(self): ) # Update the module - update_obj = nf_core.modules.ModuleUpdate( + update_obj = nf_core.modules.update.ModuleUpdate( self.pipeline_dir, sha=SUCCEED_SHA, show_diff=False, @@ -287,7 +290,7 @@ def test_create_patch_update_fail(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" @@ -304,7 +307,7 @@ def test_create_patch_update_fail(self): with open(module_path / patch_fn) as fh: patch_contents = fh.read() - update_obj = nf_core.modules.ModuleUpdate( + update_obj = nf_core.modules.update.ModuleUpdate( self.pipeline_dir, sha=FAIL_SHA, show_diff=False, @@ -339,7 +342,7 @@ def test_remove_patch(self): setup_patch(self.pipeline_dir, True) # Try creating a patch file - patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj = nf_core.modules.patch.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) patch_obj.patch(BISMARK_ALIGN) module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py index a33aac377..1f81eab48 100644 --- a/tests/modules/test_update.py +++ b/tests/modules/test_update.py @@ -167,9 +167,10 @@ def test_update_with_config_fixed_version(self): # Fix the trimgalore version in the .nf-core.yml to an old version update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": OLD_TRIMGALORE_SHA}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = ModuleUpdate( @@ -191,9 +192,10 @@ def test_update_with_config_dont_update(self): # Set the trimgalore field to no update in the .nf-core.yml update_config = {GITLAB_URL: {GITLAB_REPO: {"trimgalore": False}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = ModuleUpdate( @@ -219,9 +221,10 @@ def test_update_with_config_fix_all(self): # Fix the version of all nf-core modules in the .nf-core.yml to an old version update_config = {GITLAB_URL: OLD_TRIMGALORE_SHA} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = ModuleUpdate( @@ -242,9 +245,10 @@ def test_update_with_config_no_updates(self): # Fix the version of all nf-core modules in the .nf-core.yml to an old version update_config = {GITLAB_URL: False} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = ModuleUpdate( diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index af581fc9a..7c18ab0a2 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -1,14 +1,13 @@ """Tests covering the subworkflows commands""" import json -import os -import shutil import unittest from pathlib import Path import pytest import nf_core.modules +import nf_core.modules.install import nf_core.pipelines.create.create import nf_core.subworkflows @@ -104,49 +103,11 @@ def setUp(self): force=False, sha="8c343b3c8a0925949783dc547666007c245c235b", ) - self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) + self.mods_install = nf_core.modules.install.ModuleInstall(self.pipeline_dir, prompt=False, force=True) # Set up remove objects self.subworkflow_remove = nf_core.subworkflows.SubworkflowRemove(self.pipeline_dir) - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - @pytest.fixture(autouse=True) def _use_caplog(self, caplog): self.caplog = caplog - - # ################################################ - # # Test of the individual subworkflow commands. # - # ################################################ - - # from .subworkflows.list import ( # type: ignore[misc] - # test_subworkflows_install_and_list_subworkflows, - # test_subworkflows_install_gitlab_and_list_subworkflows, - # test_subworkflows_list_remote, - # test_subworkflows_list_remote_gitlab, - # ) - # from .subworkflows.remove import ( # type: ignore[misc] - # test_subworkflows_remove_included_subworkflow, - # test_subworkflows_remove_one_of_two_subworkflow, - # test_subworkflows_remove_subworkflow, - # test_subworkflows_remove_subworkflow_keep_installed_module, - # ) - # from .subworkflows.update import ( # type: ignore[misc] - # test_install_and_update, - # test_install_at_hash_and_update, - # test_install_at_hash_and_update_and_save_diff_limit_output, - # test_install_at_hash_and_update_and_save_diff_to_file, - # test_install_at_hash_and_update_limit_output, - # test_update_all, - # test_update_all_linked_components_from_subworkflow, - # test_update_all_subworkflows_from_module, - # test_update_change_of_included_modules, - # test_update_with_config_dont_update, - # test_update_with_config_fix_all, - # test_update_with_config_fixed_version, - # test_update_with_config_no_updates, - # ) From 0cf5f8b2f6f36d7904d95368c2ae2964bcbc09af Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 09:08:04 +0200 Subject: [PATCH 347/737] remove broken _repr_ (breaks because not all parameters are intilaized) --- nf_core/components/lint/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index ada3ee30c..f5ce3ddb7 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -45,7 +45,6 @@ def __init__(self, component: NFCoreComponent, lint_test: str, message: str, fil self.component_name: str = component.component_name -@rich.repr.auto class ComponentLint(ComponentCommand): """ An object for linting modules and subworkflows either in a clone of the 'nf-core/modules' From 879e808b37f5bf7ed4e15aaef111ba025779a08f Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:27:14 +0200 Subject: [PATCH 348/737] add types --- nf_core/components/components_command.py | 1 - nf_core/components/components_utils.py | 16 +- nf_core/components/create.py | 21 +-- nf_core/components/info.py | 14 +- nf_core/components/install.py | 4 + nf_core/components/lint/__init__.py | 12 +- nf_core/components/nfcore_component.py | 1 + nf_core/modules/bump_versions.py | 10 +- nf_core/modules/modules_json.py | 7 +- nf_core/pipelines/create/create.py | 50 +++--- nf_core/pipelines/create/utils.py | 20 +-- nf_core/pipelines/lint/__init__.py | 24 +-- nf_core/pipelines/lint/modules_json.py | 9 +- nf_core/subworkflows/lint/main_nf.py | 14 +- nf_core/synced_repo.py | 33 ++-- tests/pipelines/test_sync.py | 186 ++++++++++------------- tests/subworkflows/test_lint.py | 42 +++-- 17 files changed, 232 insertions(+), 232 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 8d200021c..13a6fed33 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -38,7 +38,6 @@ def __init__( self.hide_progress = hide_progress self.no_prompts = no_prompts self._configure_repo_and_paths() - self.repo_type: Optional[str] = None def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 743efd386..e4e2ff092 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -27,7 +27,9 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P # Figure out the repository type from the .nf-core.yml config file if we can config_fn, tools_config = nf_core.utils.load_tools_config(base_dir) - repo_type = tools_config.get("repository_type", None) + if config_fn is None: + raise UserWarning(f"Could not find a config file in directory: {base_dir}") + repo_type = getattr(tools_config, "repository_type", None) or None # If not set, prompt the user if not repo_type and use_prompt: @@ -55,13 +57,11 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P # Check if it's a valid answer if repo_type not in ["pipeline", "modules"]: raise UserWarning(f"Invalid repository type: '{repo_type}'") - + org: str = "" # Check for org if modules repo - if repo_type == "pipeline": - org = "" - elif repo_type == "modules": - org = tools_config.get("org_path", None) - if org is None: + if repo_type == "modules": + org = getattr(tools_config, "org_path", "") or "" + if org == "": log.warning("Organisation path not defined in %s [key: org_path]", config_fn.name) org = questionary.text( "What is the organisation path under which modules and subworkflows are stored?", @@ -70,7 +70,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P ).unsafe_ask() log.info("To avoid this prompt in the future, add the 'org_path' key to a root '%s' file.", config_fn.name) if rich.prompt.Confirm.ask("[bold][blue]?[/] Would you like me to add this config now?", default=True): - with open(config_fn, "a+") as fh: + with open(str(config_fn), "a+") as fh: fh.write(f"org_path: {org}\n") log.info(f"Config added to '{config_fn.name}'") diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 413790099..fdcf402b4 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -62,7 +62,7 @@ def __init__( self.not_empty_template = not empty_template self.migrate_pytest = migrate_pytest - def create(self): + def create(self) -> bool: """ Create a new DSL2 module or subworkflow from the nf-core template. @@ -106,12 +106,12 @@ def create(self): ``` """ - if self.component_type == "modules": # Check modules directory structure self.check_modules_structure() # Check whether the given directory is a nf-core pipeline or a clone of nf-core/modules + log.info(f"Repository type: [blue]{self.repo_type}") if self.directory != ".": log.info(f"Base directory: '{self.directory}'") @@ -161,7 +161,7 @@ def create(self): self.org_alphabet = not_alphabet.sub("", self.org) # Create component template with jinja2 - self._render_template() + assert self._render_template() log.info(f"Created component template: '{self.component_name}'") if self.migrate_pytest: @@ -171,7 +171,9 @@ def create(self): self._print_and_delete_pytest_files() new_files = [str(path) for path in self.file_paths.values()] + log.info("Created following files:\n " + "\n ".join(new_files)) + return True def _get_bioconda_tool(self): """ @@ -265,7 +267,7 @@ def _get_module_structure_components(self): default=True, ) - def _render_template(self): + def _render_template(self) -> Optional[bool]: """ Create new module/subworkflow files with Jinja2. """ @@ -288,7 +290,7 @@ def _render_template(self): # Write output to the target file log.debug(f"Writing output to: '{dest_fn}'") dest_fn.parent.mkdir(exist_ok=True, parents=True) - with open(dest_fn, "w") as fh: + with open(str(dest_fn), "w") as fh: log.debug(f"Writing output to: '{dest_fn}'") fh.write(rendered_output) @@ -297,6 +299,7 @@ def _render_template(self): Path(nf_core.__file__).parent / f"{self.component_type[:-1]}-template" / template_fn ).stat() dest_fn.chmod(template_stat.st_mode) + return True def _collect_name_prompt(self): """ @@ -340,7 +343,7 @@ def _collect_name_prompt(self): elif self.component_type == "subworkflows": self.component = rich.prompt.Prompt.ask("[violet]Name of subworkflow").strip() - def _get_component_dirs(self): + def _get_component_dirs(self) -> Dict[str, Path]: """Given a directory and a tool/subtool or subworkflow, set the file paths and check if they already exist Returns dict: keys are relative paths to template files, vals are target paths. @@ -372,9 +375,8 @@ def _get_component_dirs(self): # Set file paths file_paths["main.nf"] = component_file - if self.repo_type == "modules": + elif self.repo_type == "modules": component_dir = Path(self.directory, self.component_type, self.org, self.component_dir) - # Check if module/subworkflow directories exist already if component_dir.exists() and not self.force_overwrite and not self.migrate_pytest: raise UserWarning( @@ -403,7 +405,6 @@ def _get_component_dirs(self): raise UserWarning( f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" ) - # Set file paths # For modules - can be tool/ or tool/subtool/ so can't do in template directory structure file_paths["main.nf"] = component_dir / "main.nf" @@ -411,6 +412,8 @@ def _get_component_dirs(self): if self.component_type == "modules": file_paths["environment.yml"] = component_dir / "environment.yml" file_paths["tests/main.nf.test.j2"] = component_dir / "tests" / "main.nf.test" + else: + raise ValueError("`repo_type` not set correctly") return file_paths diff --git a/nf_core/components/info.py b/nf_core/components/info.py index d07486f63..a296fcacc 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -136,17 +136,17 @@ def init_mod_name(self, component): ).unsafe_ask() else: if self.repo_type == "pipeline": + assert self.modules_json is not None # mypy # check if the module is locally installed local_paths = self.modules_json.get_all_components(self.component_type).get( self.modules_repo.remote_url ) # type: ignore - if local_paths is None: - raise LookupError(f"No {self.component_type[:-1]} installed from {self.modules_repo.remote_url}") - for directory, comp in local_paths: - if comp == component: - component_base_path = Path(self.directory, self.component_type) - self.local_path = Path(component_base_path, directory, component) - break + if local_paths is not None: + for directory, comp in local_paths: + if comp == component: + component_base_path = Path(self.directory, self.component_type) + self.local_path = Path(component_base_path, directory, component) + break if self.local_path: self.local = True diff --git a/nf_core/components/install.py b/nf_core/components/install.py index aa8aac81e..8f4791328 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -69,8 +69,12 @@ def install(self, component: str, silent: bool = False) -> bool: # Verify SHA if not self.modules_repo.verify_sha(self.prompt, self.sha): return False + + # verify self.modules_repo entries: if self.modules_repo is None: return False + if self.modules_repo.repo_path is None: + return False # Check and verify component name diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index f5ce3ddb7..e2475ef62 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -75,6 +75,11 @@ def __init__( self.passed: List[LintResult] = [] self.warned: List[LintResult] = [] self.failed: List[LintResult] = [] + self.all_local_components: List[NFCoreComponent] = [] + + self.lint_config = None + self.modules_json = None + if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") else: @@ -107,7 +112,7 @@ def __init__( f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." ) local_component_dir = Path(self.directory, self.component_type, "local") - self.all_local_components = [] + if local_component_dir.exists(): self.all_local_components = [ NFCoreComponent( @@ -122,7 +127,7 @@ def __init__( for comp in self.get_local_components() ] self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) - else: + elif self.repo_type == "modules": component_dir = Path( self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, @@ -146,9 +151,6 @@ def __init__( self.registry = registry log.debug(f"Registry set to {self.registry}") - self.lint_config = None - self.modules_json = None - def __repr__(self) -> str: return f"ComponentLint({self.component_type}, {self.directory})" diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 0c63141c7..129871d68 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -55,6 +55,7 @@ def __init__( self.git_sha: Optional[str] = None self.is_patched: bool = False self.branch: Optional[str] = None + self.workflow_name: Optional[str] = None if remote_component: # Initialize the important files diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index fb0dc7d50..2d8854e3c 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -7,7 +7,7 @@ import os import re from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import List, Optional, Tuple, Union import questionary import yaml @@ -22,13 +22,13 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.utils import custom_yaml_dumper, rich_force_colors +from nf_core.utils import NFCoreYamlConfig, custom_yaml_dumper, rich_force_colors from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) -class ModuleVersionBumper(ComponentCommand): # type: ignore[misc] +class ModuleVersionBumper(ComponentCommand): def __init__( self, pipeline_dir: Union[str, Path], @@ -43,7 +43,7 @@ def __init__( self.failed: List[Tuple[str, str]] = [] self.ignored: List[Tuple[str, str]] = [] self.show_up_to_date: Optional[bool] = None - self.tools_config: Dict[str, Any] = {} + self.tools_config: Optional[NFCoreYamlConfig] def bump_versions( self, module: Union[str, None] = None, all_modules: bool = False, show_uptodate: bool = False @@ -160,7 +160,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: return False # Don't update if blocked in blacklist - self.bump_versions_config = self.tools_config.get("bump-versions", {}) + self.bump_versions_config = getattr(self.tools_config, "bump-versions", {}) or {} if module.component_name in self.bump_versions_config: config_version = self.bump_versions_config[module.component_name] if not config_version: diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index c0e41d1b4..02ce6fa5a 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -922,7 +922,7 @@ def module_present(self, module_name, repo_url, install_dir): install_dir, {} ) - def get_modules_json(self) -> dict: + def get_modules_json(self) -> ModulesJsonType: """ Returns a copy of the loaded modules.json @@ -932,7 +932,7 @@ def get_modules_json(self) -> dict: if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return copy.deepcopy(self.modules_json) # type: ignore + return copy.deepcopy(self.modules_json) def get_component_version(self, component_type, component_name, repo_url, install_dir): """ @@ -1182,6 +1182,7 @@ def resolve_missing_from_modules_json(self, missing_from_modules_json, component repos, _ = self.get_pipeline_module_repositories(component_type, self.modules_dir, tracked_repos) # Get tuples of components that miss installation and their install directory + def components_with_repos(): for directory in missing_from_modules_json: for repo_url in repos: @@ -1197,7 +1198,7 @@ def components_with_repos(): paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) pass paths_in_directory.append(Path(dir_name).parts[-1]) - if dir in paths_in_directory: + if directory in paths_in_directory: yield (modules_repo.repo_path, directory) # Add all components into a dictionary with install directories diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index aecba9423..b7d86c5bd 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,9 +8,10 @@ import re import shutil from pathlib import Path -from typing import Optional, Union +from typing import Dict, List, Optional, Union import git +import git.config import jinja2 import yaml @@ -61,12 +62,15 @@ def __init__( self.config = template_config elif from_config_file: # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else ".") - # Obtain a CreateConfig object from `.nf-core.yml` config file - if "template" in config_yml: - self.config = CreateConfig(**config_yml["template"]) - else: - raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") + try: + _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) + # Obtain a CreateConfig object from `.nf-core.yml` config file + if config_yml is not None and getattr(config_yml, "template", None) is not None: + self.config = CreateConfig(**config_yml["template"]) + else: + raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") + except (FileNotFoundError, UserWarning): + log.debug("The '.nf-core.yml' configuration file was not found.") elif (name and description and author) or ( template_config and (isinstance(template_config, str) or isinstance(template_config, Path)) ): @@ -191,7 +195,10 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): skip_paths (list): List of template areas which contain paths to skip. """ # Try reading config file - _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) + try: + _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) + except UserWarning: + config_yml = None # Define the different template areas, and what actions to take for each # if they are skipped @@ -230,13 +237,13 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): jinja_params["name_docker"] = jinja_params["name"].replace(jinja_params["org"], jinja_params["prefix_nodash"]) jinja_params["logo_light"] = f"{jinja_params['name_noslash']}_logo_light.png" jinja_params["logo_dark"] = f"{jinja_params['name_noslash']}_logo_dark.png" - - if ( - "lint" in config_yml - and "nextflow_config" in config_yml["lint"] - and "manifest.name" in config_yml["lint"]["nextflow_config"] - ): - return jinja_params, skip_paths + if config_yml is not None: + if ( + hasattr(config_yml, "lint") + and hasattr(config_yml["lint"], "nextflow_config") + and hasattr(config_yml["lint"]["nextflow_config"], "manifest.name") + ): + return jinja_params, skip_paths # Check that the pipeline name matches the requirements if not re.match(r"^[a-z]+$", jinja_params["short_name"]): @@ -417,7 +424,7 @@ def fix_linting(self): """ # Create a lint config short_name = self.jinja_params["short_name"] - lint_config = { + lint_config: Dict[str, List[str]] = { "files_exist": [ "CODE_OF_CONDUCT.md", f"assets/nf-core-{short_name}_logo_light.png", @@ -503,9 +510,10 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) - nf_core_yml["lint"] = lint_config - with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml, fh, default_flow_style=False, sort_keys=False) + if config_fn is not None and nf_core_yml is not None: + nf_core_yml.lint = lint_config + with open(self.outdir / config_fn, "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) run_prettier_on_file(os.path.join(self.outdir, config_fn)) @@ -531,9 +539,9 @@ def git_init_pipeline(self): Raises: UserWarning: if Git default branch is set to 'dev' or 'TEMPLATE'. """ - default_branch = self.default_branch + default_branch: Optional[str] = self.default_branch try: - default_branch = default_branch or git.config.GitConfigParser().get_value("init", "defaultBranch") + default_branch = default_branch or str(git.config.GitConfigParser().get_value("init", "defaultBranch")) except configparser.Error: log.debug("Could not read init.defaultBranch") if default_branch in ["dev", "TEMPLATE"]: diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index c15d61e26..88994c936 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -3,9 +3,9 @@ from contextvars import ContextVar from logging import LogRecord from pathlib import Path -from typing import Any, Dict, Iterator, Optional, Union +from typing import Any, Dict, Iterator, Union -from pydantic import BaseModel, ConfigDict, ValidationError, ValidationInfo, field_validator +from pydantic import ConfigDict, ValidationError, ValidationInfo, field_validator from rich.logging import RichHandler from textual import on from textual._context import active_app @@ -16,6 +16,8 @@ from textual.widget import Widget from textual.widgets import Button, Input, Markdown, RichLog, Static, Switch +from nf_core.utils import NFCoreTemplateConfig + # Use ContextVar to define a context on the model initialization _init_context_var: ContextVar = ContextVar("_init_context_var", default={}) @@ -33,19 +35,9 @@ def init_context(value: Dict[str, Any]) -> Iterator[None]: NFCORE_PIPELINE_GLOBAL: bool = True -class CreateConfig(BaseModel): +class CreateConfig(NFCoreTemplateConfig): """Pydantic model for the nf-core create config.""" - org: Optional[str] = None - name: Optional[str] = None - description: Optional[str] = None - author: Optional[str] = None - version: Optional[str] = None - force: Optional[bool] = True - outdir: Optional[str] = None - skip_features: Optional[list] = None - is_nfcore: Optional[bool] = None - model_config = ConfigDict(extra="allow") def __init__(self, /, **data: Any) -> None: @@ -103,7 +95,7 @@ class TextInput(Static): and validation messages. """ - def __init__(self, field_id, placeholder, description, default=None, password=None, **kwargs) -> None: + def __init__(self, field_id, placeholder, description, default="", password=False, **kwargs) -> None: """Initialise the widget with our values. Pass on kwargs upstream for standard usage.""" diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 93f652370..ed833d321 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -151,15 +151,15 @@ def _get_all_lint_tests(release_mode): "nfcore_yml", ] + (["version_consistency"] if release_mode else []) - def _load(self): + def _load(self) -> bool: """Load information about the pipeline into the PipelineLint object""" # Load everything using the parent object super()._load() # Load lint object specific stuff - self._load_lint_config() + return self._load_lint_config() - def _load_lint_config(self): + def _load_lint_config(self) -> bool: """Parse a pipeline lint config file. Load the '.nf-core.yml' config file and extract @@ -168,14 +168,19 @@ def _load_lint_config(self): Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.wf_path) - self.lint_config = tools_config.get("lint", {}) + self.lint_config = getattr(tools_config, "lint", {}) or {} + is_correct = True # Check if we have any keys that don't match lint test names - for k in self.lint_config: - if k not in self.lint_tests: - log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") + if self.lint_config is not None: + for k in self.lint_config: + if k not in self.lint_tests: + log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") + is_correct = False - def _lint_pipeline(self): + return is_correct + + def _lint_pipeline(self) -> None: """Main linting function. Takes the pipeline directory as the primary input and iterates through @@ -240,7 +245,8 @@ def _lint_pipeline(self): "Running lint checks", total=len(self.lint_tests), test_name=self.lint_tests[0] ) for test_name in self.lint_tests: - if self.lint_config.get(test_name, {}) is False: + lint_test = self.lint_config.get(test_name, {}) if self.lint_config is not None else {} + if lint_test is False: log.debug(f"Skipping lint test '{test_name}'") self.ignored.append((test_name, test_name)) continue diff --git a/nf_core/pipelines/lint/modules_json.py b/nf_core/pipelines/lint/modules_json.py index dd0a59d55..5ce205403 100644 --- a/nf_core/pipelines/lint/modules_json.py +++ b/nf_core/pipelines/lint/modules_json.py @@ -1,9 +1,10 @@ from pathlib import Path +from typing import Dict, List -from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_json import ModulesJson, ModulesJsonType -def modules_json(self): +def modules_json(self) -> Dict[str, List[str]]: """Make sure all modules described in the ``modules.json`` file are actually installed Every module installed from ``nf-core/modules`` must have an entry in the ``modules.json`` file @@ -18,10 +19,10 @@ def modules_json(self): # Load pipeline modules and modules.json _modules_json = ModulesJson(self.wf_path) _modules_json.load() - modules_json_dict = _modules_json.modules_json + modules_json_dict: ModulesJsonType | None = _modules_json.modules_json modules_dir = Path(self.wf_path, "modules") - if _modules_json: + if _modules_json and modules_json_dict is not None: all_modules_passed = True for repo in modules_json_dict["repos"].keys(): diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index c73559502..edca32bf3 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -4,12 +4,14 @@ import logging import re -from typing import List +from typing import List, Tuple + +from nf_core.components.nfcore_component import NFCoreComponent log = logging.getLogger(__name__) -def main_nf(_, subworkflow): +def main_nf(_, subworkflow: NFCoreComponent) -> Tuple[List[str], List[str]]: """ Lint a ``main.nf`` subworkflow file @@ -25,8 +27,8 @@ def main_nf(_, subworkflow): * The subworkflow emits a software version """ - inputs = [] - outputs = [] + inputs: List[str] = [] + outputs: List[str] = [] # Read the lines directly from the subworkflow lines = None @@ -38,7 +40,7 @@ def main_nf(_, subworkflow): subworkflow.passed.append(("main_nf_exists", "Subworkflow file exists", subworkflow.main_nf)) except FileNotFoundError: subworkflow.failed.append(("main_nf_exists", "Subworkflow file does not exist", subworkflow.main_nf)) - return + return inputs, outputs # Go through subworkflow main.nf file and switch state according to current section # Perform section-specific linting @@ -199,7 +201,7 @@ def check_subworkflow_section(self, lines: List[str]) -> List[str]: return includes -def check_workflow_section(self, lines): +def check_workflow_section(self, lines: List[str]) -> None: """Lint the workflow definition of a subworkflow before Specifically checks that the name is all capital letters diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 33e7f0a54..8af0ee9a0 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -4,7 +4,7 @@ import shutil from configparser import NoOptionError, NoSectionError from pathlib import Path -from typing import Dict, Iterable, Optional, Union +from typing import Dict, Iterable, List, Optional, Union import git from git.exc import GitCommandError @@ -129,14 +129,16 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa # the WorkflowRepo define their own including custom init methods. This needs # fixing. self.setup_local_repo(remote_url, branch, hide_progress) + if self.local_repo_dir is None: raise ValueError("Repository not initialized") else: config_fn, repo_config = load_tools_config(self.local_repo_dir) - try: - self.repo_path = repo_config["org_path"] - except KeyError: - raise UserWarning(f"'org_path' key not present in {config_fn.name}") + if config_fn is not None and repo_config is not None: + try: + self.repo_path = repo_config.org_path + except KeyError: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") # Verify that the repo seems to be correctly configured if self.repo_path != NF_CORE_MODULES_NAME or self.branch: @@ -148,6 +150,9 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.avail_module_names = None + def __repr__(self) -> str: + return f"SyncedRepo({self.remote_url}, {self.branch})" + def setup_local_repo(self, remote_url, branch, hide_progress): pass @@ -402,8 +407,12 @@ def get_latest_component_version(self, component_name, component_type): Returns the latest commit in the repository """ try: - return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] - except UserWarning: + git_logs = list(self.get_component_git_log(component_name, component_type, depth=1)) + if not git_logs: + return None + return git_logs[0]["git_sha"] + except Exception as e: + log.debug(f"Could not get latest version of {component_name}: {e}") return None def sha_exists_on_branch(self, sha): @@ -437,7 +446,9 @@ def get_commit_info(self, sha): return message, date raise LookupError(f"Commit '{sha}' not found in the '{self.remote_url}'") - def get_avail_components(self, component_type, checkout=True, commit=None): + def get_avail_components( + self, component_type: str, checkout: bool = True, commit: Optional[str] = None + ) -> List[str]: """ Gets the names of the modules/subworkflows in the repository. They are detected by checking which directories have a 'main.nf' file @@ -456,9 +467,9 @@ def get_avail_components(self, component_type, checkout=True, commit=None): directory = self.subworkflows_dir # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ - os.path.relpath(dirpath, start=directory) - for dirpath, _, file_names in os.walk(directory) - if "main.nf" in file_names + str(Path(dirpath).relative_to(directory)) + for dirpath, _, files in Path.walk(directory) + if "main.nf" in files ] return avail_component_names diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index d7b73c7ff..b6955e671 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -2,50 +2,104 @@ import json import os -import shutil -import tempfile -import unittest from pathlib import Path +from typing import Dict, List, Union from unittest import mock import git import pytest +import yaml import nf_core.pipelines.create.create import nf_core.pipelines.sync +from nf_core.utils import NFCoreYamlConfig +from ..test_pipelines import TestPipelines from ..utils import with_temporary_folder -class TestModules(unittest.TestCase): +class MockResponse: + def __init__(self, data: Union[Dict, List[Dict]], status_code: int, url: str): + self.url: str = url + self.status_code: int = status_code + self.from_cache: bool = False + self.reason: str = "Mocked response" + self.data: Union[Dict, List[Dict]] = data + self.content: str = json.dumps(data) + self.headers: Dict[str, str] = {"content-encoding": "test", "connection": "fake"} + + def json(self): + return self.data + + +def mocked_requests_get(url) -> MockResponse: + """Helper function to emulate POST requests responses from the web""" + + url_template = "https://api.github.com/repos/{}/response/" + if url == Path(url_template.format("no_existing_pr"), "pulls?head=TEMPLATE&base=None"): + return MockResponse([], 200, url) + if url == Path(url_template.format("list_prs"), "pulls"): + response_data = [ + { + "state": "closed", + "head": {"ref": "nf-core-template-merge-2"}, + "base": {"ref": "master"}, + "html_url": "pr_url", + } + ] + [ + { + "state": "open", + "head": {"ref": f"nf-core-template-merge-{branch_no}"}, + "base": {"ref": "master"}, + "html_url": "pr_url", + } + for branch_no in range(3, 7) + ] + return MockResponse(response_data, 200, url) + + return MockResponse([{"html_url": url}], 404, url) + + +def mocked_requests_patch(url: str, data: str, **kwargs) -> MockResponse: + """Helper function to emulate POST requests responses from the web""" + + if url == "url_to_update_pr": + return MockResponse({"html_url": "great_success"}, 200, url) + # convert data to dict + response = json.loads(data) + response["patch_url"] = url + return MockResponse(response, 404, url) + + +def mocked_requests_post(url, **kwargs): + """Helper function to emulate POST requests responses from the web""" + + if url == "https://api.github.com/repos/no_existing_pr/response/pulls": + return MockResponse({"html_url": "great_success"}, 201, url) + + return MockResponse({}, 404, url) + + +class TestModules(TestPipelines): """Class for modules tests""" def setUp(self): - """Create a new pipeline to test""" - self.tmp_dir = tempfile.mkdtemp() - self.pipeline_dir = os.path.join(self.tmp_dir, "testpipeline") - default_branch = "master" - self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testing", - "test pipeline", - "tester", - outdir=self.pipeline_dir, - default_branch=default_branch, - ) - self.create_obj.init_pipeline() - self.remote_path = os.path.join(self.tmp_dir, "remote_repo") + super().setUp() + self.remote_path = Path(self.tmp_dir, "remote_repo") self.remote_repo = git.Repo.init(self.remote_path, bare=True) if self.remote_repo.active_branch.name != "master": - self.remote_repo.active_branch.rename(default_branch) - - def tearDown(self): - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) + self.remote_repo.active_branch.rename("master") @with_temporary_folder - def test_inspect_sync_dir_notgit(self, tmp_dir): + def test_inspect_sync_dir_notgit(self, tmp_dir: str): """Try syncing an empty directory""" + nf_core_yml_path = Path(tmp_dir, ".nf-core.yml") + nf_core_yml = NFCoreYamlConfig(repository_type="pipeline") + + with open(nf_core_yml_path, "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh) + psync = nf_core.pipelines.sync.PipelineSync(tmp_dir) with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.inspect_sync_dir() @@ -227,88 +281,6 @@ def test_push_merge_branch_without_create_branch(self): psync.push_merge_branch() assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'") - def mocked_requests_get(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked response" - self.data = data - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - def json(self): - return self.data - - url_template = "https://api.github.com/repos/{}/response/" - if url == os.path.join(url_template.format("no_existing_pr"), "pulls?head=TEMPLATE&base=None"): - response_data = [] - return MockResponse(response_data, 200) - if url == os.path.join(url_template.format("list_prs"), "pulls"): - response_data = [ - { - "state": "closed", - "head": {"ref": "nf-core-template-merge-2"}, - "base": {"ref": "master"}, - "html_url": "pr_url", - } - ] + [ - { - "state": "open", - "head": {"ref": f"nf-core-template-merge-{branch_no}"}, - "base": {"ref": "master"}, - "html_url": "pr_url", - } - for branch_no in range(3, 7) - ] - return MockResponse(response_data, 200) - - return MockResponse({"html_url": url}, 404) - - def mocked_requests_patch(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked" - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - if url == "url_to_update_pr": - response_data = {"html_url": "great_success"} - return MockResponse(response_data, 200) - - return MockResponse({"patch_url": url}, 404) - - def mocked_requests_post(url, **kwargs): - """Helper function to emulate POST requests responses from the web""" - - class MockResponse: - def __init__(self, data, status_code): - self.url = kwargs.get("url") - self.status_code = status_code - self.from_cache = False - self.reason = "Mocked" - self.data = data - self.content = json.dumps(data) - self.headers = {"content-encoding": "test", "connection": "fake"} - - def json(self): - return self.data - - if url == "https://api.github.com/repos/no_existing_pr/response/pulls": - response_data = {"html_url": "great_success"} - return MockResponse(response_data, 201) - - response_data = {} - return MockResponse(response_data, 404) - @mock.patch("nf_core.utils.gh_api.get", side_effect=mocked_requests_get) @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) def test_make_pull_request_success(self, mock_post, mock_get): @@ -354,7 +326,7 @@ def test_close_open_template_merge_prs(self, mock_get): prs = mock_get(f"https://api.github.com/repos/{psync.gh_repo}/pulls").data for pr in prs: - if pr["state"] == "open": + if pr.get("state", None) == "open": mock_close_open_pr.assert_any_call(pr) @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @@ -368,7 +340,7 @@ def test_close_open_pr(self, mock_patch, mock_post): psync.gh_username = "bad_url" psync.gh_repo = "bad_url/response" os.environ["GITHUB_AUTH_TOKEN"] = "test" - pr = { + pr: Dict[str, Union[str, Dict[str, str]]] = { "state": "open", "head": {"ref": "nf-core-template-merge-3"}, "base": {"ref": "master"}, diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index 38bcc2b2c..269300870 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -291,10 +291,6 @@ def test_subworkflows_absent_version(self): with open(snap_file, "w") as fh: fh.write(new_content) - import ipdb - - ipdb.set_trace() - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0 @@ -312,31 +308,33 @@ def test_subworkflows_missing_test_dir(self): test_dir_copy = shutil.copytree(test_dir, test_dir.parent / "tests_copy") shutil.rmtree(test_dir) - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.failed) == 1 assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0, f"Linting warned with {[x.__dict__ for x in subworkflow_lint.warned]}" - assert any([x.lint_test == "test_dir_versions" for x in subworkflow_lint.warned]) + assert any([x.lint_test == "test_dir_exists" for x in subworkflow_lint.failed]) # cleanup shutil.copytree(test_dir_copy, test_dir) - def test_subworkflows_missing_main_nf(self): - """Test linting a nf-test subworkflow if the main.nf file is missing""" - main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") - main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") - main_nf.unlink() - - subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) - subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") - assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" - assert len(subworkflow_lint.passed) > 0 - assert len(subworkflow_lint.warned) >= 0 - assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" - - # cleanup - shutil.copy(main_nf_copy, main_nf) + # There are many steps before the actual main_nf linting where we rely on the main_nf file to exist, so this test is not possible for now + # def test_subworkflows_missing_main_nf(self): + # """Test linting a nf-test subworkflow if the main.nf file is missing""" + + # subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) + # main_nf = Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf") + # main_nf_copy = shutil.copy(main_nf, main_nf.parent / "main_nf_copy") + # main_nf.unlink() + # subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + # assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + # assert len(subworkflow_lint.passed) > 0 + # assert len(subworkflow_lint.warned) >= 0 + # assert subworkflow_lint.failed[0].lint_test == "main_nf_exists" + + # # cleanup + # shutil.copy(main_nf_copy, main_nf) + # shutil.rmtree(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow_backup")) def test_subworkflows_empty_file_in_snapshot(self): """Test linting a nf-test subworkflow with an empty file sha sum in the test snapshot, which should make it fail (if it is not a stub)""" From 178146f75baf6322e36a752c234c628dde9953c5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:34:26 +0200 Subject: [PATCH 349/737] fix export of pydantic models --- nf_core/components/components_command.py | 5 +- nf_core/components/update.py | 52 +++++----- nf_core/modules/modules_repo.py | 1 + nf_core/pipelines/create/create.py | 15 +-- nf_core/pipelines/lint_utils.py | 10 +- nf_core/utils.py | 50 ++++++---- tests/pipelines/lint/test_nextflow_config.py | 15 ++- tests/subworkflows/test_update.py | 20 ++-- tests/test_modules.py | 44 ++++----- tests/test_utils.py | 99 +++++++------------- tests/utils.py | 29 +++++- 11 files changed, 182 insertions(+), 158 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 13a6fed33..a6b46d1b6 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -181,7 +181,10 @@ def load_lint_config(self) -> None: Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.directory) - self.lint_config = tools_config.get("lint", {}) + if tools_config is None: + raise UserWarning("Could not load `.nf-core.yml` file.") + else: + self.lint_config = tools_config.get("lint", {}) def check_modules_structure(self) -> None: """ diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 9b24b6c0c..eb15f976b 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -97,11 +97,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr updated = [] _, tool_config = nf_core.utils.load_tools_config(self.directory) - self.update_config = tool_config.get("update", {}) - - if self.update_config is None: - raise UserWarning("Could not find '.nf-core.yml' file in pipeline directory") - + self.update_config = getattr(tool_config, "update", {}) or {} self._parameter_checks() # Check modules directory structure @@ -396,27 +392,26 @@ def get_single_component_info(self, component): sha = self.sha config_entry = None - if self.update_config is None: - raise UserWarning("Could not find '.nf-core.yml' file in pipeline directory") - if any( - [ - entry.count("/") == 1 - and (entry.endswith("modules") or entry.endswith("subworkflows")) - and not (entry.endswith(".git") or entry.endswith(".git/")) - for entry in self.update_config.keys() - ] - ): - raise UserWarning( - "Your '.nf-core.yml' file format is outdated. " - "The format should be of the form:\n" - "update:\n :\n :\n :" - ) - if isinstance(self.update_config.get(self.modules_repo.remote_url, {}), str): - # If the repo entry is a string, it's the sha to update to - config_entry = self.update_config.get(self.modules_repo.remote_url, {}) - elif component in self.update_config.get(self.modules_repo.remote_url, {}).get(install_dir, {}): - # If the component to update is in .nf-core.yml config file - config_entry = self.update_config[self.modules_repo.remote_url][install_dir].get(component) + if self.update_config is not None: + if any( + [ + entry.count("/") == 1 + and (entry.endswith("modules") or entry.endswith("subworkflows")) + and not (entry.endswith(".git") or entry.endswith(".git/")) + for entry in self.update_config.keys() + ] + ): + raise UserWarning( + "Your '.nf-core.yml' file format is outdated. " + "The format should be of the form:\n" + "update:\n :\n :\n :" + ) + if isinstance(self.update_config.get(self.modules_repo.remote_url, {}), str): + # If the repo entry is a string, it's the sha to update to + config_entry = self.update_config.get(self.modules_repo.remote_url, {}) + elif component in self.update_config.get(self.modules_repo.remote_url, {}).get(install_dir, {}): + # If the component to update is in .nf-core.yml config file + config_entry = self.update_config[self.modules_repo.remote_url][install_dir].get(component) if config_entry is not None and config_entry is not True: if config_entry is False: log.warn( @@ -481,6 +476,7 @@ def get_all_components_info(self, branch=None): components_info = {} # Loop through all the modules/subworkflows in the pipeline # and check if they have an entry in the '.nf-core.yml' file + for repo_name, components in self.modules_json.get_all_components(self.component_type).items(): if isinstance(self.update_config, dict) and ( repo_name not in self.update_config or self.update_config[repo_name] is True @@ -630,10 +626,8 @@ def get_all_components_info(self, branch=None): overridden_repos.append(repo_name) elif isinstance(self.update_config, dict) and self.update_config[repo_name] is False: skipped_repos.append(repo_name) - elif not isinstance(self.update_config, dict): - raise UserWarning("`.nf-core.yml` is not correctly formatted.") else: - raise UserWarning(f"Repo '{repo_name}' has an invalid entry in '.nf-core.yml'") + log.debug(f"no update config for {repo_name} in `.nf-core.yml`") if skipped_repos: skipped_str = "', '".join(skipped_repos) diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index daa7b5981..7d576d4ae 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -63,6 +63,7 @@ def __init__( self.setup_local_repo(remote_url, branch, hide_progress) config_fn, repo_config = load_tools_config(self.local_repo_dir) + assert config_fn is not None and repo_config is not None # mypy try: self.repo_path = repo_config["org_path"] except KeyError: diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index b7d86c5bd..deeb5554a 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -376,13 +376,14 @@ def render_template(self): if self.config: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) - with open(config_fn, "w") as fh: - config_yml.update(template=self.config.model_dump()) - # convert posix path to string for yaml dump - config_yml["template"]["outdir"] = str(config_yml["template"]["outdir"]) - yaml.safe_dump(config_yml, fh) - log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") - run_prettier_on_file(self.outdir / config_fn) + if config_fn is not None and config_yml is not None: + with open(str(config_fn), "w") as fh: + config_yml.template = self.config.model_dump() + # convert posix path to string for yaml dump + config_yml["template"]["outdir"] = str(config_yml["template"]["outdir"]) + yaml.safe_dump(config_yml.model_dump(), fh) + log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") + run_prettier_on_file(self.outdir / config_fn) def update_nextflow_schema(self): """ diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index 4ccf79007..ccab76295 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -110,9 +110,13 @@ def ignore_file(lint_name: str, file_path: Path, dir_path: Path) -> List[List[st passed: List[str] = [] failed: List[str] = [] ignored: List[str] = [] - _, lint_conf = nf_core.utils.load_tools_config(dir_path) - lint_conf = lint_conf.get("lint", {}) - ignore_entry: List[str] | bool = lint_conf.get(lint_name, []) + _, pipeline_conf = nf_core.utils.load_tools_config(dir_path) + lint_conf = getattr(pipeline_conf, "lint", None) or None + + if lint_conf is None: + ignore_entry: List[str] = [] + else: + ignore_entry = lint_conf.get(lint_name, []) full_path = dir_path / file_path # Return a failed status if we can't find the file if not full_path.is_file(): diff --git a/nf_core/utils.py b/nf_core/utils.py index 4d0566b1a..6794cf04f 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,7 +19,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Dict, Generator, List, Optional, Tuple, Union +from typing import Any, Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -1042,25 +1042,37 @@ def get_repo_releases_branches(pipeline, wfs): class NFCoreTemplateConfig(BaseModel): - org: str - name: str - description: str - author: str - version: Optional[str] - force: Optional[bool] - outdir: Optional[str] - skip_features: Optional[list] - is_nfcore: Optional[bool] + org: Optional[str] = None + name: Optional[str] = None + description: Optional[str] = None + author: Optional[str] = None + version: Optional[str] = None + force: Optional[bool] = None + outdir: Optional[str] = None + skip_features: Optional[list] = None + is_nfcore: Optional[bool] = None + + +LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] class NFCoreYamlConfig(BaseModel): - nf_core_version: str repository_type: str - org_path: str - template: NFCoreTemplateConfig + nf_core_version: Optional[str] = None + org_path: Optional[str] = None + lint: LintConfigType = None + template: Optional[NFCoreTemplateConfig] = None + bump_version: Optional[Dict[str, bool]] = None + update: Optional[Dict[str, Union[str, bool, Dict[str, Union[str, Dict[str, Union[str, bool]]]]]]] = None + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + return getattr(self, item, default) -def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, NFCoreYamlConfig]: +def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path], Optional[NFCoreYamlConfig]]: """ Parse the nf-core.yml configuration file @@ -1078,11 +1090,12 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, NFCoreYa if config_fn is None: depr_path = get_first_available_path(directory, DEPRECATED_CONFIG_PATHS) if depr_path: - raise AssertionError( + raise UserWarning( f"Deprecated `{depr_path.name}` file found! Please rename the file to `{CONFIG_PATHS[0]}`." ) else: - raise AssertionError(f"Could not find a config file in the directory '{directory}'") + log.debug(f"Could not find a config file in the directory '{directory}'") + return Path(directory, CONFIG_PATHS[0]), None with open(str(config_fn)) as fh: tools_config = yaml.safe_load(fh) @@ -1094,7 +1107,10 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Path, NFCoreYa try: nf_core_yaml_config = NFCoreYamlConfig(**tools_config) except ValidationError as e: - raise AssertionError(f"Config file '{config_fn}' is invalid: {e}") + error_message = f"Config file '{config_fn}' is invalid" + for error in e.errors(): + error_message += f"\n{error['loc'][0]}: {error['msg']}" + raise AssertionError(error_message) log.debug("Using config file: %s", config_fn) return config_fn, nf_core_yaml_config diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index 01173aec3..3cc935545 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -2,8 +2,11 @@ import re from pathlib import Path +import yaml + import nf_core.pipelines.create.create import nf_core.pipelines.lint +from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -124,11 +127,13 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" # Add max_cpus to the ignore list - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write( - "repository_type: pipeline\nlint:\n nextflow_config:\n - config_defaults:\n - params.max_cpus\n" - ) + nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + nf_core_yml = NFCoreYamlConfig( + repository_type="pipeline", lint={"nextflow_config": [{"config_defaults": ["params.max_cpus"]}]} + ) + with open(nf_core_yml_path, "w") as f: + yaml.dump(nf_core_yml.model_dump(), f) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() lint_obj._load_lint_config() diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index d3b243357..7b17a621b 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -156,9 +156,10 @@ def test_update_with_config_fixed_version(self): # Fix the subworkflow version in the .nf-core.yml to an old version update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": OLD_SUBWORKFLOWS_SHA}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all subworkflows in the pipeline update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) @@ -186,9 +187,10 @@ def test_update_with_config_dont_update(self): # Set the fastq_align_bowtie2 field to no update in the .nf-core.yml update_config = {NF_CORE_MODULES_REMOTE: {NF_CORE_MODULES_NAME: {"fastq_align_bowtie2": False}}} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all modules in the pipeline update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) @@ -216,9 +218,10 @@ def test_update_with_config_fix_all(self): # Fix the version of all nf-core subworkflows in the .nf-core.yml to an old version update_config = {NF_CORE_MODULES_REMOTE: OLD_SUBWORKFLOWS_SHA} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update fastq_align_bowtie2 update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False) @@ -246,9 +249,10 @@ def test_update_with_config_no_updates(self): # Set all repository updates to False update_config = {NF_CORE_MODULES_REMOTE: False} config_fn, tools_config = nf_core.utils.load_tools_config(self.pipeline_dir) - tools_config["update"] = update_config + setattr(tools_config, "update", update_config) + assert config_fn is not None and tools_config is not None # mypy with open(Path(self.pipeline_dir, config_fn), "w") as f: - yaml.dump(tools_config, f) + yaml.dump(tools_config.model_dump(), f) # Update all subworkflows in the pipeline update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False) diff --git a/tests/test_modules.py b/tests/test_modules.py index 13bf32f97..9ce74fd4e 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -1,8 +1,6 @@ """Tests covering the modules commands""" import json -import os -import shutil import unittest from pathlib import Path @@ -12,7 +10,13 @@ import yaml import nf_core.modules +import nf_core.modules.create +import nf_core.modules.install +import nf_core.modules.modules_repo +import nf_core.modules.remove import nf_core.pipelines.create.create +from nf_core import __version__ +from nf_core.utils import NFCoreYamlConfig from .utils import ( GITLAB_BRANCH_TEST_BRANCH, @@ -34,24 +38,28 @@ def create_modules_repo_dummy(tmp_dir): Path(root_dir, "modules", "nf-core").mkdir(parents=True) Path(root_dir, "tests", "modules", "nf-core").mkdir(parents=True) Path(root_dir, "tests", "config").mkdir(parents=True) + + nf_core_yml = NFCoreYamlConfig(nf_core_version=__version__, repository_type="modules", org_path="nf-core") with open(Path(root_dir, ".nf-core.yml"), "w") as fh: - fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) + yaml.dump(nf_core_yml.model_dump(), fh) # mock biocontainers and anaconda response with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "bpipe", "0.9.12--hdfd78af_0") mock_biocontainers_api_calls(rsps, "bpipe", "0.9.12--hdfd78af_0") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules - module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_single", False, False) + module_create = nf_core.modules.create.ModuleCreate( + root_dir, "bpipe/test", "@author", "process_single", False, False + ) with requests_cache.disabled(): - module_create.create() + assert module_create.create() # Remove doi from meta.yml which makes lint fail meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") - with open(meta_yml_path) as fh: + with open(str(meta_yml_path)) as fh: meta_yml = yaml.safe_load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] - with open(meta_yml_path, "w") as fh: + with open(str(meta_yml_path), "w") as fh: yaml.dump(meta_yml, fh) # Add dummy content to main.nf.test.snap test_snap_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") @@ -102,8 +110,8 @@ def setUp(self): self.tmp_dir, self.template_dir, self.pipeline_name, self.pipeline_dir = create_tmp_pipeline() # Set up install objects - self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) - self.mods_install_old = nf_core.modules.ModuleInstall( + self.mods_install = nf_core.modules.install.ModuleInstall(self.pipeline_dir, prompt=False, force=True) + self.mods_install_old = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, @@ -111,21 +119,21 @@ def setUp(self): remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, ) - self.mods_install_trimgalore = nf_core.modules.ModuleInstall( + self.mods_install_trimgalore = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, ) - self.mods_install_gitlab = nf_core.modules.ModuleInstall( + self.mods_install_gitlab = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH, ) - self.mods_install_gitlab_old = nf_core.modules.ModuleInstall( + self.mods_install_gitlab_old = nf_core.modules.install.ModuleInstall( self.pipeline_dir, prompt=False, force=False, @@ -135,8 +143,8 @@ def setUp(self): ) # Set up remove objects - self.mods_remove = nf_core.modules.ModuleRemove(self.pipeline_dir) - self.mods_remove_gitlab = nf_core.modules.ModuleRemove( + self.mods_remove = nf_core.modules.remove.ModuleRemove(self.pipeline_dir) + self.mods_remove_gitlab = nf_core.modules.remove.ModuleRemove( self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH, @@ -145,15 +153,9 @@ def setUp(self): # Set up the nf-core/modules repo dummy self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) - def tearDown(self): - """Clean up temporary files and folders""" - - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - def test_modulesrepo_class(self): """Initialise a modules repo object""" - modrepo = nf_core.modules.ModulesRepo() + modrepo = nf_core.modules.modules_repo.ModulesRepo() assert modrepo.repo_path == "nf-core" assert modrepo.branch == "master" diff --git a/tests/test_utils.py b/tests/test_utils.py index 860cba5ba..0d012716a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,9 +1,6 @@ """Tests covering for utility functions.""" import os -import shutil -import tempfile -import unittest from pathlib import Path from unittest import mock @@ -14,6 +11,7 @@ import nf_core.pipelines.list import nf_core.utils +from .test_pipelines import TestPipelines from .utils import with_temporary_folder TEST_DATA_DIR = Path(__file__).parent / "data" @@ -28,31 +26,9 @@ def test_strip_ansi_codes(): assert stripped == "ls examplefile.zip" -class TestUtils(unittest.TestCase): +class TestUtils(TestPipelines): """Class for utils tests""" - def setUp(self): - """Function that runs at start of tests for common resources - - Use nf_core.pipelines.create() to make a pipeline that we can use for testing - """ - self.tmp_dir = tempfile.mkdtemp() - self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", - "This is a test pipeline", - "Test McTestFace", - no_git=True, - outdir=self.test_pipeline_dir, - ) - self.create_obj.init_pipeline() - # Base Pipeline object on this directory - self.pipeline_obj = nf_core.utils.Pipeline(self.test_pipeline_dir) - - def tearDown(self): - if os.path.exists(self.tmp_dir): - shutil.rmtree(self.tmp_dir) - def test_check_if_outdated_1(self): current_version = "1.0" remote_version = "2.0" @@ -110,7 +86,7 @@ def testload_pipeline_config(self): def test_list_files_git(self): """Test listing pipeline files using `git ls`""" files = self.pipeline_obj.list_files() - assert Path(self.test_pipeline_dir, "main.nf") in files + assert Path(self.pipeline_dir, "main.nf") in files @with_temporary_folder def test_list_files_no_git(self, tmpdir): @@ -193,46 +169,39 @@ def test_get_repo_releases_branches_not_exists_slash(self): with pytest.raises(AssertionError): nf_core.utils.get_repo_releases_branches("made-up/pipeline", wfs) - -def test_validate_file_md5(): - # MD5(test) = d8e8fca2dc0f896fd7cb4cb0031ba249 - test_file = TEST_DATA_DIR / "test.txt" - test_file_md5 = "d8e8fca2dc0f896fd7cb4cb0031ba249" - different_md5 = "9e7b964750cf0bb08ee960fce356b6d6" - non_hex_string = "s" - assert nf_core.utils.validate_file_md5(test_file, test_file_md5) - with pytest.raises(IOError): - nf_core.utils.validate_file_md5(test_file, different_md5) - with pytest.raises(ValueError): - nf_core.utils.validate_file_md5(test_file, non_hex_string) - - -def test_nested_setitem(): - d = {"a": {"b": {"c": "value"}}} - nf_core.utils.nested_setitem(d, ["a", "b", "c"], "value new") - assert d["a"]["b"]["c"] == "value new" - assert d == {"a": {"b": {"c": "value new"}}} - - -def test_nested_delitem(): - d = {"a": {"b": {"c": "value"}}} - nf_core.utils.nested_delitem(d, ["a", "b", "c"]) - assert "c" not in d["a"]["b"] - assert d == {"a": {"b": {}}} - - -def test_set_wd(): - with tempfile.TemporaryDirectory() as tmpdirname: - with nf_core.utils.set_wd(tmpdirname): + def test_validate_file_md5(self): + # MD5(test) = d8e8fca2dc0f896fd7cb4cb0031ba249 + test_file = TEST_DATA_DIR / "test.txt" + test_file_md5 = "d8e8fca2dc0f896fd7cb4cb0031ba249" + different_md5 = "9e7b964750cf0bb08ee960fce356b6d6" + non_hex_string = "s" + assert nf_core.utils.validate_file_md5(test_file, test_file_md5) + with pytest.raises(IOError): + nf_core.utils.validate_file_md5(test_file, different_md5) + with pytest.raises(ValueError): + nf_core.utils.validate_file_md5(test_file, non_hex_string) + + def test_nested_setitem(self): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_setitem(d, ["a", "b", "c"], "value new") + assert d["a"]["b"]["c"] == "value new" + assert d == {"a": {"b": {"c": "value new"}}} + + def test_nested_delitem(self): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_delitem(d, ["a", "b", "c"]) + assert "c" not in d["a"]["b"] + assert d == {"a": {"b": {}}} + + def test_set_wd(self): + with nf_core.utils.set_wd(self.tmp_dir): context_wd = Path().resolve() - assert context_wd == Path(tmpdirname).resolve() + assert context_wd == Path(self.tmp_dir).resolve() assert context_wd != Path().resolve() - -def test_set_wd_revert_on_raise(): - wd_before_context = Path().resolve() - with tempfile.TemporaryDirectory() as tmpdirname: + def test_set_wd_revert_on_raise(self): + wd_before_context = Path().resolve() with pytest.raises(Exception): - with nf_core.utils.set_wd(tmpdirname): + with nf_core.utils.set_wd(self.tmp_dir): raise Exception - assert wd_before_context == Path().resolve() + assert wd_before_context == Path().resolve() diff --git a/tests/utils.py b/tests/utils.py index 90c4ae041..1d5a8a115 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -9,9 +9,12 @@ from typing import Any, Callable, Tuple import responses +import yaml import nf_core.modules import nf_core.pipelines.create.create +from nf_core import __version__ +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlConfig TEST_DATA_DIR = Path(__file__).parent / "data" OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" @@ -95,7 +98,7 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) -def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: +def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" tmp_dir = Path(tempfile.TemporaryDirectory().name) @@ -103,9 +106,31 @@ def create_tmp_pipeline() -> Tuple[Path, Path, str, Path]: template_dir = root_repo_dir / "nf_core" / "pipeline-template" pipeline_name = "mypipeline" pipeline_dir = tmp_dir / pipeline_name + pipeline_dir.mkdir(parents=True) + + nf_core_yml = NFCoreYamlConfig( + nf_core_version=__version__, + repository_type="modules", + org_path="nf-core", + lint=None, + template=NFCoreTemplateConfig( + name="mypipeline", + author="me", + description="it is mine", + org="nf-core", + version=None, + force=True, + is_nfcore=None, + skip_features=None, + outdir=None, + ), + bump_version=None, + ) + with open(str(Path(pipeline_dir, ".nf-core.yml")), "w") as fh: + yaml.dump(nf_core_yml.model_dump(), fh) nf_core.pipelines.create.create.PipelineCreate( - pipeline_name, "it is mine", "me", no_git=False, outdir=pipeline_dir + pipeline_name, "it is mine", "me", no_git=no_git, outdir=pipeline_dir, force=True ).init_pipeline() # return values to instance variables for later use in test methods From d0a968ceb3ffe85e87303cc0d63838dbbc6a71b9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:35:06 +0200 Subject: [PATCH 350/737] fix tests --- nf_core/modules/lint/__init__.py | 4 +- nf_core/pipelines/lint/files_exist.py | 2 +- nf_core/pipelines/lint/nfcore_yml.py | 3 +- nf_core/pipelines/lint/schema_description.py | 2 +- nf_core/pipelines/lint/template_strings.py | 2 +- nf_core/pipelines/sync.py | 45 ++++++++++--------- .../subworkflows/lint/subworkflow_tests.py | 11 +++-- tests/modules/test_modules_json.py | 2 +- tests/pipelines/test_lint.py | 4 +- 9 files changed, 39 insertions(+), 36 deletions(-) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 2b10b4df5..cea75d8f2 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -228,7 +228,7 @@ def lint_module( # TODO: consider unifying modules and subworkflows lint_module() function and add it to the ComponentLint class # Only check the main script in case of a local module if local: - self.main_nf(mod, fix_version, self.registry, progress_bar) + self.main_nf(mod, fix_version, registry, progress_bar) self.passed += [LintResult(mod, *m) for m in mod.passed] warned = [LintResult(mod, *m) for m in (mod.warned + mod.failed)] if not self.fail_warned: @@ -245,7 +245,7 @@ def lint_module( for test_name in self.lint_tests: if test_name == "main_nf": - getattr(self, test_name)(mod, fix_version, self.registry, progress_bar) + getattr(self, test_name)(mod, fix_version, registry, progress_bar) else: getattr(self, test_name)(mod) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index edad62aab..ad0605dcf 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -205,7 +205,7 @@ def files_exist(self) -> Dict[str, List[str]]: ] # Remove files that should be ignored according to the linting config - ignore_files = self.lint_config.get("files_exist", []) + ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] def pf(file_path: Union[str, Path]) -> Path: return Path(self.wf_path, file_path) diff --git a/nf_core/pipelines/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py index f23b2f1a8..e0d5fb200 100644 --- a/nf_core/pipelines/lint/nfcore_yml.py +++ b/nf_core/pipelines/lint/nfcore_yml.py @@ -27,8 +27,7 @@ def nfcore_yml(self) -> Dict[str, List[str]]: ignored: List[str] = [] # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get(".nf-core", []) - + ignore_configs = self.lint_config.get(".nf-core", []) if self.lint_config is not None else [] try: with open(Path(self.wf_path, ".nf-core.yml")) as fh: content = fh.read() diff --git a/nf_core/pipelines/lint/schema_description.py b/nf_core/pipelines/lint/schema_description.py index 82165b6e7..d617e4094 100644 --- a/nf_core/pipelines/lint/schema_description.py +++ b/nf_core/pipelines/lint/schema_description.py @@ -24,7 +24,7 @@ def schema_description(self): self.schema_obj.load_lint_schema() # Get parameters that should be ignored according to the linting config - ignore_params = self.lint_config.get("schema_description", []) + ignore_params = self.lint_config.get("schema_description", []) if self.lint_config is not None else [] # Get ungrouped params if "properties" in self.schema_obj.schema.keys(): diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 90c47203f..37a1f64da 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -38,7 +38,7 @@ def template_strings(self): failed = [] ignored = [] # Files that should be ignored according to the linting config - ignore_files = self.lint_config.get("template_strings", []) + ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] files = self.list_files() # Loop through files, searching for string diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index a309fa8c3..b1da99a62 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -10,6 +10,7 @@ import git import questionary import requests +import requests.auth import requests_cache import rich import yaml @@ -86,23 +87,23 @@ def __init__( self.pr_url = "" self.config_yml_path, self.config_yml = nf_core.utils.load_tools_config(self.pipeline_dir) - + assert self.config_yml_path is not None # mypy # Throw deprecation warning if template_yaml_path is set if template_yaml_path is not None: log.warning( f"The `template_yaml_path` argument is deprecated. Saving pipeline creation settings in .nf-core.yml instead. Please remove {template_yaml_path} file." ) - if "template" in self.config_yml: + if getattr(self.config_yml, "template", None) is not None: overwrite_template = questionary.confirm( f"A template section already exists in '{self.config_yml_path}'. Do you want to overwrite?", style=nf_core.utils.nfcore_question_style, default=False, ).unsafe_ask() - if overwrite_template or "template" not in self.config_yml: + if overwrite_template or getattr(self.config_yml, "template", None) is None: with open(template_yaml_path) as f: - self.config_yml["template"] = yaml.safe_load(f) + self.config_yml.template = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: - yaml.safe_dump(self.config_yml, fh) + yaml.safe_dump(self.config_yml.model_dump(), fh) log.info(f"Saved pipeline creation settings to '{self.config_yml_path}'") raise SystemExit( f"Please commit your changes and delete the {template_yaml_path} file. Then run the sync command again." @@ -259,11 +260,12 @@ def make_template_pipeline(self): # Only show error messages from pipeline creation logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) - + assert self.config_yml_path is not None + assert self.config_yml is not None # Re-write the template yaml info from .nf-core.yml config - if "template" in self.config_yml: + if getattr(self.config_yml, "template", None) is not None: with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml, config_path) + yaml.safe_dump(self.config_yml.model_dump(), config_path) try: nf_core.pipelines.create.create.PipelineCreate( @@ -411,21 +413,24 @@ def close_open_template_merge_prs(self): return False for pr in list_prs_json: - log.debug(f"Looking at PR from '{pr['head']['ref']}': {pr['html_url']}") - # Ignore closed PRs - if pr["state"] != "open": - log.debug(f"Ignoring PR as state not open ({pr['state']}): {pr['html_url']}") - continue + if isinstance(pr, int): + log.debug(f"Incorrect PR format: {pr}") + else: + log.debug(f"Looking at PR from '{pr['head']['ref']}': {pr['html_url']}") + # Ignore closed PRs + if pr["state"] != "open": + log.debug(f"Ignoring PR as state not open ({pr['state']}): {pr['html_url']}") + continue - # Don't close the new PR that we just opened - if pr["head"]["ref"] == self.merge_branch: - continue + # Don't close the new PR that we just opened + if pr["head"]["ref"] == self.merge_branch: + continue - # PR is from an automated branch and goes to our target base - if pr["head"]["ref"].startswith("nf-core-template-merge-") and pr["base"]["ref"] == self.from_branch: - self.close_open_pr(pr) + # PR is from an automated branch and goes to our target base + if pr["head"]["ref"].startswith("nf-core-template-merge-") and pr["base"]["ref"] == self.from_branch: + self.close_open_pr(pr) - def close_open_pr(self, pr): + def close_open_pr(self, pr) -> bool: """Given a PR API response, add a comment and close.""" log.debug(f"Attempting to close PR: '{pr['html_url']}'") diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index af3933474..7ca825f04 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -27,14 +27,14 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): repo_dir = subworkflow.component_dir.parts[ : subworkflow.component_dir.parts.index(subworkflow.component_name.split("/")[0]) ][-1] - test_dir = Path( + pytest_dir = Path( subworkflow.base_dir, "tests", "subworkflows", repo_dir, subworkflow.component_name, ) - pytest_main_nf = Path(test_dir, "main.nf") + pytest_main_nf = Path(pytest_dir, "main.nf") is_pytest = pytest_main_nf.is_file() log.debug(f"{pytest_main_nf} is pytest: {is_pytest}") if subworkflow.nftest_testdir.is_dir(): @@ -265,8 +265,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): # Check that the old test directory does not exist if not is_pytest: - old_test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", subworkflow.component_name) - if old_test_dir.is_dir(): - subworkflow.failed.append(("test_old_test_dir", "old test directory exists", old_test_dir)) + if pytest_dir.is_dir(): + subworkflow.failed.append(("test_old_test_dir", "old test directory exists", pytest_dir)) else: - subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", old_test_dir)) + subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", pytest_dir)) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 2ab058fa7..845f9c3e3 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -220,7 +220,7 @@ def test_mod_json_with_empty_modules_value(self): mod_json_obj.create() # Create modules.json explicitly to get correct module sha mod_json_orig = mod_json_obj.get_modules_json() mod_json = copy.deepcopy(mod_json_orig) - mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = "" + mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"] = {} # save the altered module.json and load it again to check if it will fix itself mod_json_obj.modules_json = mod_json mod_json_obj.dump() diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index ab8bcf6b2..9ca29d249 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -47,7 +47,7 @@ def test_init_pipeline_lint(self): def test_load_lint_config_not_found(self): """Try to load a linting config file that doesn't exist""" - self.lint_obj._load_lint_config() + assert self.lint_obj._load_lint_config() assert self.lint_obj.lint_config == {} def test_load_lint_config_ignore_all_tests(self): @@ -58,7 +58,7 @@ def test_load_lint_config_ignore_all_tests(self): lint_obj = nf_core.pipelines.lint.PipelineLint(new_pipeline) # Make a config file listing all test names - config_dict = {"lint": {test_name: False for test_name in lint_obj.lint_tests}} + config_dict = {"repository_type": "pipeline", "lint": {test_name: False for test_name in lint_obj.lint_tests}} with open(Path(new_pipeline, ".nf-core.yml"), "w") as fh: yaml.dump(config_dict, fh) From 33f461aa5a3a888dd0914796c99b3201b2a06f3e Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:35:26 +0200 Subject: [PATCH 351/737] migrate to pathlib --- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/create/create.py | 2 +- tests/pipelines/test_create.py | 11 +++++++---- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 02ce6fa5a..01785c6d6 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1192,7 +1192,7 @@ def components_with_repos(): self.modules_dir, modules_repo.repo_path, ) - for dir_name, _, _ in os.walk(repo_url_path): + for dir_name, _, _ in Path.walk(repo_url_path): if component_type == "modules": if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index deeb5554a..f032db7f6 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -516,7 +516,7 @@ def fix_linting(self): with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) - run_prettier_on_file(os.path.join(self.outdir, config_fn)) + run_prettier_on_file(Path(self.outdir, config_fn)) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" diff --git a/tests/pipelines/test_create.py b/tests/pipelines/test_create.py index bb27445e7..35cff5cab 100644 --- a/tests/pipelines/test_create.py +++ b/tests/pipelines/test_create.py @@ -2,6 +2,7 @@ import os import unittest +from pathlib import Path import git import yaml @@ -22,7 +23,8 @@ def setUp(self): self.pipeline_version = "1.0.0" self.default_branch = "default" - def test_pipeline_creation(self): + @with_temporary_folder + def test_pipeline_creation(self, tmp_path): pipeline = nf_core.pipelines.create.create.PipelineCreate( name=self.pipeline_name, description=self.pipeline_description, @@ -30,6 +32,7 @@ def test_pipeline_creation(self): version=self.pipeline_version, no_git=False, force=True, + outdir=tmp_path, default_branch=self.default_branch, ) @@ -51,10 +54,10 @@ def test_pipeline_creation_initiation(self, tmp_path): default_branch=self.default_branch, ) pipeline.init_pipeline() - assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert Path(pipeline.outdir, ".git").is_dir() assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: + assert not Path(pipeline.outdir, "pipeline_template.yml").exists() + with open(Path(pipeline.outdir, ".nf-core.yml")) as fh: assert "template" in fh.read() @with_temporary_folder From 1473611b9025ec4221e91fdbe0709723c59962b8 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:35:44 +0200 Subject: [PATCH 352/737] change import strategy for lint tests --- nf_core/subworkflows/lint/__init__.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index a07371088..b366ddfb5 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -19,6 +19,14 @@ log = logging.getLogger(__name__) +# Import lint functions +from .main_nf import main_nf # type: ignore[misc] +from .meta_yml import meta_yml # type: ignore[misc] +from .subworkflow_changes import subworkflow_changes # type: ignore[misc] +from .subworkflow_tests import subworkflow_tests # type: ignore[misc] +from .subworkflow_todos import subworkflow_todos # type: ignore[misc] +from .subworkflow_version import subworkflow_version # type: ignore[misc] + class SubworkflowLint(ComponentLint): """ @@ -26,13 +34,12 @@ class SubworkflowLint(ComponentLint): repository or in any nf-core pipeline directory """ - # Import lint functions - from .main_nf import main_nf # type: ignore[misc] - from .meta_yml import meta_yml # type: ignore[misc] - from .subworkflow_changes import subworkflow_changes # type: ignore[misc] - from .subworkflow_tests import subworkflow_tests # type: ignore[misc] - from .subworkflow_todos import subworkflow_todos # type: ignore[misc] - from .subworkflow_version import subworkflow_version # type: ignore[misc] + main_nf = main_nf + meta_yml = meta_yml + subworkflow_changes = subworkflow_changes + subworkflow_tests = subworkflow_tests + subworkflow_todos = subworkflow_todos + subworkflow_version = subworkflow_version def __init__( self, From 9dc61d61059bb2cb2b2bed4c7001251b01e863df Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 13:37:36 +0200 Subject: [PATCH 353/737] migrate to pathlib --- nf_core/components/components_command.py | 40 +++++++++++-------- nf_core/components/list.py | 3 +- nf_core/modules/modules_repo.py | 10 +++-- nf_core/pipelines/lint/actions_awsfulltest.py | 9 +++-- nf_core/pipelines/lint/actions_awstest.py | 6 +-- nf_core/pipelines/lint/actions_ci.py | 6 +-- .../lint/actions_schema_validation.py | 7 ++-- nf_core/pipelines/lint/files_unchanged.py | 8 ++-- nf_core/pipelines/lint/merge_markers.py | 32 +++++++-------- nf_core/pipelines/lint/modules_structure.py | 3 +- nf_core/pipelines/lint/nextflow_config.py | 19 ++++----- nf_core/pipelines/lint/pipeline_todos.py | 20 +++++----- nf_core/pipelines/lint/readme.py | 6 +-- 13 files changed, 90 insertions(+), 79 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index a6b46d1b6..4440dc32a 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -1,6 +1,5 @@ import logging import mmap -import os import shutil from pathlib import Path from typing import Dict, List, Optional, Union @@ -22,7 +21,9 @@ class ComponentCommand: def __init__( self, component_type: str, - directory: Union[str, Path], + directory: Union[ + str, Path + ], # TODO: This is actually None sometimes (e.g. in test_modules_list_remote), need to rewrite the logic here to handle these cases elegantly, for example setting a default path remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -33,7 +34,7 @@ def __init__( Initialise the ComponentClass object """ self.component_type = component_type - self.directory = Path(directory) + self.directory = directory self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress = hide_progress self.no_prompts = no_prompts @@ -47,18 +48,21 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: Args: nf_dir_req (bool, optional): Whether this command requires being run in the nf-core modules repo or a nf-core pipeline repository. Defaults to True. """ - try: if self.directory: - self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) + self.directory, self.repo_type, self.org = get_repo_info( + Path(self.directory), use_prompt=not self.no_prompts + ) else: self.repo_type = None self.org = "" + except UserWarning: if nf_dir_req: raise self.repo_type = None self.org = "" + self.default_modules_path = Path("modules", self.org) self.default_tests_path = Path("tests", "modules", self.org) self.default_subworkflows_path = Path("subworkflows", self.org) @@ -82,8 +86,8 @@ def get_components_clone_modules(self) -> List[str]: elif self.component_type == "subworkflows": component_base_path = Path(self.directory, self.default_subworkflows_path) return [ - str(Path(dir).relative_to(component_base_path)) - for dir, _, files in os.walk(component_base_path) + str(Path(directory).relative_to(component_base_path)) + for directory, _, files in Path.walk(component_base_path) if "main.nf" in files ] @@ -91,12 +95,12 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if self.directory is None or not os.path.exists(self.directory): + if self.directory is None or not Path(self.directory).exists(): log.error(f"Could not find directory: {self.directory}") return False - main_nf = os.path.join(self.directory, "main.nf") - nf_config = os.path.join(self.directory, "nextflow.config") - if not os.path.exists(main_nf) and not os.path.exists(nf_config): + main_nf = Path(self.directory, "main.nf") + nf_config = Path(self.directory, "nextflow.config") + if not main_nf.exists() and not nf_config.exists(): if Path(self.directory).resolve().parts[-1].startswith("nf-core"): raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") @@ -104,8 +108,8 @@ def has_valid_directory(self) -> bool: def has_modules_file(self) -> None: """Checks whether a module.json file has been created and creates one if it is missing""" - modules_json_path = os.path.join(self.directory, "modules.json") - if not os.path.exists(modules_json_path): + modules_json_path = Path(self.directory, "modules.json") + if not modules_json_path.exists(): log.info("Creating missing 'module.json' file.") ModulesJson(self.directory).create() @@ -122,10 +126,10 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in os.walk(self.directory, topdown=False): + for dir_path, dir_names, filenames in Path.walk(Path(self.directory), top_down=False): if not dir_names and not filenames: try: - os.rmdir(dir_path) + dir_path.rmdir() except OSError: pass else: @@ -152,7 +156,9 @@ def components_from_repo(self, install_dir: str) -> List[str]: raise LookupError(f"Nothing installed from {install_dir} in pipeline") return [ - str(Path(dir_path).relative_to(repo_dir)) for dir_path, _, files in os.walk(repo_dir) if "main.nf" in files + str(Path(dir_path).relative_to(repo_dir)) + for dir_path, _, files in Path.walk(repo_dir) + if "main.nf" in files ] def install_component_files( @@ -196,7 +202,7 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in os.walk(Path(self.directory, "modules")): + for directory, _, files in Path.walk(Path(self.directory, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 67468b4a5..0a6b65446 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -1,5 +1,6 @@ import json import logging +from pathlib import Path from typing import Dict, List, Optional, Union, cast import rich.table @@ -15,7 +16,7 @@ class ComponentList(ComponentCommand): def __init__( self, component_type: str, - pipeline_dir: str, + pipeline_dir: Union[str, Path], remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 7d576d4ae..bc92072e3 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -63,16 +63,18 @@ def __init__( self.setup_local_repo(remote_url, branch, hide_progress) config_fn, repo_config = load_tools_config(self.local_repo_dir) - assert config_fn is not None and repo_config is not None # mypy + if config_fn is None or repo_config is None: + raise UserWarning(f"Could not find a configuration file in {self.local_repo_dir}") try: - self.repo_path = repo_config["org_path"] + self.repo_path = repo_config.org_path except KeyError: raise UserWarning(f"'org_path' key not present in {config_fn.name}") # Verify that the repo seems to be correctly configured if self.repo_path != NF_CORE_MODULES_NAME or self.branch: self.verify_branch() - + if self.repo_path is None: + raise UserWarning(f"Could not find the org_path in the configuration file: {config_fn.name}") # Convenience variable self.modules_dir = Path(self.local_repo_dir, "modules", self.repo_path) self.subworkflows_dir = Path(self.local_repo_dir, "subworkflows", self.repo_path) @@ -96,7 +98,7 @@ def setup_local_repo(self, remote, branch, hide_progress=True, in_cache=False): branch (str): name of branch to use Sets self.repo """ - self.local_repo_dir = os.path.join(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) + self.local_repo_dir = Path(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) try: if not os.path.exists(self.local_repo_dir): try: diff --git a/nf_core/pipelines/lint/actions_awsfulltest.py b/nf_core/pipelines/lint/actions_awsfulltest.py index 4cf3bece2..7ea167f6c 100644 --- a/nf_core/pipelines/lint/actions_awsfulltest.py +++ b/nf_core/pipelines/lint/actions_awsfulltest.py @@ -1,9 +1,10 @@ -import os +from pathlib import Path +from typing import Dict, List import yaml -def actions_awsfulltest(self): +def actions_awsfulltest(self) -> Dict[str, List[str]]: """Checks the GitHub Actions awsfulltest is valid. In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS. @@ -29,8 +30,8 @@ def actions_awsfulltest(self): warned = [] failed = [] - fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml") - if os.path.isfile(fn): + fn = Path(self.wf_path, ".github", "workflows", "awsfulltest.yml") + if fn.is_file(): try: with open(fn) as fh: wf = yaml.safe_load(fh) diff --git a/nf_core/pipelines/lint/actions_awstest.py b/nf_core/pipelines/lint/actions_awstest.py index 7c5599894..7e4c0fc49 100644 --- a/nf_core/pipelines/lint/actions_awstest.py +++ b/nf_core/pipelines/lint/actions_awstest.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import yaml @@ -22,8 +22,8 @@ def actions_awstest(self): * Must be turned on for ``workflow_dispatch``. """ - fn = os.path.join(self.wf_path, ".github", "workflows", "awstest.yml") - if not os.path.isfile(fn): + fn = Path(self.wf_path, ".github", "workflows", "awstest.yml") + if not fn.is_file(): return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]} try: diff --git a/nf_core/pipelines/lint/actions_ci.py b/nf_core/pipelines/lint/actions_ci.py index a3e7d54b6..74f433ef8 100644 --- a/nf_core/pipelines/lint/actions_ci.py +++ b/nf_core/pipelines/lint/actions_ci.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path import yaml @@ -40,10 +40,10 @@ def actions_ci(self): """ passed = [] failed = [] - fn = os.path.join(self.wf_path, ".github", "workflows", "ci.yml") + fn = Path(self.wf_path, ".github", "workflows", "ci.yml") # Return an ignored status if we can't find the file - if not os.path.isfile(fn): + if not fn.is_file(): return {"ignored": ["'.github/workflows/ci.yml' not found"]} try: diff --git a/nf_core/pipelines/lint/actions_schema_validation.py b/nf_core/pipelines/lint/actions_schema_validation.py index b4be42b54..a057d8058 100644 --- a/nf_core/pipelines/lint/actions_schema_validation.py +++ b/nf_core/pipelines/lint/actions_schema_validation.py @@ -1,6 +1,5 @@ -import glob import logging -import os +from pathlib import Path from typing import Any, Dict, List import jsonschema @@ -26,7 +25,7 @@ def actions_schema_validation(self) -> Dict[str, List[str]]: logging.getLogger("nf_core.pipelines.schema").setLevel(logging.ERROR) # Get all workflow files - action_workflows = glob.glob(os.path.join(self.wf_path, ".github/workflows/*.y*ml")) + action_workflows = list(Path(self.wf_path).glob(".github/workflows/*.y*ml")) # Load the GitHub workflow schema r = requests.get("https://json.schemastore.org/github-workflow", allow_redirects=True) @@ -40,7 +39,7 @@ def actions_schema_validation(self) -> Dict[str, List[str]]: # Validate all workflows against the schema for wf_path in action_workflows: - wf = os.path.basename(wf_path) + wf = wf_path.name # load workflow try: diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index bafd8aec7..300b3674b 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -112,7 +112,8 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) # Generate a new pipeline with nf-core create that we can compare to - tmp_dir = tempfile.mkdtemp() + tmp_dir = Path(tempfile.TemporaryDirectory().name) + tmp_dir.mkdir(parents=True) # Create a template.yaml file for the pipeline creation template_yaml = { @@ -123,10 +124,11 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: } template_yaml_path = Path(tmp_dir, "template.yaml") + with open(template_yaml_path, "w") as fh: yaml.dump(template_yaml, fh, default_flow_style=False) - test_pipeline_dir = os.path.join(tmp_dir, f"{prefix}-{short_name}") + test_pipeline_dir = Path(tmp_dir, f"{prefix}-{short_name}") create_obj = nf_core.pipelines.create.create.PipelineCreate( None, None, None, no_git=True, outdir=test_pipeline_dir, template_config=template_yaml_path ) @@ -141,7 +143,7 @@ def _tf(file_path: Union[str, Path]) -> Path: """Helper function - get file path for template file""" return Path(test_pipeline_dir, file_path) - ignore_files = self.lint_config.get("files_unchanged", []) + ignore_files = self.lint_config.get("files_unchanged", []) if self.lint_config is not None else [] # Files that must be completely unchanged from template for files in files_exact: diff --git a/nf_core/pipelines/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py index d57b63fd1..208c9f4bc 100644 --- a/nf_core/pipelines/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -1,6 +1,6 @@ import fnmatch import logging -import os +from pathlib import Path import nf_core.utils @@ -35,36 +35,36 @@ def merge_markers(self): failed = [] ignored = [] - ignored_config = self.lint_config.get("merge_markers", []) + ignored_config = self.lint_config.get("merge_markers", []) if self.lint_config is not None else [] ignore = [".git"] - if os.path.isfile(os.path.join(self.wf_path, ".gitignore")): - with open(os.path.join(self.wf_path, ".gitignore"), encoding="latin1") as fh: + if Path(self.wf_path, ".gitignore").is_file(): + with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: - ignore.append(os.path.basename(line.strip().rstrip("/"))) - for root, dirs, files in os.walk(self.wf_path, topdown=True): + ignore.append(Path(line.strip().rstrip("/")).name) + for root, dirs, files in Path.walk(self.wf_path, top_down=True): # Ignore files for i_base in ignore: - i = os.path.join(root, i_base) - dirs[:] = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] - files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] + i = str(Path(root, i_base)) + dirs[:] = [d for d in dirs if not fnmatch.fnmatch(str(Path(root, d)), i)] + files[:] = [f for f in files if not fnmatch.fnmatch(str(Path(root, f)), i)] for fname in files: # File ignored in config - if os.path.relpath(os.path.join(root, fname), self.wf_path) in ignored_config: - ignored.append(f"Ignoring file `{os.path.join(root, fname)}`") + if str(Path(root, fname).relative_to(self.wf_path)) in ignored_config: + ignored.append(f"Ignoring file `{Path(root, fname)}`") continue # Skip binary files - if nf_core.utils.is_file_binary(os.path.join(root, fname)): + if nf_core.utils.is_file_binary(Path(root, fname)): continue try: - with open(os.path.join(root, fname), encoding="latin1") as fh: + with open(Path(root, fname), encoding="latin1") as fh: for line in fh: if ">>>>>>>" in line: - failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {line[:30]}") + failed.append(f"Merge marker '>>>>>>>' in `{Path(root, fname)}`: {line[:30]}") if "<<<<<<<" in line: - failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {line[:30]}") + failed.append(f"Merge marker '<<<<<<<' in `{Path(root, fname)}`: {line[:30]}") except FileNotFoundError: - log.debug(f"Could not open file {os.path.join(root, fname)} in merge_markers lint test") + log.debug(f"Could not open file {Path(root, fname)} in merge_markers lint test") if len(failed) == 0: passed.append("No merge markers found in pipeline files") return {"passed": passed, "failed": failed, "ignored": ignored} diff --git a/nf_core/pipelines/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py index 9d9b4c9fc..fd29942ed 100644 --- a/nf_core/pipelines/lint/modules_structure.py +++ b/nf_core/pipelines/lint/modules_structure.py @@ -1,5 +1,4 @@ import logging -import os from pathlib import Path log = logging.getLogger(__name__) @@ -20,7 +19,7 @@ def modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] - for directory, _, files in os.walk(Path(self.wf_path, "modules")): + for directory, _, files in Path.walk(Path(self.wf_path, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) parts = module_path.parts diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index f62100a70..96323af94 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -1,14 +1,14 @@ import logging -import os import re from pathlib import Path +from typing import Dict, List, Optional, Union from nf_core.pipelines.schema import PipelineSchema log = logging.getLogger(__name__) -def nextflow_config(self): +def nextflow_config(self) -> Dict[str, List[str]]: """Checks the pipeline configuration for required variables. All nf-core pipelines are required to be configured with a minimal set of variable @@ -173,7 +173,7 @@ def nextflow_config(self): ] # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get("nextflow_config", []) + ignore_configs = self.lint_config.get("nextflow_config", []) if self.lint_config is not None else [] for cfs in config_fail: for cf in cfs: @@ -205,12 +205,13 @@ def nextflow_config(self): failed.append(f"Config variable (incorrectly) found: {self._wrap_quotes(cf)}") # Check and warn if the process configuration is done with deprecated syntax + process_with_deprecated_syntax = list( set( [ - re.search(r"^(process\.\$.*?)\.+.*$", ck).group(1) + match.group(1) for ck in self.nf_config.keys() - if re.match(r"^(process\.\$.*?)\.+.*$", ck) + if (match := re.match(r"^(process\.\$.*?)\.+.*$", ck)) is not None ] ) ) @@ -313,7 +314,7 @@ def nextflow_config(self): r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', r"}", ] - path = os.path.join(self.wf_path, "nextflow.config") + path = Path(self.wf_path, "nextflow.config") i = 0 with open(path) as f: for line in f: @@ -335,7 +336,7 @@ def nextflow_config(self): ) # Check for the availability of the "test" configuration profile by parsing nextflow.config - with open(os.path.join(self.wf_path, "nextflow.config")) as f: + with open(Path(self.wf_path, "nextflow.config")) as f: content = f.read() # Remove comments @@ -379,8 +380,8 @@ def nextflow_config(self): if param in ignore_defaults: ignored.append(f"Config default ignored: {param}") elif param in self.nf_config.keys(): - config_default = None - schema_default = None + config_default: Optional[Union[str, float, int]] = None + schema_default: Optional[Union[str, float, int]] = None if schema.schema_types[param_name] == "boolean": schema_default = str(schema.schema_defaults[param_name]).lower() config_default = str(self.nf_config[param]).lower() diff --git a/nf_core/pipelines/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py index ba6ec7915..530d85376 100644 --- a/nf_core/pipelines/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -1,6 +1,6 @@ import fnmatch import logging -import os +from pathlib import Path log = logging.getLogger(__name__) @@ -39,19 +39,19 @@ def pipeline_todos(self, root_dir=None): root_dir = self.wf_path ignore = [".git"] - if os.path.isfile(os.path.join(root_dir, ".gitignore")): - with open(os.path.join(root_dir, ".gitignore"), encoding="latin1") as fh: + if Path(root_dir, ".gitignore").is_file(): + with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: - ignore.append(os.path.basename(line.strip().rstrip("/"))) - for root, dirs, files in os.walk(root_dir, topdown=True): + ignore.append(Path(line.strip().rstrip("/")).name) + for root, dirs, files in Path.walk(root_dir, top_down=True): # Ignore files for i_base in ignore: - i = os.path.join(root, i_base) - dirs[:] = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)] - files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)] + i = str(Path(root, i_base)) + dirs[:] = [d for d in dirs if not fnmatch.fnmatch(str(Path(root, d)), i)] + files[:] = [f for f in files if not fnmatch.fnmatch(str(Path(root, f)), i)] for fname in files: try: - with open(os.path.join(root, fname), encoding="latin1") as fh: + with open(Path(root, fname), encoding="latin1") as fh: for line in fh: if "TODO nf-core" in line: line = ( @@ -63,7 +63,7 @@ def pipeline_todos(self, root_dir=None): .strip() ) warned.append(f"TODO string in `{fname}`: _{line}_") - file_paths.append(os.path.join(root, fname)) + file_paths.append(Path(root, fname)) except FileNotFoundError: log.debug(f"Could not open file {fname} in pipeline_todos lint test") diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index cade9ca3e..4c1624369 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -1,5 +1,5 @@ -import os import re +from pathlib import Path def readme(self): @@ -29,9 +29,9 @@ def readme(self): failed = [] # Remove field that should be ignored according to the linting config - ignore_configs = self.lint_config.get("readme", []) + ignore_configs = self.lint_config.get("readme", []) if self.lint_config is not None else [] - with open(os.path.join(self.wf_path, "README.md")) as fh: + with open(Path(self.wf_path, "README.md")) as fh: content = fh.read() if "nextflow_badge" not in ignore_configs: From 0a3cb5ffaea2026f55e6291f1f4f1ea73dc3cc4a Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 14:09:35 +0200 Subject: [PATCH 354/737] fix mypy linting --- nf_core/modules/bump_versions.py | 2 +- nf_core/pipelines/create/create.py | 13 +++++++------ tests/pipelines/test_sync.py | 2 +- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 2d8854e3c..6546cccc9 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -76,7 +76,7 @@ def bump_versions( ) # Get list of all modules - _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.directory) + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(Path(self.directory)) # Load the .nf-core.yml config _, self.tools_config = nf_core.utils.load_tools_config(self.directory) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index f032db7f6..be07c0c29 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,7 +8,7 @@ import re import shutil from pathlib import Path -from typing import Dict, List, Optional, Union +from typing import Dict, List, Optional, Union, cast import git import git.config @@ -21,6 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file +from nf_core.utils import LintConfigType log = logging.getLogger(__name__) @@ -57,7 +58,7 @@ def __init__( from_config_file: bool = False, default_branch: Optional[str] = None, is_interactive: bool = False, - ): + ) -> None: if isinstance(template_config, CreateConfig): self.config = template_config elif from_config_file: @@ -418,13 +419,13 @@ def remove_nf_core_in_bug_report_template(self): run_prettier_on_file(bug_report_path) - def fix_linting(self): + def fix_linting(self) -> None: """ Updates the .nf-core.yml with linting configurations for a customized pipeline. """ # Create a lint config - short_name = self.jinja_params["short_name"] + short_name: str = self.jinja_params["short_name"] lint_config: Dict[str, List[str]] = { "files_exist": [ "CODE_OF_CONDUCT.md", @@ -512,7 +513,7 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and nf_core_yml is not None: - nf_core_yml.lint = lint_config + nf_core_yml.lint = cast(LintConfigType, lint_config) with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) @@ -534,7 +535,7 @@ def make_pipeline_logo(self): force=bool(self.force), ) - def git_init_pipeline(self): + def git_init_pipeline(self) -> None: """Initialises the new pipeline as a Git repository and submits first commit. Raises: diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index b6955e671..ffbe75510 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -331,7 +331,7 @@ def test_close_open_template_merge_prs(self, mock_get): @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) - def test_close_open_pr(self, mock_patch, mock_post): + def test_close_open_pr(self, mock_patch, mock_post) -> None: psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() From b7b45dfab771a2a84ed05d368719f204133ec8a0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 14:09:47 +0200 Subject: [PATCH 355/737] fix circular import --- nf_core/__main__.py | 2 +- nf_core/components/components_utils.py | 16 +++++++++++++--- nf_core/components/info.py | 2 +- nf_core/components/install.py | 2 +- nf_core/modules/modules_json.py | 8 ++------ nf_core/modules/modules_repo.py | 6 +----- nf_core/synced_repo.py | 9 ++++----- tests/modules/test_modules_json.py | 4 ++-- tests/modules/test_update.py | 2 +- tests/subworkflows/test_update.py | 2 +- 10 files changed, 27 insertions(+), 26 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index f33c63e87..b86f8f4ba 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -52,7 +52,7 @@ subworkflows_test, subworkflows_update, ) -from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE +from nf_core.components.components_utils import NF_CORE_MODULES_REMOTE from nf_core.pipelines.download import DownloadError from nf_core.utils import check_if_outdated, nfcore_logo, rich_force_colors, setup_nfcore_dir diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index e4e2ff092..4e9c0ac60 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -1,16 +1,23 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple, Union +from typing import TYPE_CHECKING, List, Optional, Tuple, Union import questionary import rich.prompt +if TYPE_CHECKING: + from nf_core.modules.modules_repo import ModulesRepo + import nf_core.utils -from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) +# Constants for the nf-core/modules repo used throughout the module files +NF_CORE_MODULES_NAME = "nf-core" +NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" +NF_CORE_MODULES_DEFAULT_BRANCH = "master" + def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[Path, Optional[str], str]: """ @@ -82,7 +89,10 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P def prompt_component_version_sha( - component_name: str, component_type: str, modules_repo: ModulesRepo, installed_sha: Optional[str] = None + component_name: str, + component_type: str, + modules_repo: "ModulesRepo", + installed_sha: Optional[str] = None, ) -> str: """ Creates an interactive questionary prompt for selecting the module/subworkflow version diff --git a/nf_core/components/info.py b/nf_core/components/info.py index a296fcacc..55a95593f 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -15,8 +15,8 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import NF_CORE_MODULES_REMOTE from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE log = logging.getLogger(__name__) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 8f4791328..f2849f85b 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -12,11 +12,11 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand from nf_core.components.components_utils import ( + NF_CORE_MODULES_NAME, get_components_to_install, prompt_component_version_sha, ) from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME log = logging.getLogger(__name__) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 01785c6d6..e9b4aa102 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -14,12 +14,8 @@ from git.exc import GitCommandError import nf_core.utils -from nf_core.components.components_utils import get_components_to_install -from nf_core.modules.modules_repo import ( - NF_CORE_MODULES_NAME, - NF_CORE_MODULES_REMOTE, - ModulesRepo, -) +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, get_components_to_install +from nf_core.modules.modules_repo import ModulesRepo from nf_core.pipelines.lint_utils import dump_json_with_prettier from .modules_differ import ModulesDiffer diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index bc92072e3..5b5020548 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -12,16 +12,12 @@ import nf_core.modules.modules_json import nf_core.modules.modules_utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.synced_repo import RemoteProgressbar, SyncedRepo from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, load_tools_config log = logging.getLogger(__name__) -# Constants for the nf-core/modules repo used throughout the module files -NF_CORE_MODULES_NAME = "nf-core" -NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" -NF_CORE_MODULES_DEFAULT_BRANCH = "master" - class ModulesRepo(SyncedRepo): """ diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 8af0ee9a0..8efdd0e48 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -9,15 +9,14 @@ import git from git.exc import GitCommandError +from nf_core.components.components_utils import ( + NF_CORE_MODULES_NAME, + NF_CORE_MODULES_REMOTE, +) from nf_core.utils import load_tools_config log = logging.getLogger(__name__) -# Constants for the nf-core/modules repo used throughout the module files -NF_CORE_MODULES_NAME = "nf-core" -NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" -NF_CORE_MODULES_DEFAULT_BRANCH = "master" - class RemoteProgressbar(git.RemoteProgress): """ diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 845f9c3e3..b2cac99e6 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -3,13 +3,13 @@ import shutil from pathlib import Path -from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import ( +from nf_core.components.components_utils import ( NF_CORE_MODULES_DEFAULT_BRANCH, NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, ModulesRepo, ) +from nf_core.modules.modules_json import ModulesJson from nf_core.modules.patch import ModulePatch from ..test_modules import TestModules diff --git a/tests/modules/test_update.py b/tests/modules/test_update.py index 1f81eab48..6c8eacc66 100644 --- a/tests/modules/test_update.py +++ b/tests/modules/test_update.py @@ -8,9 +8,9 @@ import yaml import nf_core.utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.install import ModuleInstall from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.patch import ModulePatch from nf_core.modules.update import ModuleUpdate diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 7b17a621b..153038cd1 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -8,8 +8,8 @@ import yaml import nf_core.utils +from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.modules_json import ModulesJson -from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE from nf_core.modules.update import ModuleUpdate from nf_core.subworkflows.update import SubworkflowUpdate From c6227a7cac8a909e54a46423a9626159ec6589b8 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 14:17:18 +0200 Subject: [PATCH 356/737] fix module command imports --- nf_core/commands_modules.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index 3d96d332b..b93bd7bcb 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -13,7 +13,7 @@ def modules_list_remote(ctx, keywords, json): """ List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ - from nf_core.modules import ModuleList + from nf_core.modules.list import ModuleList try: module_list = ModuleList( @@ -33,7 +33,7 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b """ List modules installed locally in a pipeline """ - from nf_core.modules import ModuleList + from nf_core.modules.list import ModuleList try: module_list = ModuleList( @@ -55,7 +55,7 @@ def modules_install(ctx, tool, dir, prompt, force, sha): Fetches and installs module files from a remote repo e.g. nf-core/modules. """ - from nf_core.modules import ModuleInstall + from nf_core.modules.install import ModuleInstall try: module_install = ModuleInstall( @@ -93,7 +93,7 @@ def modules_update( Fetches and updates module files from a remote repo e.g. nf-core/modules. """ - from nf_core.modules import ModuleUpdate + from nf_core.modules.update import ModuleUpdate try: module_install = ModuleUpdate( @@ -125,7 +125,7 @@ def modules_patch(ctx, tool, dir, remove): Checks if a module has been modified locally and creates a patch file describing how the module has changed from the remote version """ - from nf_core.modules import ModulePatch + from nf_core.modules.patch import ModulePatch try: module_patch = ModulePatch( @@ -147,7 +147,7 @@ def modules_remove(ctx, dir, tool): """ Remove a module from a pipeline. """ - from nf_core.modules import ModuleRemove + from nf_core.modules.remove import ModuleRemove try: module_remove = ModuleRemove( @@ -194,7 +194,7 @@ def modules_create( elif no_meta: has_meta = False - from nf_core.modules import ModuleCreate + from nf_core.modules.create import ModuleCreate # Run function try: @@ -257,7 +257,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, nf-core/modules repository. """ from nf_core.components.lint import LintExceptionError - from nf_core.modules import ModuleLint + from nf_core.modules.lint import ModuleLint try: module_lint = ModuleLint( @@ -302,7 +302,7 @@ def modules_info(ctx, tool, dir): will print this usage info. If not, usage from the remote modules repo will be shown. """ - from nf_core.modules import ModuleInfo + from nf_core.modules.info import ModuleInfo try: module_info = ModuleInfo( From 847c683d22a44cb20129afb4b63bf94d529badb9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 14:20:10 +0200 Subject: [PATCH 357/737] fix: cannot import name 'NotRequired' --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index 82087edcb..aa43ee3fe 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,6 +14,7 @@ types-Markdown types-PyYAML types-requests types-setuptools +typing_extensions >=4.0.0 pytest-asyncio pytest-textual-snapshot==0.4.0 pytest-workflow>=2.0.0 From e7ac7812422e1d76fc67898c72058b6decb23322 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 16:12:12 +0200 Subject: [PATCH 358/737] more dir-> directory conversions --- nf_core/__main__.py | 135 ++++++++++++++++++++-------------- nf_core/commands_pipelines.py | 29 ++++---- 2 files changed, 95 insertions(+), 69 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index b86f8f4ba..91af97958 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -126,7 +126,7 @@ # because they are actually preliminary, but intended program terminations. # (Custom exceptions are cleaner than `sys.exit(1)`, which we used before) def selective_traceback_hook(exctype, value, traceback): - if exctype in {DownloadError}: # extend set as needed + if exctype in {DownloadError, UserWarning}: # extend set as needed log.error(value) else: # print the colored traceback for all other exceptions with rich as usual @@ -278,6 +278,7 @@ def command_pipelines_create(ctx, name, description, author, version, force, out @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory [dim]\[default: current working directory][/]", @@ -331,7 +332,7 @@ def command_pipelines_create(ctx, name, description, author, version, force, out @click.pass_context def command_pipelines_lint( ctx, - dir, + directory, release, fix, key, @@ -345,7 +346,7 @@ def command_pipelines_lint( """ Check pipeline code against nf-core guidelines. """ - pipelines_lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) + pipelines_lint(ctx, directory, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) # nf-core pipelines download @@ -584,6 +585,7 @@ def command_pipelines_list(ctx, keywords, sort, json, show_archived): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -610,11 +612,13 @@ def command_pipelines_list(ctx, keywords, sort, json, show_archived): @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def command_pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_pipelines_sync( + ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr +): """ Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. """ - pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core pipelines bump-version @@ -624,6 +628,7 @@ def command_pipelines_sync(ctx, dir, from_branch, pull_request, github_repositor @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -635,11 +640,11 @@ def command_pipelines_sync(ctx, dir, from_branch, pull_request, github_repositor default=False, help="Bump required nextflow version instead of pipeline version", ) -def command_pipelines_bump_version(ctx, new_version, dir, nextflow): +def command_pipelines_bump_version(ctx, new_version, directory, nextflow): """ Update nf-core pipeline version number with `nf-core pipelines bump-version`. """ - pipelines_bump_version(ctx, new_version, dir, nextflow) + pipelines_bump_version(ctx, new_version, directory, nextflow) # nf-core pipelines create-logo @@ -680,11 +685,11 @@ def command_pipelines_bump_version(ctx, new_version, dir, nextflow): default=False, help="Overwrite any files if they already exist", ) -def command_pipelines_create_logo(logo_text, dir, name, theme, width, format, force): +def command_pipelines_create_logo(logo_text, directory, name, theme, width, format, force): """ Generate a logo with the nf-core logo template. """ - pipelines_create_logo(logo_text, dir, name, theme, width, format, force) + pipelines_create_logo(logo_text, directory, name, theme, width, format, force) # nf-core pipelines schema subcommands @@ -715,6 +720,7 @@ def command_pipelines_schema_validate(pipeline, params): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -735,11 +741,11 @@ def command_pipelines_schema_validate(pipeline, params): default="https://nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) -def command_pipelines_schema_build(dir, no_prompts, web_only, url): +def command_pipelines_schema_build(directory, no_prompts, web_only, url): """ Interactively build a pipeline schema from Nextflow params. """ - pipelines_schema_build(dir, no_prompts, web_only, url) + pipelines_schema_build(directory, no_prompts, web_only, url) # nf-core pipelines schema lint @@ -864,15 +870,16 @@ def command_modules_list_remote(ctx, keywords, json): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def command_modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def command_modules_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ - modules_list_local(ctx, keywords, json, dir) + modules_list_local(ctx, keywords, json, directory) # nf-core modules install @@ -882,6 +889,7 @@ def command_modules_list_local(ctx, keywords, json, dir): # pylint: disable=red @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -901,11 +909,11 @@ def command_modules_list_local(ctx, keywords, json, dir): # pylint: disable=red help="Force reinstallation of module if it already exists", ) @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") -def command_modules_install(ctx, tool, dir, prompt, force, sha): +def command_modules_install(ctx, tool, directory, prompt, force, sha): """ Install DSL2 modules within a pipeline. """ - modules_install(ctx, tool, dir, prompt, force, sha) + modules_install(ctx, tool, directory, prompt, force, sha) # nf-core modules update @@ -992,16 +1000,17 @@ def command_modules_update( @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-r", "--remove", is_flag=True, default=False) -def command_modules_patch(ctx, tool, dir, remove): +def command_modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module """ - modules_patch(ctx, tool, dir, remove) + modules_patch(ctx, tool, directory, remove) # nf-core modules remove @@ -1011,15 +1020,16 @@ def command_modules_patch(ctx, tool, dir, remove): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def command_modules_remove(ctx, dir, tool): +def command_modules_remove(ctx, directory, tool): """ Remove a module from a pipeline. """ - modules_remove(ctx, dir, tool) + modules_remove(ctx, directory, tool) # nf-core modules create @@ -1092,7 +1102,7 @@ def command_modules_remove(ctx, dir, tool): def command_modules_create( ctx, tool, - dir, + directory, author, label, meta, @@ -1109,7 +1119,7 @@ def command_modules_create( modules_create( ctx, tool, - dir, + directory, author, label, meta, @@ -1129,6 +1139,7 @@ def command_modules_create( @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1154,11 +1165,11 @@ def command_modules_create( default=None, help="Run tests with a specific profile", ) -def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): +def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile): """ Run nf-test for a module. """ - modules_test(ctx, tool, dir, no_prompts, update, once, profile) + modules_test(ctx, tool, directory, no_prompts, update, once, profile) # nf-core modules lint @@ -1168,6 +1179,7 @@ def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1204,11 +1216,11 @@ def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): is_flag=True, help="Fix the module version if a newer version is available", ) -def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def command_modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): """ Lint one or more modules in a directory. """ - modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version) + modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version) # nf-core modules info @@ -1218,15 +1230,16 @@ def command_modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def command_modules_info(ctx, tool, dir): +def command_modules_info(ctx, tool, directory): """ Show developer usage information about a given module. """ - modules_info(ctx, tool, dir) + modules_info(ctx, tool, directory) # nf-core modules bump-versions @@ -1236,18 +1249,19 @@ def command_modules_info(ctx, tool, dir): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", ) @click.option("-a", "--all", is_flag=True, help="Run on all modules") @click.option("-s", "--show-all", is_flag=True, help="Show up-to-date modules in results too") -def command_modules_bump_versions(ctx, tool, dir, all, show_all): +def command_modules_bump_versions(ctx, tool, directory, all, show_all): """ Bump versions for one or more modules in a clone of the nf-core/modules repo. """ - modules_bump_versions(ctx, tool, dir, all, show_all) + modules_bump_versions(ctx, tool, directory, all, show_all) # nf-core subworkflows click command @@ -1313,11 +1327,11 @@ def subworkflows(ctx, git_remote, branch, no_pull): default=False, help="Migrate a module with pytest tests to nf-test", ) -def command_subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): +def command_subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. """ - subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest) + subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest) # nf-core subworkflows test @@ -1327,6 +1341,7 @@ def command_subworkflows_create(ctx, subworkflow, dir, author, force, migrate_py @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1352,11 +1367,11 @@ def command_subworkflows_create(ctx, subworkflow, dir, author, force, migrate_py default=None, help="Run tests with a specific profile", ) -def command_subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): +def command_subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile): """ Run nf-test for a subworkflow. """ - subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile) + subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile) # nf-core subworkflows list subcommands @@ -1389,15 +1404,16 @@ def command_subworkflows_list_remote(ctx, keywords, json): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def command_subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def command_subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ - subworkflows_list_local(ctx, keywords, json, dir) + subworkflows_list_local(ctx, keywords, json, directory) # nf-core subworkflows lint @@ -1407,6 +1423,7 @@ def command_subworkflows_list_local(ctx, keywords, json, dir): # pylint: disabl @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", metavar="", @@ -1438,11 +1455,11 @@ def command_subworkflows_list_local(ctx, keywords, json, dir): # pylint: disabl help="Sort lint output by subworkflow or test name.", show_default=True, ) -def command_subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): +def command_subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): """ Lint one or more subworkflows in a directory. """ - subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by) + subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by) # nf-core subworkflows info @@ -1452,15 +1469,16 @@ def command_subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_wa @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def command_subworkflows_info(ctx, subworkflow, dir): +def command_subworkflows_info(ctx, subworkflow, directory): """ Show developer usage information about a given subworkflow. """ - subworkflows_info(ctx, subworkflow, dir) + subworkflows_info(ctx, subworkflow, directory) # nf-core subworkflows install @@ -1470,6 +1488,7 @@ def command_subworkflows_info(ctx, subworkflow, dir): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1495,11 +1514,11 @@ def command_subworkflows_info(ctx, subworkflow, dir): metavar="", help="Install subworkflow at commit SHA", ) -def command_subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): +def command_subworkflows_install(ctx, subworkflow, directory, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. """ - subworkflows_install(ctx, subworkflow, dir, prompt, force, sha) + subworkflows_install(ctx, subworkflow, directory, prompt, force, sha) # nf-core subworkflows remove @@ -1509,15 +1528,16 @@ def command_subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def command_subworkflows_remove(ctx, dir, subworkflow): +def command_subworkflows_remove(ctx, directory, subworkflow): """ Remove a subworkflow from a pipeline. """ - subworkflows_remove(ctx, dir, subworkflow) + subworkflows_remove(ctx, directory, subworkflow) # nf-core subworkflows update @@ -1527,6 +1547,7 @@ def command_subworkflows_remove(ctx, dir, subworkflow): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1587,7 +1608,7 @@ def command_subworkflows_remove(ctx, dir, subworkflow): def command_subworkflows_update( ctx, subworkflow, - dir, + directory, force, prompt, sha, @@ -1601,7 +1622,7 @@ def command_subworkflows_update( Update DSL2 subworkflow within a pipeline. """ subworkflows_update( - ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps, limit_output + ctx, subworkflow, directory, force, prompt, sha, install_all, preview, save_diff, update_deps, limit_output ) @@ -1636,6 +1657,7 @@ def command_schema_validate(pipeline, params): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1656,14 +1678,14 @@ def command_schema_validate(pipeline, params): default="https://nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) -def command_schema_build(dir, no_prompts, web_only, url): +def command_schema_build(directory, no_prompts, web_only, url): """ Use `nf-core pipelines schema build` instead. """ log.warning( "The `[magenta]nf-core schema build[/]` command is deprecated. Use `[magenta]nf-core pipelines schema build[/]` instead." ) - pipelines_schema_build(dir, no_prompts, web_only, url) + pipelines_schema_build(directory, no_prompts, web_only, url) # nf-core schema lint (deprecated) @@ -1764,14 +1786,14 @@ def command_schema_docs(schema_path, output, format, force, columns): default=False, help="Overwrite any files if they already exist", ) -def command_create_logo(logo_text, dir, name, theme, width, format, force): +def command_create_logo(logo_text, directory, name, theme, width, format, force): """ Use `nf-core pipelines create-logo` instead. """ log.warning( "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." ) - pipelines_create_logo(logo_text, dir, name, theme, width, format, force) + pipelines_create_logo(logo_text, directory, name, theme, width, format, force) # nf-core sync (deprecated) @@ -1779,6 +1801,7 @@ def command_create_logo(logo_text, dir, name, theme, width, format, force): @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1805,14 +1828,14 @@ def command_create_logo(logo_text, dir, name, theme, width, format, force): @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def command_sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Use `nf-core pipelines sync` instead. """ log.warning( "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." ) - pipelines_sync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + pipelines_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core bump-version (deprecated) @@ -1822,6 +1845,7 @@ def command_sync(dir, from_branch, pull_request, github_repository, username, te @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", @@ -1833,14 +1857,14 @@ def command_sync(dir, from_branch, pull_request, github_repository, username, te default=False, help="Bump required nextflow version instead of pipeline version", ) -def command_bump_version(ctx, new_version, dir, nextflow): +def command_bump_version(ctx, new_version, directory, nextflow): """ Use `nf-core pipelines bump-version` instead. """ log.warning( "The `[magenta]nf-core bump-version[/]` command is deprecated. Use `[magenta]nf-core pipelines bump-version[/]` instead." ) - pipelines_bump_version(ctx, new_version, dir, nextflow) + pipelines_bump_version(ctx, new_version, directory, nextflow) # nf-core list (deprecated) @@ -2086,6 +2110,7 @@ def command_download( @click.option( "-d", "--dir", + "directory", type=click.Path(exists=True), default=".", help=r"Pipeline directory [dim]\[default: current working directory][/]", @@ -2139,7 +2164,7 @@ def command_download( @click.pass_context def command_lint( ctx, - dir, + directory, release, fix, key, @@ -2156,7 +2181,7 @@ def command_lint( log.warning( "The `[magenta]nf-core lint[/]` command is deprecated. Use `[magenta]nf-core pipelines lint[/]` instead." ) - pipelines_lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) + pipelines_lint(ctx, directory, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by) # nf-core create (deprecated) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 432a36aae..c586534b2 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -60,7 +60,7 @@ def pipelines_create(ctx, name, description, author, version, force, outdir, tem # nf-core pipelines bump-version -def pipelines_bump_version(ctx, new_version, dir, nextflow): +def pipelines_bump_version(ctx, new_version, directory, nextflow): """ Update nf-core pipeline version number. @@ -78,10 +78,10 @@ def pipelines_bump_version(ctx, new_version, dir, nextflow): try: # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) + is_pipeline_directory(directory) # Make a pipeline object and load config etc - pipeline_obj = Pipeline(dir) + pipeline_obj = Pipeline(directory) pipeline_obj._load() # Bump the pipeline version number @@ -97,7 +97,7 @@ def pipelines_bump_version(ctx, new_version, dir, nextflow): # nf-core pipelines lint def pipelines_lint( ctx, - dir, + directory, release, fix, key, @@ -123,7 +123,7 @@ def pipelines_lint( # Check if pipeline directory is a pipeline try: - is_pipeline_directory(dir) + is_pipeline_directory(directory) except UserWarning as e: log.error(e) sys.exit(1) @@ -131,7 +131,7 @@ def pipelines_lint( # Run the lint tests! try: lint_obj, module_lint_obj, subworkflow_lint_obj = run_linting( - dir, + directory, release, fix, key, @@ -279,7 +279,7 @@ def pipelines_list(ctx, keywords, sort, json, show_archived): # nf-core pipelines sync -def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. @@ -295,12 +295,13 @@ def pipelines_sync(ctx, dir, from_branch, pull_request, github_repository, usern from nf_core.pipelines.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError from nf_core.utils import is_pipeline_directory - # Check if pipeline directory contains necessary files - is_pipeline_directory(dir) - - # Sync the given pipeline dir - sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml, force_pr) try: + # Check if pipeline directory contains necessary files + is_pipeline_directory(directory) + # Sync the given pipeline dir + sync_obj = PipelineSync( + directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr + ) sync_obj.sync() except (SyncExceptionError, PullRequestExceptionError) as e: log.error(e) @@ -360,7 +361,7 @@ def pipelines_schema_validate(pipeline, params): # nf-core pipelines schema build -def pipelines_schema_build(dir, no_prompts, web_only, url): +def pipelines_schema_build(directory, no_prompts, web_only, url): """ Interactively build a pipeline schema from Nextflow params. @@ -376,7 +377,7 @@ def pipelines_schema_build(dir, no_prompts, web_only, url): try: schema_obj = PipelineSchema() - if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: + if schema_obj.build_schema(directory, no_prompts, web_only, url) is False: sys.exit(1) except (UserWarning, AssertionError) as e: log.error(e) From 5a8ae1240e646413d3c483cb8d67f8d1581495cb Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 16:18:50 +0200 Subject: [PATCH 359/737] fix pydantic warnings --- nf_core/pipelines/create/create.py | 28 ++++++++++++---------------- nf_core/pipelines/sync.py | 2 +- nf_core/utils.py | 8 ++++++-- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index be07c0c29..a2df9aeef 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -21,7 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.utils import LintConfigType +from nf_core.utils import LintConfigType, NFCoreTemplateConfig log = logging.getLogger(__name__) @@ -111,7 +111,7 @@ def __init__( self.is_interactive = is_interactive self.force = self.config.force if self.config.outdir is None: - self.config.outdir = os.getcwd() + self.config.outdir = str(Path.cwd()) if self.config.outdir == ".": self.outdir = Path(self.config.outdir, self.jinja_params["name_noslash"]).absolute() else: @@ -289,13 +289,13 @@ def render_template(self): log.info("Use -f / --force to overwrite existing files") raise UserWarning(f"Output directory '{self.outdir}' exists!") else: - os.makedirs(self.outdir) + self.outdir.mkdir(parents=True, exist_ok=True) # Run jinja2 for each file in the template folder env = jinja2.Environment( loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True ) - template_dir = os.path.join(os.path.dirname(nf_core.__file__), "pipeline-template") + template_dir = Path(nf_core.__file__).parent / "pipeline-template" object_attrs = self.jinja_params object_attrs["nf_core_version"] = nf_core.__version__ @@ -310,26 +310,24 @@ def render_template(self): } # Set the paths to skip according to customization - for template_fn_path_obj in template_files: - template_fn_path = str(template_fn_path_obj) - + for template_fn_path in template_files: # Skip files that are in the self.skip_paths list for skip_path in self.skip_paths: - if os.path.relpath(template_fn_path, template_dir).startswith(skip_path): + if str(template_fn_path.relative_to(template_dir)).startswith(skip_path): break else: - if os.path.isdir(template_fn_path): + if template_fn_path.is_dir(): continue - if any([s in template_fn_path for s in ignore_strs]): + if any([s in str(template_fn_path) for s in ignore_strs]): log.debug(f"Ignoring '{template_fn_path}' in jinja2 template creation") continue # Set up vars and directories - template_fn = os.path.relpath(template_fn_path, template_dir) + template_fn = template_fn_path.relative_to(template_dir) output_path = self.outdir / template_fn if template_fn in rename_files: output_path = self.outdir / rename_files[template_fn] - os.makedirs(os.path.dirname(output_path), exist_ok=True) + output_path.parent.mkdir(parents=True, exist_ok=True) try: # Just copy binary files @@ -338,7 +336,7 @@ def render_template(self): # Got this far - render the template log.debug(f"Rendering template file: '{template_fn}'") - j_template = env.get_template(template_fn) + j_template = env.get_template(str(template_fn)) rendered_output = j_template.render(object_attrs) # Write to the pipeline output file @@ -379,9 +377,7 @@ def render_template(self): config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - config_yml.template = self.config.model_dump() - # convert posix path to string for yaml dump - config_yml["template"]["outdir"] = str(config_yml["template"]["outdir"]) + config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index b1da99a62..d044be84e 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -274,7 +274,7 @@ def make_template_pipeline(self): version=self.wf_config["manifest.version"].strip('"').strip("'"), no_git=True, force=True, - outdir=self.pipeline_dir, + outdir=str(self.pipeline_dir), author=self.wf_config["manifest.author"].strip('"').strip("'"), ).init_pipeline() except Exception as err: diff --git a/nf_core/utils.py b/nf_core/utils.py index 6794cf04f..0beb8c736 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1052,6 +1052,12 @@ class NFCoreTemplateConfig(BaseModel): skip_features: Optional[list] = None is_nfcore: Optional[bool] = None + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + return getattr(self, item, default) + LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] @@ -1086,7 +1092,6 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] tools_config = {} config_fn = get_first_available_path(directory, CONFIG_PATHS) - if config_fn is None: depr_path = get_first_available_path(directory, DEPRECATED_CONFIG_PATHS) if depr_path: @@ -1102,7 +1107,6 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] # If the file is empty if tools_config is None: raise AssertionError(f"Config file '{config_fn}' is empty") - # Check for required fields try: nf_core_yaml_config = NFCoreYamlConfig(**tools_config) From ac1ea23974df7182c02a2619a2363010590c1b23 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 16:30:53 +0200 Subject: [PATCH 360/737] fix notrequired not found --- nf_core/modules/modules_json.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index e9b4aa102..f99227174 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,12 +6,13 @@ import shutil import tempfile from pathlib import Path -from typing import Dict, List, NotRequired, Optional, Tuple, TypedDict, Union +from typing import Dict, List, Optional, Tuple, Union import git import questionary import rich.prompt from git.exc import GitCommandError +from typing_extensions import NotRequired, TypedDict # for py<3.11 import nf_core.utils from nf_core.components.components_utils import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, get_components_to_install From 79727c0b430f495468011233ec3b761d275a7be7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 17:28:53 +0200 Subject: [PATCH 361/737] fix pydantic type --- nf_core/pipelines/create/create.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index a2df9aeef..127b1f607 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -378,6 +378,7 @@ def render_template(self): if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) + config_yml.template.outdir = str(config_yml.template.outdir) yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) From ee67c5fdd7f764f1f16f10f28e75f6f1f45941f5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 17:31:34 +0200 Subject: [PATCH 362/737] fix before model creation --- nf_core/pipelines/create/create.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 127b1f607..acbe7efd5 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -377,8 +377,9 @@ def render_template(self): config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: + self.config.outdir = str(self.config.outdir) config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - config_yml.template.outdir = str(config_yml.template.outdir) + yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) From dae348e4de483f5b24f92bcc41ddb1b843f688bf Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 25 Jul 2024 17:33:33 +0200 Subject: [PATCH 363/737] fix modules repo import error --- tests/modules/test_modules_json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index b2cac99e6..0368c146c 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -7,9 +7,9 @@ NF_CORE_MODULES_DEFAULT_BRANCH, NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE, - ModulesRepo, ) from nf_core.modules.modules_json import ModulesJson +from nf_core.modules.modules_repo import ModulesRepo from nf_core.modules.patch import ModulePatch from ..test_modules import TestModules From 9ed6cdcc28d4be17175ae5a4a98f5278949c3db9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 24 Jul 2024 15:55:30 +0000 Subject: [PATCH 364/737] Update python:3.12-slim Docker digest to 740d94a --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 8943b7062..c88abcb1c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:f11725aba18c19664a408902103365eaf8013823ffc56270f921d1dc78a198cb +FROM python:3.12-slim@sha256:740d94a19218c8dd584b92f804b1158f85b0d241e5215ea26ed2dcade2b9d138 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From 7b4d135569320507bdbfb99ff459aa4d195d9357 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 25 Jul 2024 17:51:32 +0000 Subject: [PATCH 365/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b407f5349..b27dce4b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -53,6 +53,7 @@ - handle new jsonschema error type ([#3061](https://github.com/nf-core/tools/pull/3061)) - Update python:3.12-slim Docker digest to f11725a ([#3071](https://github.com/nf-core/tools/pull/3071)) - Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) +- Update python:3.12-slim Docker digest to 740d94a ([#3079](https://github.com/nf-core/tools/pull/3079)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From fcc859ef3da2d369c5a5fc7e031d8dcd03d5a72b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 19:45:20 +0000 Subject: [PATCH 366/737] Update pre-commit hook pre-commit/mirrors-mypy to v1.11.0 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fc2011cb9..63202e517 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.10.1" + rev: "v1.11.0" hooks: - id: mypy additional_dependencies: From baa1a3c27c9ae947db118f22ba7a3e0b746f9dfd Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 20:04:49 +0000 Subject: [PATCH 367/737] Update gitpod/workspace-base Docker digest to f189a41 --- nf_core/gitpod/gitpod.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 2f458dfc2..fd0e8bb79 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -1,7 +1,7 @@ # Test build locally before making a PR # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . -FROM gitpod/workspace-base@sha256:0f3822450f94084f6a62db4a4282d895591f6a55632dc044fe496f98cb79e75c +FROM gitpod/workspace-base@sha256:f189a4195c3861365356f9c1b438ab26fd88e1ff46ce2843afc62861fc982e0c USER root From b53a4b82436f0313fa5c0896e9edd869c3043084 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 09:30:22 +0200 Subject: [PATCH 368/737] fix incorrect type name --- nf_core/modules/modules_differ.py | 6 +++--- nf_core/modules/modules_json.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index 36d927f08..e310b3bf8 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -4,7 +4,7 @@ import logging import os from pathlib import Path -from typing import List, Union +from typing import Dict, List, Union from rich.console import Console from rich.syntax import Syntax @@ -296,7 +296,7 @@ def print_diff( console.print(Syntax("".join(diff), "diff", theme="ansi_dark", padding=1)) @staticmethod - def per_file_patch(patch_fn: Union[str, Path]) -> dict[str, List[str]]: + def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: """ Splits a patch file for several files into one patch per file. @@ -453,7 +453,7 @@ def try_apply_single_patch(file_lines, patch, reverse=False): @staticmethod def try_apply_patch( module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False - ) -> dict[str, List[str]]: + ) -> Dict[str, List[str]]: """ Try applying a full patch file to a module diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index f99227174..4f3578b02 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -1000,7 +1000,7 @@ def get_subworkflow_version(self, subworkflow_name, repo_url, install_dir): .get("git_sha", None) ) - def get_all_components(self, component_type: str) -> dict[str, List[Tuple[(str, str)]]]: + def get_all_components(self, component_type: str) -> Dict[str, List[Tuple[(str, str)]]]: """ Retrieves all pipeline modules/subworkflows that are reported in the modules.json From bdbdd17a24c8aee70339f22d0bc0994c2f65fe4e Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 11:50:17 +0200 Subject: [PATCH 369/737] fix types --- nf_core/pipelines/create/create.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index acbe7efd5..5e57d27c3 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -81,8 +81,11 @@ def __init__( else: raise UserWarning("The template configuration was not provided.") + if self.config.outdir is None: + self.config.outdir = str(Path.cwd()) + self.jinja_params, skip_paths = self.obtain_jinja_params_dict( - self.config.skip_features or [], self.config.outdir + self.config.skip_features or [], Path(self.config.outdir) ) skippable_paths = { @@ -110,8 +113,7 @@ def __init__( self.default_branch = default_branch self.is_interactive = is_interactive self.force = self.config.force - if self.config.outdir is None: - self.config.outdir = str(Path.cwd()) + if self.config.outdir == ".": self.outdir = Path(self.config.outdir, self.jinja_params["name_noslash"]).absolute() else: @@ -184,7 +186,7 @@ def update_config(self, organisation, version, force, outdir): if self.config.is_nfcore is None: self.config.is_nfcore = self.config.org == "nf-core" - def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): + def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Union[str, Path]): """Creates a dictionary of parameters for the new pipeline. Args: @@ -379,7 +381,6 @@ def render_template(self): with open(str(config_fn), "w") as fh: self.config.outdir = str(self.config.outdir) config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) From 98c3fb0178a4548fcab1db5055cedd1586f0c915 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 11:54:07 +0200 Subject: [PATCH 370/737] fix type definition for older python version --- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/lint/modules_json.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 4f3578b02..9f17add6a 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -115,7 +115,7 @@ def create(self) -> None: self.dump() def get_component_names_from_repo( - self, repos: Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, str | List[str]]]]]], directory: Path + self, repos: Dict[str, Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]]], directory: Path ) -> List[Tuple[str, List[str], str]]: """ Get component names from repositories in a pipeline. diff --git a/nf_core/pipelines/lint/modules_json.py b/nf_core/pipelines/lint/modules_json.py index 5ce205403..2b7c26684 100644 --- a/nf_core/pipelines/lint/modules_json.py +++ b/nf_core/pipelines/lint/modules_json.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Dict, List +from typing import Dict, List, Union from nf_core.modules.modules_json import ModulesJson, ModulesJsonType @@ -19,7 +19,7 @@ def modules_json(self) -> Dict[str, List[str]]: # Load pipeline modules and modules.json _modules_json = ModulesJson(self.wf_path) _modules_json.load() - modules_json_dict: ModulesJsonType | None = _modules_json.modules_json + modules_json_dict: Union[ModulesJsonType, None] = _modules_json.modules_json modules_dir = Path(self.wf_path, "modules") if _modules_json and modules_json_dict is not None: From 7b883eb1f01bac9249428a3821c9a4454dc715ca Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 12:11:41 +0200 Subject: [PATCH 371/737] fix incorrect types and missing cli option names --- nf_core/__main__.py | 6 +++--- nf_core/pipelines/create/create.py | 9 +++++---- nf_core/pipelines/sync.py | 21 +++++++++++---------- 3 files changed, 19 insertions(+), 17 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 91af97958..d0aad63e8 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1036,7 +1036,7 @@ def command_modules_remove(ctx, directory, tool): @modules.command("create") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option("-d", "--dir", "directory", type=click.Path(exists=True), default=".", metavar="") @click.option( "-a", "--author", @@ -1306,7 +1306,7 @@ def subworkflows(ctx, git_remote, branch, no_pull): @subworkflows.command("create") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") -@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option("-d", "--dir", "directory", type=click.Path(exists=True), default=".", metavar="") @click.option( "-a", "--author", @@ -1751,7 +1751,7 @@ def command_schema_docs(schema_path, output, format, force, columns): # nf-core create-logo (deprecated) @nf_core_cli.command("create-logo", deprecated=True, hidden=True) @click.argument("logo-text", metavar="") -@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") +@click.option("-d", "--dir", "directory", type=click.Path(), default=".", help="Directory to save the logo in.") @click.option( "-n", "--name", diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 5e57d27c3..15eed46dc 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -278,7 +278,7 @@ def init_pipeline(self): "https://nf-co.re/docs/tutorials/adding_a_pipeline/overview#join-the-community[/link]" ) - def render_template(self): + def render_template(self) -> None: """Runs Jinja to create a new nf-core pipeline.""" log.info(f"Creating new pipeline: '{self.name}'") @@ -306,7 +306,7 @@ def render_template(self): template_files += list(Path(template_dir).glob("*")) ignore_strs = [".pyc", "__pycache__", ".pyo", ".pyd", ".DS_Store", ".egg"] short_name = self.jinja_params["short_name"] - rename_files = { + rename_files: Dict[str, str] = { "workflows/pipeline.nf": f"workflows/{short_name}.nf", "subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf": f"subworkflows/local/utils_nfcore_{short_name}_pipeline/main.nf", } @@ -327,8 +327,9 @@ def render_template(self): # Set up vars and directories template_fn = template_fn_path.relative_to(template_dir) output_path = self.outdir / template_fn - if template_fn in rename_files: - output_path = self.outdir / rename_files[template_fn] + + if str(template_fn) in rename_files: + output_path = self.outdir / rename_files[str(template_fn)] output_path.parent.mkdir(parents=True, exist_ok=True) try: diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index d044be84e..741bdd751 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,6 +6,7 @@ import re import shutil from pathlib import Path +from typing import Dict, Optional, Union import git import questionary @@ -61,24 +62,24 @@ class PipelineSync: def __init__( self, - pipeline_dir, - from_branch=None, - make_pr=False, - gh_repo=None, - gh_username=None, - template_yaml_path=None, - force_pr=False, + pipeline_dir: Union[str, Path], + from_branch: Optional[str] = None, + make_pr: bool = False, + gh_repo: Optional[str] = None, + gh_username: Optional[str] = None, + template_yaml_path: Optional[str] = None, + force_pr: bool = False, ): """Initialise syncing object""" - self.pipeline_dir = Path(pipeline_dir).resolve() + self.pipeline_dir: Path = Path(pipeline_dir).resolve() self.from_branch = from_branch self.original_branch = None self.original_merge_branch = f"nf-core-template-merge-{nf_core.__version__}" self.merge_branch = self.original_merge_branch self.made_changes = False self.make_pr = make_pr - self.gh_pr_returned_data = {} + self.gh_pr_returned_data: Dict = {} self.required_config_vars = ["manifest.name", "manifest.description", "manifest.version", "manifest.author"] self.force_pr = force_pr @@ -87,7 +88,7 @@ def __init__( self.pr_url = "" self.config_yml_path, self.config_yml = nf_core.utils.load_tools_config(self.pipeline_dir) - assert self.config_yml_path is not None # mypy + assert self.config_yml_path is not None and self.config_yml is not None # mypy # Throw deprecation warning if template_yaml_path is set if template_yaml_path is not None: log.warning( From 0fe3910c54251b005a0cbc493de556d865310c1e Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 12:28:30 +0200 Subject: [PATCH 372/737] fix create-logo cli command --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index d0aad63e8..11ab574cc 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -650,7 +650,7 @@ def command_pipelines_bump_version(ctx, new_version, directory, nextflow): # nf-core pipelines create-logo @pipelines.command("create-logo") @click.argument("logo-text", metavar="") -@click.option("-d", "--dir", type=click.Path(), default=".", help="Directory to save the logo in.") +@click.option("-d", "--dir", "directory", type=click.Path(), default=".", help="Directory to save the logo in.") @click.option( "-n", "--name", From ce325366e04551b46d109b572e95320c7dc39023 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 14:47:53 +0200 Subject: [PATCH 373/737] add types, include review comments, add pydantic mypy plugin --- .pre-commit-config.yaml | 1 + mypy.ini | 1 + nf_core/components/components_command.py | 6 +++++- nf_core/components/components_utils.py | 2 +- nf_core/components/info.py | 2 +- nf_core/components/lint/__init__.py | 5 +++-- 6 files changed, 12 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bcf7ff65c..f763fa665 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,3 +28,4 @@ repos: - types-jsonschema - types-Markdown - types-setuptools + - pydantic diff --git a/mypy.ini b/mypy.ini index c48aa5884..5a9522316 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,4 @@ [mypy] warn_unused_configs = True ignore_missing_imports = true +plugins = pydantic.mypy diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 4440dc32a..91e14d060 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -254,7 +254,11 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: # Update path in modules.json if the file is in the correct format modules_json = ModulesJson(self.directory) modules_json.load() - if modules_json.has_git_url_and_modules() and modules_json.modules_json is not None: + if ( + modules_json.has_git_url_and_modules() + and self.modules_repo.repo_path is not None + and modules_json.modules_json is not None + ): modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ self.modules_repo.repo_path ][module_name]["patch"] = str(patch_path.relative_to(Path(self.directory).resolve())) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 4e9c0ac60..3d64dc1bb 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -77,7 +77,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P ).unsafe_ask() log.info("To avoid this prompt in the future, add the 'org_path' key to a root '%s' file.", config_fn.name) if rich.prompt.Confirm.ask("[bold][blue]?[/] Would you like me to add this config now?", default=True): - with open(str(config_fn), "a+") as fh: + with open(config_fn, "a+") as fh: fh.write(f"org_path: {org}\n") log.info(f"Config added to '{config_fn.name}'") diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 55a95593f..726586b5b 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -107,7 +107,7 @@ def init_mod_name(self, component): elif self.repo_type == "pipeline": assert self.modules_json is not None # mypy all_components = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, [] + self.modules_repo.remote_url, {} ) components = [ component if directory == self.modules_repo.repo_path else f"{directory}/{component}" diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index e2475ef62..ddf5e1e16 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -22,6 +22,7 @@ from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_json import ModulesJson from nf_core.pipelines.lint_utils import console +from nf_core.utils import LintConfigType from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -77,8 +78,8 @@ def __init__( self.failed: List[LintResult] = [] self.all_local_components: List[NFCoreComponent] = [] - self.lint_config = None - self.modules_json = None + self.lint_config: Optional[LintConfigType] = None + self.modules_json: Optional[ModulesJson] = None if self.component_type == "modules": self.lint_tests = self.get_all_module_lint_tests(self.repo_type == "pipeline") From 8eeaf28402013b4f54af693f80836de92c8d2cf7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 14:50:35 +0200 Subject: [PATCH 374/737] fix mypy error --- nf_core/modules/lint/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index cea75d8f2..fcf2d7d06 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -238,7 +238,7 @@ def lint_module( # Otherwise run all the lint tests else: - if self.repo_type == "pipeline" and self.modules_json: + if self.repo_type == "pipeline" and self.modules_json and mod.repo_url: # Set correct sha version = self.modules_json.get_module_version(mod.component_name, mod.repo_url, mod.org) mod.git_sha = version From 95de96b7bddf192cd5ef3ea75218236ec73e02c4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 15:27:31 +0200 Subject: [PATCH 375/737] allow dashes in pipeilne short name (why didn't this fail before?) --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 15eed46dc..dcb6d2c99 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -249,7 +249,7 @@ def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Un return jinja_params, skip_paths # Check that the pipeline name matches the requirements - if not re.match(r"^[a-z]+$", jinja_params["short_name"]): + if not re.match(r"^[a-z-]+$", jinja_params["short_name"]): if jinja_params["is_nfcore"]: raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") else: From 1b6f231ebc41665a7cad108fe4fdbc7b38d218f0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 15:27:59 +0200 Subject: [PATCH 376/737] Revert "allow dashes in pipeilne short name (why didn't this fail before?)" This reverts commit 95de96b7bddf192cd5ef3ea75218236ec73e02c4. --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index dcb6d2c99..15eed46dc 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -249,7 +249,7 @@ def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Un return jinja_params, skip_paths # Check that the pipeline name matches the requirements - if not re.match(r"^[a-z-]+$", jinja_params["short_name"]): + if not re.match(r"^[a-z]+$", jinja_params["short_name"]): if jinja_params["is_nfcore"]: raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") else: From 5186ac71523c0fe16978c3857325365f02d1093b Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 16:25:41 +0200 Subject: [PATCH 377/737] use path.walk correctly --- nf_core/components/components_command.py | 8 ++++---- nf_core/modules/modules_json.py | 4 ++-- nf_core/pipelines/lint/merge_markers.py | 2 +- nf_core/pipelines/lint/modules_structure.py | 2 +- nf_core/pipelines/lint/pipeline_todos.py | 2 +- nf_core/synced_repo.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 91e14d060..5cde00ef6 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -87,7 +87,7 @@ def get_components_clone_modules(self) -> List[str]: component_base_path = Path(self.directory, self.default_subworkflows_path) return [ str(Path(directory).relative_to(component_base_path)) - for directory, _, files in Path.walk(component_base_path) + for directory, _, files in Path(component_base_path).walk() if "main.nf" in files ] @@ -126,7 +126,7 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in Path.walk(Path(self.directory), top_down=False): + for dir_path, dir_names, filenames in Path(self.directory).walk(top_down=False): if not dir_names and not filenames: try: dir_path.rmdir() @@ -157,7 +157,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: return [ str(Path(dir_path).relative_to(repo_dir)) - for dir_path, _, files in Path.walk(repo_dir) + for dir_path, _, files in Path(repo_dir).walk() if "main.nf" in files ] @@ -202,7 +202,7 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in Path.walk(Path(self.directory, "modules")): + for directory, _, files in Path(self.directory, "modules").walk(): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 9f17add6a..3f4fb46f2 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -134,7 +134,7 @@ def get_component_names_from_repo( repo_url, [ str(component_name.relative_to(directory / modules_repo.repo_path)) - for component_name, _, file_names in Path.walk(directory / modules_repo.repo_path) + for component_name, _, file_names in Path(directory / modules_repo.repo_path).walk() if "main.nf" in file_names ], modules_repo.repo_path, @@ -1189,7 +1189,7 @@ def components_with_repos(): self.modules_dir, modules_repo.repo_path, ) - for dir_name, _, _ in Path.walk(repo_url_path): + for dir_name, _, _ in repo_url_path.walk(): if component_type == "modules": if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) diff --git a/nf_core/pipelines/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py index 208c9f4bc..4e2f73fb8 100644 --- a/nf_core/pipelines/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -42,7 +42,7 @@ def merge_markers(self): with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path.walk(self.wf_path, top_down=True): + for root, dirs, files in Path(self.wf_path).walk(top_down=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/pipelines/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py index fd29942ed..34cdc8956 100644 --- a/nf_core/pipelines/lint/modules_structure.py +++ b/nf_core/pipelines/lint/modules_structure.py @@ -19,7 +19,7 @@ def modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] - for directory, _, files in Path.walk(Path(self.wf_path, "modules")): + for directory, _, files in Path(self.wf_path, "modules").walk(): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) parts = module_path.parts diff --git a/nf_core/pipelines/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py index 530d85376..0a21d0d05 100644 --- a/nf_core/pipelines/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -43,7 +43,7 @@ def pipeline_todos(self, root_dir=None): with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path.walk(root_dir, top_down=True): + for root, dirs, files in Path(root_dir).walk(top_down=True): # # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 8efdd0e48..22cdcd157 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -467,7 +467,7 @@ def get_avail_components( # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ str(Path(dirpath).relative_to(directory)) - for dirpath, _, files in Path.walk(directory) + for dirpath, _, files in Path(directory).walk() if "main.nf" in files ] return avail_component_names From 873398727b6dd8e6a1046f6a91c45bad8fd26825 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 16:37:19 +0200 Subject: [PATCH 378/737] Revert "use path.walk correctly" This reverts commit 5186ac71523c0fe16978c3857325365f02d1093b. --- nf_core/components/components_command.py | 8 ++++---- nf_core/modules/modules_json.py | 4 ++-- nf_core/pipelines/lint/merge_markers.py | 2 +- nf_core/pipelines/lint/modules_structure.py | 2 +- nf_core/pipelines/lint/pipeline_todos.py | 2 +- nf_core/synced_repo.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 5cde00ef6..91e14d060 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -87,7 +87,7 @@ def get_components_clone_modules(self) -> List[str]: component_base_path = Path(self.directory, self.default_subworkflows_path) return [ str(Path(directory).relative_to(component_base_path)) - for directory, _, files in Path(component_base_path).walk() + for directory, _, files in Path.walk(component_base_path) if "main.nf" in files ] @@ -126,7 +126,7 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in Path(self.directory).walk(top_down=False): + for dir_path, dir_names, filenames in Path.walk(Path(self.directory), top_down=False): if not dir_names and not filenames: try: dir_path.rmdir() @@ -157,7 +157,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: return [ str(Path(dir_path).relative_to(repo_dir)) - for dir_path, _, files in Path(repo_dir).walk() + for dir_path, _, files in Path.walk(repo_dir) if "main.nf" in files ] @@ -202,7 +202,7 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in Path(self.directory, "modules").walk(): + for directory, _, files in Path.walk(Path(self.directory, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 3f4fb46f2..9f17add6a 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -134,7 +134,7 @@ def get_component_names_from_repo( repo_url, [ str(component_name.relative_to(directory / modules_repo.repo_path)) - for component_name, _, file_names in Path(directory / modules_repo.repo_path).walk() + for component_name, _, file_names in Path.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], modules_repo.repo_path, @@ -1189,7 +1189,7 @@ def components_with_repos(): self.modules_dir, modules_repo.repo_path, ) - for dir_name, _, _ in repo_url_path.walk(): + for dir_name, _, _ in Path.walk(repo_url_path): if component_type == "modules": if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) diff --git a/nf_core/pipelines/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py index 4e2f73fb8..208c9f4bc 100644 --- a/nf_core/pipelines/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -42,7 +42,7 @@ def merge_markers(self): with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path(self.wf_path).walk(top_down=True): + for root, dirs, files in Path.walk(self.wf_path, top_down=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/pipelines/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py index 34cdc8956..fd29942ed 100644 --- a/nf_core/pipelines/lint/modules_structure.py +++ b/nf_core/pipelines/lint/modules_structure.py @@ -19,7 +19,7 @@ def modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] - for directory, _, files in Path(self.wf_path, "modules").walk(): + for directory, _, files in Path.walk(Path(self.wf_path, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) parts = module_path.parts diff --git a/nf_core/pipelines/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py index 0a21d0d05..530d85376 100644 --- a/nf_core/pipelines/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -43,7 +43,7 @@ def pipeline_todos(self, root_dir=None): with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path(root_dir).walk(top_down=True): # + for root, dirs, files in Path.walk(root_dir, top_down=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 22cdcd157..8efdd0e48 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -467,7 +467,7 @@ def get_avail_components( # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ str(Path(dirpath).relative_to(directory)) - for dirpath, _, files in Path(directory).walk() + for dirpath, _, files in Path.walk(directory) if "main.nf" in files ] return avail_component_names From 1679594f5a52c0926f7f40800fcf43c445f8515a Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 16:41:59 +0200 Subject: [PATCH 379/737] switch back to os.walk() --- nf_core/components/components_command.py | 13 ++++++------- nf_core/modules/modules_json.py | 4 ++-- nf_core/pipelines/lint/merge_markers.py | 3 ++- nf_core/pipelines/lint/modules_structure.py | 3 ++- nf_core/pipelines/lint/pipeline_todos.py | 3 ++- nf_core/synced_repo.py | 4 +--- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 91e14d060..ada8b532e 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -1,5 +1,6 @@ import logging import mmap +import os import shutil from pathlib import Path from typing import Dict, List, Optional, Union @@ -87,7 +88,7 @@ def get_components_clone_modules(self) -> List[str]: component_base_path = Path(self.directory, self.default_subworkflows_path) return [ str(Path(directory).relative_to(component_base_path)) - for directory, _, files in Path.walk(component_base_path) + for directory, _, files in os.walk(component_base_path) if "main.nf" in files ] @@ -126,10 +127,10 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in Path.walk(Path(self.directory), top_down=False): + for dir_path, dir_names, filenames in os.walk(Path(self.directory), topdown=False): if not dir_names and not filenames: try: - dir_path.rmdir() + Path(dir_path).rmdir() except OSError: pass else: @@ -156,9 +157,7 @@ def components_from_repo(self, install_dir: str) -> List[str]: raise LookupError(f"Nothing installed from {install_dir} in pipeline") return [ - str(Path(dir_path).relative_to(repo_dir)) - for dir_path, _, files in Path.walk(repo_dir) - if "main.nf" in files + str(Path(dir_path).relative_to(repo_dir)) for dir_path, _, files in os.walk(repo_dir) if "main.nf" in files ] def install_component_files( @@ -202,7 +201,7 @@ def check_modules_structure(self) -> None: """ if self.repo_type == "pipeline": wrong_location_modules: List[Path] = [] - for directory, _, files in Path.walk(Path(self.directory, "modules")): + for directory, _, files in os.walk(Path(self.directory, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.directory, "modules")) parts = module_path.parts diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 9f17add6a..4f86bc830 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -134,7 +134,7 @@ def get_component_names_from_repo( repo_url, [ str(component_name.relative_to(directory / modules_repo.repo_path)) - for component_name, _, file_names in Path.walk(directory / modules_repo.repo_path) + for component_name, _, file_names in os.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], modules_repo.repo_path, @@ -1189,7 +1189,7 @@ def components_with_repos(): self.modules_dir, modules_repo.repo_path, ) - for dir_name, _, _ in Path.walk(repo_url_path): + for dir_name, _, _ in os.walk(repo_url_path): if component_type == "modules": if len(Path(directory).parts) > 1: # The module name is TOOL/SUBTOOL paths_in_directory.append(str(Path(*Path(dir_name).parts[-2:]))) diff --git a/nf_core/pipelines/lint/merge_markers.py b/nf_core/pipelines/lint/merge_markers.py index 208c9f4bc..1c3d70a76 100644 --- a/nf_core/pipelines/lint/merge_markers.py +++ b/nf_core/pipelines/lint/merge_markers.py @@ -1,5 +1,6 @@ import fnmatch import logging +import os from pathlib import Path import nf_core.utils @@ -42,7 +43,7 @@ def merge_markers(self): with open(Path(self.wf_path, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path.walk(self.wf_path, top_down=True): + for root, dirs, files in os.walk(self.wf_path, topdown=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/pipelines/lint/modules_structure.py b/nf_core/pipelines/lint/modules_structure.py index fd29942ed..9d9b4c9fc 100644 --- a/nf_core/pipelines/lint/modules_structure.py +++ b/nf_core/pipelines/lint/modules_structure.py @@ -1,4 +1,5 @@ import logging +import os from pathlib import Path log = logging.getLogger(__name__) @@ -19,7 +20,7 @@ def modules_structure(self): modules/nf-core/modules/TOOL/SUBTOOL """ wrong_location_modules = [] - for directory, _, files in Path.walk(Path(self.wf_path, "modules")): + for directory, _, files in os.walk(Path(self.wf_path, "modules")): if "main.nf" in files: module_path = Path(directory).relative_to(Path(self.wf_path, "modules")) parts = module_path.parts diff --git a/nf_core/pipelines/lint/pipeline_todos.py b/nf_core/pipelines/lint/pipeline_todos.py index 530d85376..0535069f9 100644 --- a/nf_core/pipelines/lint/pipeline_todos.py +++ b/nf_core/pipelines/lint/pipeline_todos.py @@ -1,5 +1,6 @@ import fnmatch import logging +import os from pathlib import Path log = logging.getLogger(__name__) @@ -43,7 +44,7 @@ def pipeline_todos(self, root_dir=None): with open(Path(root_dir, ".gitignore"), encoding="latin1") as fh: for line in fh: ignore.append(Path(line.strip().rstrip("/")).name) - for root, dirs, files in Path.walk(root_dir, top_down=True): + for root, dirs, files in os.walk(root_dir, topdown=True): # Ignore files for i_base in ignore: i = str(Path(root, i_base)) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 8efdd0e48..e2a76ccae 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -466,9 +466,7 @@ def get_avail_components( directory = self.subworkflows_dir # Module/Subworkflow directories are characterized by having a 'main.nf' file avail_component_names = [ - str(Path(dirpath).relative_to(directory)) - for dirpath, _, files in Path.walk(directory) - if "main.nf" in files + str(Path(dirpath).relative_to(directory)) for dirpath, _, files in os.walk(directory) if "main.nf" in files ] return avail_component_names From 3c0433e68fc33be6dd35dfbb88b3989e103b6ad6 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 22:43:37 +0200 Subject: [PATCH 380/737] fix sync --- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/create/create.py | 5 ++--- nf_core/pipelines/sync.py | 7 ++----- nf_core/utils.py | 2 ++ 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 4f86bc830..39d70b7d7 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -133,7 +133,7 @@ def get_component_names_from_repo( components = ( repo_url, [ - str(component_name.relative_to(directory / modules_repo.repo_path)) + str(Path(component_name).relative_to(directory / modules_repo.repo_path)) for component_name, _, file_names in os.walk(directory / modules_repo.repo_path) if "main.nf" in file_names ], diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 15eed46dc..9b64b5344 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -53,7 +53,7 @@ def __init__( no_git: bool = False, force: bool = False, outdir: Optional[Union[Path, str]] = None, - template_config: Optional[Union[str, CreateConfig, Path]] = None, + template_config: Optional[CreateConfig] = None, organisation: str = "nf-core", from_config_file: bool = False, default_branch: Optional[str] = None, @@ -67,7 +67,7 @@ def __init__( _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) # Obtain a CreateConfig object from `.nf-core.yml` config file if config_yml is not None and getattr(config_yml, "template", None) is not None: - self.config = CreateConfig(**config_yml["template"]) + self.config = CreateConfig(**config_yml["template"].model_dump()) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") except (FileNotFoundError, UserWarning): @@ -261,7 +261,6 @@ def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Un def init_pipeline(self): """Creates the nf-core pipeline.""" - # Make the new pipeline self.render_template() diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 741bdd751..efc7212b4 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -270,13 +270,10 @@ def make_template_pipeline(self): try: nf_core.pipelines.create.create.PipelineCreate( - name=self.wf_config["manifest.name"].strip('"').strip("'"), - description=self.wf_config["manifest.description"].strip('"').strip("'"), - version=self.wf_config["manifest.version"].strip('"').strip("'"), + outdir=str(self.pipeline_dir), + from_config_file=True, no_git=True, force=True, - outdir=str(self.pipeline_dir), - author=self.wf_config["manifest.author"].strip('"').strip("'"), ).init_pipeline() except Exception as err: # Reset to where you were to prevent git getting messed up. diff --git a/nf_core/utils.py b/nf_core/utils.py index 0beb8c736..44eafca3c 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1053,6 +1053,8 @@ class NFCoreTemplateConfig(BaseModel): is_nfcore: Optional[bool] = None def __getitem__(self, item: str) -> Any: + if self is None: + return None return getattr(self, item) def get(self, item: str, default: Any = None) -> Any: From 4d6930be2da27623f5e1644785cb6bcf1a994d74 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 26 Jul 2024 22:46:57 +0200 Subject: [PATCH 381/737] fix types --- nf_core/pipelines/create/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 9b64b5344..d5c230e04 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -53,7 +53,7 @@ def __init__( no_git: bool = False, force: bool = False, outdir: Optional[Union[Path, str]] = None, - template_config: Optional[CreateConfig] = None, + template_config: Optional[Union[CreateConfig, str, Path]] = None, organisation: str = "nf-core", from_config_file: bool = False, default_branch: Optional[str] = None, From 26b1cd13c36bc2d1234b8f97e6141bf9c0c82c5c Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 11:44:02 +0200 Subject: [PATCH 382/737] set force true in sync to create a template file --- nf_core/pipelines/sync.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index efc7212b4..fced35dc2 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -263,8 +263,12 @@ def make_template_pipeline(self): logging.getLogger("nf_core.pipelines.create").setLevel(logging.ERROR) assert self.config_yml_path is not None assert self.config_yml is not None + # Re-write the template yaml info from .nf-core.yml config - if getattr(self.config_yml, "template", None) is not None: + if self.config_yml.template is not None: + # Set force true in config to overwrite existing files + + self.config_yml.template.force = True with open(self.config_yml_path, "w") as config_path: yaml.safe_dump(self.config_yml.model_dump(), config_path) @@ -275,6 +279,14 @@ def make_template_pipeline(self): no_git=True, force=True, ).init_pipeline() + + # set force to false to avoid overwriting files in the future + if self.config_yml.template is not None: + # Set force true in config to overwrite existing files + self.config_yml.template.force = False + with open(self.config_yml_path, "w") as config_path: + yaml.safe_dump(self.config_yml.model_dump(), config_path) + except Exception as err: # Reset to where you were to prevent git getting messed up. self.repo.git.reset("--hard") From 59a3eb38c2f7603ed63c73ea6c1589a9b73a95c1 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 12:33:14 +0200 Subject: [PATCH 383/737] fix missing initialization in download.py --- nf_core/pipelines/download.py | 10 ++++++---- nf_core/utils.py | 14 ++++++++------ 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 797909636..704fe91b2 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -11,7 +11,7 @@ import textwrap from datetime import datetime from pathlib import Path -from typing import List, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple from zipfile import ZipFile import git @@ -24,6 +24,7 @@ from packaging.version import Version import nf_core +import nf_core.modules.modules_utils import nf_core.pipelines.list import nf_core.utils from nf_core.synced_repo import RemoteProgressbar, SyncedRepo @@ -131,6 +132,7 @@ def __init__( self.compress_type = compress_type self.force = force self.platform = platform + self.fullname: Optional[str] = None # if flag is not specified, do not assume deliberate choice and prompt config inclusion interactively. # this implies that non-interactive "no" choice is only possible implicitly (e.g. with --platform or if prompt is suppressed by !stderr.is_interactive). # only alternative would have been to make it a parameter with argument, e.g. -d="yes" or -d="no". @@ -161,8 +163,8 @@ def __init__( # allows to specify a container library / registry or a respective mirror to download images from self.parallel_downloads = parallel_downloads - self.wf_revisions = {} - self.wf_branches = {} + self.wf_revisions = [] + self.wf_branches: Dict[str, Any] = {} self.wf_sha = {} self.wf_download_url = {} self.nf_config = {} @@ -339,7 +341,7 @@ def prompt_pipeline_name(self): stderr.print("Specify the name of a nf-core pipeline or a GitHub repository name (user/repo).") self.pipeline = nf_core.utils.prompt_remote_pipeline_name(self.wfs) - def prompt_revision(self): + def prompt_revision(self) -> None: """ Prompt for pipeline revision / branch Prompt user for revision tag if '--revision' was not set diff --git a/nf_core/utils.py b/nf_core/utils.py index 44eafca3c..80324fc9a 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -903,7 +903,9 @@ def prompt_remote_pipeline_name(wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") -def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): +def prompt_pipeline_release_branch( + wf_releases: List[Dict[str, Any]], wf_branches: Dict[str, Any], multiple: bool = False +) -> tuple[Any, list[str]]: """Prompt for pipeline release / branch Args: @@ -912,18 +914,18 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): multiple (bool): Allow selection of multiple releases & branches (for Seqera Platform) Returns: - choice (str): Selected release / branch name + choice (questionary.Choice or bool): Selected release / branch or False if no releases / branches available """ # Prompt user for release tag, tag_set will contain all available. - choices = [] - tag_set = [] + choices: List[questionary.Choice] = [] + tag_set: List[str] = [] # Releases if len(wf_releases) > 0: for tag in map(lambda release: release.get("tag_name"), wf_releases): tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] choices.append(questionary.Choice(title=tag_display, value=tag)) - tag_set.append(tag) + tag_set.append(str(tag)) # Branches for branch in wf_branches.keys(): @@ -932,7 +934,7 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): tag_set.append(branch) if len(choices) == 0: - return False + return [], [] if multiple: return ( From fec536fd1ece3ce042a7fa32c9f05a7ddaa6ad06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Mon, 29 Jul 2024 13:35:40 +0200 Subject: [PATCH 384/737] Apply suggestions from code review --- CHANGELOG.md | 2 +- nf_core/commands_pipelines.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c47c1b88..15e92781c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ - Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) - Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) -- Rrestructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) +- Restructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) ### Download diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index c586534b2..4b6fa75f3 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -284,7 +284,7 @@ def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, Sync a pipeline [cyan i]TEMPLATE[/] branch with the nf-core template. To keep nf-core pipelines up to date with improvements in the main - template, we use a method of w that uses a special + template, we use a method of synchronisation that uses a special git branch called [cyan i]TEMPLATE[/]. This command updates the [cyan i]TEMPLATE[/] branch with the latest version of From 7f03f9e807c537c448f75d69002625b0768b4931 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:12:24 +0200 Subject: [PATCH 385/737] fix mypy warnings --- nf_core/pipelines/create/utils.py | 12 +++++++++--- nf_core/pipelines/create_logo.py | 6 +++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 88994c936..e62ef328e 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -124,10 +124,16 @@ def compose(self) -> ComposeResult: @on(Input.Submitted) def show_invalid_reasons(self, event: Union[Input.Changed, Input.Submitted]) -> None: """Validate the text input and show errors if invalid.""" - if not event.validation_result.is_valid: - self.query_one(".validation_msg").update("\n".join(event.validation_result.failure_descriptions)) + val_msg = self.query_one(".validation_msg") + if not isinstance(val_msg, Static): + raise ValueError("Validation message not found.") + + if event.validation_result is not None and not event.validation_result.is_valid: + # check that val_msg is instance of Static + if isinstance(val_msg, Static): + val_msg.update("\n".join(event.validation_result.failure_descriptions)) else: - self.query_one(".validation_msg").update("") + val_msg.update("") class ValidateConfig(Validator): diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index f49e98e93..6619b910b 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -1,6 +1,6 @@ import logging from pathlib import Path -from typing import Union +from typing import Optional, Union from PIL import Image, ImageDraw, ImageFont @@ -59,11 +59,11 @@ def create_logo( return logo_path # cache file cache_path = Path(NFCORE_CACHE_DIR, "logo", cache_name) - img = None + img: Optional[Image.Image] = None if cache_path.is_file(): log.debug(f"Logo already exists in cache at: {cache_path}. Reusing this file.") img = Image.open(str(cache_path)) - if not img: + if img is None: log.debug(f"Creating logo for {text}") # make sure the figure fits the text From a4c829fb909f060f3c5d35486a7b293c2161757f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 29 Jul 2024 12:40:04 +0200 Subject: [PATCH 386/737] add option to skip code linters --- .../create-test-lint-wf-template.yml | 5 + nf_core/pipeline-template/.gitpod.yml | 3 +- nf_core/pipelines/create/create.py | 8 + nf_core/pipelines/create/custompipeline.py | 16 + tests/__snapshots__/test_create_app.ambr | 511 +++++++++--------- tests/data/pipeline_create_template_skip.yml | 1 + tests/test_create.py | 14 +- 7 files changed, 296 insertions(+), 262 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index b823ca72d..cfa0a5007 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -41,6 +41,7 @@ jobs: - "template_skip_github_badges.yml" - "template_skip_igenomes.yml" - "template_skip_ci.yml" + - "template_skip_code_linters.yml" runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} @@ -102,6 +103,10 @@ jobs: run: | printf "org: my-prefix\nskip: nf_core_configs" > create-test-lint-wf/template_skip_nf_core_configs.yml + - name: Create template skip code_linters + run: | + printf "org: my-prefix\nskip: code_linters" > create-test-lint-wf/template_skip_code_linters.yml + # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 105a1821a..30e85ed97 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -10,8 +10,9 @@ tasks: vscode: extensions: # based on nf-core.nf-core-extensionpack + #{%- if code_linters -%} - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code - - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files + - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files{% endif %} - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - mechatroner.rainbow-csv # Highlight columns in csv files in different colors # - nextflow.nextflow # Nextflow syntax highlighting diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 27ca8ac84..42e4a6ad7 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -94,6 +94,13 @@ def __init__( ".github/workflows/awstest.yml", ".github/workflows/release-announcements.yml", ], + "code_linters": [ + ".editorconfig", + ".pre-commit-config.yaml", + ".prettierignore", + ".prettierrc.yml", + ".github/workflows/fix-linting.yml", + ], } # Get list of files we're skipping with the supplied skip keys self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) @@ -202,6 +209,7 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): "github_badges": {"file": False, "content": True}, "igenomes": {"file": True, "content": True}, "nf_core_configs": {"file": False, "content": True}, + "code_linters": {"file": True, "content": True}, } # Set the parameters for the jinja template diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 7d460db65..7a94ab228 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -41,6 +41,16 @@ For more information about nf-core configuration profiles, see the [nf-core/configs repository](https://github.com/nf-core/configs) """ +markdown_code_linters = """ +Pipelines include code linters, these linters will check the formatting of your code. +Linters will check JSON, YAML, Python and others. +The available code linters are: + +- pre-commit (https://pre-commit.com/): used to run all code-linters on every PR and on ever commit if you run `pre-commit install` to install it in your local repository. +- editor-config (https://github.com/editorconfig-checker/editorconfig-checker): checks rules such as indentation or trailing spaces. +- prettier (https://github.com/prettier/prettier): enforces a consistent style (indentation, quoting, line length, etc). +""" + class CustomPipeline(Screen): """Select if the pipeline will use genomic data.""" @@ -80,6 +90,12 @@ def compose(self) -> ComposeResult: "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", "nf_core_configs", ), + PipelineFeature( + markdown_code_linters, + "Use code linters", + "The pipeline will include code linters and CI tests to lint your code: pre-commit, editor-config and prettier.", + "code_linters", + ), classes="features-container", ) yield Center( diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 2ad077258..5e5b005de 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-2971485804-matrix { + .terminal-2778615119-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2971485804-title { + .terminal-2778615119-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2971485804-r1 { fill: #c5c8c6 } - .terminal-2971485804-r2 { fill: #e3e3e3 } - .terminal-2971485804-r3 { fill: #989898 } - .terminal-2971485804-r4 { fill: #e1e1e1 } - .terminal-2971485804-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2971485804-r6 { fill: #1e1e1e } - .terminal-2971485804-r7 { fill: #0178d4 } - .terminal-2971485804-r8 { fill: #454a50 } - .terminal-2971485804-r9 { fill: #e2e2e2 } - .terminal-2971485804-r10 { fill: #808080 } - .terminal-2971485804-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-2971485804-r12 { fill: #000000 } - .terminal-2971485804-r13 { fill: #e4e4e4 } - .terminal-2971485804-r14 { fill: #14191f } - .terminal-2971485804-r15 { fill: #507bb3 } - .terminal-2971485804-r16 { fill: #dde6ed;font-weight: bold } - .terminal-2971485804-r17 { fill: #001541 } - .terminal-2971485804-r18 { fill: #7ae998 } - .terminal-2971485804-r19 { fill: #0a180e;font-weight: bold } - .terminal-2971485804-r20 { fill: #008139 } - .terminal-2971485804-r21 { fill: #fea62b;font-weight: bold } - .terminal-2971485804-r22 { fill: #a7a9ab } - .terminal-2971485804-r23 { fill: #e2e3e3 } + .terminal-2778615119-r1 { fill: #c5c8c6 } + .terminal-2778615119-r2 { fill: #e3e3e3 } + .terminal-2778615119-r3 { fill: #989898 } + .terminal-2778615119-r4 { fill: #e1e1e1 } + .terminal-2778615119-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2778615119-r6 { fill: #1e1e1e } + .terminal-2778615119-r7 { fill: #0178d4 } + .terminal-2778615119-r8 { fill: #454a50 } + .terminal-2778615119-r9 { fill: #e2e2e2 } + .terminal-2778615119-r10 { fill: #808080 } + .terminal-2778615119-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-2778615119-r12 { fill: #000000 } + .terminal-2778615119-r13 { fill: #e4e4e4 } + .terminal-2778615119-r14 { fill: #14191f } + .terminal-2778615119-r15 { fill: #507bb3 } + .terminal-2778615119-r16 { fill: #dde6ed;font-weight: bold } + .terminal-2778615119-r17 { fill: #001541 } + .terminal-2778615119-r18 { fill: #7ae998 } + .terminal-2778615119-r19 { fill: #0a180e;font-weight: bold } + .terminal-2778615119-r20 { fill: #008139 } + .terminal-2778615119-r21 { fill: #fea62b;font-weight: bold } + .terminal-2778615119-r22 { fill: #a7a9ab } + .terminal-2778615119-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing▄▄ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,254 +2233,255 @@ font-weight: 700; } - .terminal-1445899181-matrix { + .terminal-763408100-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1445899181-title { + .terminal-763408100-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1445899181-r1 { fill: #c5c8c6 } - .terminal-1445899181-r2 { fill: #e3e3e3 } - .terminal-1445899181-r3 { fill: #989898 } - .terminal-1445899181-r4 { fill: #e1e1e1 } - .terminal-1445899181-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1445899181-r6 { fill: #1e1e1e } - .terminal-1445899181-r7 { fill: #507bb3 } - .terminal-1445899181-r8 { fill: #e2e2e2 } - .terminal-1445899181-r9 { fill: #808080 } - .terminal-1445899181-r10 { fill: #dde6ed;font-weight: bold } - .terminal-1445899181-r11 { fill: #001541 } - .terminal-1445899181-r12 { fill: #454a50 } - .terminal-1445899181-r13 { fill: #7ae998 } - .terminal-1445899181-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-1445899181-r15 { fill: #0a180e;font-weight: bold } - .terminal-1445899181-r16 { fill: #000000 } - .terminal-1445899181-r17 { fill: #008139 } - .terminal-1445899181-r18 { fill: #fea62b;font-weight: bold } - .terminal-1445899181-r19 { fill: #a7a9ab } - .terminal-1445899181-r20 { fill: #e2e3e3 } + .terminal-763408100-r1 { fill: #c5c8c6 } + .terminal-763408100-r2 { fill: #e3e3e3 } + .terminal-763408100-r3 { fill: #989898 } + .terminal-763408100-r4 { fill: #e1e1e1 } + .terminal-763408100-r5 { fill: #4ebf71;font-weight: bold } + .terminal-763408100-r6 { fill: #1e1e1e } + .terminal-763408100-r7 { fill: #507bb3 } + .terminal-763408100-r8 { fill: #e2e2e2 } + .terminal-763408100-r9 { fill: #808080 } + .terminal-763408100-r10 { fill: #dde6ed;font-weight: bold } + .terminal-763408100-r11 { fill: #001541 } + .terminal-763408100-r12 { fill: #14191f } + .terminal-763408100-r13 { fill: #454a50 } + .terminal-763408100-r14 { fill: #7ae998 } + .terminal-763408100-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-763408100-r16 { fill: #0a180e;font-weight: bold } + .terminal-763408100-r17 { fill: #000000 } + .terminal-763408100-r18 { fill: #008139 } + .terminal-763408100-r19 { fill: #fea62b;font-weight: bold } + .terminal-763408100-r20 { fill: #a7a9ab } + .terminal-763408100-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI tests        The pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for  - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file  Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include GitHub - badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles containing - custom parameters  - requried to run  - nf-core pipelines  - at different  - institutions - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles  + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different ▁▁ + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index ed498cb73..d9ef405c5 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -11,3 +11,4 @@ skip_features: - github_badges - igenomes - nf_core_configs + - code_linters diff --git a/tests/test_create.py b/tests/test_create.py index 313b6f535..896a8e7ad 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -106,14 +106,16 @@ def test_pipeline_creation_with_yml_skip(self, tmp_path): pipeline.init_pipeline() # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input - assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - assert os.path.exists(os.path.join(pipeline.outdir, ".nf-core.yml")) - with open(os.path.join(pipeline.outdir, ".nf-core.yml")) as fh: + assert not os.path.exists(Path(pipeline.outdir / "pipeline_template.yml")) + assert os.path.exists(Path(pipeline.outdir / ".nf-core.yml")) + with open(Path(pipeline.outdir / ".nf-core.yml")) as fh: nfcore_yml = yaml.safe_load(fh) assert "template" in nfcore_yml assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() # Check that some of the skipped files are not present - assert not os.path.exists(os.path.join(pipeline.outdir, "CODE_OF_CONDUCT.md")) - assert not os.path.exists(os.path.join(pipeline.outdir, ".github")) - assert not os.path.exists(os.path.join(pipeline.outdir, "conf", "igenomes.config")) + assert not os.path.exists(Path(pipeline.outdir / "CODE_OF_CONDUCT.md")) + assert not os.path.exists(Path(pipeline.outdir / ".github")) + assert not os.path.exists(Path(pipeline.outdir / "conf" / "igenomes.config")) + assert not os.path.exists(Path(pipeline.outdir / ".editorconfig")) + assert not os.path.exists(Path(pipeline.outdir / ".editorconfig")) From d2020f511c0f5992b6919e6832d3bcd5cd3bfe75 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:19:36 +0200 Subject: [PATCH 387/737] set default value for self.directory and more dir->directory switches --- nf_core/commands_modules.py | 38 ++++++++++++------------ nf_core/commands_subworkflows.py | 34 ++++++++++----------- nf_core/components/components_command.py | 12 ++++---- nf_core/components/list.py | 4 ++- nf_core/modules/list.py | 11 ++++++- nf_core/subworkflows/list.py | 11 ++++++- tests/modules/test_list.py | 18 +++++------ tests/subworkflows/test_list.py | 2 +- 8 files changed, 73 insertions(+), 57 deletions(-) diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index b93bd7bcb..a88930505 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -17,7 +17,7 @@ def modules_list_remote(ctx, keywords, json): try: module_list = ModuleList( - None, + ".", True, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -29,7 +29,7 @@ def modules_list_remote(ctx, keywords, json): sys.exit(1) -def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def modules_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ @@ -37,7 +37,7 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b try: module_list = ModuleList( - dir, + directory, False, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -49,7 +49,7 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b sys.exit(1) -def modules_install(ctx, tool, dir, prompt, force, sha): +def modules_install(ctx, tool, directory, prompt, force, sha): """ Install DSL2 modules within a pipeline. @@ -59,7 +59,7 @@ def modules_install(ctx, tool, dir, prompt, force, sha): try: module_install = ModuleInstall( - dir, + directory, force, prompt, sha, @@ -118,7 +118,7 @@ def modules_update( sys.exit(1) -def modules_patch(ctx, tool, dir, remove): +def modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module @@ -129,7 +129,7 @@ def modules_patch(ctx, tool, dir, remove): try: module_patch = ModulePatch( - dir, + directory, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], @@ -143,7 +143,7 @@ def modules_patch(ctx, tool, dir, remove): sys.exit(1) -def modules_remove(ctx, dir, tool): +def modules_remove(ctx, directory, tool): """ Remove a module from a pipeline. """ @@ -151,7 +151,7 @@ def modules_remove(ctx, dir, tool): try: module_remove = ModuleRemove( - dir, + directory, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], @@ -165,7 +165,7 @@ def modules_remove(ctx, dir, tool): def modules_create( ctx, tool, - dir, + directory, author, label, meta, @@ -199,7 +199,7 @@ def modules_create( # Run function try: module_create = ModuleCreate( - dir, + directory, tool, author, label, @@ -219,7 +219,7 @@ def modules_create( sys.exit(1) -def modules_test(ctx, tool, dir, no_prompts, update, once, profile): +def modules_test(ctx, tool, directory, no_prompts, update, once, profile): """ Run nf-test for a module. @@ -231,7 +231,7 @@ def modules_test(ctx, tool, dir, no_prompts, update, once, profile): module_tester = ComponentsTest( component_type="modules", component_name=tool, - directory=dir, + directory=directory, no_prompts=no_prompts, update=update, once=once, @@ -246,7 +246,7 @@ def modules_test(ctx, tool, dir, no_prompts, update, once, profile): sys.exit(1) -def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): """ Lint one or more modules in a directory. @@ -261,7 +261,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, try: module_lint = ModuleLint( - dir, + directory, fail_warned=fail_warned, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], @@ -290,7 +290,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sys.exit(1) -def modules_info(ctx, tool, dir): +def modules_info(ctx, tool, directory): """ Show developer usage information about a given module. @@ -306,7 +306,7 @@ def modules_info(ctx, tool, dir): try: module_info = ModuleInfo( - dir, + directory, tool, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -318,7 +318,7 @@ def modules_info(ctx, tool, dir): sys.exit(1) -def modules_bump_versions(ctx, tool, dir, all, show_all): +def modules_bump_versions(ctx, tool, directory, all, show_all): """ Bump versions for one or more modules in a clone of the nf-core/modules repo. @@ -328,7 +328,7 @@ def modules_bump_versions(ctx, tool, dir, all, show_all): try: version_bumper = ModuleVersionBumper( - dir, + directory, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index a3abce3f8..36fd998a6 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -10,7 +10,7 @@ stdout = rich.console.Console(force_terminal=rich_force_colors()) -def subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): +def subworkflows_create(ctx, subworkflow, directory, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. @@ -24,7 +24,7 @@ def subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): # Run function try: - subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force, migrate_pytest) + subworkflow_create = SubworkflowCreate(directory, subworkflow, author, force, migrate_pytest) subworkflow_create.create() except UserWarning as e: log.critical(e) @@ -34,7 +34,7 @@ def subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): sys.exit(1) -def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): +def subworkflows_test(ctx, subworkflow, directory, no_prompts, update, once, profile): """ Run nf-test for a subworkflow. @@ -46,7 +46,7 @@ def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): sw_tester = ComponentsTest( component_type="subworkflows", component_name=subworkflow, - directory=dir, + directory=directory, no_prompts=no_prompts, update=update, once=once, @@ -69,7 +69,7 @@ def subworkflows_list_remote(ctx, keywords, json): try: subworkflow_list = SubworkflowList( - None, + ".", True, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -82,7 +82,7 @@ def subworkflows_list_remote(ctx, keywords, json): sys.exit(1) -def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ @@ -90,7 +90,7 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi try: subworkflow_list = SubworkflowList( - dir, + directory, False, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -102,7 +102,7 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi sys.exit(1) -def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): +def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): """ Lint one or more subworkflows in a directory. @@ -117,7 +117,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo try: subworkflow_lint = SubworkflowLint( - dir, + directory, fail_warned=fail_warned, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], @@ -145,7 +145,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo sys.exit(1) -def subworkflows_info(ctx, subworkflow, dir): +def subworkflows_info(ctx, subworkflow, directory): """ Show developer usage information about a given subworkflow. @@ -161,7 +161,7 @@ def subworkflows_info(ctx, subworkflow, dir): try: subworkflow_info = SubworkflowInfo( - dir, + directory, subworkflow, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -173,7 +173,7 @@ def subworkflows_info(ctx, subworkflow, dir): sys.exit(1) -def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): +def subworkflows_install(ctx, subworkflow, directory, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. @@ -183,7 +183,7 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): try: subworkflow_install = SubworkflowInstall( - dir, + directory, force, prompt, sha, @@ -199,7 +199,7 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): sys.exit(1) -def subworkflows_remove(ctx, dir, subworkflow): +def subworkflows_remove(ctx, directory, subworkflow): """ Remove a subworkflow from a pipeline. """ @@ -207,7 +207,7 @@ def subworkflows_remove(ctx, dir, subworkflow): try: module_remove = SubworkflowRemove( - dir, + directory, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], @@ -221,7 +221,7 @@ def subworkflows_remove(ctx, dir, subworkflow): def subworkflows_update( ctx, subworkflow, - dir, + directory, force, prompt, sha, @@ -240,7 +240,7 @@ def subworkflows_update( try: subworkflow_install = SubworkflowUpdate( - dir, + directory, force, prompt, sha, diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index ada8b532e..69f067f8f 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -22,9 +22,7 @@ class ComponentCommand: def __init__( self, component_type: str, - directory: Union[ - str, Path - ], # TODO: This is actually None sometimes (e.g. in test_modules_list_remote), need to rewrite the logic here to handle these cases elegantly, for example setting a default path + directory: Union[str, Path] = ".", remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -34,11 +32,11 @@ def __init__( """ Initialise the ComponentClass object """ - self.component_type = component_type - self.directory = directory + self.component_type: str = component_type + self.directory: Path = Path(directory) self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) - self.hide_progress = hide_progress - self.no_prompts = no_prompts + self.hide_progress: bool = hide_progress + self.no_prompts: bool = no_prompts self._configure_repo_and_paths() def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 0a6b65446..ded035c89 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -25,7 +25,9 @@ def __init__( super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.remote = remote - def list_components(self, keywords: Optional[List[str]] = None, print_json=False) -> Union[rich.table.Table, str]: + def list_components( + self, keywords: Optional[List[str]] = None, print_json: bool = False + ) -> Union[rich.table.Table, str]: keywords = keywords or [] """ Get available modules/subworkflows names from GitHub tree for repo diff --git a/nf_core/modules/list.py b/nf_core/modules/list.py index c7dc943f9..68da570f6 100644 --- a/nf_core/modules/list.py +++ b/nf_core/modules/list.py @@ -1,4 +1,6 @@ import logging +from pathlib import Path +from typing import Optional, Union from nf_core.components.list import ComponentList @@ -6,5 +8,12 @@ class ModuleList(ComponentList): - def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + def __init__( + self, + pipeline_dir: Union[str, Path] = ".", + remote: bool = True, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + ): super().__init__("modules", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py index ddf144ee0..9e84d6cbe 100644 --- a/nf_core/subworkflows/list.py +++ b/nf_core/subworkflows/list.py @@ -1,4 +1,6 @@ import logging +from pathlib import Path +from typing import Optional, Union from nf_core.components.list import ComponentList @@ -6,5 +8,12 @@ class SubworkflowList(ComponentList): - def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + def __init__( + self, + pipeline_dir: Union[str, Path] = ".", + remote: bool = True, + remote_url: Optional[str] = None, + branch: Optional[str] = None, + no_pull: bool = False, + ) -> None: super().__init__("subworkflows", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py index fdbb61f69..a170f6d6f 100644 --- a/tests/modules/test_list.py +++ b/tests/modules/test_list.py @@ -13,7 +13,7 @@ class TestModulesCreate(TestModules): def test_modules_list_remote(self): """Test listing available modules""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -22,9 +22,7 @@ def test_modules_list_remote(self): def test_modules_list_remote_gitlab(self): """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.list.ModuleList( - None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH - ) + mods_list = nf_core.modules.list.ModuleList(remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -64,22 +62,22 @@ def test_modules_install_gitlab_and_list_pipeline(self): def test_modules_list_local_json(self): """Test listing locally installed modules as JSON""" mods_list = nf_core.modules.list.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_components(print_json=True) + listed_mods = str(mods_list.list_components(print_json=True)) listed_mods = json.loads(listed_mods) assert "fastqc" in listed_mods assert "multiqc" in listed_mods def test_modules_list_remote_json(self): """Test listing available modules as JSON""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) - listed_mods = mods_list.list_components(print_json=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) + listed_mods: str = str(mods_list.list_components(print_json=True)) listed_mods = json.loads(listed_mods) assert "fastqc" in listed_mods assert "multiqc" in listed_mods def test_modules_list_with_one_keyword(self): """Test listing available modules with one keyword""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) listed_mods = mods_list.list_components(keywords=["qc"]) console = Console(record=True) console.print(listed_mods) @@ -88,7 +86,7 @@ def test_modules_list_with_one_keyword(self): def test_modules_list_with_keywords(self): """Test listing available modules with multiple keywords""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) listed_mods = mods_list.list_components(keywords=["fastq", "qc"]) console = Console(record=True) console.print(listed_mods) @@ -97,7 +95,7 @@ def test_modules_list_with_keywords(self): def test_modules_list_with_unused_keyword(self): """Test listing available modules with an unused keyword""" - mods_list = nf_core.modules.list.ModuleList(None, remote=True) + mods_list = nf_core.modules.list.ModuleList(remote=True) with self.assertLogs(level="INFO") as log: listed_mods = mods_list.list_components(keywords=["you_will_never_find_me"]) self.assertIn("No available", log.output[0]) diff --git a/tests/subworkflows/test_list.py b/tests/subworkflows/test_list.py index 5e4e6feb0..aa1c4de7a 100644 --- a/tests/subworkflows/test_list.py +++ b/tests/subworkflows/test_list.py @@ -9,7 +9,7 @@ class TestSubworkflowsList(TestSubworkflows): def test_subworkflows_list_remote(self): """Test listing available subworkflows""" - subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) + subworkflows_list = nf_core.subworkflows.SubworkflowList(remote=True) listed_subworkflows = subworkflows_list.list_components() console = Console(record=True) console.print(listed_subworkflows) From ebdb3981fec56235822f07d19dca7bc2c206e7ee Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:26:37 +0200 Subject: [PATCH 388/737] remove unnecessary checks --- nf_core/components/components_command.py | 2 +- nf_core/components/components_utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 69f067f8f..7d43b387f 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -94,7 +94,7 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if self.directory is None or not Path(self.directory).exists(): + if not Path(self.directory).exists(): log.error(f"Could not find directory: {self.directory}") return False main_nf = Path(self.directory, "main.nf") diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 3d64dc1bb..632ae7b2a 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -26,7 +26,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P """ # Verify that the pipeline dir exists - if directory is None or not Path(directory).is_dir(): + if not Path(directory).is_dir(): raise UserWarning(f"Could not find directory: {directory}") # Try to find the root directory From 15a9071886fda734d182a2ae10a2dd78074c3d48 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:33:20 +0200 Subject: [PATCH 389/737] remove unnecessary Path conversions --- nf_core/components/components_command.py | 12 +++++------- nf_core/components/components_test.py | 2 +- nf_core/components/lint/__init__.py | 12 +++++------- nf_core/modules/bump_versions.py | 2 +- 4 files changed, 12 insertions(+), 16 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 7d43b387f..4e739fd6b 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -49,9 +49,7 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ try: if self.directory: - self.directory, self.repo_type, self.org = get_repo_info( - Path(self.directory), use_prompt=not self.no_prompts - ) + self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) else: self.repo_type = None self.org = "" @@ -94,13 +92,13 @@ def has_valid_directory(self) -> bool: """Check that we were given a pipeline or clone of nf-core/modules""" if self.repo_type == "modules": return True - if not Path(self.directory).exists(): + if not self.directory.exists(): log.error(f"Could not find directory: {self.directory}") return False main_nf = Path(self.directory, "main.nf") nf_config = Path(self.directory, "nextflow.config") if not main_nf.exists() and not nf_config.exists(): - if Path(self.directory).resolve().parts[-1].startswith("nf-core"): + if self.directory.resolve().parts[-1].startswith("nf-core"): raise UserWarning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") log.warning(f"Could not find a 'main.nf' or 'nextflow.config' file in '{self.directory}'") return True @@ -125,7 +123,7 @@ def clear_component_dir(self, component_name: str, component_dir: Union[str, Pat try: shutil.rmtree(component_dir) # remove all empty directories - for dir_path, dir_names, filenames in os.walk(Path(self.directory), topdown=False): + for dir_path, dir_names, filenames in os.walk(self.directory, topdown=False): if not dir_names and not filenames: try: Path(dir_path).rmdir() @@ -258,7 +256,7 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None: ): modules_json.modules_json["repos"][self.modules_repo.remote_url]["modules"][ self.modules_repo.repo_path - ][module_name]["patch"] = str(patch_path.relative_to(Path(self.directory).resolve())) + ][module_name]["patch"] = str(patch_path.relative_to(self.directory.resolve())) modules_json.dump() def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[str, Union[int, str]]]]: diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index f9b891004..57c0034ba 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -93,7 +93,7 @@ def run(self) -> None: os.environ["NFT_DIFF_ARGS"] = ( "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences ) - with nf_core.utils.set_wd(Path(self.directory)): + with nf_core.utils.set_wd(self.directory): self.check_snapshot_stability() if len(self.errors) > 0: errors = "\n - ".join(self.errors) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index ddf5e1e16..be6b225a0 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -104,7 +104,7 @@ def __init__( repo_url, Path(self.directory, self.component_type, org, comp), self.repo_type, - Path(self.directory), + self.directory, self.component_type, ) ) @@ -121,20 +121,20 @@ def __init__( None, Path(local_component_dir, comp), self.repo_type, - Path(self.directory), + self.directory, self.component_type, remote_component=False, ) for comp in self.get_local_components() ] - self.config = nf_core.utils.fetch_wf_config(Path(self.directory), cache_config=True) + self.config = nf_core.utils.fetch_wf_config(self.directory, cache_config=True) elif self.repo_type == "modules": component_dir = Path( self.directory, self.default_modules_path if self.component_type == "modules" else self.default_subworkflows_path, ) self.all_remote_components = [ - NFCoreComponent(m, None, component_dir / m, self.repo_type, Path(self.directory), self.component_type) + NFCoreComponent(m, None, component_dir / m, self.repo_type, self.directory, self.component_type) for m in self.get_components_clone_modules() ] self.all_local_components = [] @@ -142,9 +142,7 @@ def __init__( raise LookupError(f"No {self.component_type} in '{self.component_type}' directory") # This could be better, perhaps glob for all nextflow.config files in? - self.config = nf_core.utils.fetch_wf_config( - Path(self.directory).joinpath("tests", "config"), cache_config=True - ) + self.config = nf_core.utils.fetch_wf_config(self.directory / "tests" / "config", cache_config=True) if registry is None: self.registry = self.config.get("docker.registry", "quay.io") diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 6546cccc9..2d8854e3c 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -76,7 +76,7 @@ def bump_versions( ) # Get list of all modules - _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(Path(self.directory)) + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.directory) # Load the .nf-core.yml config _, self.tools_config = nf_core.utils.load_tools_config(self.directory) From fa00b1b61e0315177bee09c3d2e8f486cd4ae86e Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:42:13 +0200 Subject: [PATCH 390/737] fix types for 3.8 --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 80324fc9a..86ed09555 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -905,7 +905,7 @@ def prompt_remote_pipeline_name(wfs): def prompt_pipeline_release_branch( wf_releases: List[Dict[str, Any]], wf_branches: Dict[str, Any], multiple: bool = False -) -> tuple[Any, list[str]]: +) -> Tuple[Any, List[str]]: """Prompt for pipeline release / branch Args: From e7f06bea17e9c29bb40a1b7f8e54818ab308a264 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:46:10 +0200 Subject: [PATCH 391/737] limit `component` to type string --- nf_core/components/install.py | 9 +++++---- nf_core/modules/modules_json.py | 14 ++++++-------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index f2849f85b..9b42ba77d 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,7 +1,10 @@ import logging import os from pathlib import Path -from typing import List, Optional, Union +from typing import TYPE_CHECKING, List, Optional, Union + +if TYPE_CHECKING: + from nf_core.modules.modules_repo import ModulesRepo import questionary from rich.console import Console @@ -180,9 +183,7 @@ def install_included_components(self, subworkflow_dir): self.component_type = original_component_type self.installed_by = original_installed - def collect_and_verify_name( - self, component: Optional[str], modules_repo: "nf_core.modules.modules_repo.ModulesRepo" - ) -> str: + def collect_and_verify_name(self, component: Optional[str], modules_repo: ModulesRepo) -> str: """ Collect component name. Check that the supplied name is an available module/subworkflow. diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 39d70b7d7..ff4922dc0 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -355,7 +355,7 @@ def determine_branches_and_shas( # Clean up the modules/subworkflows we were unable to find the sha for for component in sb_local: - log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, str(component))}' to 'local' directory") + log.debug(f"Moving {component_type[:-1]} '{Path(install_dir, component)}' to 'local' directory") self.move_component_to_local(component_type, component, str(install_dir)) for component in dead_components: @@ -398,13 +398,13 @@ def find_correct_commit_sha( return commit_sha return None - def move_component_to_local(self, component_type: str, component: Union[str, Path], repo_name: str): + def move_component_to_local(self, component_type: str, component: str, repo_name: str): """ Move a module/subworkflow to the 'local' directory Args: component_type (str): The type of component, either 'modules' or 'subworkflows' - component (Union[str,Path]): The name of the module/subworkflow + component (str): The name of the module/subworkflow repo_name (str): The name of the repository the module resides in """ if component_type == "modules": @@ -418,7 +418,7 @@ def move_component_to_local(self, component_type: str, component: Union[str, Pat if not local_dir.exists(): local_dir.mkdir() - to_name = str(component) + to_name = component # Check if there is already a subdirectory with the name while (local_dir / to_name).exists(): # Add a time suffix to the path to make it unique @@ -1086,9 +1086,7 @@ def get_installed_by_entries(self, component_type, name): return installed_by_entries - def get_component_branch( - self, component_type: str, component: Union[str, Path], repo_url: str, install_dir: str - ) -> str: + def get_component_branch(self, component_type: str, component: str, repo_url: str, install_dir: str) -> str: """ Gets the branch from which the module/subworkflow was installed @@ -1101,7 +1099,7 @@ def get_component_branch( self.load() assert self.modules_json is not None # mypy try: - branch = self.modules_json["repos"][repo_url][component_type][install_dir][str(component)]["branch"] + branch = self.modules_json["repos"][repo_url][component_type][install_dir][component]["branch"] except (KeyError, TypeError): branch = None if branch is None: From ae7d912369630daaf2e43ddcd636bdb8d8cfb2d4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 14:59:04 +0200 Subject: [PATCH 392/737] handle more type warnings --- nf_core/modules/modules_json.py | 10 ++++++++-- nf_core/modules/modules_repo.py | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index ff4922dc0..19649a57c 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -42,7 +42,7 @@ class ModulesJson: An object for handling a 'modules.json' file in a pipeline """ - def __init__(self, pipeline_dir: Union[str, Path]): + def __init__(self, pipeline_dir: Union[str, Path]) -> None: """ Initialise the object. @@ -130,6 +130,10 @@ def get_component_names_from_repo( names = [] for repo_url in repos: modules_repo = ModulesRepo(repo_url) + if modules_repo is None: + raise UserWarning(f"Could not find module repository for '{repo_url}' in '{directory}'") + if modules_repo.repo_path is None: + raise UserWarning(f"Could not find module repository path for '{repo_url}' in '{directory}'") components = ( repo_url, [ @@ -195,6 +199,8 @@ def get_pipeline_module_repositories( # Verify that there is a directory corresponding the remote nrepo_name = ModulesRepo(nrepo_remote).repo_path + if nrepo_name is None: + raise UserWarning(f"Could not find the repository name for '{nrepo_remote}'") if not (directory / nrepo_name).exists(): log.info( "The provided remote does not seem to correspond to a local directory. " @@ -416,7 +422,7 @@ def move_component_to_local(self, component_type: str, component: str, repo_name current_path = directory / repo_name / component local_dir = directory / "local" if not local_dir.exists(): - local_dir.mkdir() + local_dir.mkdir(parents=True) to_name = component # Check if there is already a subdirectory with the name diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 5b5020548..357fc49cc 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -40,7 +40,7 @@ def __init__( branch: Optional[str] = None, no_pull: bool = False, hide_progress: bool = False, - ): + ) -> None: """ Initializes the object and clones the git repository if it is not already present """ From d7587dc1577602ab26d53dd34a438aedfb4c8a1e Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 16:18:32 +0200 Subject: [PATCH 393/737] fix import errors, handle outdir as string --- nf_core/components/info.py | 17 +++++++++-------- nf_core/components/install.py | 10 +++++----- nf_core/pipelines/create/create.py | 3 +-- nf_core/utils.py | 12 ++++++++++-- 4 files changed, 25 insertions(+), 17 deletions(-) diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 726586b5b..98f8be527 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -1,7 +1,7 @@ import logging import os from pathlib import Path -from typing import Dict, Optional, Union +from typing import Dict, List, Optional, Tuple, Union import questionary import yaml @@ -83,14 +83,14 @@ def __init__( self.modules_json = None self.component = self.init_mod_name(component_name) - def _configure_repo_and_paths(self, nf_dir_req=False): + def _configure_repo_and_paths(self, nf_dir_req=False) -> None: """ Override the default with nf_dir_req set to False to allow info to be run from anywhere and still return remote info """ return super()._configure_repo_and_paths(nf_dir_req) - def init_mod_name(self, component): + def init_mod_name(self, component: Optional[str]) -> str: """ Makes sure that we have a module/subworkflow name before proceeding. @@ -106,9 +106,10 @@ def init_mod_name(self, component): components = self.get_components_clone_modules() elif self.repo_type == "pipeline": assert self.modules_json is not None # mypy - all_components = self.modules_json.get_all_components(self.component_type).get( - self.modules_repo.remote_url, {} - ) + all_components: List[Tuple[str, str]] = self.modules_json.get_all_components( + self.component_type + ).get(self.modules_repo.remote_url, []) + components = [ component if directory == self.modules_repo.repo_path else f"{directory}/{component}" for directory, component in all_components @@ -169,7 +170,7 @@ def get_component_info(self): return self.generate_component_info_help() - def get_local_yaml(self): + def get_local_yaml(self) -> Optional[Dict]: """Attempt to get the meta.yml file from a locally installed module/subworkflow. Returns: @@ -316,7 +317,7 @@ def generate_component_info_help(self): ) # Print include statement - if self.local_path: + if self.local_path and self.modules_repo.repo_path is not None: install_folder = Path(self.directory, self.component_type, self.modules_repo.repo_path) component_name = "_".join(self.component.upper().split("/")) renderables.append( diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 9b42ba77d..a7be737bf 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,16 +1,14 @@ import logging import os from pathlib import Path -from typing import TYPE_CHECKING, List, Optional, Union - -if TYPE_CHECKING: - from nf_core.modules.modules_repo import ModulesRepo +from typing import List, Optional, Union import questionary from rich.console import Console from rich.syntax import Syntax import nf_core.components +import nf_core.modules.modules_repo import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand @@ -183,7 +181,9 @@ def install_included_components(self, subworkflow_dir): self.component_type = original_component_type self.installed_by = original_installed - def collect_and_verify_name(self, component: Optional[str], modules_repo: ModulesRepo) -> str: + def collect_and_verify_name( + self, component: Optional[str], modules_repo: "nf_core.modules.modules_repo.ModulesRepo" + ) -> str: """ Collect component name. Check that the supplied name is an available module/subworkflow. diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index d5c230e04..899507847 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -83,9 +83,8 @@ def __init__( if self.config.outdir is None: self.config.outdir = str(Path.cwd()) - self.jinja_params, skip_paths = self.obtain_jinja_params_dict( - self.config.skip_features or [], Path(self.config.outdir) + self.config.skip_features or [], str(self.config.outdir) ) skippable_paths = { diff --git a/nf_core/utils.py b/nf_core/utils.py index 86ed09555..c1cc7c130 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -30,7 +30,7 @@ import rich.markup import yaml from packaging.version import Version -from pydantic import BaseModel, ValidationError +from pydantic import BaseModel, ValidationError, field_validator from rich.live import Live from rich.spinner import Spinner @@ -1050,10 +1050,18 @@ class NFCoreTemplateConfig(BaseModel): author: Optional[str] = None version: Optional[str] = None force: Optional[bool] = None - outdir: Optional[str] = None + outdir: Optional[Union[str, Path]] = None skip_features: Optional[list] = None is_nfcore: Optional[bool] = None + # convert outdir to str + @field_validator("outdir") + @classmethod + def outdir_to_str(cls, v: Optional[Union[str, Path]]) -> Optional[str]: + if v is not None: + v = str(v) + return v + def __getitem__(self, item: str) -> Any: if self is None: return None From a4cf066e3a041be2b05fa67c661f408644bd5327 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 29 Jul 2024 16:41:19 +0200 Subject: [PATCH 394/737] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipelines/create/custompipeline.py | 4 ++-- tests/test_create.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 7a94ab228..0925fb5ba 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -42,8 +42,8 @@ """ markdown_code_linters = """ -Pipelines include code linters, these linters will check the formatting of your code. -Linters will check JSON, YAML, Python and others. +Pipelines include code linters to check the formatting of your code in order to harmonize code styles between developers. +Linters will check all non-ignored files, e.g., JSON, YAML, Nextlow or Python files in your repository. The available code linters are: - pre-commit (https://pre-commit.com/): used to run all code-linters on every PR and on ever commit if you run `pre-commit install` to install it in your local repository. diff --git a/tests/test_create.py b/tests/test_create.py index 896a8e7ad..b4ac89c7d 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -118,4 +118,3 @@ def test_pipeline_creation_with_yml_skip(self, tmp_path): assert not os.path.exists(Path(pipeline.outdir / ".github")) assert not os.path.exists(Path(pipeline.outdir / "conf" / "igenomes.config")) assert not os.path.exists(Path(pipeline.outdir / ".editorconfig")) - assert not os.path.exists(Path(pipeline.outdir / ".editorconfig")) From 419242b931ff228fee0babf63d50313b2fbd6ee9 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 29 Jul 2024 16:46:46 +0200 Subject: [PATCH 395/737] use pathlib instead of os.path --- tests/test_create.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test_create.py b/tests/test_create.py index b4ac89c7d..920a0f475 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -106,15 +106,15 @@ def test_pipeline_creation_with_yml_skip(self, tmp_path): pipeline.init_pipeline() # Check pipeline template yml has been dumped to `.nf-core.yml` and matches input - assert not os.path.exists(Path(pipeline.outdir / "pipeline_template.yml")) - assert os.path.exists(Path(pipeline.outdir / ".nf-core.yml")) - with open(Path(pipeline.outdir / ".nf-core.yml")) as fh: + assert not (pipeline.outdir / "pipeline_template.yml").exists() + assert (pipeline.outdir / ".nf-core.yml").exists() + with open(pipeline.outdir / ".nf-core.yml") as fh: nfcore_yml = yaml.safe_load(fh) assert "template" in nfcore_yml assert yaml.safe_load(PIPELINE_TEMPLATE_YML.read_text()).items() <= nfcore_yml["template"].items() # Check that some of the skipped files are not present - assert not os.path.exists(Path(pipeline.outdir / "CODE_OF_CONDUCT.md")) - assert not os.path.exists(Path(pipeline.outdir / ".github")) - assert not os.path.exists(Path(pipeline.outdir / "conf" / "igenomes.config")) - assert not os.path.exists(Path(pipeline.outdir / ".editorconfig")) + assert not (pipeline.outdir / "CODE_OF_CONDUCT.md").exists() + assert not (pipeline.outdir / ".github").exists() + assert not (pipeline.outdir / "conf" / "igenomes.config").exists() + assert not (pipeline.outdir / ".editorconfig").exists() From f5e865086bda88e28523f0757bf4e71366b228ad Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 29 Jul 2024 16:48:44 +0200 Subject: [PATCH 396/737] update chagelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b27dce4b5..8eae70a31 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ - Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) - Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) +- add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) ### Linting From 5979b9d1f4ee8fa157eb1798ec3a7226238f00cb Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 17:24:52 +0200 Subject: [PATCH 397/737] add error message and resolve circular import --- nf_core/components/components_utils.py | 2 +- nf_core/components/install.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 632ae7b2a..9289d9000 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -63,7 +63,7 @@ def get_repo_info(directory: Path, use_prompt: Optional[bool] = True) -> Tuple[P # Check if it's a valid answer if repo_type not in ["pipeline", "modules"]: - raise UserWarning(f"Invalid repository type: '{repo_type}'") + raise UserWarning(f"Invalid repository type: '{repo_type}', must be 'pipeline' or 'modules'") org: str = "" # Check for org if modules repo if repo_type == "modules": diff --git a/nf_core/components/install.py b/nf_core/components/install.py index a7be737bf..6130a4cf7 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -8,7 +8,6 @@ from rich.syntax import Syntax import nf_core.components -import nf_core.modules.modules_repo import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand @@ -69,12 +68,18 @@ def install(self, component: str, silent: bool = False) -> bool: # Verify SHA if not self.modules_repo.verify_sha(self.prompt, self.sha): + err_msg = f"SHA '{self.sha}' is not a valid commit SHA for the repository '{self.modules_repo.remote_url}'" + log.error(err_msg) return False # verify self.modules_repo entries: if self.modules_repo is None: + err_msg = "Could not find a valid modules repository." + log.error(err_msg) return False if self.modules_repo.repo_path is None: + err_msg = "Could not find a valid modules repository path." + log.error(err_msg) return False # Check and verify component name @@ -200,10 +205,10 @@ def collect_and_verify_name( # Check that the supplied name is an available module/subworkflow if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): - log.error( - f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." - ) log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") + raise SystemError( + f"{self.component_type[:-1].title()} '{component}' not found in available {self.component_type}" + ) if not modules_repo.component_exists(component, self.component_type, commit=self.sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" From 24a3ece90f28b9efe1336a5768b55dc6dc62b184 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 17:45:45 +0200 Subject: [PATCH 398/737] actually test installation with a correct subworkflow --- tests/subworkflows/test_install.py | 46 ++++++++++++++++-------------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py index af1ad9241..e6ba9439a 100644 --- a/tests/subworkflows/test_install.py +++ b/tests/subworkflows/test_install.py @@ -17,26 +17,6 @@ class TestSubworkflowsInstall(TestSubworkflows): - def test_subworkflow_install_nopipeline(self): - """Test installing a subworkflow - no pipeline given""" - assert self.subworkflow_install.directory is not None - self.subworkflow_install.directory = Path("non_existent_dir") - assert self.subworkflow_install.install("foo") is False - - @with_temporary_folder - def test_subworkflows_install_emptypipeline(self, tmpdir): - """Test installing a subworkflow - empty dir given""" - - Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) - self.subworkflow_install.directory = Path(tmpdir, "nf-core-pipe") - with pytest.raises(UserWarning) as excinfo: - self.subworkflow_install.install("foo") - assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) - - def test_subworkflows_install_nosubworkflow(self): - """Test installing a subworkflow - unrecognised subworkflow given""" - assert self.subworkflow_install.install("foo") is False - def test_subworkflows_install_bam_sort_stats_samtools(self): """Test installing a subworkflow - bam_sort_stats_samtools""" assert self.subworkflow_install.install("bam_sort_stats_samtools") is not False @@ -57,6 +37,28 @@ def test_subworkflows_install_bam_sort_stats_samtools(self): assert samtools_idxstats_path.exists() assert samtools_flagstat_path.exists() + def test_subworkflow_install_nopipeline(self): + """Test installing a subworkflow - no pipeline given""" + assert self.subworkflow_install.directory is not None + self.subworkflow_install.directory = Path("non_existent_dir") + assert self.subworkflow_install.install("bam_stats_samtools") is False + + @with_temporary_folder + def test_subworkflows_install_emptypipeline(self, tmpdir): + """Test installing a subworkflow - empty dir given""" + + Path(tmpdir, "nf-core-pipe").mkdir(exist_ok=True) + self.subworkflow_install.directory = Path(tmpdir, "nf-core-pipe") + with pytest.raises(UserWarning) as excinfo: + self.subworkflow_install.install("bam_stats_samtools") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) + + def test_subworkflows_install_nosubworkflow(self): + """Test installing a subworkflow - unrecognised subworkflow given""" + with pytest.raises(SystemError) as excinfo: + self.subworkflow_install.install("foo") + assert "Subworkflow 'foo' not found in available subworkflows" in str(excinfo.value) + def test_subworkflows_install_bam_sort_stats_samtools_twice(self): """Test installing a subworkflow - bam_sort_stats_samtools already there""" self.subworkflow_install.install("bam_sort_stats_samtools") @@ -76,7 +78,9 @@ def test_subworkflows_install_different_branch_fail(self): """Test installing a subworkflow from a different branch""" install_obj = SubworkflowInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) # The bam_stats_samtools subworkflow does not exists in the branch-test branch - assert install_obj.install("bam_stats_samtools") is False + with pytest.raises(Exception) as excinfo: + install_obj.install("bam_stats_samtools") + assert "Subworkflow 'bam_stats_samtools' not found in available subworkflows" in str(excinfo.value) def test_subworkflows_install_tracking(self): """Test installing a subworkflow and finding the correct entries in installed_by section of modules.json""" From 05def5932e12de9aa8fa2d07d763a54cd552038c Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 22:21:13 +0200 Subject: [PATCH 399/737] fix list and info command --- nf_core/components/components_command.py | 10 ++++------ nf_core/components/lint/__init__.py | 21 +++++++++++++++------ nf_core/components/list.py | 2 +- nf_core/modules/bump_versions.py | 13 ++++++++----- nf_core/subworkflows/list.py | 2 +- nf_core/utils.py | 2 ++ tests/subworkflows/test_list.py | 2 +- 7 files changed, 32 insertions(+), 20 deletions(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 4e739fd6b..bff246738 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -37,6 +37,8 @@ def __init__( self.modules_repo = ModulesRepo(remote_url, branch, no_pull, hide_progress) self.hide_progress: bool = hide_progress self.no_prompts: bool = no_prompts + self.repo_type: Optional[str] = None + self.org: str = "" self._configure_repo_and_paths() def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: @@ -50,15 +52,11 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: try: if self.directory: self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) - else: - self.repo_type = None - self.org = "" - except UserWarning: if nf_dir_req: raise - self.repo_type = None - self.org = "" + except FileNotFoundError: + raise self.default_modules_path = Path("modules", self.org) self.default_tests_path = Path("tests", "modules", self.org) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index be6b225a0..d0f668536 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -86,6 +86,11 @@ def __init__( else: self.lint_tests = self.get_all_subworkflow_lint_tests(self.repo_type == "pipeline") + if self.repo_type is None: + raise LookupError( + "Could not determine repository type. Please check the repository type in the nf-core.yml" + ) + if self.repo_type == "pipeline": modules_json = ModulesJson(self.directory) modules_json.check_up_to_date() @@ -128,6 +133,8 @@ def __init__( for comp in self.get_local_components() ] self.config = nf_core.utils.fetch_wf_config(self.directory, cache_config=True) + self._set_registry(registry) + elif self.repo_type == "modules": component_dir = Path( self.directory, @@ -143,16 +150,18 @@ def __init__( # This could be better, perhaps glob for all nextflow.config files in? self.config = nf_core.utils.fetch_wf_config(self.directory / "tests" / "config", cache_config=True) - - if registry is None: - self.registry = self.config.get("docker.registry", "quay.io") - else: - self.registry = registry - log.debug(f"Registry set to {self.registry}") + self._set_registry(registry) def __repr__(self) -> str: return f"ComponentLint({self.component_type}, {self.directory})" + def _set_registry(self, registry) -> None: + if registry is None: + self.registry = self.config.get("docker.registry", "quay.io") + else: + self.registry = registry + log.debug(f"Registry set to {self.registry}") + @staticmethod def get_all_module_lint_tests(is_pipeline): if is_pipeline: diff --git a/nf_core/components/list.py b/nf_core/components/list.py index ded035c89..65a28db71 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -16,7 +16,7 @@ class ComponentList(ComponentCommand): def __init__( self, component_type: str, - pipeline_dir: Union[str, Path], + pipeline_dir: Union[str, Path] = ".", remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 2d8854e3c..d98eac7cd 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -245,12 +245,15 @@ def bump_module_version(self, module: NFCoreComponent) -> bool: fh.write(content) # change version in environment.yml - with open(str(module.environment_yml)) as fh: + if not module.environment_yml: + log.error(f"Could not read `environment.yml` of {module.component_name} module.") + return False + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) env_yml["dependencies"][0] = re.sub( bioconda_packages[0], f"bioconda::{bioconda_tool_name}={last_ver}", env_yml["dependencies"][0] ) - with open(str(module.environment_yml), "w") as fh: + with open(module.environment_yml, "w") as fh: yaml.dump(env_yml, fh, default_flow_style=False, Dumper=custom_yaml_dumper()) self.updated.append( @@ -271,11 +274,11 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]: """ # Check whether file exists and load it bioconda_packages = [] - try: - with open(str(module.environment_yml)) as fh: + if module.environment_yml is not None and module.environment_yml.exists(): + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) bioconda_packages = env_yml.get("dependencies", []) - except FileNotFoundError: + else: log.error(f"Could not read `environment.yml` of {module.component_name} module.") return bioconda_packages diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py index 9e84d6cbe..5f849a1f9 100644 --- a/nf_core/subworkflows/list.py +++ b/nf_core/subworkflows/list.py @@ -10,7 +10,7 @@ class SubworkflowList(ComponentList): def __init__( self, - pipeline_dir: Union[str, Path] = ".", + pipeline_dir: Union[str, Path], remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, diff --git a/nf_core/utils.py b/nf_core/utils.py index c1cc7c130..a95eea4b6 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1113,6 +1113,8 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] else: log.debug(f"Could not find a config file in the directory '{directory}'") return Path(directory, CONFIG_PATHS[0]), None + if not Path(config_fn).is_file(): + raise FileNotFoundError(f"No `.nf-core.yml` file found in the directory '{directory}'") with open(str(config_fn)) as fh: tools_config = yaml.safe_load(fh) diff --git a/tests/subworkflows/test_list.py b/tests/subworkflows/test_list.py index aa1c4de7a..1ae8f5fff 100644 --- a/tests/subworkflows/test_list.py +++ b/tests/subworkflows/test_list.py @@ -19,7 +19,7 @@ def test_subworkflows_list_remote(self): def test_subworkflows_list_remote_gitlab(self): """Test listing the subworkflows in the remote gitlab repo""" subworkflows_list = nf_core.subworkflows.SubworkflowList( - None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) listed_subworkflows = subworkflows_list.list_components() console = Console(record=True) From cce51e3cb06c2e9fbb320ffde026572327eac052 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 29 Jul 2024 22:35:58 +0200 Subject: [PATCH 400/737] add migrate_pytest option to `nf-core test` command --- nf_core/__main__.py | 20 ++++++++++--- nf_core/commands_modules.py | 50 +++++++++++++++++++++----------- nf_core/commands_subworkflows.py | 39 +++++++++++++------------ 3 files changed, 70 insertions(+), 39 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 3da153ee1..00ae8e2a1 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1154,11 +1154,17 @@ def command_modules_create( default=None, help="Run tests with a specific profile", ) -def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile): +@click.option( + "--migrate-pytest", + is_flag=True, + default=False, + help="Migrate a module with pytest tests to nf-test", +) +def command_modules_test(ctx, tool, dir, no_prompts, update, once, profile, migrate_pytest): """ Run nf-test for a module. """ - modules_test(ctx, tool, dir, no_prompts, update, once, profile) + modules_test(ctx, tool, dir, no_prompts, update, once, profile, migrate_pytest) # nf-core modules lint @@ -1352,11 +1358,17 @@ def command_subworkflows_create(ctx, subworkflow, dir, author, force, migrate_py default=None, help="Run tests with a specific profile", ) -def command_subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): +@click.option( + "--migrate-pytest", + is_flag=True, + default=False, + help="Migrate a subworkflow with pytest tests to nf-test", +) +def command_subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile, migrate_pytest): """ Run nf-test for a subworkflow. """ - subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile) + subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile, migrate_pytest) # nf-core subworkflows list subcommands diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index 3d96d332b..c65c42d41 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -219,7 +219,7 @@ def modules_create( sys.exit(1) -def modules_test(ctx, tool, dir, no_prompts, update, once, profile): +def modules_test(ctx, tool, dir, no_prompts, update, once, profile, migrate_pytest): """ Run nf-test for a module. @@ -227,23 +227,39 @@ def modules_test(ctx, tool, dir, no_prompts, update, once, profile): """ from nf_core.components.components_test import ComponentsTest - try: - module_tester = ComponentsTest( - component_type="modules", - component_name=tool, - directory=dir, - no_prompts=no_prompts, - update=update, - once=once, - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - verbose=ctx.obj["verbose"], - profile=profile, + if migrate_pytest: + modules_create( + ctx, + tool, + dir, + author="", + label="", + meta=True, + no_meta=False, + force=False, + conda_name=None, + conda_package_version=None, + empty_template=False, + migrate_pytest=migrate_pytest, ) - module_tester.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + else: + try: + module_tester = ComponentsTest( + component_type="modules", + component_name=tool, + directory=dir, + no_prompts=no_prompts, + update=update, + once=once, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], + profile=profile, + ) + module_tester.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index a3abce3f8..8c3be2bb4 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -34,7 +34,7 @@ def subworkflows_create(ctx, subworkflow, dir, author, force, migrate_pytest): sys.exit(1) -def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): +def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile, migrate_pytest): """ Run nf-test for a subworkflow. @@ -42,23 +42,26 @@ def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile): """ from nf_core.components.components_test import ComponentsTest - try: - sw_tester = ComponentsTest( - component_type="subworkflows", - component_name=subworkflow, - directory=dir, - no_prompts=no_prompts, - update=update, - once=once, - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - verbose=ctx.obj["verbose"], - profile=profile, - ) - sw_tester.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + if migrate_pytest: + subworkflows_create(ctx, subworkflow, dir, None, False, True) + else: + try: + sw_tester = ComponentsTest( + component_type="subworkflows", + component_name=subworkflow, + directory=dir, + no_prompts=no_prompts, + update=update, + once=once, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], + profile=profile, + ) + sw_tester.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) def subworkflows_list_remote(ctx, keywords, json): From 640ce03ab4f687e744f2631ba11971db576d7135 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 29 Jul 2024 20:39:02 +0000 Subject: [PATCH 401/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b27dce4b5..49a58ff6b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,6 +54,7 @@ - Update python:3.12-slim Docker digest to f11725a ([#3071](https://github.com/nf-core/tools/pull/3071)) - Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) - Update python:3.12-slim Docker digest to 740d94a ([#3079](https://github.com/nf-core/tools/pull/3079)) +- Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 2d4d0799d9827797c82059d187a16932f08c8af2 Mon Sep 17 00:00:00 2001 From: Usman Rashid Date: Tue, 30 Jul 2024 18:43:15 +1200 Subject: [PATCH 402/737] Added process_high_memory to create/lint list --- nf_core/components/create.py | 1 + nf_core/modules/lint/main_nf.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 5d6c411bd..bae648792 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -236,6 +236,7 @@ def _get_module_structure_components(self): "process_medium", "process_high", "process_long", + "process_high_memory", ] if self.process_label is None: log.info( diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 81308ba5c..5477ee9db 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -423,7 +423,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): def check_process_labels(self, lines): - correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long"] + correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long", "process_high_memory"] all_labels = [line.strip() for line in lines if line.lstrip().startswith("label ")] bad_labels = [] good_labels = [] From 423af01f9bd41b644280668a31b229cace850406 Mon Sep 17 00:00:00 2001 From: Usman Rashid Date: Tue, 30 Jul 2024 18:48:51 +1200 Subject: [PATCH 403/737] Updated changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b27dce4b5..a56abe0ac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ - Fix linting fail on nfcore_external_java_deps if nf_schema is used ([#2976](https://github.com/nf-core/tools/pull/2976)) - Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) +- Fix module linting warning for process_high_memory ([#3086](https://github.com/nf-core/tools/issues/3086)) ### Download From c18d9cdbb3071dad0c221bd07e299f0909263149 Mon Sep 17 00:00:00 2001 From: Usman Rashid Date: Tue, 30 Jul 2024 18:51:48 +1200 Subject: [PATCH 404/737] Fixed linting --- nf_core/modules/lint/main_nf.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 5477ee9db..883d2bb3e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -423,7 +423,14 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): def check_process_labels(self, lines): - correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long", "process_high_memory"] + correct_process_labels = [ + "process_single", + "process_low", + "process_medium", + "process_high", + "process_long", + "process_high_memory", + ] all_labels = [line.strip() for line in lines if line.lstrip().startswith("label ")] bad_labels = [] good_labels = [] From 8de38715468b084a00a9627c89c5e1ec77bf52f4 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 09:21:01 +0200 Subject: [PATCH 405/737] exclude click from rich traceback --- nf_core/pipelines/create/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 56e25bf1d..1dd9902fb 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -2,6 +2,7 @@ import logging +import click from textual.app import App from textual.widgets import Button @@ -23,6 +24,7 @@ show_time=False, show_path=False, markup=True, + tracebacks_suppress=[click], ) logging.basicConfig( level="INFO", From 8fe4be6520995812a8d819b3cfe213055d8f6864 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 10:16:43 +0200 Subject: [PATCH 406/737] give nicer message on install failure, fix install test --- nf_core/__main__.py | 2 +- nf_core/components/install.py | 21 +++++++++++++++++---- tests/modules/test_install.py | 4 +++- 3 files changed, 21 insertions(+), 6 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 11ab574cc..1e5cb210a 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -126,7 +126,7 @@ # because they are actually preliminary, but intended program terminations. # (Custom exceptions are cleaner than `sys.exit(1)`, which we used before) def selective_traceback_hook(exctype, value, traceback): - if exctype in {DownloadError, UserWarning}: # extend set as needed + if exctype in {DownloadError, UserWarning, ValueError}: # extend set as needed log.error(value) else: # print the colored traceback for all other exceptions with rich as usual diff --git a/nf_core/components/install.py b/nf_core/components/install.py index 6130a4cf7..5bdcd1ebd 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -4,7 +4,10 @@ from typing import List, Optional, Union import questionary +from rich import print from rich.console import Console +from rich.markdown import Markdown +from rich.panel import Panel from rich.syntax import Syntax import nf_core.components @@ -160,7 +163,7 @@ def install(self, component: str, silent: bool = False) -> bool: ) if self.component_type == "subworkflows": subworkflow_config = Path(install_folder, component, "nextflow.config").relative_to(self.directory) - if os.path.isfile(subworkflow_config): + if subworkflow_config.is_file(): log.info("Add the following config statement to use this subworkflow:") Console().print( Syntax(f"includeConfig '{subworkflow_config}'", "groovy", theme="ansi_dark", padding=1) @@ -205,11 +208,21 @@ def collect_and_verify_name( # Check that the supplied name is an available module/subworkflow if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): - log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") - raise SystemError( - f"{self.component_type[:-1].title()} '{component}' not found in available {self.component_type}" + log.error(f"{self.component_type[:-1].title()} '{component}' not found in available {self.component_type}") + print( + Panel( + Markdown( + f"Use the command `nf-core {self.component_type} list` to view available {self.component_type}." + ), + title="info", + title_align="left", + style="blue", + padding=1, + ) ) + raise ValueError + if not modules_repo.component_exists(component, self.component_type, commit=self.sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" log.warning(warn_msg) diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py index 8f7ac0a1d..5f45d1b5b 100644 --- a/tests/modules/test_install.py +++ b/tests/modules/test_install.py @@ -55,7 +55,9 @@ def test_modules_install_different_branch_fail(self): """Test installing a module from a different branch""" install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH) # The FastQC module does not exists in the branch-test branch - assert install_obj.install("fastqc") is False + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Module 'fastqc' not found in available module" in str(excinfo.value) def test_modules_install_different_branch_succeed(self): """Test installing a module from a different branch""" From 6bc7361043ffe2e34685383a8b5af6fef6f4b36f Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 10:48:56 +0200 Subject: [PATCH 407/737] use self.registry --- nf_core/modules/lint/__init__.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index fcf2d7d06..b75f7e757 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -200,13 +200,12 @@ def lint_modules( for mod in modules: progress_bar.update(lint_progress, advance=1, test_name=mod.component_name) - self.lint_module(mod, progress_bar, registry=registry, local=local, fix_version=fix_version) + self.lint_module(mod, progress_bar, local=local, fix_version=fix_version) def lint_module( self, mod: NFCoreComponent, progress_bar: rich.progress.Progress, - registry: str, local: bool = False, fix_version: bool = False, ): @@ -228,7 +227,7 @@ def lint_module( # TODO: consider unifying modules and subworkflows lint_module() function and add it to the ComponentLint class # Only check the main script in case of a local module if local: - self.main_nf(mod, fix_version, registry, progress_bar) + self.main_nf(mod, fix_version, self.registry, progress_bar) self.passed += [LintResult(mod, *m) for m in mod.passed] warned = [LintResult(mod, *m) for m in (mod.warned + mod.failed)] if not self.fail_warned: @@ -245,7 +244,7 @@ def lint_module( for test_name in self.lint_tests: if test_name == "main_nf": - getattr(self, test_name)(mod, fix_version, registry, progress_bar) + getattr(self, test_name)(mod, fix_version, self.registry, progress_bar) else: getattr(self, test_name)(mod) From e4717cccccce91402535bd9ff49a619ee81ff323 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 10:53:39 +0200 Subject: [PATCH 408/737] move subworkflows main_nf linting closer to the modules version --- nf_core/subworkflows/lint/main_nf.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py index edca32bf3..3ad3f3486 100644 --- a/nf_core/subworkflows/lint/main_nf.py +++ b/nf_core/subworkflows/lint/main_nf.py @@ -31,8 +31,9 @@ def main_nf(_, subworkflow: NFCoreComponent) -> Tuple[List[str], List[str]]: outputs: List[str] = [] # Read the lines directly from the subworkflow - lines = None - if lines is None: + lines: List[str] = [] + + if len(lines) == 0: try: # Check whether file exists and load it with open(subworkflow.main_nf) as fh: From b022862f0c1bce3dbc0188f4aa3135e2b8a71812 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 14:49:54 +0200 Subject: [PATCH 409/737] fix handling of missing dir in list command --- nf_core/components/components_command.py | 2 ++ nf_core/components/list.py | 11 ++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index bff246738..f25fb33a6 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -51,6 +51,8 @@ def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ try: if self.directory: + if self.directory == Path(".") and not nf_dir_req: + self.no_prompts = True self.directory, self.repo_type, self.org = get_repo_info(self.directory, use_prompt=not self.no_prompts) except UserWarning: if nf_dir_req: diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 65a28db71..05a8f7112 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -22,8 +22,17 @@ def __init__( branch: Optional[str] = None, no_pull: bool = False, ) -> None: - super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.remote = remote + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + + def _configure_repo_and_paths(self, nf_dir_req=True) -> None: + """ + Override the default with nf_dir_req set to False to allow + info to be run from anywhere and still return remote info + """ + if self.remote: + nf_dir_req = False + return super()._configure_repo_and_paths(nf_dir_req) def list_components( self, keywords: Optional[List[str]] = None, print_json: bool = False From e9bf654aec84fc749b915e5e834e45ec2d035deb Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 14:50:52 +0200 Subject: [PATCH 410/737] remove unnecessary string conversion --- nf_core/components/create.py | 2 +- nf_core/components/nfcore_component.py | 4 ++-- nf_core/modules/lint/environment_yml.py | 2 +- nf_core/modules/lint/meta_yml.py | 4 ++-- nf_core/modules/lint/module_tests.py | 1 + nf_core/modules/modules_differ.py | 2 +- nf_core/pipelines/create_logo.py | 4 ++-- nf_core/utils.py | 2 +- 8 files changed, 11 insertions(+), 10 deletions(-) diff --git a/nf_core/components/create.py b/nf_core/components/create.py index fdcf402b4..c9af6d1ad 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -290,7 +290,7 @@ def _render_template(self) -> Optional[bool]: # Write output to the target file log.debug(f"Writing output to: '{dest_fn}'") dest_fn.parent.mkdir(exist_ok=True, parents=True) - with open(str(dest_fn), "w") as fh: + with open(dest_fn, "w") as fh: log.debug(f"Writing output to: '{dest_fn}'") fh.write(rendered_output) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 129871d68..db3196be9 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -163,7 +163,7 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" inputs: List[str] = [] - with open(str(self.main_nf)) as f: + with open(self.main_nf) as f: data = f.read() # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo # regex matches: @@ -191,7 +191,7 @@ def get_inputs_from_main_nf(self) -> None: def get_outputs_from_main_nf(self): outputs = [] - with open(str(self.main_nf)) as f: + with open(self.main_nf) as f: data = f.read() # get output values from main.nf after "output:". the names are always after "emit:" if "output:" not in data: diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index e34b9d585..341b9cd73 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -25,7 +25,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) if module.environment_yml is None: raise LintExceptionError("Module does not have an `environment.yml` file") try: - with open(str(module.environment_yml)) as fh: + with open(module.environment_yml) as fh: env_yml = yaml.safe_load(fh) module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml)) diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 32110b713..4a0ef6e01 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -43,7 +43,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case meta_yaml = None - if module.is_patched: + if module.is_patched and module_lint_object.modules_repo.repo_path is not None: lines = ModulesDiffer.try_apply_patch( module.component_name, module_lint_object.modules_repo.repo_path, @@ -57,7 +57,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None raise LintExceptionError("Module does not have a `meta.yml` file") if meta_yaml is None: try: - with open(str(module.meta_yml)) as fh: + with open(module.meta_yml) as fh: meta_yaml = yaml.safe_load(fh) module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) except FileNotFoundError: diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 9301db81e..6722c1212 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -73,6 +73,7 @@ def module_tests(_, module: NFCoreComponent): with open(module.nftest_main_nf) as fh: if "snapshot(" in fh.read(): snap_file = module.nftest_testdir / "main.nf.test.snap" + if snap_file.is_file(): module.passed.append( ( diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index e310b3bf8..f6b334eb6 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -307,7 +307,7 @@ def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: dict[str, str]: A dictionary indexed by the filenames with the file patches as values """ - with open(str(patch_fn)) as fh: + with open(patch_fn) as fh: lines = fh.readlines() patches = {} diff --git a/nf_core/pipelines/create_logo.py b/nf_core/pipelines/create_logo.py index 6619b910b..c54d8f208 100644 --- a/nf_core/pipelines/create_logo.py +++ b/nf_core/pipelines/create_logo.py @@ -62,7 +62,7 @@ def create_logo( img: Optional[Image.Image] = None if cache_path.is_file(): log.debug(f"Logo already exists in cache at: {cache_path}. Reusing this file.") - img = Image.open(str(cache_path)) + img = Image.open(cache_path) if img is None: log.debug(f"Creating logo for {text}") @@ -81,7 +81,7 @@ def create_logo( template_fn = "nf-core-repo-logo-base-darkbg.png" template_path = assets / template_fn - img = Image.open(str(template_path)) + img = Image.open(template_path) # get the height of the template image height = img.size[1] diff --git a/nf_core/utils.py b/nf_core/utils.py index a95eea4b6..c12ec191f 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1115,7 +1115,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] return Path(directory, CONFIG_PATHS[0]), None if not Path(config_fn).is_file(): raise FileNotFoundError(f"No `.nf-core.yml` file found in the directory '{directory}'") - with open(str(config_fn)) as fh: + with open(config_fn) as fh: tools_config = yaml.safe_load(fh) # If the file is empty From d4c06cb17fce07eedf00b2104e5be6fc5fc5548c Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 14:51:05 +0200 Subject: [PATCH 411/737] fix tests --- nf_core/modules/lint/main_nf.py | 9 +++++---- nf_core/pipelines/refgenie.py | 14 +++++++++----- tests/modules/test_install.py | 15 ++++++--------- tests/modules/test_list.py | 6 +++--- tests/pipelines/test_params_file.py | 18 +++++++++--------- tests/subworkflows/test_install.py | 5 +++-- 6 files changed, 35 insertions(+), 32 deletions(-) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 985a92fa1..358d6a75a 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -43,8 +43,8 @@ def main_nf( of ``software`` and ``prefix`` """ - inputs = [] - outputs = [] + inputs: List[str] = [] + outputs: List[str] = [] # Check if we have a patch file affecting the 'main.nf' file # otherwise read the lines directly from the module @@ -58,7 +58,7 @@ def main_nf( reverse=True, ).get("main.nf", [""]) - if not lines: + if len(lines) == 0: try: # Check whether file exists and load it with open(module.main_nf) as fh: @@ -66,9 +66,10 @@ def main_nf( module.passed.append(("main_nf_exists", "Module file exists", module.main_nf)) except FileNotFoundError: module.failed.append(("main_nf_exists", "Module file does not exist", module.main_nf)) + raise FileNotFoundError(f"Module file does not exist: {module.main_nf}") deprecated_i = ["initOptions", "saveFiles", "getSoftwareName", "getProcessName", "publishDir"] - if lines is not None: + if len(lines) > 0: lines_j = "\n".join(lines) else: lines_j = "" diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index 19ef4b512..f2eb09f33 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -181,14 +181,18 @@ def update_config(rgc): log.info("Could not determine path to 'refgenie_genomes.config' file.") return False + if refgenie_genomes_config_file is None: + log.info("Could not determine path to 'refgenie_genomes.config' file.") + return False + # Save the updated genome config - try: - with open(str(refgenie_genomes_config_file), "w+") as fh: - fh.write(refgenie_genomes) - log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") - except FileNotFoundError: + if not Path(refgenie_genomes_config_file).is_file(): log.warning(f"Could not write to {refgenie_genomes_config_file}") return False + else: + with open(refgenie_genomes_config_file, "w+") as fh: + fh.write(refgenie_genomes) + log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") # Add include statement to NXF_HOME/config if nxf_home: diff --git a/tests/modules/test_install.py b/tests/modules/test_install.py index 5f45d1b5b..92d30a494 100644 --- a/tests/modules/test_install.py +++ b/tests/modules/test_install.py @@ -1,4 +1,3 @@ -import os from pathlib import Path import pytest @@ -17,30 +16,28 @@ class TestModulesCreate(TestModules): - def test_modules_install_nopipeline(self): - """Test installing a module - no pipeline given""" - self.pipeline_dir = None - assert self.mods_install.install("foo") is False - @with_temporary_folder def test_modules_install_emptypipeline(self, tmpdir): """Test installing a module - empty dir given""" Path(tmpdir, "nf-core-pipe").mkdir() self.mods_install.directory = Path(tmpdir, "nf-core-pipe") with pytest.raises(UserWarning) as excinfo: - self.mods_install.install("foo") + self.mods_install.install("fastp") assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) def test_modules_install_nomodule(self): """Test installing a module - unrecognised module given""" - assert self.mods_install.install("foo") is False + with pytest.raises(ValueError) as excinfo: + self.mods_install.install("foo") + assert excinfo.typename == "ValueError" + assert "Module 'foo' not found in available modules" in self.caplog.text def test_modules_install_trimgalore(self): """Test installing a module - TrimGalore!""" assert self.mods_install.install("trimgalore") is not False assert self.mods_install.directory is not None module_path = Path(self.mods_install.directory, "modules", "nf-core", "trimgalore") - assert os.path.exists(module_path) + assert module_path.exists() def test_modules_install_trimgalore_twice(self): """Test installing a module - TrimGalore! already there""" diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py index a170f6d6f..282f044c1 100644 --- a/tests/modules/test_list.py +++ b/tests/modules/test_list.py @@ -10,10 +10,10 @@ from ..utils import GITLAB_DEFAULT_BRANCH, GITLAB_URL -class TestModulesCreate(TestModules): +class TestModulesList(TestModules): def test_modules_list_remote(self): """Test listing available modules""" - mods_list = nf_core.modules.list.ModuleList(remote=True) + mods_list = nf_core.modules.list.ModuleList() listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) @@ -22,7 +22,7 @@ def test_modules_list_remote(self): def test_modules_list_remote_gitlab(self): """Test listing the modules in the remote gitlab repo""" - mods_list = nf_core.modules.list.ModuleList(remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) + mods_list = nf_core.modules.list.ModuleList(remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) diff --git a/tests/pipelines/test_params_file.py b/tests/pipelines/test_params_file.py index 7e3e4b4f4..22a6182ac 100644 --- a/tests/pipelines/test_params_file.py +++ b/tests/pipelines/test_params_file.py @@ -20,15 +20,15 @@ def setup_class(cls): # Create a test pipeline in temp directory cls.tmp_dir = tempfile.mkdtemp() - cls.template_dir = os.path.join(cls.tmp_dir, "wf") + cls.template_dir = Path(cls.tmp_dir, "wf") create_obj = nf_core.pipelines.create.create.PipelineCreate( "testpipeline", "a description", "Me", outdir=cls.template_dir, no_git=True ) create_obj.init_pipeline() - cls.template_schema = os.path.join(cls.template_dir, "nextflow_schema.json") + cls.template_schema = Path(cls.template_dir, "nextflow_schema.json") cls.params_template_builder = ParamsFileBuilder(cls.template_dir) - cls.invalid_template_schema = os.path.join(cls.template_dir, "nextflow_schema_invalid.json") + cls.invalid_template_schema = Path(cls.template_dir, "nextflow_schema_invalid.json") # Remove the allOf section to make the schema invalid with open(cls.template_schema) as fh: @@ -40,14 +40,14 @@ def setup_class(cls): @classmethod def teardown_class(cls): - if os.path.exists(cls.tmp_dir): + if Path(cls.tmp_dir).exists(): shutil.rmtree(cls.tmp_dir) def test_build_template(self): - outfile = os.path.join(self.tmp_dir, "params-file.yml") - self.params_template_builder.write_params_file(outfile) + outfile = Path(self.tmp_dir, "params-file.yml") + self.params_template_builder.write_params_file(str(outfile)) - assert os.path.exists(outfile) + assert outfile.exists() with open(outfile) as fh: out = fh.read() @@ -56,9 +56,9 @@ def test_build_template(self): def test_build_template_invalid_schema(self, caplog): """Build a schema from a template""" - outfile = os.path.join(self.tmp_dir, "params-file-invalid.yml") + outfile = Path(self.tmp_dir, "params-file-invalid.yml") builder = ParamsFileBuilder(self.invalid_template_schema) - res = builder.write_params_file(outfile) + res = builder.write_params_file(str(outfile)) assert res is False assert "Pipeline schema file is invalid" in caplog.text diff --git a/tests/subworkflows/test_install.py b/tests/subworkflows/test_install.py index e6ba9439a..00ba88841 100644 --- a/tests/subworkflows/test_install.py +++ b/tests/subworkflows/test_install.py @@ -55,9 +55,10 @@ def test_subworkflows_install_emptypipeline(self, tmpdir): def test_subworkflows_install_nosubworkflow(self): """Test installing a subworkflow - unrecognised subworkflow given""" - with pytest.raises(SystemError) as excinfo: + with pytest.raises(ValueError) as excinfo: self.subworkflow_install.install("foo") - assert "Subworkflow 'foo' not found in available subworkflows" in str(excinfo.value) + assert excinfo.typename == "ValueError" + assert "Subworkflow 'foo' not found in available subworkflows" in self.caplog.text def test_subworkflows_install_bam_sort_stats_samtools_twice(self): """Test installing a subworkflow - bam_sort_stats_samtools already there""" From d57c815b83213154179be3b3e390557a6404d3b5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 15:30:46 +0200 Subject: [PATCH 412/737] add type hints to utils functions --- nf_core/components/update.py | 4 +-- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/download.py | 2 +- nf_core/utils.py | 48 ++++++++++++++++----------------- tests/modules/test_list.py | 2 +- 5 files changed, 29 insertions(+), 29 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index eb15f976b..3e4694adc 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -755,7 +755,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path config_files = [f for f in pipeline_files if str(f).endswith(".config")] for config_file in config_files: log.debug(f"Moving '{component}/{config_file}' to updated component") - shutil.move(pipeline_path / config_file, temp_component_dir / config_file) + shutil.move(str(pipeline_path / config_file), temp_component_dir / config_file) files.append(temp_component_dir / config_file) else: @@ -772,7 +772,7 @@ def move_files_from_tmp_dir(self, component: str, install_folder: str, repo_path log.debug(f"Moving '{file}' to updated component") dest = Path(pipeline_path, file) dest.parent.mkdir(parents=True, exist_ok=True) - shutil.move(path, dest) + shutil.move(str(path), dest) log.info(f"Updating '{repo_path}/{component}'") log.debug(f"Updating {self.component_type[:-1]} '{component}' to {new_version} from {repo_path}") diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 19649a57c..536589d81 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -430,7 +430,7 @@ def move_component_to_local(self, component_type: str, component: str, repo_name # Add a time suffix to the path to make it unique # (do it again and again if it didn't work out...) to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" - shutil.move(current_path, local_dir / to_name) + shutil.move(str(current_path), local_dir / to_name) def unsynced_components(self) -> Tuple[List[str], List[str], dict]: """ diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 704fe91b2..97453b127 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1388,7 +1388,7 @@ def singularity_pull_image( # where the output of 'singularity pull' is first generated before being copied to the NXF_SINGULARITY_CACHDIR. # if not defined by the Singularity administrators, then use the temporary directory to avoid storing the images in the work directory. if os.environ.get("SINGULARITY_CACHEDIR") is None: - os.environ["SINGULARITY_CACHEDIR"] = NFCORE_CACHE_DIR + os.environ["SINGULARITY_CACHEDIR"] = str(NFCORE_CACHE_DIR) # Sometimes, container still contain an explicit library specification, which # resulted in attempted pulls e.g. from docker://quay.io/quay.io/qiime2/core:2022.11 diff --git a/nf_core/utils.py b/nf_core/utils.py index c12ec191f..ea850a5d2 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,7 +19,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Any, Dict, Generator, List, Optional, Tuple, Union +from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -66,11 +66,11 @@ ] ) -NFCORE_CACHE_DIR = os.path.join( - os.environ.get("XDG_CACHE_HOME", os.path.join(os.getenv("HOME") or "", ".cache")), +NFCORE_CACHE_DIR = Path( + os.environ.get("XDG_CACHE_HOME", Path(os.getenv("HOME") or "", ".cache")), "nfcore", ) -NFCORE_DIR = os.path.join(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore") +NFCORE_DIR = Path(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore") def fetch_remote_version(source_url): @@ -358,17 +358,17 @@ def run_cmd(executable: str, cmd: str) -> Union[Tuple[bytes, bytes], None]: ) -def setup_nfcore_dir(): +def setup_nfcore_dir() -> bool: """Creates a directory for files that need to be kept between sessions Currently only used for keeping local copies of modules repos """ - if not os.path.exists(NFCORE_DIR): - os.makedirs(NFCORE_DIR) - return True + if not NFCORE_DIR.exists(): + NFCORE_DIR.mkdir(parents=True) + return True -def setup_requests_cachedir() -> dict: +def setup_requests_cachedir() -> Dict[str, Union[Path, datetime.timedelta, str]]: """Sets up local caching for faster remote HTTP requests. Caching directory will be set up in the user's home directory under @@ -377,10 +377,10 @@ def setup_requests_cachedir() -> dict: Uses requests_cache monkey patching. Also returns the config dict so that we can use the same setup with a Session. """ - pyversion = ".".join(str(v) for v in sys.version_info[0:3]) - cachedir = setup_nfcore_cachedir(f"cache_{pyversion}") - config = { - "cache_name": os.path.join(cachedir, "github_info"), + pyversion: str = ".".join(str(v) for v in sys.version_info[0:3]) + cachedir: Path = setup_nfcore_cachedir(f"cache_{pyversion}") + config: Dict[str, Union[Path, datetime.timedelta, str]] = { + "cache_name": Path(cachedir, "github_info"), "expire_after": datetime.timedelta(hours=1), "backend": "sqlite", } @@ -403,7 +403,7 @@ def setup_nfcore_cachedir(cache_fn: Union[str, Path]) -> Path: return cachedir -def wait_cli_function(poll_func, refresh_per_second=20): +def wait_cli_function(poll_func: Callable[[], bool], refresh_per_second: int = 20) -> None: """ Display a command-line spinner while calling a function repeatedly. @@ -427,7 +427,7 @@ def wait_cli_function(poll_func, refresh_per_second=20): raise AssertionError("Cancelled!") -def poll_nfcore_web_api(api_url, post_data=None): +def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: """ Poll the nf-core website API @@ -448,7 +448,7 @@ def poll_nfcore_web_api(api_url, post_data=None): raise AssertionError(f"Could not connect to URL: {api_url}") else: if response.status_code != 200 and response.status_code != 301: - log.debug(f"Response content:\n{response.content}") + log.debug(f"Response content:\n{response.content.decode()}") raise AssertionError( f"Could not access remote API results: {api_url} (HTML {response.status_code} Error)" ) @@ -460,7 +460,7 @@ def poll_nfcore_web_api(api_url, post_data=None): if "status" not in web_response: raise AssertionError() except (json.decoder.JSONDecodeError, AssertionError, TypeError): - log.debug(f"Response content:\n{response.content}") + log.debug(f"Response content:\n{response.content.decode()}") raise AssertionError( f"nf-core website API results response not recognised: {api_url}\n " "See verbose log for full response" @@ -476,14 +476,14 @@ class GitHubAPISession(requests_cache.CachedSession): such as automatically setting up GitHub authentication if we can. """ - def __init__(self): # pylint: disable=super-init-not-called - self.auth_mode = None - self.return_ok = [200, 201] - self.return_retry = [403] - self.return_unauthorised = [401] - self.has_init = False + def __init__(self) -> None: + self.auth_mode: Optional[str] = None + self.return_ok: List[int] = [200, 201] + self.return_retry: List[int] = [403] + self.return_unauthorised: List[int] = [401] + self.has_init: bool = False - def lazy_init(self): + def lazy_init(self) -> None: """ Initialise the object. diff --git a/tests/modules/test_list.py b/tests/modules/test_list.py index 282f044c1..3e92a33ab 100644 --- a/tests/modules/test_list.py +++ b/tests/modules/test_list.py @@ -67,7 +67,7 @@ def test_modules_list_local_json(self): assert "fastqc" in listed_mods assert "multiqc" in listed_mods - def test_modules_list_remote_json(self): + def test_modules_list_remote_json(self) -> None: """Test listing available modules as JSON""" mods_list = nf_core.modules.list.ModuleList(remote=True) listed_mods: str = str(mods_list.list_components(print_json=True)) From 495e05acc406b64e1d21e008aed92b443c5fb40e Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 15:46:39 +0200 Subject: [PATCH 413/737] fix refgenie tests --- nf_core/pipelines/refgenie.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index f2eb09f33..426ca5eb7 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -186,13 +186,13 @@ def update_config(rgc): return False # Save the updated genome config - if not Path(refgenie_genomes_config_file).is_file(): - log.warning(f"Could not write to {refgenie_genomes_config_file}") - return False - else: + try: with open(refgenie_genomes_config_file, "w+") as fh: fh.write(refgenie_genomes) log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") + except FileNotFoundError: + log.info(f"Could not write to {refgenie_genomes_config_file}") + return False # Add include statement to NXF_HOME/config if nxf_home: From 4c07bdfef5542de71954dbaedba6293e71bfcc9d Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 30 Jul 2024 15:57:15 +0200 Subject: [PATCH 414/737] fix tests --- nf_core/subworkflows/list.py | 2 +- nf_core/utils.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py index 5f849a1f9..9e84d6cbe 100644 --- a/nf_core/subworkflows/list.py +++ b/nf_core/subworkflows/list.py @@ -10,7 +10,7 @@ class SubworkflowList(ComponentList): def __init__( self, - pipeline_dir: Union[str, Path], + pipeline_dir: Union[str, Path] = ".", remote: bool = True, remote_url: Optional[str] = None, branch: Optional[str] = None, diff --git a/nf_core/utils.py b/nf_core/utils.py index ea850a5d2..48b7eae29 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -448,7 +448,10 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: raise AssertionError(f"Could not connect to URL: {api_url}") else: if response.status_code != 200 and response.status_code != 301: - log.debug(f"Response content:\n{response.content.decode()}") + response_content = response.content + if isinstance(response_content, bytes): + response_content = response_content.decode() + log.debug(f"Response content:\n{response_content}") raise AssertionError( f"Could not access remote API results: {api_url} (HTML {response.status_code} Error)" ) @@ -460,7 +463,10 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: if "status" not in web_response: raise AssertionError() except (json.decoder.JSONDecodeError, AssertionError, TypeError): - log.debug(f"Response content:\n{response.content.decode()}") + response_content = response.content + if isinstance(response_content, bytes): + response_content = response_content.decode() + log.debug(f"Response content:\n{response_content}") raise AssertionError( f"nf-core website API results response not recognised: {api_url}\n " "See verbose log for full response" From c9f86ca5e841cdda6660803ade89919b538efc92 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 1 Aug 2024 00:15:29 +0000 Subject: [PATCH 415/737] Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 63202e517..879bc15c0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.11.0" + rev: "v1.11.1" hooks: - id: mypy additional_dependencies: From 6f0345f445e97ade328b32b1c059788898ec208a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 1 Aug 2024 00:16:33 +0000 Subject: [PATCH 416/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a56abe0ac..ec033ff1e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -55,6 +55,7 @@ - Update python:3.12-slim Docker digest to f11725a ([#3071](https://github.com/nf-core/tools/pull/3071)) - Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) - Update python:3.12-slim Docker digest to 740d94a ([#3079](https://github.com/nf-core/tools/pull/3079)) +- Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 38ea56804aad6a0bd4492b55e164af42f1bac758 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 2 Aug 2024 10:10:19 +0200 Subject: [PATCH 417/737] allow numbers in custom pipeline name --- nf_core/pipelines/create/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index c15d61e26..a360b5f3a 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -65,7 +65,7 @@ def name_nospecialchars(cls, v: str, info: ValidationInfo) -> str: if not re.match(r"^[a-z]+$", v): raise ValueError("Must be lowercase without punctuation.") else: - if not re.match(r"^[a-zA-Z-_]+$", v): + if not re.match(r"^[a-zA-Z-_0-9]+$", v): raise ValueError("Must not contain special characters. Only '-' or '_' are allowed.") return v From 8a781b8bcb3f5bc40e67819ee3e03fe774e14a9a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 2 Aug 2024 08:12:16 +0000 Subject: [PATCH 418/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ec033ff1e..b6cd12d55 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -56,6 +56,7 @@ - Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) - Update python:3.12-slim Docker digest to 740d94a ([#3079](https://github.com/nf-core/tools/pull/3079)) - Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) +- Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 7ea871858fbcb263c65de4527fa4644f0ee007fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Fri, 2 Aug 2024 10:24:58 +0200 Subject: [PATCH 419/737] Update CHANGELOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Júlia Mir Pedrol --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 49a58ff6b..12001d56e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,7 +54,7 @@ - Update python:3.12-slim Docker digest to f11725a ([#3071](https://github.com/nf-core/tools/pull/3071)) - Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) - Update python:3.12-slim Docker digest to 740d94a ([#3079](https://github.com/nf-core/tools/pull/3079)) -- Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) +- Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From cbee6d06bb0fbaa2a76d6e7f7c2a842e0a81ec54 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 2 Aug 2024 12:03:07 +0200 Subject: [PATCH 420/737] allow module lables between double quotes --- nf_core/modules/lint/main_nf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 883d2bb3e..2762dd22d 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -437,7 +437,7 @@ def check_process_labels(self, lines): if len(all_labels) > 0: for label in all_labels: try: - label = re.match(r"^label\s+'?([a-zA-Z0-9_-]+)'?$", label).group(1) + label = re.match(r"^label\s+'?\"?([a-zA-Z0-9_-]+)'?\"?$", label).group(1) except AttributeError: self.warned.append( ( From 086302e209fa56fd869aae3590e7d1bae780c3fa Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 2 Aug 2024 12:48:00 +0200 Subject: [PATCH 421/737] allow multiline module input tuple --- nf_core/modules/lint/main_nf.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 883d2bb3e..ed3d93e2c 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -81,6 +81,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): script_lines = [] shell_lines = [] when_lines = [] + lines = iter(lines) for line in lines: if re.search(r"^\s*process\s*\w*\s*{", line) and state == "module": state = "process" @@ -104,6 +105,13 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar): if state == "process" and not _is_empty(line): process_lines.append(line) if state == "input" and not _is_empty(line): + # allow multiline tuples + if "tuple" in line and line.count("(") <= 1: + joint_tuple = line + while re.sub(r"\s", "", line) != ")": + joint_tuple = joint_tuple + line + line = next(lines) + line = joint_tuple inputs.extend(_parse_input(module, line)) if state == "output" and not _is_empty(line): outputs += _parse_output(module, line) From 5b7425368135301c53f664262133bf84981cbdd9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 2 Aug 2024 15:34:20 +0200 Subject: [PATCH 422/737] Update nf_core/pipelines/create/utils.py Co-authored-by: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> --- nf_core/pipelines/create/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index a360b5f3a..c387960c3 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -65,7 +65,7 @@ def name_nospecialchars(cls, v: str, info: ValidationInfo) -> str: if not re.match(r"^[a-z]+$", v): raise ValueError("Must be lowercase without punctuation.") else: - if not re.match(r"^[a-zA-Z-_0-9]+$", v): + if not re.match(r"^[-\w]+$", v): raise ValueError("Must not contain special characters. Only '-' or '_' are allowed.") return v From 4f755380032ce01d6b1222036ae68172976e40f5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 5 Aug 2024 08:27:26 +0200 Subject: [PATCH 423/737] update ruff --- .github/workflows/changelog.py | 9 +++++---- .pre-commit-config.yaml | 2 +- nf_core/modules/modules_differ.py | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/changelog.py b/.github/workflows/changelog.py index 471665e4b..24130e65c 100644 --- a/.github/workflows/changelog.py +++ b/.github/workflows/changelog.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 """ Taken from https://github.com/MultiQC/MultiQC/blob/main/.github/workflows/changelog.py and updated for nf-core @@ -18,7 +19,7 @@ import re import sys from pathlib import Path -from typing import List +from typing import List, Tuple REPO_URL = "https://github.com/nf-core/tools" @@ -32,7 +33,7 @@ assert pr_number, pr_number # Trim the PR number added when GitHub squashes commits, e.g. "Template: Updated (#2026)" -pr_title = pr_title.removesuffix(f" (#{pr_number})") +pr_title = pr_title.removesuffix(f" (#{pr_number})") # type: ignore changelog_path = workspace_path / "CHANGELOG.md" @@ -50,7 +51,7 @@ sys.exit(0) -def _determine_change_type(pr_title) -> tuple[str, str]: +def _determine_change_type(pr_title) -> Tuple[str, str]: """ Determine the type of the PR: Template, Download, Linting, Modules, Subworkflows, or General Returns a tuple of the section name and the module info. @@ -85,7 +86,7 @@ def _determine_change_type(pr_title) -> tuple[str, str]: pr_link = f"([#{pr_number}]({REPO_URL}/pull/{pr_number}))" # Handle manual changelog entries through comments. -if comment := comment.removeprefix("@nf-core-bot changelog").strip(): +if comment := comment.removeprefix("@nf-core-bot changelog").strip(): # type: ignore print(f"Adding manual changelog entry: {comment}") pr_title = comment new_lines = [ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f763fa665..c83cb5a4b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.1 + rev: v0.5.6 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index f6b334eb6..b6d7f0d0f 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -312,7 +312,7 @@ def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: patches = {} i = 0 - patch_lines: list[str] = [] + patch_lines: List[str] = [] key = "preamble" while i < len(lines): line = lines[i] From f4323602f8481460aea48c973c22e6b79dde6fae Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 5 Aug 2024 13:13:33 +0200 Subject: [PATCH 424/737] add option to exclude gitpod for custom pipeline template --- .github/workflows/create-test-lint-wf-template.yml | 5 +++++ nf_core/pipeline-template/nextflow.config | 2 ++ nf_core/pipelines/create/create.py | 2 ++ nf_core/pipelines/create/custompipeline.py | 13 +++++++++++++ tests/data/pipeline_create_template_skip.yml | 1 + 5 files changed, 23 insertions(+) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index cfa0a5007..8a893ce9f 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -42,6 +42,7 @@ jobs: - "template_skip_igenomes.yml" - "template_skip_ci.yml" - "template_skip_code_linters.yml" + - "template_skip_gitpod.yml" runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} @@ -107,6 +108,10 @@ jobs: run: | printf "org: my-prefix\nskip: code_linters" > create-test-lint-wf/template_skip_code_linters.yml + - name: Create template skip gitpod + run: | + printf "org: my-prefix\nskip: gitpod" > create-test-lint-wf/template_skip_gitpod.yml + # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 2e6a56b00..8a6ca6b32 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -178,11 +178,13 @@ profiles { wave.freeze = true wave.strategy = 'conda,container' } + {%- if gitpod %} gitpod { executor.name = 'local' executor.cpus = 4 executor.memory = 8.GB } + {%- endif %} test { includeConfig 'conf/test.config' } test_full { includeConfig 'conf/test_full.config' } } diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 42e4a6ad7..8f279df19 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -101,6 +101,7 @@ def __init__( ".prettierrc.yml", ".github/workflows/fix-linting.yml", ], + "gitpod": [".gitpod.yml"], } # Get list of files we're skipping with the supplied skip keys self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) @@ -210,6 +211,7 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): "igenomes": {"file": True, "content": True}, "nf_core_configs": {"file": False, "content": True}, "code_linters": {"file": True, "content": True}, + "gitpod": {"file": True, "content": True}, } # Set the parameters for the jinja template diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 0925fb5ba..e83904218 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -51,6 +51,13 @@ - prettier (https://github.com/prettier/prettier): enforces a consistent style (indentation, quoting, line length, etc). """ +markdown_gitpod = """ +Gitpod (https://www.gitpod.io/) provides standardized and automated development environments. + +Including this to your pipeline will provide an environment with the latest version of nf-core/tools installed and all its requirements. +This is useful to have all the tools ready for pipeline development. +""" + class CustomPipeline(Screen): """Select if the pipeline will use genomic data.""" @@ -96,6 +103,12 @@ def compose(self) -> ComposeResult: "The pipeline will include code linters and CI tests to lint your code: pre-commit, editor-config and prettier.", "code_linters", ), + PipelineFeature( + markdown_gitpod, + "Include a gitpod environment", + "Include the configuration required to use Gitpod.", + "gitpod", + ), classes="features-container", ) yield Center( diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index d9ef405c5..c18d810cd 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -12,3 +12,4 @@ skip_features: - igenomes - nf_core_configs - code_linters + - gitpod From 997468d81855989fdd95c04e38955ee594bf6335 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 5 Aug 2024 11:15:26 +0000 Subject: [PATCH 425/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aea91cb51..df11c6416 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) +- add option to exclude gitpod for custom pipeline template ([#3100](https://github.com/nf-core/tools/pull/3100)) ### Linting From 51f37561bf9555202803c23494959f8cec7e1ae3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 5 Aug 2024 15:27:13 +0200 Subject: [PATCH 426/737] add option to exclude citations for custom pipeline template --- .../create-test-lint-wf-template.yml | 5 + nf_core/pipeline-template/nextflow.config | 3 +- .../pipeline-template/nextflow_schema.json | 4 +- .../pipeline-template/workflows/pipeline.nf | 4 + nf_core/pipelines/create/create.py | 2 + nf_core/pipelines/create/custompipeline.py | 13 + tests/__snapshots__/test_create_app.ambr | 511 +++++++++--------- tests/data/pipeline_create_template_skip.yml | 1 + 8 files changed, 284 insertions(+), 259 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index cfa0a5007..818c20616 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -42,6 +42,7 @@ jobs: - "template_skip_igenomes.yml" - "template_skip_ci.yml" - "template_skip_code_linters.yml" + - "template_skip_citations.yml" runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} @@ -107,6 +108,10 @@ jobs: run: | printf "org: my-prefix\nskip: code_linters" > create-test-lint-wf/template_skip_code_linters.yml + - name: Create template skip citations + run: | + printf "org: my-prefix\nskip: citations" > create-test-lint-wf/template_skip_citations.yml + # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 2e6a56b00..f622205a1 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -25,7 +25,8 @@ params { multiqc_title = null multiqc_logo = null max_multiqc_email_size = '25.MB' - multiqc_methods_description = null + {%- if citations %} + multiqc_methods_description = null{% endif %} // Boilerplate options outdir = null diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 18bad71b7..0bd444367 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -234,12 +234,12 @@ "description": "Custom logo file to supply to MultiQC. File name must also be set in the MultiQC config file", "fa_icon": "fas fa-image", "hidden": true - }, + },{% if citations %} "multiqc_methods_description": { "type": "string", "description": "Custom MultiQC yaml file containing HTML including a methods description.", "fa_icon": "fas fa-cog" - }, + },{% endif %} "validate_params": { "type": "boolean", "description": "Boolean whether to validate parameters against the schema at runtime", diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index de0f21fe3..765b97c68 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -63,21 +63,25 @@ workflow {{ short_name|upper }} { workflow, parameters_schema: "nextflow_schema.json") ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) + {%- if citations %} ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) ch_methods_description = Channel.value( methodsDescriptionText(ch_multiqc_custom_methods_description)) + {%- endif %} ch_multiqc_files = ch_multiqc_files.mix( ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) + {%- if citations %} ch_multiqc_files = ch_multiqc_files.mix( ch_methods_description.collectFile( name: 'methods_description_mqc.yaml', sort: true ) ) + {%- endif %} MULTIQC ( ch_multiqc_files.collect(), diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 42e4a6ad7..86ca1763f 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -101,6 +101,7 @@ def __init__( ".prettierrc.yml", ".github/workflows/fix-linting.yml", ], + "citations": ["CITATIONS.md", "assets/methods_description_template.yml"], } # Get list of files we're skipping with the supplied skip keys self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) @@ -210,6 +211,7 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): "igenomes": {"file": True, "content": True}, "nf_core_configs": {"file": False, "content": True}, "code_linters": {"file": True, "content": True}, + "citations": {"file": True, "content": True}, } # Set the parameters for the jinja template diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 0925fb5ba..1dd613484 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -51,6 +51,13 @@ - prettier (https://github.com/prettier/prettier): enforces a consistent style (indentation, quoting, line length, etc). """ +markdown_citations = """ +If adding citations, the pipeline template will contain a `CITATIONS.md` file to add the citations of all tools used in the pipeline. + +Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to describe a Materials & Methods section describing the tools used in the pieline, +and the logics to add this section to the output MultiQC report (if the report is generated). +""" + class CustomPipeline(Screen): """Select if the pipeline will use genomic data.""" @@ -96,6 +103,12 @@ def compose(self) -> ComposeResult: "The pipeline will include code linters and CI tests to lint your code: pre-commit, editor-config and prettier.", "code_linters", ), + PipelineFeature( + markdown_citations, + "Include citations", + "Include a CITATIONS.md file to add pipeline tools citations.", + "citations", + ), classes="features-container", ) yield Center( diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 5e5b005de..531ea098b 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-2778615119-matrix { + .terminal-4194849765-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2778615119-title { + .terminal-4194849765-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2778615119-r1 { fill: #c5c8c6 } - .terminal-2778615119-r2 { fill: #e3e3e3 } - .terminal-2778615119-r3 { fill: #989898 } - .terminal-2778615119-r4 { fill: #e1e1e1 } - .terminal-2778615119-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2778615119-r6 { fill: #1e1e1e } - .terminal-2778615119-r7 { fill: #0178d4 } - .terminal-2778615119-r8 { fill: #454a50 } - .terminal-2778615119-r9 { fill: #e2e2e2 } - .terminal-2778615119-r10 { fill: #808080 } - .terminal-2778615119-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-2778615119-r12 { fill: #000000 } - .terminal-2778615119-r13 { fill: #e4e4e4 } - .terminal-2778615119-r14 { fill: #14191f } - .terminal-2778615119-r15 { fill: #507bb3 } - .terminal-2778615119-r16 { fill: #dde6ed;font-weight: bold } - .terminal-2778615119-r17 { fill: #001541 } - .terminal-2778615119-r18 { fill: #7ae998 } - .terminal-2778615119-r19 { fill: #0a180e;font-weight: bold } - .terminal-2778615119-r20 { fill: #008139 } - .terminal-2778615119-r21 { fill: #fea62b;font-weight: bold } - .terminal-2778615119-r22 { fill: #a7a9ab } - .terminal-2778615119-r23 { fill: #e2e3e3 } + .terminal-4194849765-r1 { fill: #c5c8c6 } + .terminal-4194849765-r2 { fill: #e3e3e3 } + .terminal-4194849765-r3 { fill: #989898 } + .terminal-4194849765-r4 { fill: #e1e1e1 } + .terminal-4194849765-r5 { fill: #4ebf71;font-weight: bold } + .terminal-4194849765-r6 { fill: #1e1e1e } + .terminal-4194849765-r7 { fill: #0178d4 } + .terminal-4194849765-r8 { fill: #454a50 } + .terminal-4194849765-r9 { fill: #e2e2e2 } + .terminal-4194849765-r10 { fill: #808080 } + .terminal-4194849765-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-4194849765-r12 { fill: #000000 } + .terminal-4194849765-r13 { fill: #e4e4e4 } + .terminal-4194849765-r14 { fill: #14191f } + .terminal-4194849765-r15 { fill: #507bb3 } + .terminal-4194849765-r16 { fill: #dde6ed;font-weight: bold } + .terminal-4194849765-r17 { fill: #001541 } + .terminal-4194849765-r18 { fill: #7ae998 } + .terminal-4194849765-r19 { fill: #0a180e;font-weight: bold } + .terminal-4194849765-r20 { fill: #008139 } + .terminal-4194849765-r21 { fill: #fea62b;font-weight: bold } + .terminal-4194849765-r22 { fill: #a7a9ab } + .terminal-4194849765-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + + ▃▃ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,254 @@ font-weight: 700; } - .terminal-763408100-matrix { + .terminal-423998432-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-763408100-title { + .terminal-423998432-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-763408100-r1 { fill: #c5c8c6 } - .terminal-763408100-r2 { fill: #e3e3e3 } - .terminal-763408100-r3 { fill: #989898 } - .terminal-763408100-r4 { fill: #e1e1e1 } - .terminal-763408100-r5 { fill: #4ebf71;font-weight: bold } - .terminal-763408100-r6 { fill: #1e1e1e } - .terminal-763408100-r7 { fill: #507bb3 } - .terminal-763408100-r8 { fill: #e2e2e2 } - .terminal-763408100-r9 { fill: #808080 } - .terminal-763408100-r10 { fill: #dde6ed;font-weight: bold } - .terminal-763408100-r11 { fill: #001541 } - .terminal-763408100-r12 { fill: #14191f } - .terminal-763408100-r13 { fill: #454a50 } - .terminal-763408100-r14 { fill: #7ae998 } - .terminal-763408100-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-763408100-r16 { fill: #0a180e;font-weight: bold } - .terminal-763408100-r17 { fill: #000000 } - .terminal-763408100-r18 { fill: #008139 } - .terminal-763408100-r19 { fill: #fea62b;font-weight: bold } - .terminal-763408100-r20 { fill: #a7a9ab } - .terminal-763408100-r21 { fill: #e2e3e3 } + .terminal-423998432-r1 { fill: #c5c8c6 } + .terminal-423998432-r2 { fill: #e3e3e3 } + .terminal-423998432-r3 { fill: #989898 } + .terminal-423998432-r4 { fill: #e1e1e1 } + .terminal-423998432-r5 { fill: #4ebf71;font-weight: bold } + .terminal-423998432-r6 { fill: #1e1e1e } + .terminal-423998432-r7 { fill: #507bb3 } + .terminal-423998432-r8 { fill: #e2e2e2 } + .terminal-423998432-r9 { fill: #808080 } + .terminal-423998432-r10 { fill: #dde6ed;font-weight: bold } + .terminal-423998432-r11 { fill: #001541 } + .terminal-423998432-r12 { fill: #454a50 } + .terminal-423998432-r13 { fill: #7ae998 } + .terminal-423998432-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-423998432-r15 { fill: #0a180e;font-weight: bold } + .terminal-423998432-r16 { fill: #000000 } + .terminal-423998432-r17 { fill: #008139 } + .terminal-423998432-r18 { fill: #fea62b;font-weight: bold } + .terminal-423998432-r19 { fill: #a7a9ab } + .terminal-423998432-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different ▁▁ - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles  + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index d9ef405c5..06e01cc7f 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -12,3 +12,4 @@ skip_features: - igenomes - nf_core_configs - code_linters + - citations From 74bd318ef0785108f122bc1f14f4d9d668f57b35 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 5 Aug 2024 13:29:41 +0000 Subject: [PATCH 427/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aea91cb51..a723bd328 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) +- add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101)) ### Linting From ed4554d7c4a37341a034bb227c4a02953a88ce05 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 5 Aug 2024 15:32:04 +0200 Subject: [PATCH 428/737] update snapshots --- tests/__snapshots__/test_create_app.ambr | 511 +++++++++++------------ 1 file changed, 255 insertions(+), 256 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 5e5b005de..531ea098b 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-2778615119-matrix { + .terminal-4194849765-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2778615119-title { + .terminal-4194849765-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2778615119-r1 { fill: #c5c8c6 } - .terminal-2778615119-r2 { fill: #e3e3e3 } - .terminal-2778615119-r3 { fill: #989898 } - .terminal-2778615119-r4 { fill: #e1e1e1 } - .terminal-2778615119-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2778615119-r6 { fill: #1e1e1e } - .terminal-2778615119-r7 { fill: #0178d4 } - .terminal-2778615119-r8 { fill: #454a50 } - .terminal-2778615119-r9 { fill: #e2e2e2 } - .terminal-2778615119-r10 { fill: #808080 } - .terminal-2778615119-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-2778615119-r12 { fill: #000000 } - .terminal-2778615119-r13 { fill: #e4e4e4 } - .terminal-2778615119-r14 { fill: #14191f } - .terminal-2778615119-r15 { fill: #507bb3 } - .terminal-2778615119-r16 { fill: #dde6ed;font-weight: bold } - .terminal-2778615119-r17 { fill: #001541 } - .terminal-2778615119-r18 { fill: #7ae998 } - .terminal-2778615119-r19 { fill: #0a180e;font-weight: bold } - .terminal-2778615119-r20 { fill: #008139 } - .terminal-2778615119-r21 { fill: #fea62b;font-weight: bold } - .terminal-2778615119-r22 { fill: #a7a9ab } - .terminal-2778615119-r23 { fill: #e2e3e3 } + .terminal-4194849765-r1 { fill: #c5c8c6 } + .terminal-4194849765-r2 { fill: #e3e3e3 } + .terminal-4194849765-r3 { fill: #989898 } + .terminal-4194849765-r4 { fill: #e1e1e1 } + .terminal-4194849765-r5 { fill: #4ebf71;font-weight: bold } + .terminal-4194849765-r6 { fill: #1e1e1e } + .terminal-4194849765-r7 { fill: #0178d4 } + .terminal-4194849765-r8 { fill: #454a50 } + .terminal-4194849765-r9 { fill: #e2e2e2 } + .terminal-4194849765-r10 { fill: #808080 } + .terminal-4194849765-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-4194849765-r12 { fill: #000000 } + .terminal-4194849765-r13 { fill: #e4e4e4 } + .terminal-4194849765-r14 { fill: #14191f } + .terminal-4194849765-r15 { fill: #507bb3 } + .terminal-4194849765-r16 { fill: #dde6ed;font-weight: bold } + .terminal-4194849765-r17 { fill: #001541 } + .terminal-4194849765-r18 { fill: #7ae998 } + .terminal-4194849765-r19 { fill: #0a180e;font-weight: bold } + .terminal-4194849765-r20 { fill: #008139 } + .terminal-4194849765-r21 { fill: #fea62b;font-weight: bold } + .terminal-4194849765-r22 { fill: #a7a9ab } + .terminal-4194849765-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + + ▃▃ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,254 @@ font-weight: 700; } - .terminal-763408100-matrix { + .terminal-423998432-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-763408100-title { + .terminal-423998432-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-763408100-r1 { fill: #c5c8c6 } - .terminal-763408100-r2 { fill: #e3e3e3 } - .terminal-763408100-r3 { fill: #989898 } - .terminal-763408100-r4 { fill: #e1e1e1 } - .terminal-763408100-r5 { fill: #4ebf71;font-weight: bold } - .terminal-763408100-r6 { fill: #1e1e1e } - .terminal-763408100-r7 { fill: #507bb3 } - .terminal-763408100-r8 { fill: #e2e2e2 } - .terminal-763408100-r9 { fill: #808080 } - .terminal-763408100-r10 { fill: #dde6ed;font-weight: bold } - .terminal-763408100-r11 { fill: #001541 } - .terminal-763408100-r12 { fill: #14191f } - .terminal-763408100-r13 { fill: #454a50 } - .terminal-763408100-r14 { fill: #7ae998 } - .terminal-763408100-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-763408100-r16 { fill: #0a180e;font-weight: bold } - .terminal-763408100-r17 { fill: #000000 } - .terminal-763408100-r18 { fill: #008139 } - .terminal-763408100-r19 { fill: #fea62b;font-weight: bold } - .terminal-763408100-r20 { fill: #a7a9ab } - .terminal-763408100-r21 { fill: #e2e3e3 } + .terminal-423998432-r1 { fill: #c5c8c6 } + .terminal-423998432-r2 { fill: #e3e3e3 } + .terminal-423998432-r3 { fill: #989898 } + .terminal-423998432-r4 { fill: #e1e1e1 } + .terminal-423998432-r5 { fill: #4ebf71;font-weight: bold } + .terminal-423998432-r6 { fill: #1e1e1e } + .terminal-423998432-r7 { fill: #507bb3 } + .terminal-423998432-r8 { fill: #e2e2e2 } + .terminal-423998432-r9 { fill: #808080 } + .terminal-423998432-r10 { fill: #dde6ed;font-weight: bold } + .terminal-423998432-r11 { fill: #001541 } + .terminal-423998432-r12 { fill: #454a50 } + .terminal-423998432-r13 { fill: #7ae998 } + .terminal-423998432-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-423998432-r15 { fill: #0a180e;font-weight: bold } + .terminal-423998432-r16 { fill: #000000 } + .terminal-423998432-r17 { fill: #008139 } + .terminal-423998432-r18 { fill: #fea62b;font-weight: bold } + .terminal-423998432-r19 { fill: #a7a9ab } + .terminal-423998432-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different ▁▁ - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles  + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From 9182530d2a4356fa2924798b5aa2025fed793dd0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 5 Aug 2024 16:13:37 +0200 Subject: [PATCH 429/737] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipelines/create/custompipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 1dd613484..d5bcf04bc 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -54,7 +54,7 @@ markdown_citations = """ If adding citations, the pipeline template will contain a `CITATIONS.md` file to add the citations of all tools used in the pipeline. -Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to describe a Materials & Methods section describing the tools used in the pieline, +Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, and the logics to add this section to the output MultiQC report (if the report is generated). """ @@ -106,7 +106,7 @@ def compose(self) -> ComposeResult: PipelineFeature( markdown_citations, "Include citations", - "Include a CITATIONS.md file to add pipeline tools citations.", + "Include pipeline tools citations in CITATIONS.md and a method description in the MultiQC report (if enabled).", "citations", ), classes="features-container", From b0d07609507ef60ba79179e810b0cac1f308108a Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 5 Aug 2024 16:23:54 +0200 Subject: [PATCH 430/737] exclude content of CITATIONS.md instead of the whole file --- nf_core/pipeline-template/CITATIONS.md | 3 +++ nf_core/pipeline-template/README.md | 4 ++-- .../subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf | 2 ++ nf_core/pipeline-template/workflows/pipeline.nf | 2 +- nf_core/pipelines/create/create.py | 2 +- 5 files changed, 9 insertions(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 6edf8f620..c0c072927 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -8,6 +8,8 @@ > Di Tommaso P, Chatzou M, Floden EW, Barja PP, Palumbo E, Notredame C. Nextflow enables reproducible computational workflows. Nat Biotechnol. 2017 Apr 11;35(4):316-319. doi: 10.1038/nbt.3820. PubMed PMID: 28398311. +{%- if citations %} + ## Pipeline tools - [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) @@ -39,3 +41,4 @@ - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. + > {%- endif %} diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index feece399b..4efa88d5f 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -116,10 +116,10 @@ For further information or help, don't hesitate to get in touch on the [Slack `# - +{% if citations %} An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. - +{% endif %} {% if is_nfcore -%} You can cite the `nf-core` publication as follows: diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index a4bfb9f8b..0c2e1d8b0 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -207,6 +207,7 @@ def genomeExistsError() { } {%- endif %} +{%- if citations %} // // Generate methods description for MultiQC // @@ -270,3 +271,4 @@ def methodsDescriptionText(mqc_methods_yaml) { return description_html.toString() } +{% endif %} diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 765b97c68..8ce3637e4 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -9,7 +9,7 @@ include { MULTIQC } from '../modules/nf-core/multiqc/main' include { paramsSummaryMap } from 'plugin/nf-validation' include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' -include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' +{% if citations %}include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 86ca1763f..3adeec60b 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -101,7 +101,7 @@ def __init__( ".prettierrc.yml", ".github/workflows/fix-linting.yml", ], - "citations": ["CITATIONS.md", "assets/methods_description_template.yml"], + "citations": ["assets/methods_description_template.yml"], } # Get list of files we're skipping with the supplied skip keys self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) From 138ecb793ab57f34b7fb124a81df04512f0b36fd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 5 Aug 2024 16:58:50 +0200 Subject: [PATCH 431/737] update snapshot --- tests/__snapshots__/test_create_app.ambr | 511 ++++++++++++----------- 1 file changed, 256 insertions(+), 255 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 531ea098b..940a87583 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-4194849765-matrix { + .terminal-2433570258-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4194849765-title { + .terminal-2433570258-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4194849765-r1 { fill: #c5c8c6 } - .terminal-4194849765-r2 { fill: #e3e3e3 } - .terminal-4194849765-r3 { fill: #989898 } - .terminal-4194849765-r4 { fill: #e1e1e1 } - .terminal-4194849765-r5 { fill: #4ebf71;font-weight: bold } - .terminal-4194849765-r6 { fill: #1e1e1e } - .terminal-4194849765-r7 { fill: #0178d4 } - .terminal-4194849765-r8 { fill: #454a50 } - .terminal-4194849765-r9 { fill: #e2e2e2 } - .terminal-4194849765-r10 { fill: #808080 } - .terminal-4194849765-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-4194849765-r12 { fill: #000000 } - .terminal-4194849765-r13 { fill: #e4e4e4 } - .terminal-4194849765-r14 { fill: #14191f } - .terminal-4194849765-r15 { fill: #507bb3 } - .terminal-4194849765-r16 { fill: #dde6ed;font-weight: bold } - .terminal-4194849765-r17 { fill: #001541 } - .terminal-4194849765-r18 { fill: #7ae998 } - .terminal-4194849765-r19 { fill: #0a180e;font-weight: bold } - .terminal-4194849765-r20 { fill: #008139 } - .terminal-4194849765-r21 { fill: #fea62b;font-weight: bold } - .terminal-4194849765-r22 { fill: #a7a9ab } - .terminal-4194849765-r23 { fill: #e2e3e3 } + .terminal-2433570258-r1 { fill: #c5c8c6 } + .terminal-2433570258-r2 { fill: #e3e3e3 } + .terminal-2433570258-r3 { fill: #989898 } + .terminal-2433570258-r4 { fill: #e1e1e1 } + .terminal-2433570258-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2433570258-r6 { fill: #1e1e1e } + .terminal-2433570258-r7 { fill: #0178d4 } + .terminal-2433570258-r8 { fill: #454a50 } + .terminal-2433570258-r9 { fill: #e2e2e2 } + .terminal-2433570258-r10 { fill: #808080 } + .terminal-2433570258-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-2433570258-r12 { fill: #000000 } + .terminal-2433570258-r13 { fill: #e4e4e4 } + .terminal-2433570258-r14 { fill: #14191f } + .terminal-2433570258-r15 { fill: #507bb3 } + .terminal-2433570258-r16 { fill: #dde6ed;font-weight: bold } + .terminal-2433570258-r17 { fill: #001541 } + .terminal-2433570258-r18 { fill: #7ae998 } + .terminal-2433570258-r19 { fill: #0a180e;font-weight: bold } + .terminal-2433570258-r20 { fill: #008139 } + .terminal-2433570258-r21 { fill: #fea62b;font-weight: bold } + .terminal-2433570258-r22 { fill: #a7a9ab } + .terminal-2433570258-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - ▃▃ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + ▃▃ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,254 +2233,255 @@ font-weight: 700; } - .terminal-423998432-matrix { + .terminal-1630976143-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-423998432-title { + .terminal-1630976143-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-423998432-r1 { fill: #c5c8c6 } - .terminal-423998432-r2 { fill: #e3e3e3 } - .terminal-423998432-r3 { fill: #989898 } - .terminal-423998432-r4 { fill: #e1e1e1 } - .terminal-423998432-r5 { fill: #4ebf71;font-weight: bold } - .terminal-423998432-r6 { fill: #1e1e1e } - .terminal-423998432-r7 { fill: #507bb3 } - .terminal-423998432-r8 { fill: #e2e2e2 } - .terminal-423998432-r9 { fill: #808080 } - .terminal-423998432-r10 { fill: #dde6ed;font-weight: bold } - .terminal-423998432-r11 { fill: #001541 } - .terminal-423998432-r12 { fill: #454a50 } - .terminal-423998432-r13 { fill: #7ae998 } - .terminal-423998432-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-423998432-r15 { fill: #0a180e;font-weight: bold } - .terminal-423998432-r16 { fill: #000000 } - .terminal-423998432-r17 { fill: #008139 } - .terminal-423998432-r18 { fill: #fea62b;font-weight: bold } - .terminal-423998432-r19 { fill: #a7a9ab } - .terminal-423998432-r20 { fill: #e2e3e3 } + .terminal-1630976143-r1 { fill: #c5c8c6 } + .terminal-1630976143-r2 { fill: #e3e3e3 } + .terminal-1630976143-r3 { fill: #989898 } + .terminal-1630976143-r4 { fill: #e1e1e1 } + .terminal-1630976143-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1630976143-r6 { fill: #1e1e1e } + .terminal-1630976143-r7 { fill: #507bb3 } + .terminal-1630976143-r8 { fill: #e2e2e2 } + .terminal-1630976143-r9 { fill: #808080 } + .terminal-1630976143-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1630976143-r11 { fill: #001541 } + .terminal-1630976143-r12 { fill: #14191f } + .terminal-1630976143-r13 { fill: #454a50 } + .terminal-1630976143-r14 { fill: #7ae998 } + .terminal-1630976143-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-1630976143-r16 { fill: #0a180e;font-weight: bold } + .terminal-1630976143-r17 { fill: #000000 } + .terminal-1630976143-r18 { fill: #008139 } + .terminal-1630976143-r19 { fill: #fea62b;font-weight: bold } + .terminal-1630976143-r20 { fill: #a7a9ab } + .terminal-1630976143-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different  - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles ▆▆ + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From d94d1d065001885e2a4b58bf55d1cd5fb352f5b9 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 6 Aug 2024 10:28:23 +0200 Subject: [PATCH 432/737] add bot action to update textual snapshots --- .github/workflows/regenerate-snapshots.yml | 92 ++++++++++++++++++++++ CONTRIBUTING.md | 21 +++++ 2 files changed, 113 insertions(+) create mode 100644 .github/workflows/regenerate-snapshots.yml diff --git a/.github/workflows/regenerate-snapshots.yml b/.github/workflows/regenerate-snapshots.yml new file mode 100644 index 000000000..d05505a95 --- /dev/null +++ b/.github/workflows/regenerate-snapshots.yml @@ -0,0 +1,92 @@ +name: Regenerate Textual snapshots from a comment +on: + issue_comment: + types: [created] + +jobs: + regenerate-snapshots: + # Only run if comment is on a PR with the main repo, and if it contains the magic keywords + if: > + contains(github.event.comment.html_url, '/pull/') && + contains(github.event.comment.body, '@nf-core-bot regenerate snapshots') && + github.repository == 'nf-core/tools' + runs-on: ubuntu-latest + steps: + # Use the @nf-core-bot token to check out so we can push later + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + with: + token: ${{ secrets.nf_core_bot_auth_token }} + + # indication that the command is running + - name: React on comment + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: eyes + + # Action runs on the issue comment, so we don't get the PR by default + # Use the gh cli to check out the PR + - name: Checkout Pull Request + run: gh pr checkout ${{ github.event.issue.number }} + env: + GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} + + # Install dependencies and run pytest + - name: Set up Python + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + with: + python-version: "3.12" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip -r requirements-dev.txt + pip install -e . + + - name: Run pytest to regenerate snapshots + id: pytest + run: | + python3 -m pytest tests/test_create_app.py --snapshot-update --color=yes --cov --durations=0 + continue-on-error: true + + # indication that the run has finished + - name: react if finished succesfully + if: steps.pytest.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: "+1" + + - name: Commit & push changes + id: commit-and-push + if: steps.pytest.outcome == 'failure' + run: | + git config user.email "core@nf-co.re" + git config user.name "nf-core-bot" + git config push.default upstream + git add . + git status + git commit -m "[automated] Update Textual snapshots" + git push + + - name: react if snapshots were updated + id: react-if-updated + if: steps.commit-and-push.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: hooray + + - name: react if snapshots were not updated + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: confused + + - name: react if snapshots were not updated + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + issue-number: ${{ github.event.issue.number }} + body: | + @${{ github.actor }} I tried to update the snapshots, but it didn't work. Please update them manually. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2b2dfc1be..cc81fa3f4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -125,3 +125,24 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) + +## nf-core-bot + +nf-core has a bot which you can use to perform certain actions on a PR. + +- Fix linting: + +If the linting tests is failing on a PR to nf-core/tools, you can post a comment with the magic words `@nf-core-bot fix linting`. The bot will try to fix the linting, push to your branch, and react to the comment if the fix was successful or not. + +- Update the `CHANGELOG.md`: + +The nf-core-bot runs automatically on every PR updating the `CHANGELOG.md` if it was not updated. It will add the new change using the title of your PR. +If the action didn't run automatically, or you want to provide a different title, you can post a comment with `@nf-core-bot changelog`, optionally followed by the description that you want to add to the changelog. + +- Update Textual snapshots: + +If the Textual snapshots (run by `tests/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. +If you are sure that these changes are correct, you can automatically update the snapshots form the PR by posting a comment with the magic words `@nf-core-bot regenerate snapshots`. + +> [!WARNING] +> Please always check this report to make sure that the changes are expected. From c76a2ba95adef683e8fa197258472cf6deb36b8d Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 6 Aug 2024 08:31:24 +0000 Subject: [PATCH 433/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aea91cb51..013e8a5d4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,6 +58,7 @@ - Update python:3.12-slim Docker digest to 740d94a ([#3079](https://github.com/nf-core/tools/pull/3079)) - Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) - Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) +- Add bot action to update textual snapshots and write bot documentation ([#3102](https://github.com/nf-core/tools/pull/3102)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From d07f3ffbc0a490a7813897ab14f969a079cef70d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Tue, 6 Aug 2024 10:49:42 +0200 Subject: [PATCH 434/737] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- .github/workflows/regenerate-snapshots.yml | 5 +++-- CONTRIBUTING.md | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/regenerate-snapshots.yml b/.github/workflows/regenerate-snapshots.yml index d05505a95..8fcc9b6f1 100644 --- a/.github/workflows/regenerate-snapshots.yml +++ b/.github/workflows/regenerate-snapshots.yml @@ -8,7 +8,7 @@ jobs: # Only run if comment is on a PR with the main repo, and if it contains the magic keywords if: > contains(github.event.comment.html_url, '/pull/') && - contains(github.event.comment.body, '@nf-core-bot regenerate snapshots') && + contains(github.event.comment.body, '@nf-core-bot update snapshots') && github.repository == 'nf-core/tools' runs-on: ubuntu-latest steps: @@ -36,6 +36,7 @@ jobs: uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 with: python-version: "3.12" + cache: "pip" - name: Install dependencies run: | @@ -45,7 +46,7 @@ jobs: - name: Run pytest to regenerate snapshots id: pytest run: | - python3 -m pytest tests/test_create_app.py --snapshot-update --color=yes --cov --durations=0 + python3 -m pytest tests/test_create_app.py --snapshot-update --color=yes --durations=0 continue-on-error: true # indication that the run has finished diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cc81fa3f4..3ad9e5724 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -145,4 +145,4 @@ If the Textual snapshots (run by `tests/test_crate_app.py`) fail, an HTML report If you are sure that these changes are correct, you can automatically update the snapshots form the PR by posting a comment with the magic words `@nf-core-bot regenerate snapshots`. > [!WARNING] -> Please always check this report to make sure that the changes are expected. +> Please always check the HTML report to make sure that the changes are expected. From 346344f55d47d8bd20fd7344eaff799079f6b3cc Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 6 Aug 2024 10:54:39 +0200 Subject: [PATCH 435/737] rename regenerae-snapshots to update-textual-snapshots --- ...shots.yml => update-textual-snapshots.yml} | 6 ++--- CONTRIBUTING.md | 22 ++++++++++++++++--- 2 files changed, 22 insertions(+), 6 deletions(-) rename .github/workflows/{regenerate-snapshots.yml => update-textual-snapshots.yml} (96%) diff --git a/.github/workflows/regenerate-snapshots.yml b/.github/workflows/update-textual-snapshots.yml similarity index 96% rename from .github/workflows/regenerate-snapshots.yml rename to .github/workflows/update-textual-snapshots.yml index 8fcc9b6f1..fb936762f 100644 --- a/.github/workflows/regenerate-snapshots.yml +++ b/.github/workflows/update-textual-snapshots.yml @@ -1,10 +1,10 @@ -name: Regenerate Textual snapshots from a comment +name: Update Textual snapshots from a comment on: issue_comment: types: [created] jobs: - regenerate-snapshots: + update-snapshots: # Only run if comment is on a PR with the main repo, and if it contains the magic keywords if: > contains(github.event.comment.html_url, '/pull/') && @@ -43,7 +43,7 @@ jobs: python -m pip install --upgrade pip -r requirements-dev.txt pip install -e . - - name: Run pytest to regenerate snapshots + - name: Run pytest to update snapshots id: pytest run: | python3 -m pytest tests/test_create_app.py --snapshot-update --color=yes --durations=0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3ad9e5724..f9773296c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -132,17 +132,33 @@ nf-core has a bot which you can use to perform certain actions on a PR. - Fix linting: -If the linting tests is failing on a PR to nf-core/tools, you can post a comment with the magic words `@nf-core-bot fix linting`. The bot will try to fix the linting, push to your branch, and react to the comment if the fix was successful or not. +If the linting tests is failing on a PR to nf-core/tools, you can post a comment with the magic words: + +``` +@nf-core-bot fix linting +``` + +The bot will try to fix the linting, push to your branch, and react to the comment when it starts running (👀) and if the fix was successful (👍🏻) or not (😕). - Update the `CHANGELOG.md`: The nf-core-bot runs automatically on every PR updating the `CHANGELOG.md` if it was not updated. It will add the new change using the title of your PR. -If the action didn't run automatically, or you want to provide a different title, you can post a comment with `@nf-core-bot changelog`, optionally followed by the description that you want to add to the changelog. +If the action didn't run automatically, or you want to provide a different title, you can post a comment with: + +``` +@nf-core-bot changelog +``` + +Optionally followed by the description that you want to add to the changelog. - Update Textual snapshots: If the Textual snapshots (run by `tests/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. -If you are sure that these changes are correct, you can automatically update the snapshots form the PR by posting a comment with the magic words `@nf-core-bot regenerate snapshots`. +If you are sure that these changes are correct, you can automatically update the snapshots form the PR by posting a comment with the magic words: + +``` +@nf-core-bot update snapshots +``` > [!WARNING] > Please always check the HTML report to make sure that the changes are expected. From 6144d0f1ab8b9440623bfa586de61c75ee240569 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 6 Aug 2024 14:48:57 +0200 Subject: [PATCH 436/737] add option to exclude multiqc from pipeline template --- .../create-test-lint-wf-template.yml | 5 ++ .prettierignore | 2 + .../pipeline-template/.github/CONTRIBUTING.md | 2 + nf_core/pipeline-template/CITATIONS.md | 5 +- nf_core/pipeline-template/README.md | 2 +- .../assets/sendmail_template.txt | 2 + nf_core/pipeline-template/conf/modules.config | 2 + nf_core/pipeline-template/docs/output.md | 8 ++- nf_core/pipeline-template/docs/usage.md | 2 +- nf_core/pipeline-template/main.nf | 6 +- nf_core/pipeline-template/modules.json | 3 +- nf_core/pipeline-template/nextflow.config | 2 + .../pipeline-template/nextflow_schema.json | 7 ++- .../utils_nfcore_pipeline_pipeline/main.nf | 9 ++- nf_core/pipeline-template/tower.yml | 2 + .../pipeline-template/workflows/pipeline.nf | 15 ++--- nf_core/pipelines/create/create.py | 62 ++++++++++++------- nf_core/pipelines/create/custompipeline.py | 8 ++- nf_core/pipelines/create/nfcorepipeline.py | 8 ++- nf_core/pipelines/create/utils.py | 6 ++ 20 files changed, 111 insertions(+), 47 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index cfa0a5007..0bc32d8e4 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -42,6 +42,7 @@ jobs: - "template_skip_igenomes.yml" - "template_skip_ci.yml" - "template_skip_code_linters.yml" + - "template_skip_multiqc.yml" runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} @@ -107,6 +108,10 @@ jobs: run: | printf "org: my-prefix\nskip: code_linters" > create-test-lint-wf/template_skip_code_linters.yml + - name: Create template skip multiqc + run: | + printf "org: my-prefix\nskip: multiqc" > create-test-lint-wf/template_skip_multiqc.yml + # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | diff --git a/.prettierignore b/.prettierignore index 2a445d487..059007ab0 100644 --- a/.prettierignore +++ b/.prettierignore @@ -5,6 +5,8 @@ docs/api/_build testing nf_core/module-template/meta.yml nf_core/pipeline-template/nextflow_schema.json +nf_core/pipeline-template/modules.json +nf_core/pipeline-template/tower.yml # don't run on things handled by ruff *.py *.pyc diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index e22a66425..2d6ecc174 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -87,8 +87,10 @@ If you wish to contribute a new step, please use the following coding standards: 6. Add sanity checks and validation for all relevant parameters. 7. Perform local tests to validate that the new code works as expected. 8. If applicable, add a new test command in `.github/workflow/ci.yml`. + {%- if multiqc %} 9. Update MultiQC config `assets/multiqc_config.yml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module. 10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`. + {% endif %} ### Default values diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 6edf8f620..22f5da4e7 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -14,9 +14,10 @@ > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. -- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) +{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) - > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. +> Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. +> {%- endif %} ## Software packaging/containerisation tools diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index feece399b..a9c913d6e 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -45,7 +45,7 @@ 1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)) -2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)) + {% if multiqc %}2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)){% endif %} ## Usage diff --git a/nf_core/pipeline-template/assets/sendmail_template.txt b/nf_core/pipeline-template/assets/sendmail_template.txt index 3e59cd2d6..5257815f7 100644 --- a/nf_core/pipeline-template/assets/sendmail_template.txt +++ b/nf_core/pipeline-template/assets/sendmail_template.txt @@ -26,6 +26,7 @@ Content-Disposition: inline; filename="{{ name_noslash }}_logo_light.png" join( '\n' ) %> <% +{%- if multiqc %} if (mqcFile){ def mqcFileObj = new File("$mqcFile") if (mqcFileObj.length() < mqcMaxSize){ @@ -48,6 +49,7 @@ ${mqcFileObj. join( '\n' )} """ }} +{%- endif %} %> --nfcoremimeboundary-- diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index d203d2b6e..84972d8e2 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -22,6 +22,7 @@ process { ext.args = '--quiet' } + {%- if multiqc %} withName: 'MULTIQC' { ext.args = { params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' } publishDir = [ @@ -30,5 +31,6 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] } + {%- endif %} } diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 53b0e242e..e27ff9959 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -2,7 +2,7 @@ ## Introduction -This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline. +This document describes the output produced by the pipeline. {% if multiqc %}Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline.{% endif %} The directories listed below will be created in the results directory after the pipeline has finished. All paths are relative to the top-level results directory. @@ -13,7 +13,7 @@ The directories listed below will be created in the results directory after the The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: - [FastQC](#fastqc) - Raw read QC -- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline + {% if multiqc %}- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline{% endif %} - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution ### FastQC @@ -36,8 +36,9 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d ![MultiQC - FastQC adapter content plot](images/mqc_fastqc_adapter.png) :::note -The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. +The FastQC plots {% if multiqc %}displayed in the MultiQC report{% endif %} shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. ::: +{% if multiqc %} ### MultiQC @@ -54,6 +55,7 @@ The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They m [MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . +{% endif %} ### Pipeline information diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index bf637219f..1dc033eea 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -114,7 +114,7 @@ It is a good idea to specify a pipeline version when running the pipeline on you First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. -This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. For example, at the bottom of the MultiQC reports. +This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. {% if multiqc %}For example, at the bottom of the MultiQC reports.{% endif %} To further assist in reproducbility, you can use share and re-use [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index c13a0d24e..fddfc5489 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -56,10 +56,10 @@ workflow {{ prefix_nodash|upper }}_{{ short_name|upper }} { {{ short_name|upper }} ( samplesheet ) - +{%- if multiqc %} emit: multiqc_report = {{ short_name|upper }}.out.multiqc_report // channel: /path/to/multiqc_report.html - +{%- endif %} } /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -101,7 +101,7 @@ workflow { params.outdir, params.monochrome_logs, params.hook_url, - {{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report + {%- if multiqc %}{{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} ) } diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 9137c5967..eb9391b29 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -9,12 +9,13 @@ "branch": "master", "git_sha": "285a50500f9e02578d90b3ce6382ea3c30216acd", "installed_by": ["modules"] - }, + }{%- if multiqc %}, "multiqc": { "branch": "master", "git_sha": "b7ebe95761cd389603f9cc0e0dc384c0f663815a", "installed_by": ["modules"] } + {%- endif %} } }, "subworkflows": { diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 2e6a56b00..07c04d7b4 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -20,12 +20,14 @@ params { igenomes_ignore = false {%- endif %} + {%- if multiqc %} // MultiQC options multiqc_config = null multiqc_title = null multiqc_logo = null max_multiqc_email_size = '25.MB' multiqc_methods_description = null + {%- endif %} // Boilerplate options outdir = null diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 18bad71b7..1815f57ef 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -35,12 +35,13 @@ "fa_icon": "fas fa-envelope", "help_text": "Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run.", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$" - }, + }{% if multiqc %}, "multiqc_title": { "type": "string", "description": "MultiQC report title. Printed as page header, used for filename if not otherwise specified.", "fa_icon": "fas fa-file-signature" } + {% endif %} } }, {%- if igenomes %} @@ -201,6 +202,7 @@ "fa_icon": "fas fa-remove-format", "hidden": true }, + {%- if multiqc %} "max_multiqc_email_size": { "type": "string", "description": "File size limit when attaching MultiQC reports to summary emails.", @@ -209,6 +211,7 @@ "fa_icon": "fas fa-file-upload", "hidden": true }, + {% endif %} "monochrome_logs": { "type": "boolean", "description": "Do not use coloured log outputs.", @@ -222,6 +225,7 @@ "help_text": "Incoming hook URL for messaging service. Currently, MS Teams and Slack are supported.", "hidden": true }, + {%- if multiqc %} "multiqc_config": { "type": "string", "format": "file-path", @@ -240,6 +244,7 @@ "description": "Custom MultiQC yaml file containing HTML including a methods description.", "fa_icon": "fas fa-cog" }, + {%- endif %} "validate_params": { "type": "boolean", "description": "Boolean whether to validate parameters against the schema at runtime", diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index a4bfb9f8b..7c5b7b221 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -123,7 +123,7 @@ workflow PIPELINE_COMPLETION { outdir // path: Path to output directory where results will be published monochrome_logs // boolean: Disable ANSI colour codes in log output hook_url // string: hook URL for notifications - multiqc_report // string: Path to MultiQC report + {% if multiqc %}multiqc_report // string: Path to MultiQC report{% endif %} main: @@ -134,7 +134,11 @@ workflow PIPELINE_COMPLETION { // workflow.onComplete { if (email || email_on_fail) { + {%- if multiqc %} completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList()) + {%- else %} + completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, []) + {%- endif %} } completionSummary(monochrome_logs) @@ -206,7 +210,7 @@ def genomeExistsError() { } } {%- endif %} - +{%- if multiqc %} // // Generate methods description for MultiQC // @@ -270,3 +274,4 @@ def methodsDescriptionText(mqc_methods_yaml) { return description_html.toString() } +{% endif %} diff --git a/nf_core/pipeline-template/tower.yml b/nf_core/pipeline-template/tower.yml index 787aedfe9..2ddbef770 100644 --- a/nf_core/pipeline-template/tower.yml +++ b/nf_core/pipeline-template/tower.yml @@ -1,5 +1,7 @@ reports: + {%- if multiqc %} multiqc_report.html: display: "MultiQC HTML report" + {%- endif %} samplesheet.csv: display: "Auto-created samplesheet with collated metadata and FASTQ paths" diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index de0f21fe3..eb56cb923 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -5,11 +5,11 @@ */ include { FASTQC } from '../modules/nf-core/fastqc/main' -include { MULTIQC } from '../modules/nf-core/multiqc/main' +{% if multiqc %}include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} include { paramsSummaryMap } from 'plugin/nf-validation' -include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' +{% if multiqc %}include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' -include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' +{% if multiqc %}include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -25,7 +25,7 @@ workflow {{ short_name|upper }} { main: ch_versions = Channel.empty() - ch_multiqc_files = Channel.empty() + {% if multiqc %}ch_multiqc_files = Channel.empty(){% endif %} // // MODULE: Run FastQC @@ -33,7 +33,7 @@ workflow {{ short_name|upper }} { FASTQC ( ch_samplesheet ) - ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}) + {% if multiqc %}ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}){% endif %} ch_versions = ch_versions.mix(FASTQC.out.versions.first()) // @@ -47,6 +47,7 @@ workflow {{ short_name|upper }} { newLine: true ).set { ch_collated_versions } +{% if multiqc %} // // MODULE: MultiQC // @@ -85,9 +86,9 @@ workflow {{ short_name|upper }} { ch_multiqc_custom_config.toList(), ch_multiqc_logo.toList() ) - +{% endif %} emit: - multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html + {%- if multiqc %}multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html{% endif %} versions = ch_versions // channel: [ path(versions.yml) ] } diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 42e4a6ad7..386ddd3bd 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -101,6 +101,11 @@ def __init__( ".prettierrc.yml", ".github/workflows/fix-linting.yml", ], + "multiqc": [ + "assets/multiqc_config.yml", + "assets/methods_description_template.yml", + "modules/nf-core/multiqc/", + ], } # Get list of files we're skipping with the supplied skip keys self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) @@ -210,6 +215,7 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): "igenomes": {"file": True, "content": True}, "nf_core_configs": {"file": False, "content": True}, "code_linters": {"file": True, "content": True}, + "multiqc": {"file": True, "content": True}, } # Set the parameters for the jinja template @@ -374,8 +380,8 @@ def render_template(self): # in the github bug report template self.remove_nf_core_in_bug_report_template() - # Update the .nf-core.yml with linting configurations - self.fix_linting() + # Update the .nf-core.yml with linting configurations + self.fix_linting() if self.config: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) @@ -425,28 +431,31 @@ def fix_linting(self): """ # Create a lint config short_name = self.jinja_params["short_name"] - lint_config = { - "files_exist": [ - "CODE_OF_CONDUCT.md", - f"assets/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_dark.png", - ".github/ISSUE_TEMPLATE/config.yml", - ".github/workflows/awstest.yml", - ".github/workflows/awsfulltest.yml", - ], - "files_unchanged": [ - "CODE_OF_CONDUCT.md", - f"assets/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_dark.png", - ], - "nextflow_config": [ - "manifest.name", - "manifest.homePage", - ], - "multiqc_config": ["report_comment"], - } + if not self.config.is_nfcore: + lint_config = { + "files_exist": [ + "CODE_OF_CONDUCT.md", + f"assets/nf-core-{short_name}_logo_light.png", + f"docs/images/nf-core-{short_name}_logo_light.png", + f"docs/images/nf-core-{short_name}_logo_dark.png", + ".github/ISSUE_TEMPLATE/config.yml", + ".github/workflows/awstest.yml", + ".github/workflows/awsfulltest.yml", + ], + "files_unchanged": [ + "CODE_OF_CONDUCT.md", + f"assets/nf-core-{short_name}_logo_light.png", + f"docs/images/nf-core-{short_name}_logo_light.png", + f"docs/images/nf-core-{short_name}_logo_dark.png", + ], + "nextflow_config": [ + "manifest.name", + "manifest.homePage", + ], + "multiqc_config": ["report_comment"], + } + else: + lint_config = {"files_exist": [], "files_unchanged": []} # Add GitHub hosting specific configurations if not self.jinja_params["github"]: @@ -505,6 +514,11 @@ def fix_linting(self): if not self.jinja_params["github_badges"] or not self.jinja_params["github"]: lint_config["readme"] = ["nextflow_badge"] + # Add multiqc specific configurations + if not self.jinja_params["multiqc"]: + lint_config["files_unchanged"].extend([".github/CONTRIBUTING.md", "assets/sendmail_template.txt"]) + lint_config["multiqc_config"] = ["report_comment"] + # If the pipeline is not nf-core if not self.config.is_nfcore: lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 0925fb5ba..0ddd6533b 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -6,7 +6,7 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Switch -from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes +from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes, markdown_multiqc markdown_ci = """ Nf-core provides a set of Continuous Integration (CI) tests for Github. @@ -96,6 +96,12 @@ def compose(self) -> ComposeResult: "The pipeline will include code linters and CI tests to lint your code: pre-commit, editor-config and prettier.", "code_linters", ), + PipelineFeature( + markdown_multiqc, + "Use multiqc", + "The pipeline will include the MultiQC module which generates an HTML report for quality control.", + "multiqc", + ), classes="features-container", ) yield Center( diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 49cc1f8f8..8319cb044 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -6,7 +6,7 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Switch -from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes +from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes, markdown_multiqc class NfcorePipeline(Screen): @@ -29,6 +29,12 @@ def compose(self) -> ComposeResult: "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", "igenomes", ), + PipelineFeature( + markdown_multiqc, + "Use multiqc", + "The pipeline will include the MultiQC module which generates an HTML report for quality control.", + "multiqc", + ), classes="features-container", ) yield Center( diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index c387960c3..43094ab53 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -259,3 +259,9 @@ def remove_hide_class(app, widget_id: str) -> None: For more information about reference genomes in nf-core pipelines, see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). """ + +markdown_multiqc = """ +MultiQC is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline quality control results can be visualised in the report and further statistics are available in the report data directory. + +The pipeline will include the MultiQC module and will have special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see http://multiqc.info. +""" From dcc4214f65009f9a94a6a3dd70543567d7016ccd Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 6 Aug 2024 12:50:57 +0000 Subject: [PATCH 437/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aea91cb51..5256acf3d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) +- add option to exclude multiqc from pipeline template ([#3103](https://github.com/nf-core/tools/pull/3103)) ### Linting From a2d6b447f4bb0f11d184d576602dcaf964ccd9b9 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 08:31:00 +0200 Subject: [PATCH 438/737] fix tests --- nf_core/pipelines/create/create.py | 13 +- tests/__snapshots__/test_create_app.ambr | 764 +++++++++++------------ 2 files changed, 392 insertions(+), 385 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 386ddd3bd..86e104aa5 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -455,7 +455,7 @@ def fix_linting(self): "multiqc_config": ["report_comment"], } else: - lint_config = {"files_exist": [], "files_unchanged": []} + lint_config = {} # Add GitHub hosting specific configurations if not self.jinja_params["github"]: @@ -516,8 +516,15 @@ def fix_linting(self): # Add multiqc specific configurations if not self.jinja_params["multiqc"]: - lint_config["files_unchanged"].extend([".github/CONTRIBUTING.md", "assets/sendmail_template.txt"]) - lint_config["multiqc_config"] = ["report_comment"] + try: + lint_config["files_unchanged"].extend([".github/CONTRIBUTING.md", "assets/sendmail_template.txt"]) + except KeyError: + lint_config["files_unchanged"] = [".github/CONTRIBUTING.md", "assets/sendmail_template.txt"] + try: + lint_config["files_exist"].extend(["assets/multiqc_config.yml"]) + except KeyError: + lint_config["files_exist"] = ["assets/multiqc_config.yml"] + lint_config["multiqc_config"] = False # If the pipeline is not nf-core if not self.config.is_nfcore: diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 5e5b005de..6c93216be 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-2778615119-matrix { + .terminal-4132131606-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2778615119-title { + .terminal-4132131606-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2778615119-r1 { fill: #c5c8c6 } - .terminal-2778615119-r2 { fill: #e3e3e3 } - .terminal-2778615119-r3 { fill: #989898 } - .terminal-2778615119-r4 { fill: #e1e1e1 } - .terminal-2778615119-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2778615119-r6 { fill: #1e1e1e } - .terminal-2778615119-r7 { fill: #0178d4 } - .terminal-2778615119-r8 { fill: #454a50 } - .terminal-2778615119-r9 { fill: #e2e2e2 } - .terminal-2778615119-r10 { fill: #808080 } - .terminal-2778615119-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-2778615119-r12 { fill: #000000 } - .terminal-2778615119-r13 { fill: #e4e4e4 } - .terminal-2778615119-r14 { fill: #14191f } - .terminal-2778615119-r15 { fill: #507bb3 } - .terminal-2778615119-r16 { fill: #dde6ed;font-weight: bold } - .terminal-2778615119-r17 { fill: #001541 } - .terminal-2778615119-r18 { fill: #7ae998 } - .terminal-2778615119-r19 { fill: #0a180e;font-weight: bold } - .terminal-2778615119-r20 { fill: #008139 } - .terminal-2778615119-r21 { fill: #fea62b;font-weight: bold } - .terminal-2778615119-r22 { fill: #a7a9ab } - .terminal-2778615119-r23 { fill: #e2e3e3 } + .terminal-4132131606-r1 { fill: #c5c8c6 } + .terminal-4132131606-r2 { fill: #e3e3e3 } + .terminal-4132131606-r3 { fill: #989898 } + .terminal-4132131606-r4 { fill: #e1e1e1 } + .terminal-4132131606-r5 { fill: #4ebf71;font-weight: bold } + .terminal-4132131606-r6 { fill: #1e1e1e } + .terminal-4132131606-r7 { fill: #0178d4 } + .terminal-4132131606-r8 { fill: #454a50 } + .terminal-4132131606-r9 { fill: #e2e2e2 } + .terminal-4132131606-r10 { fill: #808080 } + .terminal-4132131606-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-4132131606-r12 { fill: #000000 } + .terminal-4132131606-r13 { fill: #e4e4e4 } + .terminal-4132131606-r14 { fill: #14191f } + .terminal-4132131606-r15 { fill: #507bb3 } + .terminal-4132131606-r16 { fill: #dde6ed;font-weight: bold } + .terminal-4132131606-r17 { fill: #001541 } + .terminal-4132131606-r18 { fill: #7ae998 } + .terminal-4132131606-r19 { fill: #0a180e;font-weight: bold } + .terminal-4132131606-r20 { fill: #008139 } + .terminal-4132131606-r21 { fill: #fea62b;font-weight: bold } + .terminal-4132131606-r22 { fill: #a7a9ab } + .terminal-4132131606-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-763408100-matrix { + .terminal-2850535557-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-763408100-title { + .terminal-2850535557-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-763408100-r1 { fill: #c5c8c6 } - .terminal-763408100-r2 { fill: #e3e3e3 } - .terminal-763408100-r3 { fill: #989898 } - .terminal-763408100-r4 { fill: #e1e1e1 } - .terminal-763408100-r5 { fill: #4ebf71;font-weight: bold } - .terminal-763408100-r6 { fill: #1e1e1e } - .terminal-763408100-r7 { fill: #507bb3 } - .terminal-763408100-r8 { fill: #e2e2e2 } - .terminal-763408100-r9 { fill: #808080 } - .terminal-763408100-r10 { fill: #dde6ed;font-weight: bold } - .terminal-763408100-r11 { fill: #001541 } - .terminal-763408100-r12 { fill: #14191f } - .terminal-763408100-r13 { fill: #454a50 } - .terminal-763408100-r14 { fill: #7ae998 } - .terminal-763408100-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-763408100-r16 { fill: #0a180e;font-weight: bold } - .terminal-763408100-r17 { fill: #000000 } - .terminal-763408100-r18 { fill: #008139 } - .terminal-763408100-r19 { fill: #fea62b;font-weight: bold } - .terminal-763408100-r20 { fill: #a7a9ab } - .terminal-763408100-r21 { fill: #e2e3e3 } + .terminal-2850535557-r1 { fill: #c5c8c6 } + .terminal-2850535557-r2 { fill: #e3e3e3 } + .terminal-2850535557-r3 { fill: #989898 } + .terminal-2850535557-r4 { fill: #e1e1e1 } + .terminal-2850535557-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2850535557-r6 { fill: #1e1e1e } + .terminal-2850535557-r7 { fill: #507bb3 } + .terminal-2850535557-r8 { fill: #e2e2e2 } + .terminal-2850535557-r9 { fill: #808080 } + .terminal-2850535557-r10 { fill: #dde6ed;font-weight: bold } + .terminal-2850535557-r11 { fill: #001541 } + .terminal-2850535557-r12 { fill: #14191f } + .terminal-2850535557-r13 { fill: #454a50 } + .terminal-2850535557-r14 { fill: #7ae998 } + .terminal-2850535557-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-2850535557-r16 { fill: #0a180e;font-weight: bold } + .terminal-2850535557-r17 { fill: #000000 } + .terminal-2850535557-r18 { fill: #008139 } + .terminal-2850535557-r19 { fill: #fea62b;font-weight: bold } + .terminal-2850535557-r20 { fill: #a7a9ab } + .terminal-2850535557-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different ▁▁ - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles ▁▁ + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2511,254 +2511,254 @@ font-weight: 700; } - .terminal-388991162-matrix { + .terminal-157696122-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-388991162-title { + .terminal-157696122-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-388991162-r1 { fill: #c5c8c6 } - .terminal-388991162-r2 { fill: #e3e3e3 } - .terminal-388991162-r3 { fill: #989898 } - .terminal-388991162-r4 { fill: #e1e1e1 } - .terminal-388991162-r5 { fill: #4ebf71;font-weight: bold } - .terminal-388991162-r6 { fill: #1e1e1e } - .terminal-388991162-r7 { fill: #507bb3 } - .terminal-388991162-r8 { fill: #e2e2e2 } - .terminal-388991162-r9 { fill: #808080 } - .terminal-388991162-r10 { fill: #dde6ed;font-weight: bold } - .terminal-388991162-r11 { fill: #001541 } - .terminal-388991162-r12 { fill: #454a50 } - .terminal-388991162-r13 { fill: #7ae998 } - .terminal-388991162-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-388991162-r15 { fill: #0a180e;font-weight: bold } - .terminal-388991162-r16 { fill: #000000 } - .terminal-388991162-r17 { fill: #008139 } - .terminal-388991162-r18 { fill: #fea62b;font-weight: bold } - .terminal-388991162-r19 { fill: #a7a9ab } - .terminal-388991162-r20 { fill: #e2e3e3 } + .terminal-157696122-r1 { fill: #c5c8c6 } + .terminal-157696122-r2 { fill: #e3e3e3 } + .terminal-157696122-r3 { fill: #989898 } + .terminal-157696122-r4 { fill: #e1e1e1 } + .terminal-157696122-r5 { fill: #4ebf71;font-weight: bold } + .terminal-157696122-r6 { fill: #1e1e1e } + .terminal-157696122-r7 { fill: #507bb3 } + .terminal-157696122-r8 { fill: #e2e2e2 } + .terminal-157696122-r9 { fill: #808080 } + .terminal-157696122-r10 { fill: #dde6ed;font-weight: bold } + .terminal-157696122-r11 { fill: #001541 } + .terminal-157696122-r12 { fill: #454a50 } + .terminal-157696122-r13 { fill: #7ae998 } + .terminal-157696122-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-157696122-r15 { fill: #0a180e;font-weight: bold } + .terminal-157696122-r16 { fill: #000000 } + .terminal-157696122-r17 { fill: #008139 } + .terminal-157696122-r18 { fill: #fea62b;font-weight: bold } + .terminal-157696122-r19 { fill: #a7a9ab } + .terminal-157696122-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from iGenomes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use multiqcThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include the MultiQC▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + module which  + generates an HTML  + report for quality  + control. + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From 88a354fa73a657570496fd9cc6343d7e75aeaba7 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 09:29:48 +0200 Subject: [PATCH 439/737] update textual snapshots --- tests/__snapshots__/test_create_app.ambr | 511 ++++++++++++----------- 1 file changed, 256 insertions(+), 255 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index ed597e46b..57e4258ef 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-4194849765-matrix { + .terminal-3843971496-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4194849765-title { + .terminal-3843971496-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4194849765-r1 { fill: #c5c8c6 } - .terminal-4194849765-r2 { fill: #e3e3e3 } - .terminal-4194849765-r3 { fill: #989898 } - .terminal-4194849765-r4 { fill: #e1e1e1 } - .terminal-4194849765-r5 { fill: #4ebf71;font-weight: bold } - .terminal-4194849765-r6 { fill: #1e1e1e } - .terminal-4194849765-r7 { fill: #0178d4 } - .terminal-4194849765-r8 { fill: #454a50 } - .terminal-4194849765-r9 { fill: #e2e2e2 } - .terminal-4194849765-r10 { fill: #808080 } - .terminal-4194849765-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-4194849765-r12 { fill: #000000 } - .terminal-4194849765-r13 { fill: #e4e4e4 } - .terminal-4194849765-r14 { fill: #14191f } - .terminal-4194849765-r15 { fill: #507bb3 } - .terminal-4194849765-r16 { fill: #dde6ed;font-weight: bold } - .terminal-4194849765-r17 { fill: #001541 } - .terminal-4194849765-r18 { fill: #7ae998 } - .terminal-4194849765-r19 { fill: #0a180e;font-weight: bold } - .terminal-4194849765-r20 { fill: #008139 } - .terminal-4194849765-r21 { fill: #fea62b;font-weight: bold } - .terminal-4194849765-r22 { fill: #a7a9ab } - .terminal-4194849765-r23 { fill: #e2e3e3 } + .terminal-3843971496-r1 { fill: #c5c8c6 } + .terminal-3843971496-r2 { fill: #e3e3e3 } + .terminal-3843971496-r3 { fill: #989898 } + .terminal-3843971496-r4 { fill: #e1e1e1 } + .terminal-3843971496-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3843971496-r6 { fill: #1e1e1e } + .terminal-3843971496-r7 { fill: #0178d4 } + .terminal-3843971496-r8 { fill: #454a50 } + .terminal-3843971496-r9 { fill: #e2e2e2 } + .terminal-3843971496-r10 { fill: #808080 } + .terminal-3843971496-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-3843971496-r12 { fill: #000000 } + .terminal-3843971496-r13 { fill: #e4e4e4 } + .terminal-3843971496-r14 { fill: #14191f } + .terminal-3843971496-r15 { fill: #507bb3 } + .terminal-3843971496-r16 { fill: #dde6ed;font-weight: bold } + .terminal-3843971496-r17 { fill: #001541 } + .terminal-3843971496-r18 { fill: #7ae998 } + .terminal-3843971496-r19 { fill: #0a180e;font-weight: bold } + .terminal-3843971496-r20 { fill: #008139 } + .terminal-3843971496-r21 { fill: #fea62b;font-weight: bold } + .terminal-3843971496-r22 { fill: #a7a9ab } + .terminal-3843971496-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - ▃▃ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files.▁▁ + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,254 +2233,255 @@ font-weight: 700; } - .terminal-423998432-matrix { + .terminal-1616690770-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-423998432-title { + .terminal-1616690770-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-423998432-r1 { fill: #c5c8c6 } - .terminal-423998432-r2 { fill: #e3e3e3 } - .terminal-423998432-r3 { fill: #989898 } - .terminal-423998432-r4 { fill: #e1e1e1 } - .terminal-423998432-r5 { fill: #4ebf71;font-weight: bold } - .terminal-423998432-r6 { fill: #1e1e1e } - .terminal-423998432-r7 { fill: #507bb3 } - .terminal-423998432-r8 { fill: #e2e2e2 } - .terminal-423998432-r9 { fill: #808080 } - .terminal-423998432-r10 { fill: #dde6ed;font-weight: bold } - .terminal-423998432-r11 { fill: #001541 } - .terminal-423998432-r12 { fill: #454a50 } - .terminal-423998432-r13 { fill: #7ae998 } - .terminal-423998432-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-423998432-r15 { fill: #0a180e;font-weight: bold } - .terminal-423998432-r16 { fill: #000000 } - .terminal-423998432-r17 { fill: #008139 } - .terminal-423998432-r18 { fill: #fea62b;font-weight: bold } - .terminal-423998432-r19 { fill: #a7a9ab } - .terminal-423998432-r20 { fill: #e2e3e3 } + .terminal-1616690770-r1 { fill: #c5c8c6 } + .terminal-1616690770-r2 { fill: #e3e3e3 } + .terminal-1616690770-r3 { fill: #989898 } + .terminal-1616690770-r4 { fill: #e1e1e1 } + .terminal-1616690770-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1616690770-r6 { fill: #1e1e1e } + .terminal-1616690770-r7 { fill: #507bb3 } + .terminal-1616690770-r8 { fill: #e2e2e2 } + .terminal-1616690770-r9 { fill: #808080 } + .terminal-1616690770-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1616690770-r11 { fill: #001541 } + .terminal-1616690770-r12 { fill: #14191f } + .terminal-1616690770-r13 { fill: #454a50 } + .terminal-1616690770-r14 { fill: #7ae998 } + .terminal-1616690770-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-1616690770-r16 { fill: #0a180e;font-weight: bold } + .terminal-1616690770-r17 { fill: #000000 } + .terminal-1616690770-r18 { fill: #008139 } + .terminal-1616690770-r19 { fill: #fea62b;font-weight: bold } + .terminal-1616690770-r20 { fill: #a7a9ab } + .terminal-1616690770-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different  - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help ▄▄ + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles  + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From 3c1b1b07bf57aa5ab36fc60d0deb96859053057e Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 10:01:49 +0200 Subject: [PATCH 440/737] add multiqc potion to test data --- tests/data/pipeline_create_template_skip.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index 1431114c0..7ae5bba8c 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -14,3 +14,4 @@ skip_features: - code_linters - citations - gitpod + - multiqc From 2fcfd0e7237ce6c624ce538d902c2c14c6d0dcab Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 10:12:23 +0200 Subject: [PATCH 441/737] update textual snapshots --- tests/__snapshots__/test_create_app.ambr | 512 +++++++++++------------ 1 file changed, 256 insertions(+), 256 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 51ff46d68..9aad37cc9 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-2778615119-matrix { + .terminal-2285198722-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2778615119-title { + .terminal-2285198722-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2778615119-r1 { fill: #c5c8c6 } - .terminal-2778615119-r2 { fill: #e3e3e3 } - .terminal-2778615119-r3 { fill: #989898 } - .terminal-2778615119-r4 { fill: #e1e1e1 } - .terminal-2778615119-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2778615119-r6 { fill: #1e1e1e } - .terminal-2778615119-r7 { fill: #0178d4 } - .terminal-2778615119-r8 { fill: #454a50 } - .terminal-2778615119-r9 { fill: #e2e2e2 } - .terminal-2778615119-r10 { fill: #808080 } - .terminal-2778615119-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-2778615119-r12 { fill: #000000 } - .terminal-2778615119-r13 { fill: #e4e4e4 } - .terminal-2778615119-r14 { fill: #14191f } - .terminal-2778615119-r15 { fill: #507bb3 } - .terminal-2778615119-r16 { fill: #dde6ed;font-weight: bold } - .terminal-2778615119-r17 { fill: #001541 } - .terminal-2778615119-r18 { fill: #7ae998 } - .terminal-2778615119-r19 { fill: #0a180e;font-weight: bold } - .terminal-2778615119-r20 { fill: #008139 } - .terminal-2778615119-r21 { fill: #fea62b;font-weight: bold } - .terminal-2778615119-r22 { fill: #a7a9ab } - .terminal-2778615119-r23 { fill: #e2e3e3 } + .terminal-2285198722-r1 { fill: #c5c8c6 } + .terminal-2285198722-r2 { fill: #e3e3e3 } + .terminal-2285198722-r3 { fill: #989898 } + .terminal-2285198722-r4 { fill: #e1e1e1 } + .terminal-2285198722-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2285198722-r6 { fill: #1e1e1e } + .terminal-2285198722-r7 { fill: #0178d4 } + .terminal-2285198722-r8 { fill: #454a50 } + .terminal-2285198722-r9 { fill: #e2e2e2 } + .terminal-2285198722-r10 { fill: #808080 } + .terminal-2285198722-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-2285198722-r12 { fill: #000000 } + .terminal-2285198722-r13 { fill: #e4e4e4 } + .terminal-2285198722-r14 { fill: #14191f } + .terminal-2285198722-r15 { fill: #507bb3 } + .terminal-2285198722-r16 { fill: #dde6ed;font-weight: bold } + .terminal-2285198722-r17 { fill: #001541 } + .terminal-2285198722-r18 { fill: #7ae998 } + .terminal-2285198722-r19 { fill: #0a180e;font-weight: bold } + .terminal-2285198722-r20 { fill: #008139 } + .terminal-2285198722-r21 { fill: #fea62b;font-weight: bold } + .terminal-2285198722-r22 { fill: #a7a9ab } + .terminal-2285198722-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key,▁▁ + the pipeline will automatically download the required reference ▂▂ + files. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-763408100-matrix { + .terminal-3802907671-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-763408100-title { + .terminal-3802907671-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-763408100-r1 { fill: #c5c8c6 } - .terminal-763408100-r2 { fill: #e3e3e3 } - .terminal-763408100-r3 { fill: #989898 } - .terminal-763408100-r4 { fill: #e1e1e1 } - .terminal-763408100-r5 { fill: #4ebf71;font-weight: bold } - .terminal-763408100-r6 { fill: #1e1e1e } - .terminal-763408100-r7 { fill: #507bb3 } - .terminal-763408100-r8 { fill: #e2e2e2 } - .terminal-763408100-r9 { fill: #808080 } - .terminal-763408100-r10 { fill: #dde6ed;font-weight: bold } - .terminal-763408100-r11 { fill: #001541 } - .terminal-763408100-r12 { fill: #14191f } - .terminal-763408100-r13 { fill: #454a50 } - .terminal-763408100-r14 { fill: #7ae998 } - .terminal-763408100-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-763408100-r16 { fill: #0a180e;font-weight: bold } - .terminal-763408100-r17 { fill: #000000 } - .terminal-763408100-r18 { fill: #008139 } - .terminal-763408100-r19 { fill: #fea62b;font-weight: bold } - .terminal-763408100-r20 { fill: #a7a9ab } - .terminal-763408100-r21 { fill: #e2e3e3 } + .terminal-3802907671-r1 { fill: #c5c8c6 } + .terminal-3802907671-r2 { fill: #e3e3e3 } + .terminal-3802907671-r3 { fill: #989898 } + .terminal-3802907671-r4 { fill: #e1e1e1 } + .terminal-3802907671-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3802907671-r6 { fill: #1e1e1e } + .terminal-3802907671-r7 { fill: #507bb3 } + .terminal-3802907671-r8 { fill: #e2e2e2 } + .terminal-3802907671-r9 { fill: #808080 } + .terminal-3802907671-r10 { fill: #dde6ed;font-weight: bold } + .terminal-3802907671-r11 { fill: #001541 } + .terminal-3802907671-r12 { fill: #14191f } + .terminal-3802907671-r13 { fill: #454a50 } + .terminal-3802907671-r14 { fill: #7ae998 } + .terminal-3802907671-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-3802907671-r16 { fill: #0a180e;font-weight: bold } + .terminal-3802907671-r17 { fill: #000000 } + .terminal-3802907671-r18 { fill: #008139 } + .terminal-3802907671-r19 { fill: #fea62b;font-weight: bold } + .terminal-3802907671-r20 { fill: #a7a9ab } + .terminal-3802907671-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different ▁▁ - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges▃▃ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles  + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From 57ed19ae36e40e74dda75f18c008a606f79073af Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 7 Aug 2024 10:46:27 +0200 Subject: [PATCH 442/737] run tests also after pytest-migrate --- nf_core/commands_modules.py | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index c65c42d41..a34e9a519 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -242,24 +242,23 @@ def modules_test(ctx, tool, dir, no_prompts, update, once, profile, migrate_pyte empty_template=False, migrate_pytest=migrate_pytest, ) - else: - try: - module_tester = ComponentsTest( - component_type="modules", - component_name=tool, - directory=dir, - no_prompts=no_prompts, - update=update, - once=once, - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - verbose=ctx.obj["verbose"], - profile=profile, - ) - module_tester.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + try: + module_tester = ComponentsTest( + component_type="modules", + component_name=tool, + directory=dir, + no_prompts=no_prompts, + update=update, + once=once, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], + profile=profile, + ) + module_tester.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): From baae14c03e4feec6ef77caf32c610079dcad419f Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 7 Aug 2024 10:47:39 +0200 Subject: [PATCH 443/737] and the same for subworkflows --- nf_core/commands_subworkflows.py | 35 ++++++++++++++++---------------- 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index 8c3be2bb4..8b2db3578 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -44,24 +44,23 @@ def subworkflows_test(ctx, subworkflow, dir, no_prompts, update, once, profile, if migrate_pytest: subworkflows_create(ctx, subworkflow, dir, None, False, True) - else: - try: - sw_tester = ComponentsTest( - component_type="subworkflows", - component_name=subworkflow, - directory=dir, - no_prompts=no_prompts, - update=update, - once=once, - remote_url=ctx.obj["modules_repo_url"], - branch=ctx.obj["modules_repo_branch"], - verbose=ctx.obj["verbose"], - profile=profile, - ) - sw_tester.run() - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) + try: + sw_tester = ComponentsTest( + component_type="subworkflows", + component_name=subworkflow, + directory=dir, + no_prompts=no_prompts, + update=update, + once=once, + remote_url=ctx.obj["modules_repo_url"], + branch=ctx.obj["modules_repo_branch"], + verbose=ctx.obj["verbose"], + profile=profile, + ) + sw_tester.run() + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) def subworkflows_list_remote(ctx, keywords, json): From f9eb2a58d7ad231cd14f971d86b57558d3e67912 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 11:15:59 +0200 Subject: [PATCH 444/737] remove multiqc images from the template docs --- .../docs/images/mqc_fastqc_adapter.png | Bin 23458 -> 0 bytes .../docs/images/mqc_fastqc_counts.png | Bin 33918 -> 0 bytes .../docs/images/mqc_fastqc_quality.png | Bin 55769 -> 0 bytes nf_core/pipeline-template/docs/output.md | 11 +---------- 4 files changed, 1 insertion(+), 10 deletions(-) delete mode 100755 nf_core/pipeline-template/docs/images/mqc_fastqc_adapter.png delete mode 100755 nf_core/pipeline-template/docs/images/mqc_fastqc_counts.png delete mode 100755 nf_core/pipeline-template/docs/images/mqc_fastqc_quality.png diff --git a/nf_core/pipeline-template/docs/images/mqc_fastqc_adapter.png b/nf_core/pipeline-template/docs/images/mqc_fastqc_adapter.png deleted file mode 100755 index 361d0e47acfb424dea1f326590d1eb2f6dfa26b5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 23458 zcmeFZ2UJtryD!S#x<#o93es(Ww4k)maRbte0-+a?-g^xY-3myTE`8G_KvA54)F1tn})nJ5u%TA4Y;^!^{48eL_}p#q-Umo0M|F1 z74+PQh^X8N|9_jcWbq~ zzn+tZC9B75nKdz=gQ8wo9GJ$P{D~3knlI_`-PRhCw34f1oYDLr^;oEbgxa#A^J%*2 z>FfDE*(~JzKFs$t_oeLz))qDU?s}%Q?7b~3Y;lUi^Oy-2@3g?joA4Wkgb6-2=ih*jub)~7yZ`T=L=Z`B`{1jhkB-iSjea94&Eo9A zxN59pv1p_}RO1>EC^q}Z2)ZI;b7JV_x4lMr=Bker2+EK;8~!;JO7re*@ZkDmoV878S*N^yX(F@U1yqt?Is3nnV>7}#(5pk`V3C) zWhB8;CwWIwsVIjH+`<9=YA(j&3DgQdFOOGU~*`36wNC&QDv8> zr?h2PQgnHkp&t^S)q^K!68h~`$PjZW&-Wns;Zlw$M2sc z1xR!u{m|Kih*|Hht#M@eOMM#8O*={^6b9k5B5^eBsrnhVHD7XZ5BWO&F?q(>Y=QFl z`f>yQ9NCoxZCH-1F{#mz_j{QeyY~4h*VeyYZ#S@Z(Pnb7G=ud!RW)5svqM*&GI_za zzn;8LkOTT?``1Ygt6w!2;5arK*o5k15cdIJnMg)IQhF_zVK%!ma$z&jL zZt>Q{!PqKl^`Qw?nJUOEm@@qX(y(TwSJ~dqW&M@7-N4Wk_wC4izx(xJMrmNjsl$XR zCyK&INt}7@FzNAbbg-nW)sJ>3->I1+2~YdlPsaS}^X-H0GR_CEsw`PGjpq`uX}8VP zJ)HC34>D(z{KR9;E&z=@?@q_|I{NPOj~g>w!$gR?Tlu~F+L$Mk%}xQEm+{&T(5zkH zacVy0k3w!T9r*p2sgX@V;^+PfUYUrEde07XSV=KSDbkIZU!j!Rk3MQV=h-!y@kWVB zdYkmu^fiU~pp#ixe4hBEMx7^LdHa z_L*14aVIHtrsR)SO?=&kQS&JR#^AVvln=P=bUXEIy$QB&!s34znCV@y(C%j9V=}SU zoYLHn+-Lalm0$-=QQ}a(+2dR*{DPF+)J4y!ukiA_T%dF zVKEk;c?LWheG#A5{A20}CKjMw5G%2}cT5@Oce=wqdobHC70=kY7}dxt3diH9(Zcwr zCabx8yObHQ@#e_wjl%wp8s_!Wvxe5f-Duin@obgt>qOcqN$$@{X^C_rEDh3fmM;|X z$zu4;D`{YRbaJ?o!KkazII&|th9v5MG2Mao$ytOHtW+wo;XJJdtLuGjg;d020qT++ zpD}e&o?SeKSqR`}4`OdkWNC7K)Wltn zbwBrWGM;bBGm8uP_RiqfwvDD1f+uRX>b=nTH9Y%vpg{ka0e*E>%<+3!G3#s*-1D>q zHg~1@BT52a*L>mVcP>6y*0iX8@!3tDFJLE+sRlnU(cl``hF`0Q>e4i6P8|wKmqIqI zoY+a0V*Bib0`F9nG#sR(8$^!IWLR)cE8@7XZTN%L-ucJ{9yijy)w5Pom%XG7V<^PX z$Z$U82w0qgcGmld-O6*e)?pm$g@!6`Pps5SPKccjDf(|vX9zcLs7t!7cyyckZI#R* z#lj(HqfVeqyZ+Va{)>65sAb3IQ%a{9W^_F!5!;w=XD}ZUHFH$8=Xjw+VE)s$q(nt> zE2^aDYki5`e73RQ=DxaBNZ6CK?XKCv@V}=y(g?YHnFaHfXnl}Lo;36@?471W;&#Se z>pE*@M{Y?CevLG8il9#HXG#W3>;o$1``EYBY5i<;JlBqj2M8Y2!+6bPj1(S_bOksY z<34UQE;=Z>KiL``pYd}5fpOOT)GJQnXfNiAc5wgJ>F|$Eqw&D*Vmz+#mM0oFD^`-^ zB~SXe{T+5hd$gnKd7Afo9cy&Lii@syPDFDK)^V{iWEAEO@?xzx1bd`ta z;$(vG+=i3~9|D=GX%f~<>eOVjy~-yRAhLf2dR8V<@M_`C^ev(yOTg{uf=L3uyDb-w z&)l7KXS_HTo87BxI}fXF{ge&5p&IHk9M1}eNAwqw)`eZSOPFhqjS70{hyE@C{oSN$ zam*`-UH3RF-RWEP`^Su1q#n_J{AncekkV4m7YITf%QHBo60h@pk4N4O}hhf%rxuIZGiQpprVMal%h7?8+cY#L>pYnx6v!EnuIgInW` z)w!NuTp;fz9md^}*x@K9+`^2LO*bZp1^?BG#iS@(4i%AB6YP023T8Eb?M5K7ElSpe z9-wA22Mm}VwDkmECLd*}a=7bCf(}@SHs6UBe)Xvk(+hQ^^unj5JBeo$=><{4PBI%P z4_9XQ=XnE``;1Daa6f`~rGwNj9{YXY)eIw3G90Ip+QEWg0%?g=i$UHuQ?Qc0OR0!w zv?BvlQa!QMyI*IP!0>goBt$xo2^hlD&wRp?$=}}#?q~Yw z{**_|5&yL*Epz|4V#SJjg-lNaIx_{sCL3R=_VH&_;oOn5J2P=h!0enu-i%FAZ- zw`Hm*u6N*}&A7pAqr>-?%0(lveb{r8>hpDmex?Yo*8!-%1?YV0R~VEPBFp>)ba=mv+2(#>WEy0yxHZX=Cr2 zKmew%=^>HsD3BtRR*#H!@!TTGcI&fHrVh)P&|X;>)OHML+uWDn(dlsDjXa;5uBM$r zdt!r~ig?5iGbx!GpH+kdG8k0%;~)Q#0L6wFROJ}^Z%DvO3x#yNk13^&ccd&l)BP9h zD5cU-qZg-rV3Sg&?)`x}cI3`zw#zq{-eN4pNf(+?QuOG4oZ7zMGSVqOUe>`u=GfKM z{xPCciJFw9%Pk+uDSoormR&c=fS#hGOk=RGUtizBOoY^8P(>!Si|I9i=1ZCQbcc)5 zgE6UED;+b$4u&#dhZjdXwO3tpG0QaQwXrLOx5YP#TOaS@FP!h|G!z!Pbv?hTp0eQL zoUsiv4d@*Ck#ID9-ua|zPbQepcC4a>>9-bJApd()Wg%}hj#%A4pO-q{jIJ$f-SL7- zo&=keG_jhq$Ty4e|J^l6j6TQ=W)|~&Ei6gRn<{*^cFG*tS19#kHpMD7Y;wb~!3_%X zS_-3NQoGiWCX!M-Id;Nsg7oSi4VJ=Hi{bYNfjnmTq?IyK@@&_uacfb&8h@DIe70-Q zZ^KaT(4UX*vf7@A7CY;P!IVGIuXPRIe^&71Z1EyHO5&^=jUUKHF+h&m!4!dOA+!Ed zfA#uQ&p6vD7|O8(?5`bf8^gK)6p`>+$c*yG?Sw29;OD+tp}kDD9augDAEXWbSVoie zpHF1Wj8lWfIZ}mx%(2XREqF9!{fNd&iurAaoQDMCSNo!vRHE8wH%QLLZf9u;ADqnxOaAD#VE%Yg z?Gb?EmGbY}a0|vSZPlF3z6;Kf669Bf%h zlSGiY-}E4LFurm_CJN)(*l?=uX);o&R&qLuzENz?9I%S&YQ2>rVhx#c!hbvWLL!CI zA8mXM$zjnnJ#Me@-99}hjxCE!w8|9w{SBlj%Miq#dvS5GHP!DxO$sDx^4PF^#`;A! zb=bZ1pyj{R#9h$r7svB$QlJqeF1cp*ubT12UZ!deKFG%1N<@S2x&2UtqsVz zn=gF&$D4i3x7&vdoa#^cS?bQuP69OpspVPxm*%@DSWf!NG`o`y^R~o1Hvta;#!r%i zvEB~Jsi~sJ7Y35P!bf?OQin->fAk+TpU$Ow1st|l9|i2rrOneBP3&aDyoUj3K{a7! zOYpnJyYD#nr4GNJ;@$ce2dSN=eS7f-VptzM(|Ek^ze)mPVrpAEgrFs3mL>f(ZwriH zCZ65HdO0|W@2<+v9t?J=-4U9>bvM@@Ew4uVZy@c^Ovw9`k|$!+CTAn(u#4kC7TVTB zXuy#d+GC@RIMaPyp|Y2jS%RJkktCracCaLqfs^i^XFqK#3z+d}n02*VDF&My)vp)lNzWx<< zGB7hEAH?7_joYR?>+&+JIas*%Oiux%kr*X*B=8N8Ulowx0MkRK?pR)K1F_m8>dSe54 z)48k>#|F!OV#yOs7xQNQ@1iun5pl;py{tx+o044?r{W2O{f}3r{#QS#4bf(|f9R3y#6*0YY) z5Ey{M`dj)yHl)B{sdmvti^b0IE5xFx%jJM&5w69;`PGy0vGk2ztSW|5H3~zhXO?mn z+4mo>;Y7=4&gC}HifyMO`#70u3H6;0|| z!l=0lP|zVF`bfxm{%i98943^7y4Iz};Z9F$oY3iUI*FIsYa=o=nS^d`;3?*wDxi&| z=?oqs6uDcd1e_e5z7M5q(+I^PilSRE(T6%z<=U8%sq63V!wELY9Rj%#Y@2Y+TEJ8(f_Kh0ih?l6E6~wDl3~?-5%7>d{ zKs0XHUeORoi5+U#M{kE!Ae%|)^dabh1DsJI9N~LVXp*8$XlOfc6J+Cc?}SM zsc3N~L7hzcpXn2>b(_YN=J*C0N}$f_NINTiV!~L}nA{wn^XfBogd5hu!G?*THg^mF zFJm@9m{X~X3t5{7 z#lWIO++R8;BTByGl7U;fz|JBB^*4R|bLvm18x;DF*U`=kyxbH2nD*RIH5AWfJ4^5o z&Nr;*|NreNKo$fUI5}~n#Xcbjr0T-7MV;wZXA(QPt^`x;=ZK)5^`AFgQM?7ry_(Tm z0|EhWs&cYJW?|uvc3af(tfuyDf$28~R=HOa#}3Edru##Wwm0a$Vnk=_8+eQ; zfyq+GVt0Twr^QS*HtI+&&>_<%-Gq-!{iQr-3LYn-6bqW0VW)>%iat!2IP)Jd+LgnS zgI+jJ-I9HMJ8Z*$2FjwK1T0RpF%U`&x)S{3HqRJ z5^;r?VoA(k7*aP@tzB`O5Y26jv#x54xNH;E`KzzLxC)FEnQ<}IR#w*>9sq|zFzZq< zdM1%ynXvcLfZ{Xm=l(Op?=XGV8`BwRiQ%@@A-GnjD+y3K zN2Pm011b!s`3368%P&MapW-PDulXKfpeyRXNjN`lKKgC%CplwE#GrRw#0FE#Q4>R+ z23B4CmO%uy8Y@;F$hCHU6+oJ}_cKgm|4Amr{$`38ue-?+GX1T!hd$w@x=z{w30Z*W za@$MLl^=f#*oR+8(&a&`E@Bj{{1O;DPjj$g9U7~{m*?^Tj}Rrc^wc=(SycXVT?bW{ zUus*6{74fo{nOh@zQyv0g{)t}Qekl*>KXQYCI9m2jqge|&Ntj{V?gLs*_GkeODYhf zW39Q1L1~vk+#E^S!nCyO&z9Wh}2=K}`9#{=`j&)^}8=U|lz}DqgAteVsos){s zDhK`>&pK%cVuhO7tPu7@Y4|yXAdHs!(uKDuLL@i$Okc6Gs;2456Br??ZNZiONAe!~ zvY5w1(C)E9fRmpWgWU2Su0u6~9{@wIm<-lha;uuEN>&C^FJ#^|oopkg``l#i0&{OX z%rI6Q>l^9J++K19D;HrFU#V9o0M`MBTT#-(q&A{|n-`T~CgAFET=$E_&pIQTPE;J#&nrwf2N^I*d zH)ev~7d=Sy8<@syK<`PFvNtyfa#8^JceG^ua^o%!fl6R&j--jGkz8wS`EgfEZouOD zr97H059Dj(#$*$-!UQLvb92wS40!wJc!4K~lq-K2h2rXunCs?SjQERnvv9Fs?tF;y zWUTcQ&PtDMbsUY6_&np`UGMS0ZZIhnDh~p{`Bryj7XS~*R}%z6 zUO^hJn$_-CW(;$)hHu0ej1BNqv^o%*D2gR6zUvCZyw)ddNB6JE$;okhf7PEEz|dRN z$sP&o`MU(L_I8mDW33;)3!U*;HRm$zVV%%zaDn^*Qj~RdWdFNb;^fRhnF&{oeY-tv zq$p~pZw)Ls$EWKsEZubtx_9bpdCfsjdy*<8_Io8VtCIC+8kk@Qxdti>xnu}nRYJ-y zp8$3YP7u;u+YlPQ2`o_>S?mpXvd0-x!Z3=}>ceWDg*e)+#wQLE)Uwhneo z;*y`VfoY<#lwT^k4BP(ytfI;M`FoYsedi}L{1V|Ho}ciBs=`@vtgnieHdpWz%Vyy$ zlnn?k0KJWOnlJD9>6y64*X=G{lyl&%pV8Uo&>tXw%1za!6*YYVB$jR$Y0XhB#1mVx zvjd8N4X~{Dd&28RVEkCw9TLN9*Ng!?9F88l2Bl)w%7!97mtx5(Qx%1u6h+$OGa4#qGGGI{Pj4d)5yg8F4O2sfu61u0uM}?$_nH8=0St?`ogZ@1LAr@*uC4Z9(|dIQ z?OH<_%?PD56K*Kty@PQT;W#)tazY~|I7-aq)tQ($$#Q?{gEbJwJK3mnk)|l>XgmJQ z_POHzee+4NEWu0i0zUFmLTF(zvD3B%sp1_F7 z<|O7{-oZ2>t9k~zX0MDQ(4&(YZ#~baV{$ah?o_K1p$Ad`PAvgtuhW(xO{@bMjNb>Y z-k>lsDx?xX;x5*9RSpJe~BwLtb79%{p~+JTs5HZ&#({u>j3kAOLx*Y zW{7^+`OD%vhcxVW39F$jZ;I@H`3X?>Wwt@269f1o{V4-t-|dX4x7L3j zUHltoa@jqToWvn&=0CF%6%D0h50m^)qaXkRMC&Owv8iG~$}1PBgld3nBE#Rg(5)8n zga7!2@yjoBBoF_e3M$ongy7N1L_hT@!LUaCXX6QLZFKcq1r;;Z$sca}zfwaCji7PcbfW7H9p`7Eh$-j*7-=%{5f&}TidFWiMr=NYvc}Q@gh_z)<;^d&F zd@za3ugvK(BbprUX|)`Rk0&+6)#sm5S8a7;dzrqn*f)iXpvW$BVu6u)bR+ywtGne@B61Om=Q)yvb`45S}|LKt&5@)wSOfk;LhZ^UofjlQz0h zm)>a9f&40n$;-ndr=xntY3nOFGmA5POfiIsfgTzT*Cl zU{P;It;qo}n}IeEA1&?GRONCJp3=_!ce2$kKRZonNV+tS_uFPWzeS zhqSPws(Jp?TsgNT7yGtphSz=h2-}y#HTWNE#@LHFs^pseT#RfN*P8yLUm`jG1N5s* zfU25qv2akmjD=Q`s4SJxi@i`xIOCdT5B%W6wj1Fz8)Kuv*iB`}b^(em~z zz4~VcUB9M5@W}s3-SOWXu+*?)Al7p)Bw?jh8_#s)>lYp{{b%_vCY00=iC@I3$FcpY zYuOjg948l-C~}cDxL!%j&X1(H6ZC7U5?oVLQ<)zh*qg)k6HdNPB;PQcbVRXucl7>@ zE`Ga=^8RPrIRE!3E#e-v8MTy%%a1yk_k{s|V-=5ML7(Mg#S@LA3;rEyjF&X1w*^R&VJ>2%B@{=W9BD)oa@0!_Gl{G8Oe+Vki1QQWd~<<~Et zEV_YlJ=t8VXv>#L|FKXIJ)GZ1(d6xUoSPZVFOzMhM$6tgyhWq=@}=HzWm&b4o8R}L zQd7<0PV(LqaHYNNcXtTN4rc2ov$)VeRm&}XS-vamGB^G4tspa#HrPa5#22^pb?s&W zS%!p!fba6R+WLMjkeUo!qpKob}#cMpU4(`C+U6R8i>qlJ&Hbh52enW<`FmyjlhwlfIlxyu$Pg z3uS-Qau7K~%A$hBFocIe2<$LBIbEI!uddh9(JX=++R9aM|DO2#5*qKh#Zq^~O40f6 z0#s@~v{DPy=4^A}ieKe(Idu22Ex4~>p=#u?w_Lx>bHE@Z4Dh%iKrDJj2IJ+qNDIxj&WPRXRSaNz$JyFkpFK#gLAB6G;4KKql{+5w z{2yWKln-fjDCc()q_W&mmIx?JvpXPb{)hR&ok40*!M7lC!&?b|=efwVb@r0;FeD2( z*x!h~5OA8DEVr>6PS6o_oYt+7HY+d${lh@ruB?hP=`vq;@uLNGIb%@~*X54+`NY0- z35nZLFQArwtL~;t?sb(T6k;wi@v0FFLV}%b1@;p|R%u%8ROV= zRWO3*fG33>>}We#nQ5Vk3gY2ODY5fL+-E@ zvWG%=(;1n3UEEjqSDn9V_C*FMSXjR{uYKa`>$>D#@FacqRX4qmy{)y4&Gf)@V_BVr zvNEa@r<%e5HW?jhEb!SY6v|~N%22Y0992I>~ud8In`Lf`QStH3E)x@G=`2&AraN&V){PF%a=v)Pu{I zuQ7a;TZAlAgDiVUO+`B+z-8%M0kCiylcazP7I(w|^h*D4Sn6R#-jd7ZMN@iJo=6v2GyL zo;~Df{e7CCta*U4B1pD0lfi=EwI3CTf2}#(`mwSD-u-%XLU(&V?BTG?P-Fx}R5*E5 zcvSdpxqh`s3e`yRJ6%Efp|NYd2}SjJ)h@$9391YRLSU!qq4E=W9yx#}_KqRcG)(~r z!+&i&OckDJQ2El}fI8mdeCHPcJ2=byp-dT&ZFDzLuqc{lvh)^vKB2 zL}g}~j~QUN0Fo{!0BTTKwrDjx#j6KVb>MsCz=!G& z0?uz!q)+3>Q|KAM0zy>+^zjMt4}XE)t2HIfc*Tmi?$;KdI7B#Aw9_O-Zg>98L}4}% zna0Es9syWr5+f5RGVqawtNUt}*r|Zy#6ay+mEGaSGMmMOW%88u6mXzDD_wlGT6!zy zpLOrO442P{0J&IYJjqwrVrEF87ZDTT<9iz5xv)C#pUTTj+d73+z7GI`Ehx*q&zxS(F>^b?4*udLeSbU~XBKKi_PI+| z`R!s3tpv7gX^R3~Cce0vX(P9@UCS)XwG6mNX_eM`6X(`UW>OMp*nTlrcUU?`gCzDr zKR0P?yj9z#ME0=e!>GupM|%&t{Qcx)sN)wVzW*5E>yxt5g6NEc!GR+F(!Nysd6n&^ zN?K|Q@t>y$%H^ z1}}eMB%-GY`CK5%Pj}AkUNRem1zBUE6y}0KA;6;dZu&VyB`KCwPfdQ5Xri>Osl*$@qxi zNUlL!r3OOxC4C`xXPqL4Ec)b`ajpfaw12E4xMZ6=Yyb-WN0LL2RUzLj zAKS$6X%>ekm|3yQ$#-`3N8ah|B+0f4bxDc4nfJcHZ{dlBeXYRL5bY2afSAF|vcc%G!HPxGS8==1)_U|T zNvWWGt}f~OGmCtqW8>q3f@5Go0Rce)p>g@dgop$3UUF3))$Wn6gRX7M3GQ}?tC)i6 z5#2fg?U#)GsvTF-;w zY-Nw9hPGMC9F9(W5F-PUEmiuS(F06nlcE{I)}b=%A7_~A6cEH$BClS~DB|X6Z*IT2 zIpOX|#S?qiLR2Osk#^=DtNG&ym+&FR*Kv8P<@ep!ZLZtJSjcEO2t@V!3dE-*!yhNO z<`xWq;JT2z{)iLD9MQ;&^p<*B%Gv z9;zH_>TGtlGO@9MT_xDkFS4=QaZA)){{?|_B)8Hw-q)H3IPzKPiHM2|2?0GNX^+EI zRf5>q`4yE?GgaPuK8|(quyuVfv-aF(wlXs_w}4}Na=7tnIA2P*pcwxEhcBp%Q-6rI3Rc0j@jnbz>h=|(@M6C7U>fx%lJG+#q2Q4af?@H7>c`6Fw&JpwfW1WFvJ!J#H z%4DH$Nww@r6h6K-1K$M;1QOi8g)GMGRywKGssy2=E7s%k;ESt|W)#O-pRtb)vf8-D zxR2gI3De!E>)xMZTl>m(C!Tx|_c}u7mC!FmY~hT4&*t)mO76L0VQ$Zm)=+l7>+9FH zfQZjFC%h{enbPhuNz~lx(beZsjm#JG@8B$iw_cTSX-?0fRc}lkFJafCcF=wqJsUd8 zMn~$&N!wK2xp3mXuom2=TlzBdg~W^u`*x0IxUuITUpwpCCpIqO47DsRfB}i?8mn+k zO?VOK*oa)bFN6F7oN04eyGiZR6q#;01`nk`g-ro<5USFo8#dEMz{N z)FLtwpl>inBl;{0syyqD<@D`l$#Jfl)EJHXIv_2TJFdCbB1tJq2^~2}iq9XvxA^o{ zn0YLREmF;vJ(gM2^u>gGlpZOM>hd=@e@%v3L4CC$gdajz11>;t>9B37u4gN+c2EaN z7N{PzCO`Ov_B8QVS#5&Tgk_TYRF@xdXvUjab#=&lP?prpL~g4|3*W;OC@JF8+0RZoP6YS5=9t%X5j<@=9s zJZx5j1kEdx-027b#7vEm4TRT9soiaOv=y$Y#MT=^nhP%|fDdU^7Ez#Ft2I{)2fQ7` zW7SkW?%wkBWnL)w_~|{}hkUWMk@uEt@uS1%?(3-dK@CnX)?b$25^pIgnsh^HS!eiB z?gK|C)llrf;ga;b^r9EOF`p3yYRe*y*MIBz1Bd-qR8TlBdJn2ur@`?phF`DfaY8;D zCwmvCvRQoWVlI$tetKk}o?MNTX9H3!Y@C`PXWV>S%$VZ{%|p4jHr#UH_Ryyow;{{;KtygLxrG7(#ca)wTYK z-Y0sN6h;=V$f!GPone8y(zPnL+1N>PyLSs(y=`1y*FQ1lR8e`3s=cW#m$+c=3)Tb3 zN7!8_R~a%Ek8tTvTN6~|O}BoxmiKrt8Mkh0)vSD{hV=%yVvnL*%!|m2!23pSnTfsT zwQ-^GnI8{pLlWXKtGU!5h-Pk2LFIGB{oj=);~!Nlji{=PmP~Mqtb8I%bKzXfV~y`v zhZpp~H7qb%5D%?Sa5$&Vmvl)54qk6v;W{B~UlL4_ z81zf;L5bb3SJPuc^~%Ua_>tB)$VLK>FZvy&b%*eB+g)qdbU(k_R*eJS(gX< zJxL0apH$ji6sKDr)n`3{aNlN^Qwkhtd8DRdnV96&?L&8b5Co{7; zvmmb;3CdwVs8W1GMY~|zn1^&RO1t0hBt(ULtGJTf^IAMxRpD7HU;6{ij?XXdjHv`a zw9!c(a5cYpR_vk~eKYL+k6gM+5023LHvMEY_p}y=4k&Q!!C<*zC^2Ia3C3Ji zL1sbM+*p_j602gKXP|mF$s?~%_vnUv zj52~Vd_MWnLq+!(*+*-Lw~%K)_w>^_onjFhcBsl-1z4eAVzf$ZoD9yB+;Sysedi;%NXg8B1{e-#F_eG|zvUc4YC2OlIpARjmdsP@u05 zr*U3jsq00uHQh{r5KWSeeT?KjD!)FjzCJInzFM??L^jL9NcW`?Lr-^4X;Bzlu&Q?y z02M)ULBT=3$s#1Y9wAzg8-+0n||g$cI`eH$?LAzF9rpS6h3c^3UB*o~o`&^2bx~YDhrzULrno%G+^r zq3*RFmK+#R^m@8?svWLq){v0z;Az zxet5`c$dkiO>9f|6fbU>MAIx-Kjc(r4SckyK$1&9Ug3)mVCA8Y1>GV0bcjayWKU?1 z;d6`Ui1G&YLMmdtb&4SB(ffffFqD_1Okq%F3-y=7Xr$+V_G^RS{QgC zXKOBBq9L5K2Qnz3y##l~^f-q^dVo0JTO6ysmtjFF?tQ4=Mh9FhB)1vUcK2(Quo8ja4+LSJ)Y<8ba zuA}O{%Nltg%FD9=r+$Zri;I)XEgq8j;?A9Ap0;b5j5DIM+@eRt2of>UaXBan>ZY7* zVXIJgT25e+vU`n3vm9;wD-XX>S5Izts;k7?q0ifUbXFZ ztu890yFSO?daUUr!gp4FD4cm`X`a_ImZ)oY+O^`2sgS=Z-sfHvxbI807yFk_pf??D z)@elHpxFmUW>0G7ey-bx)DpdGO}*NS(z-#}PYqNxLg1@YN}fvhUtBLqKc+GUT;OW% zO_B<`R#rcqET`udx*1pLFro0I)_p#G&G^C(J)_;ph87-;WP@^*-yrWnJiD`bUJP4q znYR1%sd_A6GDQ|qpc%2A)KEGs;Y;857S{2jmRaCehP?GUgH%@%HTz-B?uYLBrVgP} zH@h;%V${F6+&AJkBG1T_xqmSr-oU0c++uF-EFD zir8XIv!Ke#t=O)W|8PyRa?ZUc=)2$4uI5;dauysN?Iuy7nk&-rwtj_ zbqWwtQli>QcMkpbLD<<#ef^2AtKAu7XV^+t%ng>C+4%Wb9$F58#E^h`#n9f!Ps zj#E`k*Ev&FK`3R|?l*-YBQmL)w`1e~thLbiWK69X#vg3g_b_#aGcF(hyvqEk72SD; zu~^e}9oE2m94b1C2NhicobMMlg}U1!FA|mJle8de9Xe&=-H(MvA(68kA0+z|@_;-# z&(b*W+h^U$FizY_L_j1L?db`Rywq|kJ8nKA;QjfTaq4P?Nw-t8PTt*s02E}f>sbOX zogFNsq@})oI`S|>iHp=g?5*Ri>{ zfB@dk5v}dqihux<=+%{)tOw&-*p;K#;k0?3?5LDv#-^~Bshk-i29xz)oSMVH0{UfE_@k=$Td6mLADmA5HCS>H;8Elg7$zuRGQ_PzI@ zO7f{m&I)ngat~(Q!A^05yQ_P6@m+rB1*YFo4Y=~o+^59v4+%;&=jKhGbUydp4sH`1 zy;I`gK$wj(W`yp3Yj2)F9^2eqVW8uZJUv^BWHR7|G0X^Vuta6p*nh6WK_UPW?g|4H zCB73}#_XrDiYLG?L;{a;A`xflU$&e61X|e>FFS;FXT~~Nej^;8D;T+(JOGZ)-YCl! zDic2c`~DhIAgQ(OXEkNRICxKJ<<&$(86$}P>l1x?yCEt=imFk`Pe$TW&4$L37fnx4(%*=smL>0uH114m_}1+sdfuU!A0Zqzr@~p)h_Rae)3fnObHlP6C?me#TrO zCzi%;E6iC);zLiV*o22GEXIF{NL2tM-wS{K&aCtKGNF+iOQ+JaXYw|H4%FRB?7R&T z1KbAY2p!11zb8icU0Q6TPkZCL#ztpG;uZYw`xg!FyJfa%ZgI;OhQyI`fsLCle_S+t z4uqjjj%#Gy0#Ipt92R{W{euP*jXIOxh~qaUFM9L1FgE=XM~3_=Bba|6C*-;_c4HdFiehcxh0 z3i5W02=DV{(OsRR{NTp{O}%1D0O?=QOrHWG;?)^(Uyagt?*2oVuw0Pnoh8{=0EzL^H|PjFP(dF&|L7WETT0GcVgY_ zx1oq}^k1#{aimB=*)HzvnsDIHm*|-4-oMfmwO_ThrZR-9o)Q(i2K8OOn)fj<5|I>i zrMN-NYx$b70)BeTtJLb1l@(5>DzdL{44E$Db`c|6v{j8rk`njaT(d`!Q+zvdV+~uc zwOi(`abOznKOr4><!y3?&Pn`#_&3l#Gef?)=p3_f^Ui;vfzaAOR#H0C- zC_m1^677NRcZrEQlhb%^AG}2eIicl$V9+BoV;Y&B{w1=n5~3`>l3tCJ_iei91O5sJ zlfRNrKdWsWxAWWhrxQmbuci*ftO7n7Oc}WO%lj>uVaUiDKPF^(#js~|dl-WEB(b%;R&%wBZo4s*Feg>11~T!zk!KqRO#H>GQupBCvQnt=r+5tC~|_jcwZextGmQ=bxnE*pJAI!;`6FR9y=}o5@Ho683hnm=2#mq1!K9 z;~t#M?%xqQa&ju$A*O`A5Y;)3bM=^-yRtSfb`+m*&?NHD1^&k_^1V`zUUp zBQjO}+aSl}wx4UqTg2FEd)wQlHv^*CRVd!3FhGRo(ku4))jpO12ugP&rZjKiwWfRW zYw>!=HK|cBWxk2w*r^o8&xo`u5~q#7C$1%JvzI7GnjkBxN}y~)MsK5FzthqT)I+i9 zLQUJe#tLyOp$}IIr$A@HkBqga9H3%Ak12)kQ{#!2%+*+9#70XhbyV%2UkvY~D0|mM zOicCza3cpNf8-DDqMQ{MkW2mhk21pBOx#yO@k>+nz1ZeIc+LzQXaBES&Mc^@EREx+ zqiBmVE)B9tyJ8C(1%!qWVxu&JY>L`J5QAF>)IcL^2uZMMRMdci4TdEsixgYJCJ-=e z(Lp2&ix5o$VGm(RSON)Tn;Yzh>4%xBd6>6bx9&ano^!tXf8ROv|DAg`e-7-iRZ8cm z=ml-2W49d)ss}v#)i{V&<{UK+J~DWlkr^ixT(|EP4_lGEv+7l6mX7 z`rnoA>yKLGlLdp#ymRS3uTeX~bc`pDe>eR8u{uRKGM^xch?2hX5Bxxz6(kXw^chB# z#7h9KbJ}H`x6PI{mOk`b>sfNpaaH^>y|DfmqK}?)K;U6OD{UDN0WtzaUnVZ#(spqZ zVUr8UHtKKJjt*vN1d8xgpq!jad2C3(uDSb@6AQqAzw;SdN2f_9m=Y%6(PT^t2e zg=!ibR|V#v11NDo)>*m?5o>hTQnM~G5obZpgu!tGj(YQzF70x0uAV}pwc8nXX9bNO zbd)kXD!8@U4%A|o<87&s*`|`dnky@hr;;ZAo2~Bu2g7qn%3zfDbCVL7wu5 zo6Tn~<`BAK((ct9AG1D;F6BcA^^r>vEU%LrOxsOA%-~5M z#X&|sFPm7+R$g01eYw6pxAtP}a&bw{TPi%16;?Qf0?g2_F$#<3}XnXEmOcm0X z!{Mfdfq*I2fU-a1TZs929@5Rg{4M{z@?9Cko|M^ReIRLnw|jnGRaL}G1ibFOa|A7s z+co|6Dsuoxs)B@lW!!Fy@jnb5RF(!^gPXPin?1IG|04fYi3yRqp(DWls)4f1ZERc>4-}4==@QsXQg#VCX`Pjnxeb({{Mj4zJ&j-1gzqTJ&ZexJiN=qXShYkaMiouM$* zihdgSA>BBh>UG8sz{fP)%#B>6)ZZ=Zve3ylD#}%J_s_FUjp|p?zS5nme$D^s9D%?1 zd2a%1f&hF>jr5)w_Qg&=>>L|+n_ZGJ{}HuB-aWy6I|{a6W`Hnb;cfm6{HJ~AA5ZV+ zO^P4X_D8eT5KMzCi0L0n3XE^`Xqp2~J~>=whP^9u!!3KaNy^5JOLz)Qwu7R8tf2ks zjisRN+T82EvVNsTX1X}xJ+r&E1Ana8Qpn2QD&fVB#c4QXwtxn8H8-fA^k_PfU1K3X z>IqazcZf<=_}R)j8P@aQ7;I*x%o;+#m133p4|1XdRsx)DWgq8qRCq~o16CxrvV~U` z$2#Ub_snsmq87&UH8fBu1S$k8W-@S#nO1mvLoQ#oa#qzo1j5WsbiT7n#x9E6xctup zJJ%*Op$=MhR$JZqbv_dwGf|=jmqw4H=Qe2mw@dI%LXLx+E_G`7=_yvYv(qNF3xrZR3f^9WzweTrZ7WqEQ>&+*-xiy?FBw3-ZWJN4Th}bQmbtp<+ZqlYjQPJ zzNJfa4MuhJC8X&CS?MdFHTA9?=isQw$nkr*(2+Po!G*E?U$K}~)F4_CUzSe8@O3kZ^Er5IyP;Rw( z35J!UL`-m9!A;qPy7nr*dZ@-uSCrN8P)B_V9{n(?zi#F`+gKxs#*j zIH*Icy{ipTSyFy2@?sB~?5qc-cE2IAHt=n!gOV&jwpC}hxH_Kx% ztE2W0xmBmGr@cJg0cyO-?r1X(kr9xzu3+5V>1YzBtuK6Ra+RToix@7>2?<#qlBORE zbPI%~d_ybB0wTJa@)1vVt^ENOxF^N8TUJ5l82Ua|j9w5GM!ns$6;8y2MsryfV`-qN zEznw|%v2>{C)I{qY-dkz`?}Fkw&fQ zBN#PretyOeaJs1{;WawCpt=$SI;XBPp7InnGa1cDG>a+B>Gj%*6DIE9rWl)H8{q`X zVd*sdD=SM1z|Vy6zDVL-OqDUa_)7$Y%8SwTNc$fK$`(EpOnd?|qD%^KF$$pzZLs>; zv5g|58uwUn(Y{xXl&jn#G4$KyOX%KD$tr1&*MWVUnx;mKg3#9O_l|8-Q|n3o{>>eu z!`5^oYumbF>)9rC1!*L0!jnc)RWy#I)ou2c_^7-jK29i+|GW6{gJ3&?o*?PGQU4@` z$7-B=gU6FGBh1l6I?5Y{G*rvYh!1zuM?w70^DH5@`^PXicUM2_WGwV*Cy$rqr&KUs z;}joZDc2XLy+|3^isfRqI4kTS5mliCSf3Z_X+6tS(ggtRztKx~?*aru3zmUEkLmby!sE-ZloZO_Y`t>6Y$Ly1P@lk?ycSK)R&6OFD*7$sq=57)m6D?#^$`jN9!w z$Ftw}yzlq@^{wmjQf8PnYd!0E?%(f@$3O)+@w>P1Z=s-|+?A9NQ9?mM?L$Gi>i)-7 z;FZH#{oBA_R~(hZpP`gM2$z8$uA4oTeTsro7IypWIV$k;%@-1yjwmP?PVhfhrcFuQ zP*C1rN{T#HanoBrM|UIK_dfItqc6S?i^K#wb=ab?`wf!gEn-xkev5WY+aryTcai40c^)|>K>E+ec<8oTH!6Jvz?Pot=)BPAz*Z5>N7QUnkVti;^*btsSu9JUB@m~FS*n@cgXc6=9G3|4JYC@2aKBbRSEYonlO za7Xp=p9IuQxwVwM&PZnCJ#%x~OjH`hZAy4prD3VfDMm6~t%mQtl1`0vY z*HSSM%jBKyrWm|{+j6?LEI}Y3GvqKEDtH)kdJrmQRpWguolR0j=(SSeI_c4Jel05F zE(*$y81yR2r!Hccg3dmurS^Q(HErm&J9Lcb19agHm=hjsYU3Xc8JP81a5~KKILPL7JFyC z^*y&LQk#x%OoY^&&%X9NV8Xxp!e{Yo1&Fv(yp%lKzl_l9%%8x6n5Y`}aGHU!@%d=C z%jwtMQ?X)wPTTQXsI6($fxrBiWKUnp@$!V6r|EpIV72dz`))g5bBFxBNjs7q0h_?| z+eB8$4^{il7xeGQr?`&Hv+-V>O$Tf^Z*KOwdfAV%mO|c1H&BWl2sj+taB>rPpM2Ks zBTjfYnw03!%t6XgR&N&9DCQ*5^#-(%(Jz$S5s>P!v_TB(teM{aHrGek#kJFI=zD-| zcF#h8!oH(eZMS`5FU^Vlw!V6P zQzEMlGS7gS9xjcGDfav+vr-4~BAJaDGUC(`T{j2v{X^#xw?pNF?_27&6{QB-d@81T z-jvQ!gz*74P}1rns(}HmjXUJydQr5B-n6IgyBo%&<#RShWtQss{dV*2*RaN!muBb} zZBwb|QQl@PVS=EU>8^+Z)QZ_ATzx_hx8TNFo3PrwHnftOgs4nG#~VdD!^6)nyJlbO z60GZ^q1Vss__}XBJROZK>0Z}AUiyRIlw@c7XzjF`2{syyG6|e@>Q88&&ncr@ zyL*nFhnc(7S6a{Y@q4H*1@~P-uU$@Y??fFAT^^bIgMnpt^lYt6P)Fa+jKb4p zZ?a(y9I-9h^0XbT>Ehd`CI8bVkHh_97f{nGrvBL(!@$zC_yMt0=!XydN3CR@_mZc# zzSR&{_SqO)=z+GUr^3#2Z|8}7`RJTNUqcfKh?g2YU$bK6U3AHNE#Iz@u-ounY9?{0 z-hv)})tBIH+I?|E1_`mA!fP^WBqy3Y4a;XR(;wR(FXiVP^nw}5Q*d-Ej6L8FeIGK` z%;B=&-IU%>;#5Q2qwWxVl-YB)%VX;np!}q(Hrr5%~#e840K*K^J zXcHTx3)+WF6rWzaCOLOne!#;jc)rSiKz3TfJ8HH{jDli7`g34i??`x8>?ZHGakeMr ztT#S{d9E&*&kEl+Jr9sDc9uJ{rKTST%iDCs3SLZK9zkHq@v^LBWkl&IM4ozkJwiOb zFJ@BFr3c!#LQ)h73OTLoo<_E(o`IQKgW`QBL8B`n1TD=mdM|4BpF!RqRe0{f z!}sj9;oIzeC<8$;nc#j@&rR`xcC?El2&4SX+3Fm*)tPOw4vf0Cqe0)YKCS5&Gt~@r zw0Ch`M8b9}Ac`y5Jh^pQ;}Om0p;gUQhyK-E=%sI<`?H{G4fJCE8Bg0~Yw`eyyzlZ$ z0{*b26E)cV%nm-^VM5cm%T8daTZY4zIv?Z-=4^S0c1e}bT|tl0Q2xF!2)*JqxoqPu zzwg1BW^PPsEACOnTf)3YM2VZz=W7+7O@!6*ZcbkFflHf{n<}Jb=R0k%wKvp8K{95! z$pt;c_|DCr`-q29D}0Jo1$0`sIRo}!YjT$oixKNbi+kz)J?`?l;~g>YNifUW=0DG- zYBrDfcnL$m0;t6Onbp&hY^G8DV;IwC;Q3l8RRB%qZ4@Cjcp0VdUOW2yl8X4`m3NTNM5AZhNpzK~ z&uW>?=+MOHR+1U}-QJq1&EjV(W>ck82ABBmrymA;NF&-Rd0H%aM(Q(##X91M6JK1h zncX~}GIHf%?%Gl(hQdac_|HqCK*lo7_1hODTyeKpJCZ``dDdph+Zf*EjY@iNgKfUEl!h{(dmX0U zNbz!;kR{sBr3x_OwFRwzHcMjq+Qd^|;_NSb_QkcJeIirtLHIsFi9?W?mw5}-ntn@w zp8ke;z?rkP`_|2xrp?dKrxG{l6MPoj=vB_NSmHOjeCA(FV=LXNeov;i7%CAVc28G9 z@mmb6hyFD8B|rL1Rd%Mk%g!+s02W^9s-9O+^623Mj%Ds*tiBicI(O9ew4&MLXpmsU z^r71~MeXK;ldWsM2Wu6V=byFJqzATP#3zt}Dvptv`red+?eANkC&_Tz^}X6lIz4QT z=4|gqkA#pk4_}<`Z8htj)rv+ko*pr928n7rCSsBi*6(HW;cM+m29P2} z!v`B^9BA)Z01N_^hi#`)S9UH|+jgs0bD&Dk5vERZb3*!ZH>T|x0ZVYP*VcijfX(_@ zUGo`;5LO${U%N>I@>!{7n%wXrt*M;e83%!iq%TYl2Q6T%O|_HmG6MnCTs1}_o}a12 zmX_+frrnPAIVWAZxGn5czTuRDpLn{lWgd>$xrCl&94NcW4WeSC4<8m=z>K0w~a56+P1wDksK7nRmdn4Ee zq=bJC5eDh$Rl;@wG!s7z9W8A>EKEHl7uX-2KHbtCX+rmz6ZCCyq+AJ}JL=rJ9XaG> zc0_4LFR^}Nqu(@GPlJ{U<%~RiBSj!!U+O(`X~9)oy?SiFzO8#ni7%Pq)>~AwwRPmE ze_7!j-)1dPzAo*;;{0NBCUkzAQ$uN$Dg)j2qs!sZXqAq8_glj4a-dQO+U3WY9(o@K zpZe4dRjqQ`o(k4zxSoPv&Q{9ykqo5Z$7Yp)1U;p{WA(VZs*`H@nl$cjcABq(>)V z4s?5N_!w`pHsiSp$B%E%>iSm8TTbt6;YQAcua^$WT|6m2^lZuSvvmlU-t|Yju5Ca5Cb>mVJixq34`PMiwUGtt}AZ4}nLGr6Kod{&6Y zL23K+JOusXTZFb&$KkZ^W+s%0(kz*mg_oJfTo7q5DSX1X@*xE5(7!Q*j*vk2PPuCYwgK zvyhqQUV+>`k?(d+J}#z)d*3Qfo3=a9DO}4r_BxH4XV_0)Gl?0IWpq%Yub)OOVcJzs z@5FQn_}c7jruw>Kr>!mumWzMqYjm9{gbh+4*yAQFA z`s72sHv3!!_uuPgnCw$EZFA~3wt-&mR~@(I9$pBYf-i)lQkcnfn=dui!fKp`f=qMf zGFt>Mv~3KG=W#P_DMC)VM_j%4>g6vMd$p@|Mu$n8G62@#JE88MO+eyvu>Dd0q4p}r z*_wDCKkHd0uK2x1i}li`xrDIGkxl>2S{v!n?{=e@WS*C+Df7D1Zgah99)mCAHRME+#PX!(3lN1tyq=wT z4A#BN&r~(!hl?8D-(8q?pbPBoHJJs7`@|k~muzS?`<%BY3SNMFYl-# zSpNE*;$dCwjgys>^i6)kf_KLvz&kOo>VZ$g4^g2h;ERF7FZdOpHo%Xx4-x>mh95zJ z|G&Qk*S3oEGcz-Fb#*srb?`S+5oBUZl{ ztFc@4{$KCIbmON+V<1@XIkP&EV_d%Z0;RhHk5Kd@szVHg4sn+t6ke?YtZ=e*eNt@7uFX{LH`VP z^yuQ?DeNfC5hYr{6eFhO_!#y4>pYskSNdV*DC%HvK6rS&(8|h66ttI=%Cy&vI|72Om90UCr7>1mT5s8(#7L*CZeotBrN>eyyZ1y+y3kbcz4m? z-vfEW9v<~|b#Ecyu9c+N*w~Yk;0f+g-I}NLF)?J~p&BI4_yh!^1j|KeVf%`?#l^Cf zv(LTd?p?oHTwI)S7k&r8o%W^hPxSYbLb=HYu?J!Y7IGNu8gRMHF{b0PPqda(o9krR zfCnMf6Qi!TJs-u~PfeG_a3P`Xb)Ooz&ok_V>L=2FGr426Yed6D4eK>rI!RThXoL4Z zf2^+%$BEOJta5P6g<@7tw5Ju^!y9>3s}{sORA`w4DiS%(2m&pAJtZrv1$}_V7~jip zOlV{Z8)9#aa}htS_B@PZG!k5PB|W?gp&jRqcTImZWJBXR1eZCp-`6w51l2PLP|JP? zM$46ErF!W+LZau+=Gv}Q_oJR`^%63KCl{3lVv+O3mipCrU+{*qhztYzH!4Ls@KlV9 zp08Tsu#;Of1_r<4-;nw|U0ANUrWLkt`PuyYD>oUUo_8iJG~f_f*>(A;6&+44G*3=T zbFcz(rmCcU8N}ho36_>(W3DtVOQVP$Bs#|Z* zzeLHps63DlHS0g@i0LH|%|vN`Za4Nohl=1@0dJZp$=57}*hGUn2NtW5n!(AZ*Vktm zgb#drNEu4r#HCy(|6t@_DQD^g*UbT-8!9iDXT%o1zFtNZxGX%fxzTzQd37vPC2Qk_ zLtZd{996+m**lZV_Ps!9M#nrmp<4kB0ZJL(mKp;pt304=i3{bIYumgICnbo}q3k%= zLnN_OI8Z6hEj$$h`9sW&(#zf|)4A$uDQX)jgtU_L@|SfKiabuqpk*}sBu(z^6IGS& zVGu<$C;=?*AyPZ`c)55`TYzyxjnXG3D*#(2~YjfQBB=%Uc-N3od4ttKbpexVfi(dnjDP% zP)qx|aoO*D;_YcU(mOdDB9Dz$&}67?NX@m<*)uSEN{rrkFB&Lw@4G-`4dPsWuNcfI zBg&^zY{;aN#>#Us4ou&w3Nr6q^XFxvA=R`H4b%#FA1tlnsitVzCpKBH6?-hTqo#US zQmfRH!n0Ebx<;b*87&`E?4wSGru(E;y7_a1h~btRvq^RYgfcZD<`*=R~q$@dq?Wh%Bt%nbs1AI*a|w7 zm4RUOm;mts1-ZOP?fOaDIt19VbY`!y%b%Z7U9MYY0PibYEos;ZqDp-qD5jY%RU%k0 zf0A~;2pBOERR`qNsA0f|6F7vJ;leEZz{33b5<`tt32|_%Q`uU$a6!E)&g$#u&Sqis zjAgY}3tMtkROU4yPgRMY6rtJ|V;SYC56ie}1|EoFyY{CaiW}OyGFQ=o36(tAJ@tw6 ztvs04Ll0~YH<)zWeFiq4Z4e~I?>kj@U+>ZbVPZ^wLel_o!6A8pQE#O`*m*xGm2yt|-dK zogz9zqRwH56>=3Xpz*o*i)8CNc^iH>-a=8&G;LookL4Cin=-g;U{(gya0yHQBN*#V z-+9Djl$3?2p?)jnMYMI&ZTFvgu1Ol6gztlRnVYgu4ydv7d6NiN4Eq)WX+7u-$D5hG zzejcxt`LNOA>B-m&f|^isE63nL>{UhSZ^hY8QNd z%9wY=@rL0}Gm4O^7DVQ;35b6}ESjs#M4n=;_g0~g;S$;%PlI=3#T5TN(1vIx?RG|& ze?9D=$d!>9Kz$#HT;vNmrq7>$K4ItKfesHZloYtZd!?*Cneqz4G95ori}yN13AMYs zw@=c+oYS`n+4=%iskM8R1uwzArwQi34YnZPTKkws->Nji~nkb z-JKxW#*N=)Wo1kCrt}!YlB73}wlQU8L+;+ai|AZCw&yw$6A}pUS40VjfesufM~jO% zJXCarj#^q;E2~VlFdf&a8)YhLd6BDOKe4HUJCHUYvD(XAw|k|Uvh3E)k+~7JUI;{P zbwQ};*;OQkIPt1B?M0N7QYl{P~Z32{(ltt)fva$`&O@I;js25et z^u|d}?fNZ&B|_gU27y1YynqVGMFqIb!0}1ymy(7o9!I`}yT|?LvRaAB@yV_=Xo%l4 zc?lGXp&^M;o&Jqo$9=ST3k1{%9j8m#E;|&?kFc>5r;=f58-FfQ9GaYLD5&n?feBtL zqZQx9J?999Xtt42MeV`4%QxS zvSxn6oF~cKdM|UzA~2LWuf6@t$S}R7#DE7TE~@8b%&SIqlZvq_;??0-{jI3mA9y}I z=r&f0BuGqvrgGJCXGuOdyt*1G`gG9nz;-B{QxrMhhcmV+MZ?;@M`Fm{VbG+f?v6~q zn|1Z3w}^WEF8(a3T?nOX;hQhz#`u9l?S!oJvOxp}ol}Vpn3zN12FD^2R@LN#~aAA#Z%DCzEEK4h?B5E47AWNEtgHd_*&qz=gnKjQADb(QFEGm z=k_MMV*S*9_G1JV*GIwaek=EA`_b5Fq8BLfUVB69jYkY&0#7~Ny2Beu93_J3W-B$N zeR`OMwW!P{pnPjYKU$V>TTNAmijMm<|E2)R3pki=YaH0gq}I-}1f1N+deP}gO##jI zr;x2Gsn8DMs(8O+7&a3z=t_b2I)M>89E!MRKTF4dtw7I%e^Y_L8MHScesK~fXOvdL z`=2Ozb0TD9L-K^B?@HSb5*`W#=Sp!`IlRVIIznnIDh(#t4B%IkuaXtBaMNNuZPnMb z>gxG@b3a8e0FAuo#Ut0rE=Zo?x_hqjEly%-I#sJMF)*P+#$m_aMjrpI_IxdZd-zaW zGc`q9xfmU*O%H4Pguzr9TjZp60LB_Y5@O>;=?#C+5|j%@{;B>rwE^`fWpT_*B#5rR za!?D|4jL=|Re#)ZjA4XA0c+?@7 zrL9%1YoxjaPml%ZLv8RuCq9{T0U2^&Cu3QoB*ty~svl6uS&zTQ^{lWSmUmzUI0I`G zH4RXH$_lev+b9b73#qHj$ZT~Py1gje3k&?oi$@zH`Hd-UTq2oFK&+{qbykpzK|3{Q zB@Ob#(f>ppxZ7+8%_td4ch)l=2>hNm9J8jV&3Mf@_XB6hV@W+xIl8U?E~wpsh}$8n zv9YnNOtCV;7EmmztE&-O1T#B3_8-@^w6zfs-W)|GpTh51otY_I=_rvyH~gVG`u0F< z5TcwEJhbSh5Q2VxE%X^!-=$wG7rrN50kSc`k*4*V2KYBG*~?`NETlx4Ygux6eYqg` zZ1q&@Lt=9A?dxj8(VB*NzL$mj&g>cX{XG!KjjJyc5`ulwSSp|J@`?jgA~CVBShvbj zwHQeqI61YowaxZJ5kEa|d_Fwf&pobc2|I(9Is;!59O8&^{H>A~UK5h8)H~E#bO(%7 z71>&06own{+sY2Et*uq+-D{;K2P(=U3|8D{W;Ie&CeR$DD&e}f)DI{*i;Jd6fydDB z%gKw8zgWun$ukL#+w$k;=Hx&pCRSJS z7UIDkZ9wVOYpidSA>oeuv^__akbqBsk1v9##B&{Cob2qJY(v2ud_Vyj931TJWdLfV z8mzLia%fcD09lwTb%t!V#iwvcqA9n5(vvA=yYON#_RlsZ534sy@DzM`j+{*Rz-0R1 zh@or!v&7~_A{)eyk$}!zc1e*j9Dh(HxYmnS2 zQ?TOqoZ+2SHlA=}foXlWR3%eEZScKDL5yHfaK5hOVmP#L{B%b`chJ+qwbBmc>buNx z5aoj#$vGD3UQxcaCugdTD8y0-6G)(9oV+V>Vq(T`rTEv1l(+=1Nbhl&{ZmF_ z%pZ4@l_tyRMfXl^JQIk1AraetCnEB?X9k#F@@By6NbZfeRO*SSr;(G6pvUn6js2L2 z^_XXkn#*wVj$e^_4L8NQJTu76fiJj8u*7?Eza&)LEAw_IN0vR2%Af*hI`-BQ|-sIu32GbNaWR!8W# z(^e18lCO$alRw7TJbpcCPsf`XR0T_xqnUK0FIFk$$ER@Y44ftz1ZBF6J;!ZUZFwp@ z(J1m+D_5$d%9X#Gt9MzRlGFW3fC!h!5R#C@(EP6}mRH|`b?R-&TlvSRtcdGQ%fJ$- z77Y{wt#4CZm_4n=d~o`o6fe-5t_%@MG$sGvHWgjoZV{Y1uvitC!9`TPX-tCpIJbYN{& zxKz6lvqs8lQ4!_EZDx-XA6ap^ml(rgL;Jc(kdfQOFf#U54)Wom=4)zbeDnzk4RvvL zt}CQXQC{QlHdUIAu^XhvpC!YsqTDz;d*x%k6LNSJt=G{In^tspzRzdJ*H;%VP!+W2 z3SeJ+!Oh4h(-99Pw6L?Yv$n>v$x2K~DJd?tv9iLnag&jiMZNlRWJC>t-JA2^D6_tl z^`)iz>x7ZZQtUYl3$H4(U%_jW---y-;b!>%f=Yd@j~%v=HN?g!>L|8INKQ_EDfE-U zTy#c|0Tm^`un@B_d}FCUlYxPux3?EboLXB&00%-D(@sMZC_hD`^MHm2@FpZ)DN>B0 zy*2O#ILvPW)}*Z`DP{MP+uZ{KUF%tE0P!Qnmil%U1D)yfryl#om;!>Ojprp}Sco^G z(E-hDa0FxNVqY$m#H3NzJGU&Q8A*;7-Z)~!Fdim}3@WwEVjj%=p?7=W%jBB1?xT+d z{%o|EfKjuaB;@TKqC%!dI<+=wU2O8B{yuk>OCIKQlH)+QFad+y&V_2*wkfE|b9Nh( zIsi!=7R}H_Z5O+^I7$Sv22GIho?vb+DH zJP6)BFnqZ)?mN;%hrh7QnpziCncZrC1I~ef=N9u9yERF!25LrxL^Gonyj(03v50h! zf6BQRZ>TD_7`|e=Dz)BfdMD`i@YBr|oxKkrXYyE=ImB6nu=Cc+7##W_O-*@^wcHgl zyh8zrqkyU-qNd>OTIX~KexxXJWvF19VwhyV5iVyloo5Y2`YfM!Xti09UN5ic1$l+Z3$%;>iTx!rb0 zULiG>g|rJ?byj@y33+{3zf&#nGG-MrT*_i!F-RHBhZoo~KrJ$1Fx)-ir~nwgo`;!Q z5#l#@-E`3!h0yS9#HP$_e=X8n7AOD zg^kMw-{3pMo77am+Wy6SH4i&4Ec+>N*E3`X)7JSQh2N(!li3Q8L7+hgnp615{MiP1 zHL#zx)Qz*UvlrqQ^*o>>=-xLOOMNQW@6ri!2U(>p{lEdJYE2fz89qVi=EyTW+zU zR>$w{Baxi7K>9eBVOu2xOPZchP5(Y%8FtSqTu}~p_zH-&_uevjA=h7;PW12BY}Z1$ z3l1wF?C*aG=tNwKU-@U53^uu#$-KwQWqZm**gXO*5mDp!s}S!hm`G^jC}${&26Y&A z_W>GtDdpRtXAuAEh<9nPTS#+Au|aKc?KJhK;k?*@>r38`E5!g7H=s_gf1!Je#&~j3 zOCF!FqT*+-^NAWr$pMFg?LXM~1wm%;ewq~j9)%^Y70p-%n;4^|>?G0#pRMzcn~ujW zgn#Z)O`Pjx?%}kjJez`mz-~P6W*y8iqwE>rd|!PjWMx%oPB!(A-t-S85)L|kufnUN zX#lTU-5mP2`&=??rI#I6tCMcAHTtXptNIP9#dBMiYR3B-s=|gJ0wLS8E^=v2O=1NP z3d3z(Y^z7g3)Cv%Yvm(PE@Xv(hl&6h7+6lKS1oko?0W^--mdWW6H)WHtH zqena(0y+4QqT_Fuhe=z5r={)Lm_;gy(N1O6c-`*q#sT~Rprp}TXfE>^1em^ z@ZuQlS6JF)dAM=;7+>@Ycc9k`C=mi=fXog2_$^WE;;~`&_aKY#(XAu|Xwm?$@w?cH zm$F1GZ3Rg^q{CAqG0?zXJQ-a)X?EYk{`1B2-dbgwZ|ro1btIzv72A5W9xd!w8ZM zfhDYjv{3U57gDQR|Ea2K<~(``s9Q9%^9nyc?F9UmQ?L?UiFu7iBVR^?jZDx%KL67) z7BHU5@JoZrG$|wlNb7nMMg2>m#c34GARf!YKrU1i{VaxHn*O}UZAR0W=nr38(wB(1 z9z1#d2jUWs$ZWu3@Fx5_!(%&UKzzGH^&0WmP&BUoS%X{e>AXL>LZ&&;mVVFSN6!+j z+xz9qt9>gcr^>>@Ze7*wB*PjD`@r&suA0Xok`clMS`CBPy?sne0hH){>kQiOs&4f*+X>FIii<^3Tg z#n#p~9Z?~(v$LC0AmEHIJh1vzj(6FQXOlz(xYptM9uhOZlAr6?`IlCEr28dcIP-LL zoSmITkcp2JX)3FC4AO#tvaFS=pO~14^dtfUZ?3jzDl13*(1|Fu_5WB-Dk_5fNgm*C z`OhSc{f(t^W=9XmC2W3~+p1!B*M$&itpNT@caWw=xSsdwo4!6PyXIAEczzW)gt$p< zG?{G}UT)}b?j0+ROprydSpH=&Pbk$-)-&W@l`SRVWl~f9h%f1Ywq1+;vUp+sl}Ug3 zer@=L6*88L-G$C)SZ5PNA?(>uDW4Sy55SRPauXINCgw z3`mG1^w{^1$_CZqYQ!y-QC!7s^u07KtHO_Ei$S)$ewJTkGKzjtNVH8{`|HW!_|kkP zGM;kBZ61iOfcYBcKOr?s1!ka+X6?9Rk(~5Sqv2M!+~4;Gu{09!42cvM_mIiWdJcom z^cPng;}I7u6i;_qnXMhIWiJY9TUmIpU}L0IDZhR*C`J-)7GBRhR(n-;yWs<=YA9eS6R?za z39lg~N7|b|+lL44!Q4Zf23!wi^!6@35dUJ5KDGfvxPvQn-9+Qa$$UOZ#5&pMy%sR@ z8vz_o@Q_MbaT~7`ag78RA%Z6-KI*9J zdk=3+U5c^=8UKe`GftW@f}3YNvZ-rD7S&s_+VIdQ{P@+*{Efr;^Q9kE($d;@CPI1F z5IYiQE$A!2z6&iS@8G68detTm4m4N}qdG%oYo_(s1s>zaEd2276sQm@1fUc3>FG@+ zp%5_8aoDd6<@@{J04O?7hxl7(h_0&*ru08l*k70f*yrzxrEusY4Frs56ICC;4QHC^LBg3uSO9cY?v)Fk{Rve4!L zIh|cfrhD932NcF)3`VmyM#wcjS$_T%A)Qm*fi4piK zNG%{dRY^vB&qq}ox7X-PXfGaT_BTq3h=O@zLPlyHW;iPKEFtw9g}ec2Z85`x%CuH% zAf+M{GB!YYy{_!t_@<6wH;-;7o`+UkeG539QTjzk_nVy*Zsbx4S8xD?=TQpfRe~PE zzzl0wx`MrYQdS(rfCk4`-^4gk1*g47muU8QIs zbl)W83cI?bw!0NMAzS5@zP71;k+-;YFc(o4^rd`yu`to0Yl%Z%892f4{75|UZgeM- z5q9d+jMxBjilqc(mGD_)mbHpQTt!vk`pVRCte>R9+7=~oH*5(x10G5-+mv-`51ZFy zbqtu@sdJKLO%89%wpLSO4I5ag0Q}R0e34y(;YhJS9&su=B#NQ}&R$!FwfZ`c7~J>+ z*C=l^KhH35S!yU{J<6cwRfbaDeegE1vQB(?TXq_e%VT&k5}EpsyeT}Odqv(#e}WNSLsXX|#4qM^5(OCX zv0;GRx4ym}5)zUT;sp3DRaI3sHZ~b|!+=b)(4((VC@maT&XW1uch<%$h=_r=(pqJ+(64TIjLi_UZ7fNiR_W; z>c*i^oPpsDQ99}sQO8zVF_p3r;=PjUJVH&c3 ztXlM}{=d>lkVy9ckz)RtX2_IcL_DD1Bsczw{lOr8pb13v^D7sEmPg8^B zu+-4tv2m-LI*y{CzP@3S%2lo5;T=xI+Dl7%fwUo){=}==4{E7Lha~3I@Lc`PV7F6lk0Dch*+& zLTjd`-XfCK71T6fA~P5v@ zwe}q)3=_{C|8D*ox=44fnHIz_`t7I(Sp-j)TCQfe%Z!yhoXf$Q%pzBcNqXOcDoVBZ zfwVX(j`Lb)cauBf8`Bb^^`I;m6}hMsrq|pbUbAeC-^kXGO!RcfD>FW6O^Vr6Pt_TL8bS*QSUbok1spKPn97(M zu`f@B3AS`5iDa>)>{qi0zbb3KCl1a-u z`W2{TSOklXmq1zlJ*FNo0<}+Bu?=G|CXauD>a#7X=oMW%Zydm|;bIMpEH~lg<}$N~ zIJ(K+@b=Y-l<94J8hRU#0@*Nj$^H`^eGf!YB@#WOiD%|*6!CvCV*YN4{NI2+9Ygpk zN;3?vR$(2$Awhbdm7+>PzrT=s?3)zTiIzJB*IeiB ze1%82N*XPlz0-g!_pAL{cG-%Gia`(VpRwo~fz)EnikyxsA zfiE#JTHH&z>;n%vj+nw=>s)sb6B8cTz^?fCsPSavW@_r_w9n}Hd*nVRKZj>XX=$o? zdU-dqs79Rn7f@8F$#$x9)|Nv}&=YjgE21}yIuB(p{Exzf_k;k z@|I*~`Sei{ovr|#!+zqSYAj%HWj*tCCQW4eSsW5ep2sepN89 zc8}AB`%lfQ>t%j^X0sQ<67;*}&_UEJ4pquW@K$8wp&|Jbn*XwjvQ=u@fIxMX0T3=Q zwgAG>8k3rv$Y^%RdudRn_r#PgB7eXW92q%j?*f^<(;uE?pfNQb#plPIS8(n7muwf~ zendM75555+qcUQ{i%>S8aiV5Ao~g=A;qWiY>Jd6ftV?&k*J}Tg-z_rq7?7zdg^Pk+ zs4(vfN~u_vXv};##Y{{TPQbEf`p5`25(ffo3M)7n1#I31$r=c3RmmQZ(SDyk{o$d~ zE zP~2h+p&5sT(E2>ry&!a>$>>*!(IN$rQTDZIeyxP8SZysRVW(Iab} zWu98km0)kVV2Txmyb1|rpl!vdTJ6TaW?3RtxicccWo~{gB^Z<$cqWVpfnW2W4emEW z(B;&;w(r1>5|^BgND2qcJs(%`AK?5+{+~Nfr3Gu&@nM(!4KL|W@AScWH;PI)@5WK1#JpZVwXm|XGO!w}s#Fnb+wUDa8fC;f$y3QckY`UL7=2`i?%yvE*DGCSWCqz=|Hr_5R5yxxG)E9x0Ig zF$Bn#KVz|_g@8-;r+=3Y_;*1F--_39QAW0x7J&!rC7|lSY!(qx4WyW@^3$aId#e3^ z&!qdEevXj!H->BEj?Nkm4nP0|LzI8P*~sZpjIC3PoD$^vSO}o4%kD0Y1i9Eu#5=MZ zV)IevQmWUK0=Wh3^;4=N?9$uGQ8B~ZK-ge^-$@SGRnr_FA5~RV$f&1zxLPvtD7Nc9 zGF!k!r3epuwK(2oYGkETOXtzS;mY>re+*v>Lg3oD(3xN)1S9AOkl99p%J25PDANqv zF#oTZdhLsRBF$gh-vS)?|A2*}kdQZ_^cg^QY-L~zqk9xC5FtCoV9AUvd$GdupbAjr zDA(_=W=sLQ>Nx)->DIRQER58zWRQLa2o(rW9rPj>`f%3& z3~7zmB?z9(D{!SU^B^8Z8cVbeG^4{AJalq{RXl@w0yA6T83JsCqqnmQBdBeUAaoCUQCy4(yz%qwVj~CIj|`+;wBz z2&LRXuaWDz!XMKH>_r6j3MR-88QK@jYw->mfidcCdNhMF&oXcvC7f9aGJcqrGXH%5 z?mg6j9Ndh_;wwBu5{oV+fLMr57l?r<_+tf(I>rt0i2KQtV!wU+_DE@ee}72{qw8=Ge2VrekHh((m8dC;yac0QM;ZTR;%GrGWi}$&nE;n6Zho9I#i~$S4!x zsvvi=Sn<~Z0>Xd2Veda>?q*see=&DJx`Wr9pB@=X?VIVdRi=k?Mu;tYlmaLHVSEQ; zHKJs8$XykPsqkCU{!3@5NTCkjDuIOvrj~VmFNta49ZpFDwd1X*vJdLUDorE`Tb7#E z(h)gGsMd7BMSVAQ?Pzm-l?UC+EH05gMv)+g!?lv0-o}O4$$;)_zz#tJ6NJneO;#|k zcV|I|Vw5k9DheyOY33$9Mh_`_20)v=C3&+19$1cH^-^67btEHpCk9sJ-lXw_$W%O3XhRC$M_ZTzqZTW1rMQrh;#tCrYJsL`$&n$ zV4xJnZ7Q*9ES8HLx@R$8Wikv7DY?15J5Q3iSH+tqInTZtJxF(@Hj)Vf_SH$wzPQkY zM_dg*Fh*Yy2&9J(r@+O%%eHY z{fdsKWLh=Vfau|*|J=&_@HZh0A!rggMZJi1)D#fHxR<{&l99~e@sAxG$|s7wMSWi| z9tkE~EN9v75A&HX>u6%YcL(y_KQ@JhI03PIKF~5#=u9;Mdjb&2 zi+Mx%rZ4$^ZUMO@uKuwxgo8W0o;-TlSj@aXgMlE)8II+=K4)&q%8tUqjR+KA=I5W9 zoP34=2Vjq{H-B;zJPl~NXbfnLh%9|aPtW^(?vMCCT;2vigC~KJ7yJ+G-D9s~ zHhJvs>WP?|3OInj0&IYB>cw6c5LEa5nqr}8Wb>!asOlgcr%h2)cJ3`M$J}5NfeJ!4 z!v7|;#uMad=D5uRtAbso<_Ni)t^R&<7%=$2rJF&L^7A#@#+%ALHXB)iF0SDJly{zC zO{H7kcg9g%ac%cTYalgN&8m;+>7;sRAQzKcsL! z9pdSp-)^vD46y^}ZSo8jw7~|G+H&sxaLztL2KDbbZ0?mi)ClgWC9UwIH- z17CgkS`JW8#g)EVwxU^5+l4f*{DI-wYZ4s7KrOL2cH>;^Xnc(=#Kr}~2eBT{{rL|d z+T{I0lC7_u7L1*@nrq^;#*J{QMywSe;GdeohQ!z2&9Usb4zV2je%+=8FuN-Wo4osyaw zOG%I|3KuP~O(nBoAZKvJ6A99jOgB+t0cj4+Lo|*^>p>a>K0)hdeQ;2Wa;}St#?YC# zjqH^IvcbLR39D`;M=8&11eM|>vtMMy>F8U)yuzWf&YxuZ`#?v2-hm>X!;}?Q@tB8` z!fOmsT#}Re+TGXCMhEnH$C*(=;_j?TzK#I@Ha!F&iI-)cfvO?E8!?-H!PX~Qs5H>v`6bfxFdo14N~kp_>vNA47z9PSn7%X5y^mcq};(@5$Yu`t-EWoV}Nke?`&98vC<*d=66R>Ot`8# z&|CP-8zazRrzcgs{y+q9pK1zgX=wp%_ij|<3-f&wm;7*oWDp6(W09gQ^?%W3)zQ`@ zzb#zM(6}c2hLvGwM~6Y$Vc`5p7&xHw=!*Y~s(2_abuNrPxCD|&3ZLl?0n1h_W93W6 zFEtnb*4Fnm5r3wf;R3RsCNFa5`GaNrx3MNj=_*sq%2s7biEbNm29*0`N+J z?>wQ`W|IhmA&~T7V>k%FP@5# zIm6X<<~=8J)gLm7G<$|s_klLm>pVM&mt!%X>V{ z8OkVf2)fqC1ux?`7>>0(P8yDl9eONSW-J802x>U_D7SKUVN8OdWk4J=8-pFp!QLzd zQ%7n6R@!8d(e^m}AW)q8#|XNO65@Hx-2Y3)5!FR3g(cfI~Sf_55# z2s+Q)#^7fO;5k~N$-(_(>659=$+0#FiLsZUhdqwx`I<~ zHJ^Q!4_~#&g-4JXVg8$PBEVpu$lIAT^{I`@OmXtS5TUWE%kBwo!4fhe^S4{{(awhkNpg=`Jfxt7In5W3@)d7Pu!C9DL?p53ulWm`KA<$hwy zq|f8_?1?44Zy54Vm(HE2uSTB_I+peknNFArf~kp+JZ9*00w|{PTT3>oo<;tUdKP;E zy3bp;%Lhlg%MoWZ%*s8ohb!q*bw_O%fZ<+mo_x_QS2Ig97-(r{b~x1dX;w(Ahb3P@ zhB;Alm@+MXF1aLp@Qm?jd?)fPdg$v)W)C_WnY`pBO^y}|gCZsZQvLGB&i0}7jVtQ4 zJF#^&B;?E?-DxY9y?KP`1a+kHKbQ(h?p5%cI-ETT&0w^qwUaaj4qjZ2f1|$t&3}D0 z=~Qp!^=;k*bN=5r0H|vh{?%{)sc*Hc?H`6{zFYe$%gej})i-mCY?U-p=O-g_;x;c1 z`5Tfk0{;XE5c;eAZ%apj{E;*OJV&qN{r!zUqns`1R*`?yMtRU__9FUccfm@=5%t>o z?GxnE^u3F+rkLTd{Cg(8CbL<;l{g`}i)|vBn-57K zgG0xIe}6tAb`OVR+#5H$A-{lbmRKc1&N^fc4GkH!=M5*buiqLGE^I;Tj{?kcbTdyxjot~Y4)i{T@hjy<+1ZtZ6PrYMk#S__K>z!*sk7$GKuvkx z?Djz=T;wW-XPZA})EM)jR{O|pP}9628^AQ~KT|3*P(rZ--w8P$(%*a3&ZNbbSHVA= zSSGuu62hoS|SV#5o~d8Ie%3Kn`pAEv$wGmycK$6 ze2tBqH2Gep-~V1)3x<$uYp13^YwHA1TXQJD*?-6^4+O%+rmG?xOed7*-k1l0A%y=; zo+&mm`J)$+vXlK+AJ>@J-q3;xcxli~dtfOboSmlY92GpecZHh?CF9sl(lAfhRNWWM zS%{$~_s|hk3?4am*~o(9T@QU=P`KarDm_!i*_LDL%FD<{HfKPzgzMUSJ74=1`@zxV z$zvx=tug__=U0JRc+R9+5pkQ|S1`rD&hp@UF6ZZePd%IOY?4w>Go}>l*@NnwtOf?l zNfmKVC=2@BGUqJ4=s;c|>1}a3!>md^EtYnIogbdvoH@It#ZV)P(E0qw*=GJP)G$AF zNo#UDhNK1p>`?3tho8JH$#>;i7FThZyp{;Wn8=TSgW-^4?RQ#+;u0n4ORbwuGN?V& zW*`w|wo(VHzF8mtAtkMN&W-w^n(tU5k-g#!ov#Xj2@Cn>({ds{Y)Z@PWUO1W*0RWrMHS< znBh&n?wo%r=RcECC0y5m1D&HcJ|^j#>#_g;G++H4`2p&|1&=PJPlJSdw(L1z3E~^1 zeF2=%`h77B`~ZyTCXt=x*T*ByS<{=XHUM5n7UgQL)Z)5`>Yjm-b_L13+3FNOZ{DL` zN~Q*m$Ayp(+}AlOWUh8LBO~K{aslYufSv+iH+}-SC^;|1)(1xG0n+WW|Ji(Gz9$%e zKS#nT0^CdknSN%p)XG8T=afjZ8w<3PWlG=~KQOWyC_OpwKK>PIY5DNrYbq-WF88}D z=%5>{>1wlm&Gt2LAjGU0B^}<~|2DW|_Mct+|NU>}{s0=fkxOzeVt898QykPk8WzyC zN)(a`?^2$3WL45|84$tLP3Fx&)eG4o=bgqD%<~KP!{u4iFP#)~J`LgE7=y)&f*=9#d);a7Q8)-D$BoJ^VS zw)A8ajO299nwOo#LNTv>@nxfy+|-&&Y|Juq+c=H=RaWNdxL^ExT-==3J-$u%NR<0|q1J2|-=;+~ zZvV89e1rUh!wxsG3>03jkj!n}M;a9p+h!V#*OkUI-{2e1C3qKF))`H`pwXSmRZI8m zN!63M$~>)KK?NJ27VWY*W zQ)DezvXGXox+lf_XG3Y=;j-Q;AX9Fpc3lBjt^GyOe9CK!=1*F6+I%S)mnNLzBgdiW z5wRFv3J(0jCurDdnG4<#Se5veK#DPYDG#lEbGMmv-sbX81BaIQ6tv<-UF~T@P{n4x zdqIkQA zOodNJUK(13$SPhA9L3h7bd3rL{ z1}>QfUr6?f$HV>3vIIu>u_zfUYk3sixQ{=dyjyP)*-<>Rl-WpN;Dk@-#=pbd%1u;3 zI}77;buE^c4VC9g#%G%EG`Ky6xkT|SFxAOSJyz1}vVNK+j@;#k@1UGcsw;Np7(&b#e*M}=eAT-#<-voHLR(k94qFB!M`88NHLy&+9NzwOjvB}Dc^j3w*(SZ! z$>r%KIZ-I3PZ}Bm!Q#}d$##p4_|J~8xGT$(l(aiTeGJQ`=l@vfn_jb#F&cHx#281d zTV%aw&vzZvj?=#Pz9;X6=dy%dptg@S3bVx_!D5ioU43vZt5prXDPW-JTi^nY1 zduhn)cB})E7hrmc9eMY`%JodPjoov$CC*+P+7*}y&>@`DE7s{&`FQyYe25|qj*sh9 z`FJE?gKs#H-I-fS?fs&SLeXwLh5ls;$cD%L*3U**Whf>~YD1+`W=9V*;xM(IzwO*e z5MUNS69f8NQ{#1e#Q3Xh6%5qWu9#MPj#Ad)f=maFvUlyYhEMJz?Iq`e5U>r05PT={ zY;$ziZ&6YieT26!PTJ8DTg}E9DJf`ZDi)aZ|ImzJ-&8H8OCe&{N{F(&_|`l68AV9K z`~xF-A~F}$=&>=4Ma;DphRLhaC{9z&_a8s{jIhivFePR;dFWJ_8IM9Zz|%DwRQ82> zCe+sOMnYGIms+(lz9Zl|Sa;r}br;K=ZJ0JD-|iR3+2yX$xlGI`GTSN8mrKM~RL|3X zG_wFXTFzjlE>t6VXMfQK`6U;3x__y~qE~{gTXQ!hR#rM?njmwN_Z2jIP4C2BjheDf zalH&D&klP1KAXgJF~~+CJg&m&o}=_;*qPijdrEQ7hcGCywgBAV$TK6Sw>h7P=gNk% z#D$2sT8pYK`jcq*lw`tuvb?1HFJMKX*X<@bK2UUBR@ee3AC=bTM_FA2tCz0^D~h8n zsy7B*rI`Q5Y|MjxWxFU%rvEqlmp#5&#T3nOLuCGlU_i;MYLE!O`|@%;cLx>55t=*F z+@g(5+4YKAzx8%8V?-)@s_?{a?dL(3TLtE+C1+^cG50=E0P$`2?F%HXIh1-29v^_q zj9;xJ(r~x;A_M8}__gSs*rOSlQn#wL2)l6EuZJJqaCQs}m^$LnQyPn6@6YLprz!j< za9!FrVMslV2|VmfHJ*7mA}bAvQj!Ffw$~> z+aXTVb@q9_-aO<6ux|$DeWb~l;!U;xqWp%Qmg{M48sE^Bb!>@J1j0( znVzA#l=qu0x16mf!IOJL2%$BYL0u9h^BQ-RcTXNbY{Pokw}^jmrd{%i+D;ioXf6as zeF*`8h>S;x7i0qNZ0&Y*sA!Z2-$70HnrdRKelU?9)CqTQaP-o)kaPj?`n$1??|{_* zOkn+g^jmK&{duW1DX6-u<$$m5@lp(vzdVKw=p6S*o}D;aAgjr-;;Zedm*W?oavRyS zkxd4}w%V0#mO$C&k|hZk>BpO`iZ^Preg+8VGqsXjpc#<!dv!hWLF=PxZdsvP zxxdjp(oJ3Btv>~>HJNW8_X1;AW_8enh_2;GL)Qg_}dl$aoik?y6oCZzkgwBS*tGN zWq+e*&En@~`5T(W>VhE4hw~R=61r!`UueU#prxGCMG;es6dM89yOkjb&yJZH7VozX zVLHwAe~4XeGZPTi^}Wh17IOhOGCjMjKw)u&4C%B{QR?7qyNcjq6a!|;a;*%xrrnoE z1R+Y;N?E#XR^d2E!kOh_OiW#%WJ2jY=zV-3Pk?Y)SxRfFw#Qd8OgD#7X&simU$O}k ztavikwkFOkJb}D(UL+LR{l9Tfa<9Xskn%CEpK<|yb z%cMqs@~)iOIKvItCbOF!ze=7RLYtlAbcCqF6C_>QTRWvKC+4o)xaId{{bn_ZG!=^P zQXiZ4>vslir3*HSg}h)<98;`<#-iudnoVrEV}&l}KBd$H)By4W%;gCtY2xILTO{(G z9V!@4%}`SUgPL-~&e%&+$%f&=yG0(qIrl{3NbXKur)g?Kp-3=zf>Z9a=H_d(DS zW{09il11yfqvVbxD5jM)p55zRGO=cs@-E$WRZAkyq?Qj)jt)IJ23P}UGJhzH4yw0n zFTkb~RtJjie>}l_V9)#iXa|Ts%no$j^;Rcysx-s_n7VHaF)|0PPY_l2Cx4I&vp#G{p!F-iaeM|p}i^0f+VJ;eAR^MA{7~hUf+n)w> zh%sR>=|pTNdh`MV6sAw#d=>!&pErXCTY{uBricm=D+SU5939lkdQBS;liLVrnqB$~ zzKbZf-|0#iTIkJ|ml#9Ku;9lgs3Jh!{H34?MzMCMmKb@AaslO7un~1lx=N72_QfSF-e(t>6VS4+W?n1q(M(FE1yW)@S&9g@Z(#V-pv60ZT`MAxOH1}X9w(ma~ltK zkz#Rj)1Mh_edt51gJ#ui4Qe}LO7xfO^nbb8e|5bktt7}8veHbS7PmFrPDwMYzg#oD z{Lwx7k}B9bM2~mY!bil`bjC!SAJR1_Dk+ZHH)|V*jx}sXbcqXgjzbeuA6Y9<>z#z+ z7MqccdbWm3uQA?w{w!jxr?2)TC@k+@Q$y0t3O?O=FdV#OyJ8_AAnBj9XV8gf_yQd@ z%R_=3DvPA=X_y+F`_&ig=$vy}g}w=g!@oUhZ<;9NF6$rY)g8RbvX5A=)2Uuc{bJ)| z3R4)pNbC2EX-CC2v$4V$QHj`DHBOdY4wP0&XB&K^m@Lrevl@k5ZUhYnzRMnI_(uU_ z@tD_)%qc|;D#R?BLMOi&*m64}_$~f?P?)!mPk2_=r-6aW%F3{tgnpmdy~IoCj9N^lB3VLA*FFw0(l*lnVV+3&PuyJ2b3Y6J5D3U-^fXYjp#seSEaJ3C4sJw-vVrNw4Te&sQ3yZO^Uu;)9 zAkoki_0WebPq)Mm zw+dv!g$ix$!6Ns)bY*BcT7ZM_{lF+b{i`78Eb8@*2I$7x&9J_L``(FQCsZ~pt=&-8 zG3lSxqc|&->?wL5IhbRcDU0iflJtJaQj!lH%($2=@U{waSqxXb4(*mqoC)0Kv$IT_ zH42b{pfk^m2oIPrpCCrr%~aU;QZ;NEUyZo=Q;d*}OY7w|xnBguX2i_6SF^j4cVcUC zv0Jt5!Qceh(W-p@r{;o=&uqS_n}>nW4lJtR_ALgm8xVgJ41(Ks+NeR zFZ%UML6MR>1F+!~eh~zeOWoDxRGOcFEhzbap?;!mA_I)N(-f*5Wa#spDGU z3Fh>CdOyuNEHay*mGr@ibE_<_HH|RnnIE%xeQVGbp`_E%d85PA&_le>1J6Q4qFrlO z!Jy`liFaRU{Z2CxW_RXVTxvObOq4^VXYFw!B#RgsBjQ~TIFn&jR?QX;zqz@Wl1F1YlWBeEWsWBJj=nNkCOvK(k4cYPWYD_ot+aYV;7X+7 zI7P6x_gGy+_g3`nI=j7Lw=`%1U8VKSmuoph_9!QjQ8bFKc-wOX<~lSTM5Q+9W4wZ7mwpdC{~$5n#h%3)AK*U6)o} zdv&9DlP<~!DQE7Cq`u!{4>sRzV+;O50eO70dc@yf?>A4@&M&v|J)0Wz{s=8dMZ5Sli6wZCTqbg1 z?BgTW7>b_5IMlM(w#gCOTmjKko*bhE9Ko4htrr(dK@$AH!&{6=he+0th5;bg-KOZ98*t1i7d(5%nP=ag3FOAMZl+T8U$4nc->{a?L;C>flNRi zplitg`cJtJq_-!%{+56LU%uB5P9$3L+j40a9^aH9M%4`By43^kv@=3>r~GEIdz;(n zz;r8t0AeUIenpCf&ek_ zno^0AIi3)fg&{*e~y@EJqFwi!ipU__DEJ#qQ-16{S z|DA|a*G?q5O0iV7i(~(D6kl4E{cEYy_BBE@==cV8lj#gjFUXbf@>n=b zEJMbnZqy}v!6f+6%(8<2Y$UwDAFi~=Q&>wt8FfXri$1iOoABPdws zqp4Fuq@c@$;J8b5){re~y#^Ji-qxefjCD`a#-j2dMgkCus)7Z(^5Cq6TAati zYguGLr0DXY_ihR{LPF?m(?y&>3v5>+k&z4QeFnt0fC_ghUBafT%Md?QuNKo zai}G~GY-WHamRcpCBiEB4Trm4q!Nr~*^ zn{_>80{RM3`+JWeo5c%fb2krHP5;I@y)#h8>^)rSvV5H%^C7XhAmhoBj5M!dO?hl$ zBhL6Wfz5breR5*QV5vhDWmnw!$bGnYcIl3ZV_e{T-vLP3{=%$yj=& z!hNZ)8~fzwbtamRjIC`6b?s-EeiS)RguQhYmDf~jz_070-W;*v0~f)4uGx0kp^UC( zaV1p7ZL9Avn-3J>yfU*yk<412vaUdwZ9eQmInrKOwXeEw=uU<1nQMO#CX6;7sFxUt z)8iQE_Z#0y9AJzaDR?kku5*h$-zv*Ogs2TwOZ{9C6Ukjz7SmxEw^}zuoBQPlZl9PuT?ut@#>I4jtKjOCkMqHdziOPd>sSE(3jidh}P9 z&>ODr9aGYG!0lOlqs;yTgX-HLYii(20Dr>&;*%fYezh diff --git a/nf_core/pipeline-template/docs/images/mqc_fastqc_quality.png b/nf_core/pipeline-template/docs/images/mqc_fastqc_quality.png deleted file mode 100755 index a4b89bf56ab2ba88cab87841916eb680a816deae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55769 zcmeFZRal$t)-Fn+z*nS{Vx>rm6qiDAOL2F1cMtAuDNvx0;#Q!zyE_zjcbDMqmSlzR zn{)pEI@tSUUwdu2)&Y>bJb7fuJ?=5a1EER^lGqq;F_4guu%)HMRFIHRN0E?_z5hZ+ zJaJ}X&O!Wm=At4gf>b&}x`%l4+)`Lx7zwEYjQMDcig^FRNlM!V3F)=#)7P^V3xFpQ z(!7JTn6R3s!6EcTteK|QPPjx@DDOv5T2*CXB}Z%z@|SP-DsObzPh`FaVcdV&m0)j; zcZ>LN@}*RhsyUw6to^1IV&KrBgSL*D84<+V=b92tLUGmkCzrla{Dr!*h^X~IGAQjM zyD9lfz=>mTe@ql{QdCq_QdAt=(BA&2YBUsY=dfzD{{p(Xxaz)h;YCF8?Ul%1e}5}@ zO@0yZuh)nND%kn8|Na%lH#NLM=KqYOnC|MbCw}whr}=*yP7H-Y`-r9qwQ2rq9Dz|0 zBdN65Kl4A$DgS>m=QkV7|7=EzGh^Yu&HaDh$NCi3wnS$c$@$FVUp#HFss7?l0LJ~{ z!`SL7tNPPP=8^Kq8)3(i@(qbit!IaRj$Duu3h(VXaI4Sdu3~_@H&ak|A1shtFJP;$ z&Ff|ziaT$FS{aiU@Te#m;Cp!+I*IbJ@XxAqIeeeH<$>FQ&-YdyTH@a_&X?%>7*prF zp2!e%;=M(CLssc(k6U1h(+Z6N7fk4b1$pU zx+k}@k}uu*?&UWT+g}Y#gV?3_XQkIe!hs%Suq9Q))|Tlh`Wr-J#)v6)bNt9IQZ-?zd%Hw*=ZrCzD^f-D3r^0KBi$+ip$`A6Mk<3rtrZFNxAf zKk90T99Gb#t7ndaGJ(*jcpaOR-2zFV|0MH`0H4>cX|8kH-A>yB@PzO5QPgAAeG<9~ z(7IdVikhJ^RFhx&6*~Cd*30U>;FKs>ES%nYuI$%8RM=1({ChUX}X7!Wu zAA=&In$O5ezi+pM8LtJ8`oW`oa28+E!&*f>9{W97;k4XXkIS^H4+UAGvZx7D{UOIK zH$}ZEkpj2NC%)GxA>My-R{)`xdTyO1fcg{J)!T^@lJhkw=vrQzj&$^Qa(I7Cu2xl- zg5af(2k=sEQGeBmBNF1c9B_MFCIG7eR|`T^)>Jws({-d$>S9rNoIs$o1qKW1U(s7gPai5(qrX(&Um zwy;AI@AZ}{%d9#&PBP>zwc8=%jgWWGH2jQp`DWYPw4k^T`^Nvelzg_m4tOygvshAx zSic)*_56B2$iwR{sdtKA-$NW8Cffewvz4#abf1JwCg*y2X*Lu~6edkmydt&um&!Yh;0Fgz!I z8S zXW#cIlDgIR7Kgd*mV>IL1+VdR*KujmVe6Bnrwi2`nyj5h(N`umHB#h26X zt}BBFa)TAfq5C^R?mPC5nk4!GljuO$+PG#|*B4a_2>^!?m-qb{I`I10^!40&Ah?Xo z5pt;rAZdrM_}>Q86li@(J8)D#f?(9Br`@U}FA1>Jx%%}~}bmH|q8K|Y!jaNAu?dYM~6 zRZJc^eBV;Y!Mnx?kn&2<<#2q|Pp)+P>ZBPmqA2KkX?Et2s&9LqBzZimIWVsmGYatA zRXt~RY=fjB;A5x~rSrZ2e#S!_7>vCGqC{9lj*|V8LTb}g!H@mpp{+Rn_v>x&(6H+J z7}nKf@B4Ld%Z-a7|M0=og<;D>XSx@Y&lV$4Ekin}o2SXK^<>^M{r+%K-I&?XE$nJSn(xJK4qrH|bnqfPU>4jm=e=x!oc#?Jke&g(g- zUucQtw<$SVY?d~P}!t-c2Lo8mx6d`@70 zvP5TBSUX%%C7-WOwciMN4WbKqP5B%ow3f{Z-jx6kgNKYV|^tpbL^<*qZ-A^30n?FBY*Hn_q~jp%0Mg-<>UCF!!;rL{!Y{b z*3Cv>f1?;licgf`G`bG-zLl-3R|wc#Q538g0z$S#C86oCbHSjNy?ANChiOIVH2rMI zG5nGlT3Axtm$CYA3AoOV^jpuMy|ROZ?T(T^1UI_*!$t2I@DM>^@!2%tQ*2Px;zGGh z02fo5-BK-N3cz|cST76mXYkO_egPK}#MwY7cUixalk{5k7n=LGIBj3hTJKhyeXzl~ zGo3fkBcT7$3Q6oSx65M@pbZ+YC;(b=HY>1%!!mZp6Fqznq0rpI#0pXZU|dVnIlk9-%u>~`h}VhYjz zmPod{6t5ndj-zKD=!WOo(!>9dq!*2ld8_8dca!LG1x9m|yPCUXkoxbbV)V`B^QlP* z2QLUMxOI2m3%(x6c>7K);Oa-%C(!K#N~N9Ef%3qRq9J)~x4KpV>itdW?%7A43LDIa z8X^^jrZk!ojDyDSMXww70zLApJntoe%=xcBD#D>RDy64nfaU_M6Z)d7V4v3O7+UfM zI23&xL2-PqOi$oj<6nQBorePGYWBHH+x}3PF;m>1({p~`Te}(*tYP8JcKw|ZaIa3W z5|KeaW+a1}*~V9jOh9(L$~YKYYcNd}*`l$FOU6yA(HR-(cSZ&9*~&v1R}oErionDF zkmE|SIb~(H=VJ$DZ4b&-CQ)fO@a_a4)*zSnmv493+6k&S(%z0p_QJ>psX^O_V9lhrb>BAr9 z#!w93wGILaXkvaRP39@H;n)|GB8ih{1e-l>kB{FBn1qGHL%+#NzbvY3$Xf&5Ir5z2 zPG9!I*3-qPiSN%$8O#PHBV)1VD}P1)O~7Dhj2?72@pBcduzphsN8H)`k=p3Wh%;_$ zOeXLMp7o@Qaw@rwstN}`?{)X08s5C`DQlRw*eDrX7{@P}7d8#NUz6uvKJSkcQF?Ne z6pViyWiT|=e=Doa?LjcWpUG)555Bnx)chgcgWJ97&2EQZf!xal z)p2nI02nbGF^RF>u>$hlk&33=WQ-^JoI>Si0u8 zV07Zbz#>r^qAXD{lBu!00RKml^p=Cv64=~UMF`M+kogAK za9tvbFb_5Czmu~*!Wcf7X4}nlOhFn>z@2UYs5e8zXiDYQ=Ox))S3>&zy2o(u2h5!JvYvSsLq$lAJ%%c;J%Lb@e5mEkCW z?eZ|Dux0i&Si?wGLD+e^#G`KKbCx{u6gsr?6jUM?pE*3wAGiPuHc1MIvY4|WVosn|)%172v_ zuJ9qyLTdW=-$|n#8!G@V$$7Z3oifYzxs!m`vv;S}RV*&e|L#YrvkJalcR(jP&|ivp zdX?VXKmoSP&tSH<4&P*Xc=vJz77}8-1B8!d0cW#BxWLd8o=iJfUfU`0+(QVsx$4{8 zM%dD+!cq1`U^-K(q~!|)T~eLAZia5FB+I+)`mCM=ATeKEa>FyeeU0P0N(2$?H5_a% z1c?1K;t}s!d86fx%Dsml&FIN>)%>u!tJSay-_BD*KV3b8rOY0MRDF}8&W3rMO8Cvd zq4No{`UQOiAyeW&=;8TZg&{D6<%2^Z z!|qE6iY8+BPguq9y#O>n~H+h-giBAsF%%~f&;2z zHSJ9+elB|j$&@GebI=dtreMMQ&ghri{%!G?7SS%=%2G0KqHH#RkD(za3ny=Hi$(=p zLGvS3B|d!WGOoC}J8#If=~Y0uQMxBB0Dao47Ri8W79ysyRyY66Fcmx+Tm-DB zhy25cx=95+#qc?ToUlOnSSf2{HM2o=*VzYQSjU+-RrVoQq-g{FF4Zg zE~D2d*8doXY~?Q)$%+d%R^R5T*Ja|j(efj$qMbfNU$|`D4f(?#^kdi{t)k*vJRUdL zlxcwb4m#}66CTp`2n9CPSQhv#x;!Mn5l~6yO6GGaT9+UCvj-#Cg^PfUgy(9?6bFXL zpNb`ZMW&HB#=RloUUl{4T*WAYN0#{>9S=giO>#Fy+5dV^K*r~FnE~_`y9;cG`R|Z< zoOm=C`0i!|j9q)!?A~%82Uz7BM!4{L-9s2&lDz;lp6G%f*Hh2|EjuF*ZTdWkb~fij z6_P^E5528|&KH1y9o-vpP$5xCn_I}+iK{MC;6&BY+8Fs=m!-n;b%SD?b{UHjMD=vl z=|HehRp36=l!l{Nb=j)%E)c-p>$yu+7f<0NCv?~F0Cqtaf)`7bVV&u>BhZse9N&i(A3$x{)K4e9C)`q;|M{`52%Ol-Fg#F@RhIVC{{nI!7gqddBASWD!btp-(BBw zy3b`l5s_nR2<)6q^Y+vd*eWbZ{zSIO{;S}l*pU8|lJn$|PvBuKUqx7+=-R09e`&ej zfx{|HP3Z%AGj5jsR!`dCO19@yQ~>yvW;*!(X7#4zWHpB}1(BEfJf?t!{10!5-z-JJ zQX-eGqE>l9_7%!}cZXT{YORv&H@6?!P^VBI%uu6V6=U2bfK z-nUhXzIRgAtSRD^1sRqBr@J>`*yP8cp7G0o-9a4q`1%ZFqkHR25(W(nc!>F8Rev?+ z2p#E#0X>$-*t{U__3WWm|LRC(^ku5R)_I#q+`)twhDXu$zH2tK)}SV;F#zE0@2 zg?0JR?v@D90Hrb{11&%10Dztc$r&o2>~^QX>Hg!vk;( z#!o$oW+d2aJ3E!HTRLmi#ku04&fiTkl>~TQ=DSMO6nU&V@0^f&T|`G#xX*^A`Jd~q zJ}%Ne)$q(Ccl0IwAN0|Wt_{zb<)PfG{R#-xbxpIXTB^TSg|zin6u zSh5q{v1O+fzBxjo@#?QW1SARF$04v2_)CFv*=aWK_yOuc#x(QJ=Ett;&FUqs;sfxq zCIB|&O^N=5HrZJJV02Sr(xjsQLk19jeTIiI@V|PQ~{$B-zwT*x3pGviT$60%8 zCF!>divF-$D){m87X$&aRcy6G_WdbycC+L(o9?%>1B5-W24q|AHU&J)RiTV0+o^D# zT@WW6EHpXfOd)pp&5q{s?`;3C`S)0Y*FJT?+vbC9;6s04-B?QK(}F_(bAgv9`a9z3 z6M28iWc~@r|2+7AU-9?vZT>GSHUD2*%^6Xwe{?i5`rX!MSZEWDhZAtQj+cwo7%6a? zSLc=zv`#AoZy(3i_dRGaga;nDKI!IPS|BN(j!XSr`)E`qYOKB0Wf*X2oba7V#{I5) zk=%1laIo%)G5j-l9>dPfyf>2it=GmbYZG{h1;(^o*K*Rh-V5gQHTu_th|#qnsfD#z z@N=S0eaEKKL8ivW8}}v!0nvu1qUJx#E)FXw=}JTjohk=?^dIb7E2n>IU)7z^yXKN5>F_agCUG}=!;#J&CZeBX*c`T6-#zh=YC zndemokzv74zo3(!G~OKC6xP?%!8h!~ZNg_vh8nM8JRn4`F)hCQXDep(R~_D}48xI{ zy4B6+;dRhGlsf5MLde2Kp_-kt&0xj4>3R zhquhEz2pj?@1^q#2>W9fj)Lo|e>Qu;f1NoyY^u>Q{MwRUOwH>_4=8z=h;cgr9=^=* z?xGoVzo&BQKig6XySlGE%#IRELH|3M`R8%$1||7_>z7ob{BH;Pi(>l!kOxD5aw~vz80WD^z{{}CSKKBaMsdz*X zg6)>mlPEl1p-B3iKpQu{PzB-uPdhWO{u5Cs7TY70bf2c^q^bito#+l%nrww;wH*q9 z9^AY$9%^s&xgT$p@9X{}TC>IZXEuYUIBot@Zd+L=dt8Ib>xM9s`UCq}w*sdfH-c>$0J>4`lZ*J!KJWf!Y{KJ18 zO*eu+eRMMb1qB7s`&Lme!UCS%p^vnj9Q2HvZ-t@@!T%j}87W(a>}+UdXigJcB$4Fw!o$e+tk>*3^i~SJOF4C(3^hQo`+k zUHc7b-*l>D~O}$@DWtwNsB+WB=I-1wY3B z)aL(26^f6bcMLQ!gU#$v8OoT`dO;}%ZkQ@+oL)F*{Gtk~zA0_h*@O(Wo!zyFkK)04I`B2uMsXC_I zU!z7c!RhYhJk8D~`gE!0=iP>pQ1&?a zB!)_?vR+2ekCH#{3X(;%F)T=$KuNw;e-z^P__rCKy7~zHo4Nd6PA>hsiCK;Rkg$~!x* z1oZ}mhF_&o*#{n_Gl6O4`E5MaZ`8*?L(y-2KH65;x&P}1M}c~Nt(r)Z&EUbuGWgb` zq7h*-WJ2sQ%Gao%mg#yU&%gCFZGLyHw3wSiqxS1=ra7 zhfVM<(E_q=xL(ERoMH|F6v6KtK8Lk~#`=qi2h8)gZN zpyUxJ+PA&F!GFW~&t>#~6y)_7(HpW8GA#0Jj)JnO8cp|o$d$>=w7`eLBf~3W4w@?I z3W{(h>8dd`6ru&FGa6{(H&J8WF#<6i9@Pa!~XE?j?N_|er(s~ zoQnPL+2qvYPfp!VWX_=|XJ`LT_K`)B)Hpg6`5Jj1h*XuWGaakV^^5GAL8 z1<+W`_)7+Y9;rgWz7UMAb3^H0$qF~P}9YX$|(l68N)eOTs+-Qe#c_pox#H>9Hd=PVCb?037 zc_zYv+uwJQsXssy&e|r6osX(3gtZO%F+;}1ED_{DN(OKVGEW(OEgOHy`z;Y7edqUg zys_WA|GWh3p==edvj;U(>@0s)K za$RXeodzH`gT9(d)4eY`^}kKtGx+twpn!(!VK&>E+`yXpuh(v|Wpi(xTH=d7h;v5M zR!OVLI0!YPL@|EdV)~92GWb13R$pt`GEOT?Qb3x8FL#*Qs?^3PjDp30bwiH;|K&TnmI{XS_VTuIA^Xnk) zsnw>~BEwGBj$xwjGp_8r=GxpTbLY>4v$JC!E~~?Hz8N?^Ndu^6cq%-o7f>+JKkXTPIu#nTp1%Bf8oJEn+~#k zN$lGfo=h(}gTm<=NmRx#HWubhurWa9!z_j0mirhQKozcX)o-MCKS+U+)JmbYr=O&@ zqxm_+j`#c2m5$2FzBZCB1j*|si#Xvy3^!Fg04#vUxMh?he_JB87X1Pu^@Js}Al%lvRC}tTS?07wM`*eC|2fyacbu0nu1^PZ>k4AuS6p2pa8h}3!lXb z7r_gjW1#8@siJi4P7|_X)OLVfrXKQ1D=O4MjItz#=B=8o?40SD-1vq-P6EOgSr>U~Z9S?C>u(HvJCbLw4qC ztop8mY8GXcZ~_~n((s%NJy11JVUEbad`sQH;>i#eZ%GutbswFi`1%Pt)KH$zcr%DNDbV>DfG#DbOi8HOuFJpN&gT2;Iw>eOv}O#o z4R?4w{O&%K5Vb8@eB}{yeS>?T6RABQWkJM`{;QZIfGnGhyGq@IV*-6knvpw|-p9>L z8_Al3s`00QS`2aOB3S!KJ6PoClJHk*^e<9Ad|2h$i@?&-W7MU;?%kal^yz-r<+G^1 z3ePEaFu4kt4B8S>_b4Tog*3~bz8YIp2aKD9eM`&~kMoKBWiRy9>3*ex{3JikcJ}Fb z%F|>X-1Il#2ykyN?PknmKS5VQ>R)oG6|@i!HKt@e_*{`e6InENts%!y^}F{k;`8W< zOrqN3znhy>Y9D=`Y^b~%VAL%YTfa)04G_FL@T75=u?EDHHkKYcahGyN8oqe$#fkN- zL8ZX;gEHG~1>0NUj1-Y$rY3Fo=O%*5W=W@_?&iwRXu`HWXo{>Xyp@Hhxe!iZ?z&aD z4#nffwZ_Qzzrns#X;7I)Zjo{zoMhLa+xqy$Lg_DE<4d}V4`)a2&!Cd8UrIb`$7hQ~ z=rk3pL_>uShe-#nDQLLow4nimpL(^LXX95){J{Vs+#}lAx7hhMZKMAmM z@F@}Uj3|<`r$;{V-DHE@vA-qpGrh)EZ5nLHWL(KsXXqLi6M2tSeldQ*-*^A#+2(TN zh$e0D&p8p<0o2}CZ?Hhg*9_EEM8poNPOG1Aa2MN4ah2O+F;TTtw>uGr!H)Gh>J2rH zXFLlZh85r9yE4=+UxGnHePi3;6^A7(&UUa7E_@yVU?4Y_-Fl<@d%Quv-C`T%DQ|3``&(L^MPUn-q&sCZ zIsW1CvgOQcUB>3?@6N76^$4n~f@AH|@$r9Ikk}0E6n$%+>4bIhw}NC?o0k^zHGQCq zxp%a2gBW2V&eD+hK-KcNgv_rD{9j9$3M3nTudV&qOyVhqdTQ*bNTlgAZR#YREPi=I zfkqQU1+uZ!r~ zapTZw$fVK7r9vJg-B@Ml62+w5DO-4xdbOHw%~CT+&0R2hKK6+*aN;}#xCcXC8`-rj z#;6lm-Bt>#;*zI)V_WakvCNkFRBe|M;i6nIt8_Sqf)GD$y4Ebet;_EQ-h36+-}Hwi z*G}Fgdp~G<3==(#xp-|EIBy&Mupf-xtXVY1eM0f9a^eqffibJ*| zFeh(6S1byR5ldEw}h82UX3!s5W0g3eUd%q+f2x+?Q9?AJ$OF(NzRM^O0ul)+F&srRw4rpP9NNM zC+6g5Exi}AgJU;t`_6WH(mrCoZ3b*c%ri})d9Ihd2^NoS7gwNk za5jd{cQ*6X&O$wBl|Mpu%G zfG|V3AiCEMp;(0hIdu;xI$DRF-Q+5CzoEklgGPL8%wa`qXo-C(ae{e2;oprIn(;Y@Rg$=FML#BVB8#k+Rsl+tItuyeq~L*%@f2v&d2@{8TD zM4U=vKs?;y0D1T4AlMAjt@pZ4y~b5b@2%c%N=e{S-}#nshr*)&pdIT`hWpYx&!zQe zjQd!}?*!y1TmKrsOhSFkV0&vQpSUeJ3^??Yn_vhJE!C@OqdrT8p(8U?oK zh4%j8J@{vmM&n5g*a{t_Z9=H#&%@^O?8k?dY_{BgDp+AGs7eel>=}gdqYj%0RVi$( zsT+LAc6Q%axVf$PzQhzC+57B3hfK@;tUU~41cfVo{!Kj}NUffe)J3ZeQ!*z(w z>Yf&dPaI1$fq6}(4-q#NuR(Tjuk+8QT?>!Z%}?WO-j#B?w@`gzPQ`$y$X_?XzFGTR zq4hP-)!S%(Z9A9kK-iSIk7=8q-+i=TuFWi-ym*_>eUoPt=U@$W&Du0xolIbxFcuds z4|Sb9PnETL$71WkID^fx}bZ->Qs>AzZ!# z)c%0bGRnt2(({R^w`7S zQ7`JPVihS~JElzLcg&Jdd}{iZFO;O*+4PfZg117qLHd0iCL@#g)Gf`g%DXKUr@=Yy zaQwqceMb;fi5;K|T|B z`ANT$P7xM#`E`EtzTje-z>i*~rOcq&w0y=+5+UNB=7_ZR+xavh$!gMiy9+D2V)I5) zXmTO4S339dDqho((|)vpY7L~`^o1fNL?K(C>SAW7+0tP}5O6WnD~RdrArPuwYBrFn z0t9YDTYbmUanM0m#&K`|H1tT-76<{b^1V|*ZWLDqsJ;U0k+kIi?txp3rqAApczcKB zo-dSweIHV#%4W#2=aTn${B1Sv+UK<<0kN}qKR$ZB4bCuBx0k6_9x~vVoKV+ z&(}WQ=Jfd5nXXxN3SCvQlpXd}JoI-|b2eC!WgJd}PGeu$0!A_7d^#zIInYxi2_?*Ae@&^G z$PDnH`PPs*7BM*M79tWQTA8;<+CjnjahNS z)TAw}dr@;mwFV9luiSC7%1XKG3xtoE5sB2~ygqfPHmK?D`3S&-UbuAZDCpu%&f(5$ zZ=tm6>C+h!4NRlD7~_9!xK|Rw7kh7$EdN8&O|Q*;*ZCaD z4jJd=S~Xv{DiBm!zi9n!b0}i$`%OoeZgb9z_M07f<{%w$=I`(F7_&6GM`$zITB8MB8N6Ln8`vU|&v^H% zzlI7CK3Iehb#r8caRv?DU*F)1A3F@2*T^{A{zQd`>S=|uUQsZ&KA$%6(}JuU$Osz{88r^rp+Wi2e{`0T9QV1?p4 za~L#5T~1-Vhe|5^Tiu~ICc2J`73V*Tefm#B~4=bveHUwyMjMBL|;cX%8)=8 zoFo#i&)!T+)w-21=sR3;km9s1*flcnP%RDC*F=Tm+O94aEg_pD%leF8vta2*Az+P5 zADCIRacf?WQ5yN&B7R1q%5=w5DPM1NI*8FkNSjOkOD-biO1n=>Yb5tgEnr6RP3U8p z5Y3K}dS=;@c)-P$KCeSaK>{xIyvtA`@hFg}FUHmS*FTS48)2aw_y`Ge$ znPdOp^4YsOOpB;eHiXpO*`L}sIyT{J3b~>{{`Hm*>q&-6fwqLN*}Hm*SJZr0npYDr z?=PMOu;BO2GP-?w@jR;0&XjsqFWugHNL(Ya_7gUH7>j4_c5%P9E#H1=OZjV-#{l0u_)~I>-0fUVyiYkdf9XWUa zM1Xd3e6i;hJ1jx+30m4J7u2Est`0T%J8*(f$K%%KjgCZsHvMO3bvqCnPh3H|?xQma z4rSbdWu=z(`9a-Vy*y?Xf&ekh=h1@{dte9L4d-_~uQ60YMb*`Oc8Afv+%Yp?VF6=U zBVxaZSM8}7nHB{T5Ec5;B(df4+%q?_-G3OE5S=3EkUl8VV4L_ckv;LF(c9jrKJ0u# zcUAY~BU|YBk+VVlfiscRFj_~_Mj8R6yWmfL^BTYEytrmUr|}&luY{yq2gBhj`^c5Z z^S(cSkrU0?2?&(}>)0c{^rSVWrQMSY%$yc?UR!hrcSNmq+0&B!svJ0?5C~GA8}c>6 zj3N{*t4OCfKpu_^evK+tV7fprL3p;sL9(|iBI7Pia)v6MwpCc}&x=Mz?g403Xl<e;viOll%5G z0F13z2bFa2Hzg%Djq*8s(f={4DAR z_VYbC*mT3k8^YwXI%jshm2GBx>{5ieUdx1_gq9OvdT$5b@dmgLq=((RU{ZK6<-f+T zm}DK>i(S6*_7hf2xOTX|1-7HO4%Lop@E&^79{! z@9zg?%&B$Nbb{u$4&`iUl7ECne{W^Zt*<`qAxIkdiPu5@9OKNSobC�)v~C(0C)c zgd3@mu<_@wnt>uVJydQ~oz|jKOy0;^`Z?+o2D0^+hp!@j_=nH5zG^AYBuV|wimv<8 zJ-BGiO^XI}T+0%OK+mPa+&L+!)PYa5H}wL${$XzJBCc;XV=Co{g^!)F^tz?jpNo4b zH_VuCMYaCaZVyd48bC?#x#Q0K4CK%<=X&Zv)V@IQ!g5ZVK?zTp+C(vj*rq zre0*ZTR%sn9`4BUqa`iQwuwP$!iTu9y z*^Aa8nvPt{NV`}cy5l$vTGknczicBgdPa#+$B~_lxB0^l39bW-wL`u?WXo>LbCrxs zHO}TPn@o1wSYvVPGZi62B3}9ADk9<9rEQFD-?ViCJHyk~ulRlQ*z07+ zmqT0+dAd*&o$#ah@3U!@BqPvJ}Ns=MjBuIqf9PCEedGznEA@4tG^@#xdHP z5}hhW*p9vTm8p^F2zoA2iJy%YoUT99TiNM^!6xPDkXY%@^R6F7n4GGx+4V!RemOu` z=Bso5M|O}5LA6BSOdLB#UmR7s1}UL!yoSsl_4aP{66T2X(LM*|9)bk2fjUQG@;XV5 za7g2iD)Klhxr?NUp}g%l7S(du@pSRzjsod24a*3J?<_x#8}8QdV|kf7grum zMHRS^M;MRa{Q64RKHpz0W`#~YUyQ#oG(l?D10Z|E)=~C)c9e1bRQzl_KE8L*d#S4H zGq*7)2eRPeh6YhjH3bvBj1tQl|SyY`C6lvas01T(9PNZJK6 zP3wxPDqmT-KbA4>ntJkBD=r{uh>P2dKe_5iem*i@&Qi7(JIJESfjBKGU&VlMgWXOZ z+grrgAg-ko&vt-qp3qk_{Jyj{S5C8tp_aWI-lcFeqdCorB>t+{;r}X*a{YZ_D7jsx@3ZLF5~Y0 zEmA^FHl-=O@oYTk=b{3)f#6wrVMR^aAFkWt`K!X;*hkOEJ}h?qih1@jUzl5Auc6L~ zxmKdYX`}A(wIiw@Nvhre3EN-J<9T?KI85Pa#lXhN0pxf~!g)YyRJC$%aOPVO z1|N}Vm(EBijEx+5zwlamO7S~iGl_`D(3_AYNv=Tp-B zLfLb!LWW&-P|dCrm$Sp?uU4-Z9Z(L)Y`Z^8vKv;BwSQutkP{9P7Ks==4@J%CYWj*9 zM}5&B_xX$_jmo8fH#TZaygRjP#vD;JIFLu_3CL=zp!gk|koyVmeEXBMat*taN>zb& zg&Kq-YKy~J*#7QCz^h^O!Y`}mn!;bvx)sw2>M`%V$C^-PmWPOs%LdR>R9a zjk<;fPnjUHaeQF}hq2MN56#UAxS3c@3Q9#gOvfR69IJ)f)#IIsnP!H1MzFJ+M~v3H zm2atRwZuz(u=p#QW$W$iOXDKnfSyYt`5~>Wm|Mz|({I|E$#NdL=fer>#3u1y5dSj4 zhbTlcNm<$ZXDm5+&{w;^Vnmq)aShdk!HJ)q1*3!J?c7eue z4Ayl-cd=DH3Kr87G6hlUw+4yt%YStriba0x#%6h8yWB{-wpg`bEXk>vAuT`8CMCZ= z-ET)=GS~U_weHAuj!N8$QxriRCC_$2*OZ)z1s7+y0Y=tKL9QtIwdQO;E))*V`;X)q z!yVh(pIlUb7qE?K#Tiudee6%#>#9!n7viM7$pyuCMEsl%le^k_Q@40@a~s%d)S`(E zEoa4Rt!`>1A*l{oFdqaZ%8$Gp!HH!0fyIoqj-0fBJZJCd=cuTUbI%~>YWI-?Xf_iU z;p(r4yd|!ntJP(HtQYRCvJmF3CM-fcN?4UOu~xNlO#K4l9UutOL;i*TcD40HZNfNZ z48=KpV`9#O&p~l1lqXnxeu_{R(_Fy18x?Do2vyIpfsMNi==h3*DeaW9KFeGKVIEUk zFA=1Sbsa>aOw&?cN(-LAsQGLQI*QKv_J(QxZW9@`w79A$t3iTm_8RU}= zPk1~jn1_ubHVP*Y=ty%DSKZCk_LL+S4BZt3ps?hcWV7U@v&+g|tce!uuT zoaf$auXWTi2^OKA6T^5VDK+&=LRZ zh}nwN4f|Wi2H;M29qxDsS1;ds?$L2%vs&=*`}(}x?fu@t5*h?7mkz7o7{o ziz|$({9mgQP|Q^QNr%LsNmqXDY%h(Z4D5=5G#s8mXc;bGXjqNhviHGjue>Uo%4SRF z*bqwj7Nod}m)P&L4UmIEG5T06`^F6ydHyGsz7w|bSdf}FmmV{OAIoAn zvSLZ+%SiQOM*3+%Bp+W1Lg$l}=r{Uk#**4isDECH=%jX5K&c!$Byp5BG?w8J;=YkIeXoqkj znKUFjOl-m^nECRn!;La!Lg$gJIgh_m;Fm}zxFr*;hzA!C9k~v(P>w8rpF(hXh1ovr zzA%Rm`6u4?vDUSNLT~;c9KJVF;WP;$)M+Y!vNGWDe8gda@!UuX;bF}B<-Nf*2T4sj z3>#r!`)cWpK08bL@-hHE@LQROyQGIdK{mv!k;3mAV~Y*& zSx9%5c6=H`R2c<5TZom~S)T3I8*R!KE9Z zGy!Hum?_Ifj#-ah^FhR$lt)QpLd z4Z=r(dZzP@l^;2su|VZMmnmOEH~2N&6&pO_5y1FY{2%~AEy}vnB0qX?;I+BeKcB&f z|5-n=5l=bT!BIq+;RyxX6beD)7x>UAtobc61SA?P_ozwGiB-Aj_c@!Lx0)r0&$Q*; z7-Q3p>Q8fJ@t8ETi=ab%YjAt}qA~>G@Vs;N-`I%rADs}msjm0>eWY*01Gn@It7Gr) zvfk|JHY~V9eI(H5^?}anqY4?%?)Xku8F<& z>_)a|3WD-J7>6{IyHJ7Ny`sr%kPEeFA5=8sz8I;*LW|uf$ijVCB$3K8y`x{FJORg-`CT zC}*oRScJZ^5!az4e_~k*L8Kie5o|%0U=n+}6MSoXJV^q{avZhx_N7Rh6~0qzf$Y&r zdu6)*)REIY#^T(0%7wuvlqQEMvE;#rG+58^o-`ukh`jLP##HQy1~6-E4c@rB3Pqh8 zDUnBX7mjDFaBO-{#bn&eWY$}&K#}-hW>rwhHS7<%)64c=7yoZj1-pKq1+iGlPBJuV zKWWI?fcdcbKl5WJrm2fffh~(~uvkVjp*vVr(~|$L=|8=URvWRpUf6Lsh5vzbQvm?> zx`zl(i*xr!4lxhdG3~Y`Q1gGiOqdro9<4s_DQ8>s)cb318F(RE9jSx=U_oa)!&<@6 zW>xI-V$Y4~$-l&cpIC)?eD<+JdcA$LeW$*9XCE(FnjzJSg_7=*jN^W1@WeUBcjDH4 zDPL7o!srDPfz9aXRG;qPXHjo@CM^=WfXt`E4qzoma*pJ40+uSL4biBj23qPqe)@#A-O+O882J9sS zx^ICqC-ENXg873a)hiL?Yz@}dc-2eO3P(wUqi2Mlig-`}Xn^2<>c-!c)nYA2ANpSM zuX$`hTok?gLtX^Ds38~f)saMV)hGjY49J#-6JXcd)fmPuT>MU&!;gXb^H(>&Zpei{ zD6$?;nhRf>Cl)J|l?%H+@7`H_THjT#q2NZFv}4$jI?{y^AFw)t(<3NOQOC{@uK$`a zoPZm>!1K=HBz(h-CC8)qCeFF)q=Y?4W0+Y>aYM_;Ck3GXj6bx#QiT@aGiN1BTVkl{ z$_soMv^o*z|IS*ibD=5ke1x4mH+90p^=6jL+vCqdmy>bpw>AThce8)=@3y`C^n)S` z2As*5mQq-ZofZMgl3aFv4EY~!kc=DVgPk4%_|XB9(t z&pkSvEgC-Fd2cJ<#I~D^+)wy<2|Dc}KteTsyumg~<4T`RTwO73uT1x6b7?Nz2m-zv zqyOe#?uynui^nat&s)saS#K051fD3HM8_dfRsv_4@!qD$rGwLBE5@Z2j9$ta(Iy%Q zyI?(ek&`*!o}zI)2_mMe+s^6{Ncvh8eAY-1@6{vYFcn>k8*Sfm zy$cr$g*55TbyE3$Y-}MsJmS0A>(>=$`3LA|Pq1!y36T*z%Y;3sBPxQ9<3LzLbMRC2 z^lI6cc)`I^f-xhbbhyc!6GZwVIRv`9)wSdf+(mLG-yGJyMG40l%UHu-3#%X;qlpQ4 zI#_zNF=lp0{;4(>6BbnpqPK82Py0fT!H1JSM(`6+d>88_BgyPd;`e|gGv!)&v8f|h zKFe}=GlJEsk%FxPR7!jXRBNR>!wcL`rav1Gca&M6@ZFqE% z`4Mh^%VfTB>88(OnS}XjA%!~1TgzdO3p7|7|926;mpc4??7wq26+B<|^nJ2fDzywu zFo?l1EdtXHOpk5ff@z1DS-<$rG(ZFiXuFs|}Y34Kpxiz9w9v)SYh`Qlsa!LK_OFPk$W_-wQcU; zqnMAG5Q$Prs$WQkS8`znPLX==kuQ7CiAW{Rl1k9zUL&)gL2Ky%RI6%ljx`3Lym78HOG_r#NWZ`h;UmT; z8Q;NB(OjT-ypxw`C{7rz=Ah6?Ilf*d)0!r@p+-^-rj8xi z_6SQ&${Rp@207;QK;#<376gviKcGm_O;|y6$pBqF&Tj(sX+L)PBhju%zN5&)Py{q84S1 z!u8GCK6^gp(|xu;h?PPKnUh7Lmhp+RzfjWm!UtOhw9(KveIW^uIn_ z_4XfElclN`*ZUd3r=6|g_*_mCYn{^noi)emliSaY^fz<49-|%;zdlvkVbJWlK+ewK zY*{HA(P$@!lXVkSTpg#-w&~WQVm=nA@QV~tjbwOd-7zb2C?(IOw{6?D(sBB$ncUFf zOE(5xIKJ9Pt&il#NG9BsH`1^QjnQt{9LJsje&!xuc&TL(@ zAuXdsJ#S?ulhXa4ohB~W21ju2HEmn9;Ale><}Dj~ZAt1pw2jd+HpPP}W)J-w1RDseHl7A;l`H-f zBR?QsBau>#e*U!E>9Dp@ArRa{F&#eiGa?C9X0D*u+HD^SnppyBly#h5H*jF%%7=!sw59c9vD zehhfcSO<-^K!2XtS}}-6ld)lbeq<@ttMA$#^BVn6O>T$3LxpcObE-NtEn)SH3DAgsjf%Hy@L@o z>)9|}Njhf6u=~m;LtCH0meC4`1j`X@*Usz5Oj(WAi)jVKP9?vMg6!#`W_aJeyzA9E z8Et=&jhAK;rplBlx~kENNni)V)@4o#6iK~r3DI>TTeDky--t|0k4HK@%pgO9xQ%UD zyh!gX7B7xtM3{)5K!6}U%CGpooZ#bwfJBA8TNJ|w2h=#+HMy)2qAkKu)x~cv^MTR5 zgRFZprT~ARVEa$0VJl_teYh6S_m})2e(B2S7D%gA2}!UY_BEL%&Tpl&tiC2nrB;xd z>BKo49MIQG#xbHH@XVM6HDxXHxI_x8HLWh^aO2<0Q|I4KOH9SCksvdzy{{R;Q_qkt zt6QqxbuiwIc%>4LsbH_z77CuZ(N3Eh{Hjl*tq**sjUxsbL00hB%O`K$_t@x|s{n4T zNd=a$$ae5z7;Rcbu!eQO`0qOBG$j8>tyuBKRunfzdwqI*M)DkXw4BTY9#k;h5lpSc zQ`n|Bngm4zP!!TzK$%?Z-G;AmCHO7HG zJ4a(MJnx8jrjb>P`5nQ+l}d5)GCk*Icu;gi*^oOINvafMb|ZIakvKmN9Bc9!zuX@| z8c!6fcJBtgI}cj%Z*hu}cIGcMT*eEDaRt3viG8Pz`YPlFCsx%E3 ze|0qp+oBM@_a-zIsY9^~(nq26QCP#uvzBLITT-Fz1pxTVGcnL9>X6Hfuvh0pCi`ERa%Md2+UxG~gfM-;9Wc)ekf>K{tXe9Mtf!(RFbeqz0o?=Tkh6Nvrj3gQ`mk*o^N zm!-*o=#C|``9cYa3e9*JN%R@qkelPrEPd#e)szjS?u45l-g~tSiv;RefFk~@$ll69Yelw0B?`5LzC;tmCJSyx_+HqT%Gc-2 zhqa7V;q8X$f6QtH%hylOT@X$Mzo#h71A{SUK$?cZ-d!_6boCTtWx6T|zRb+Ik5lZx zC5dG%G$-g=G*YM6F_`aAlH>GIDIqE;_y7oJh498JT}+&LXR4d;+c`H(r3h&!=?z9x z4Q9TKSxmY$n+qmpaZ(L5^RA7HmY@KNAqINP#5>dVozR%cDNn*ch4az#C??EvxggEz zsSOE4zWxw3&F#htFngbgdsT{RM~3V7uK!%; zSN!T%2CcRzG~5cBOfItKldRJy+p^9QA@i?}dZ znE+cDmfM=j?ciR(FH$XL?toJf-0P#?``x(7+V%+5_T&Q}4ryu>>On>|O2>w&hEpt* z5)Q%Yc&uncx(~56ht=CiOPu^_jEY%zk8Kpx8pu5Vbwy1^yuRo6Z{#hTke{V6p)&Tv=g`ZHv@IDp| z9-YRIOoK7?Vhu_H48|kcl8_9){<@Y7i_RF`qbV6-7s>n$_Pk7Q+O8Ny@3HclM47Ac z6zq|t>*>*jzQ1Q3l^j2@k0ZK+I`N0qp{^YV!oBYzZE5 zSvR>;F(^9oMiSA@_%a>wFdl#lN12STlFn`{Qmaf}rDn#9RS6j!Q3~}X zj=UMxLXAIWT*~kt-mDJCc)Cpz=ibFBQnyK#3pFG)Am4l|0PbQn#eT`Vij|AEU5G%h z$?8@IdZ=eNwR^{eh9<;Pjkqg_&CZ`Hvor z^fGvd$l6WXOdtBDp6J#m__((+#YK7r9MVZZf^jwc^VldYv>MnCwxEHmjCA-@!jTj?aPs5l^liizJ(^&FE1FpZ{Ym2#`r~ z3$WnCaEA?+aPxO%`B{1|`gSd*Ka{eb%NZ?ZKVE^@Xr40xBKY^cL=YK*9#^7FK>)h( zQSI76fgkV{B@bpHxC!faVCy9_0+fD8)Zyl>Oz5wZTeI&x21V>$btPM->8wm90k^yf zdoyGD<+a&Jz#pF3h!1alyPUX(tHDr~S87UyD+l>$24NU?oQO9D4|DnM<<{P-5v z0EfE~)@KAjemmaKTCM0`k3tG8krF!R2_~LbrBR2%teCVPh=veVmQB9mWCw` zRBgo9P5Zjdo9INN96~`85TLimeAWEwn27-7gW?#U5e%o(cE$*1-b}L?*H}@0i!8#D z>Uo|PP&r6F`v|C&?si$#j^150fj%x~5ONvfry{1>s%V^z?BIVI6%;awoqIAAE+1r% zr%okZN!tCI+p9joS~>M{6SzZ;3?!2Dhs9X!)6EG?W`;1=K2r-_=(Wi~M!Bb|OgmT_ z`2VC)SopD@PttM9_!%^JN0ir>nt%q^UFnwBe^6%XTT+3YDSb?Ycreb%B%%D&Nya3+ z2w8xJsD7FRj?pAvgW`tTb`Y4^yWJDg1&-?3wn>%6BsC2_CNkshL&e|3s0g6 zCp}stZhun&7%~}K)l7`s*HIU=ZT@Ig^~ciyxVAo{|#log(TGcqhFz2n>YD}PfA{!SqL*%27i3L zVt~5xwo(|dpyWNbTT%Xq90l-OjX0{cQ19gm4a+43;MeNTZ=^*pQErF466HVSl3n+B>}KhjI4M{vNuAyFoXS1WABDQ=ro#C9LHsinW@c$u zat7*s0VfDf|5M;;M0)rQl0tU8yk)AY$&F5i9w5cuIvS^~N4`8Er&8j=LloSD zIB@a!n7j^ZL*-A|ES~z_uESM3XAG>{e-s_b5@Y`0H<8?2V(vtNLcG>P#L70QDc=)3S59YTUZanCyxMgJ9IkJd@Js*GAR@QbFvEkyRt*ihX00jFbI`A{T@Hi7a>$ z9dv>9Zj5Nb)QrZRk2L02K06WlI?fU!y<7-R6wIRSDQm0??g)lKHj%zN!@_9%(a0V@-q0Y8JIgQw0k zW7KL3JY)7Dk5n5?r)jU5j0mN7vF}HdGu<)aLXMCHNd@t)OBd>dOcSQhVqu3=2eTsJ zgNs889adQocnYQEJQ%-no23VQ4pIz4bPKzPwc4-DLBR#uam?%N00hJ1njr|mOjTE{ zuR*ca{PW6n35vM9iK!*t8#DOOToBZaHj4?8k)~387a3NBLhj#R<;uK?z!bpJAS{wMPPYv6QFvJ; z1pm(5kCd0#WeWoFpwEhy?MR{TpwFJvXUtWgmeSGOP~>%i;$uC8L4s7CRaGSMz)fV7 zUH@X6>SJwD$y@wy2ft<@D9oe0{#fa=1O4+V;?Bu0XBj9@M&lTPmY1jKr%$u)t-%0H z3-xW%={G`|GW$M+@#1R2?cK`Es+e7a%3W&Y1={ajI{pp38a*BZf*cLMk@lcca%YXg zlb1((z53>tdl)5ewLO~{@W(aPGbV;*m_@yq z!qTY3JAN1dwSq6%J#P}Te0+5klVk5cW$!ppnl4pN5rBxnk}NjD;mr^O8WxI(tuyk`0_N-ZINriG=?|u0V*1~khV8VY1|dGfHsb!! z+(Ui-?Et=|dkl0Y1P6cph=LaS8TfA9T!yz?PpqW;y^36HLg)!o#r+qiEHMP~Vi977 z$7(}MP96Xy$AJ4j@)5S$ z2snd)MC1dM)y=FAI%aa~((I9!l;V~J2~%)Ps1pnWdtN_h)#4y1#Z|)Fy9R6MzFoTe zsG`5SF9Og>19#F$6A!2U5?$CmJUloKIWH2K!Pd!8Gl`-1B`tWbEj% zwiRkjD6ZDTM|sd?csJIOZSX&P3A_*kqq5%5i_x!yzuk!p2uJdXg!FMp@@_6aB7IoK zTfZ~n1_C0XsCgX-MJnqGCJnx&_GY%K+A@wwo}wu?zoJ5#%SCTshjddm*NlVOA60_o!t^8= zI0W__5IW`8Nk&UmI_i37>*#cFxlw+_lofMOq0LpPidbt%JRf+;51US0iZ2wkzhXBU z{sXo$ZRM!4y-fB)6GIa>mYK;(pHg%hKn`sr{vXS;Aw-_P)O1OwGV)Fmp4(3wz9Z;JL^LazLgBqs3c>31Ete zkvJ1G`mg2RFVoXBnbHFFXWG}DO5nA2ddz$^Q8rNcLw=sroH}ESu(vXg%7D4dr20c9 zVNbh2>kz^V5OkSK&mtMk#;7y~;;>bHPfBU~h1=K)Dez%9_oT_M9oq@hXPaCI-KAEa zu{h^qo^D~8_;yJU*(bQ2%Oy5pYPXS<8wW+^w*v_EnVFo=7Mxz0CO69%AvIkDua;ml zz0U!d&tone{&(zC2X!Ary4j(iv_c8}woL+hqX_34lAb%E5GR|RK3+PiU)tc&EO!lKt<)6Q?q{01?$TSpi z38`d+Wo9~JQFS7;L2m6=S4)!eGXEzn&)k-^*? zd1y`4oT}4%G%!z%}xCXHc>M$mhmTVAT336kckoBel%Bj z)&g8&jvAf@O!Xhv1y`%@vuHDzBU2eIKJHE-d^ihaG#+dinEZ??qTvKcSlIFl81&S% zoHEM=3Op{yn%GAlOe-^MQu7mA{UvC{^itXKzvVGn(In#i#7D#%-g`5-t%^txqr;ss zRa0U@3P+4G!CJk))@m4Yv!C;=t6-d2%gT=&k-LlU|HZLBjegiyu>*aHJ!<&T@twR$ z^k4HAr3$u8`D~&vUEwT~q%_-kU^k{QgYV^l6xU@aP~?)2R7Ni$;PRB>bq>wO4x z2Q47emNCk?Js?qGe-5jolGaEsMPNIPaN$dtXL$dp|N+K@#;;e$!}L;e9} z9|)HU8%z}N04-t!fy*cV-| z&}2yI^chFepYwSOh4h{7N6VIfD{fU8et0cv8q!pPWz}4dDhN9|6I4wEbU6S->l0aK z?`%!J%XqGI<%f9I^uH^v<41c29XWsR#SV7|oO?9xCy>;&NqxDJX*3)v0PF5mQe}Es z@{;McY=s=QsWN-j8l0i~VYxwu_RW_Ls(MO$M{F8D_^*6~WTdgNv!&mSpEEAgV7HKY zTz%Wg9D9(mFuZm&NL&x$k&5rqgW!Yx@a3u(zOIv;Ue;XgsP!R%QYvY);a(757zH9- zc4Ud;32BE97bj;-a`!?>KVi0llNL>XV{9ku{Qmt2^8w^JR*d2BdNFU}#jr1+?>tXidnE0BuK=S-> z=h>P=fbRnz5T;}T#2o|*n;igrz#sHq*Bq9%ys)H0F?pyPCv1_YM@pkxZGk0jT@WbQ z5KDokY=z2KTuDMU4aqZi^4=l86&mO^S~CWqFJ#i%2anIL^fydaUH znXJV@%IYSNofgsOQP}Cg&4d09K3VJd-5y#GZ}o0}XOvHnK&sdphlZ&~#{|6}+ePr)l?$_|NKwLRKN(BdZ3 zo#DJ@U=>sU752Y!1jPp&lbVL#t1ET51sA7t1e0$u;%X|Ct*=X&mew+NwOB)Prz=`#`&@WnIu3xwe)a~C4 zL3v7x3@n3V8V#$U@_G!`_`vmnCMluP{oO7rK%lLl3x8yU+u<%d=vI7RcD(rIYmub< zT~sKdn`Pe^#RKp{qrZlIH+Iz?rGH+&5V9Psbt{^s~I1Ml@4D2Us9a; zf4SJtwo@OBo~(qNojBF^%Gy!d?!UHHei#89mXzm%#QE2`WDj{{{~$+0LOqi*%6P%0 z%3*@i?u*OGyVk3B*A@ywsLuGBl2XYGDBy!kJtwQF*UaS`^K4pW=iof1FET}khs3Pk z`NJ&y!b>98;h~${_Too$)x{x$R6!8lWcpKg1iM0@TPL@5L~j{1C5nuVnU4R5xHDw3 zqy^a<2LKeQ&$;g-_YXS^u5A2l7-&=BGi7NvGn(RPbh&U4IM@v9x)hMm*~+kBFCBdP zu4W6LX$?j_MX-4Jo@9aOZxENUak7i;55J?NPMBy`KM7T5ki?o8-nY?+u$qaWER8=g zX0`0P5AGVR99*~Hw`{`*p!!-^knJK}Mz1=QZU%3}(R)yvgcrj?|fbhq#uk$67 zMp4}MhtDq#SrBar_6ynA{zL$l`8iMX#AmJRP2+R3}^5MRaqpmbj8GW4!Z$hLkza1`zr z@k1u&zx9zVlB`!`#B2Lg5tCAMDrTA+UfcW6Nk5kMr}E;uAB)ID3+Z}V$xKiXWLCGu zb&@@Pb=!WfDCLy2e{fUTg0SW%7c@zmHGmJkn5=1dILIl&6ZLKPV0MRz{m^T^tnU0UCMJ`aMmWMX6AQLqmL;?q?P zsbsx@f@LdX-&7D>Q*qjpw6tK(m1T$qYAVZXr#d;VCrG*3N1uYBJ$*>h8d-xGYpn=o zUXj?>QLCMN@Z(K7T^8!Pfq%bg=|gHJDV*VtQ|Rre}=?E(~;cSh>N0a!&!`UV$bA_ zrNERQ=kmQr#)YKfW1eZN?^ZaROvEf+Yg$8b;+I~$(Pc$u*9{X-G#3IEkEt*`$QSVIog6J# zA`y-Qp5M6VpbaKYFu}LMRK3jUvBOu0mF2z1`>m?1rp5!TB?KT<)b`${2^}{Z=Kap0 z{@V3UP2Cu&xngy8UO?MRAL3Ui;OO2=NV3gbgfYwkP86@NxCxSNd?D*Z;Zxl1p2TPq zrfV*YYx>zPG-*J6HTk{i<}%v5b&p^5)+`-ncA=7+ncNZE0?ZkE3V~-}!vX1E{LVMpgh3KmU##d}~-$~?0L z!|)PA9W6o#giPgsU|Bd3WY?@A&mz2kBdC8gH59E4D;y?C1g*@8X)44>)LvUB+KSRrZn=Pa@>glXfFN%iKv9F#NG)hABKjwmrQf`7$ zE^WH##}=w5_T5xu{lMbWSxb-&^K6pkh!Q&d0xdri^MFOgdH#*LE+|n)iWM|pweW{VTV9CFXr9w? zT@lQL5&`5YX#i=(c#8(v!80ed^u*m4}!_GKMeCmXy@wwvgds+K#6l{NU|Do5{(O1B!Z{bv(e>!|OAEauS zFeCzQ!T5<^)IA>Yesp68z2Lp{xE_t0@12s0l`&0uW2#aSd@}jt+iIPR$@|wAI{##s zO~&Eqz$0ku7AcgPbRy%=czUPh9_h?#Y7j1-_uwi+$vayFT~X+LPFx#MV3UgN7xq*W zdRE@0<>|@hX2qG>alJKa2Lf$fQ{-%T4DfS`J5Uf9P!LYt8I`KK-+Y^67+c?upqH?A zbu+jCX>IsTy&Mr$c#Z{Qw{IN)7_C$@ll$C^JjFaM4UaBV3d+sjB%0sMUs6dF*N}-xms`V{CaT%m*h#p@O z>BQbq6`f=qyyS0ry8-B=tf6jBpPis4XrLe+l{eb)ECZnKA49`I8v$CsCnT;z#CU*a z3rJ6pN9ZOU#7HD0wcJsit~-$nq-<+5xq1!z^C_`6szx(sQ!bfJfwoLDM^!hV!6YSJ z+0L#W|7eCMNd}#2)Rrn)R4P|t<_mHSDlSf8mDcyxcR%pilbomaJVaG_erwu*dH6n; zqfkc$7&t{y139)h%fUV|pyCnKR07)+)&mzNl~E!yFB_feQ(|~4lV8CVewB`IK~pJV z&M*5ev^{b(giYFsq`_n9ZtN>{C@9!j#P?p^RxU&>uHm3yb=kO%=F>&qmOf-m(WdU_ z|GyTDdlZ_dFE9Y<2rhwQ#LPA(L4NcFlH`}C(gvI9b*L6E0yhqi4ydqdDEI}QbYJ#w z6s3BOr4oJ1EEBU=s*~`r&>xDG?ao@fK z-5cUhSAgf=s%@m1wL)&1?g>1;v`GxC45skT;j)yN7-vDMotdI z3OSDKnsivlGMbhGKdZ2B)r5|NC4od58dXW%bW&>Fm^=Eey|!iZb?s;alW-ume{ME6 z^-@gBV6DY|joezuIF0uoWhvV7FGr*jd;7XXF#8r@)E{3E0EdqiKw}A+tfszOT1xAM zI@Yp=1WjEk8mu1Q_};EU1QG6i8p@7^)KpTH<|>_KzF@VKS?)}5?*^>Muh{Dbomv}C zZ)MM%Wl3xss_PQ69Hptk8=e64H@5$<)w6K{ka$v-q*jkReP%Hpze^vX@;;S^oiF#p zP^ZC<|BZbn$a_rk_ND!%!^nzsbP&HxMfr4&>`&zRfbmN4n7}mH0brX_P`(N#XNl#< zmlf3~Eab19m+!$p{M;v`C0hYbGa_hx+LXnSpxzr-XRM%bQN=*EL!~-s>=JoHgqoiD zmVUtXU2Q0#koE<;u(ea_d7+7=)KNo`nZe3H+js%Zapby%dzMdg8Q?dPc>0LC=XW%$ zA&94IY=F+HD-W#y=xdOp2alN6y9Fl0=p-sQ1-ZEslOzb)HC zFhk+y8%GUGuIY{$8=Ly=tk*N+t09D{jR&g)Q+MN9*#U%VFjBCoYKH{i_rn4lrfa>o z|Ip`>IH&N+O+v3&tywmNYXlqo#0uK=MYXTRWm&c7fih5AWF1K^{7`h}&tQ%WMSXlH zROqnOkl9@Ep_(hq0c+Lm%78cqD5!7Hhd0}Sm(MfNEQPfILeGVu3nP>A1{j(9C!*9% ze%Y-f92R*nz*5!ps^FtUL*f%R2QFQZ?qg>85EhKo2PkKZ?fG5MUQ(OS#3l1T7ru+F zj{*hHy1JjQSmy((?D|kgxB4pGy3VpoV$y(Rb%Ou@QQXk+LK+jk1>2b~=1%HZh4Dy`vziB=x^Yls~C#>020lv-;?LpQ~-2kH;EQQ~}+TdG)vi3@3};f$5i3CQ3^ zYuR*OoV=rykE7K;8F2*>kUmk|ppqG+Wg5r&D9;dTq!bzT=#>%e^-IZIqXezVLBrT& z@UWkNe@2~93z#=99oN6=eT_z!x91M{2FA`8&61U;EHu_+{`Z+zQ}A4Ix8FtM{{Ptf z%BU*4w@*+36#)eWk$R*XrKLqWr8}j&J5&UuyG!Xt>KwYeI}aeufkSuCMxXyXGi%M4 zS!>pOdOykWu6^(O>iAtNOJpgMtw<0u=ihwTrl^KTyoGbW!|`F5VD^;|{;*Ck`6BwK z;R!>C7GoQZuIm}L!o>aW6XTd5)NV}ssjS7%Bne6|c$O3=(!|DcO2obc5h<%vtQa7IKA^Y(eaz^nI_J}jXD6Qbc0+zw*m zGAIlpF_r2+duF^JU?lZXDB#CXv2-iSNV9zV=2n^iF}4MD^%w0|x+=}D5%*+(Z+p)n zGcHG)kIj}gk@-va5Iz_UmCi7B(sM-TG9gZ}QMBu+aG7*L>S^TK`ae}ldtf4`t3`*4 zS+Go=c!Y$kP>Ok=f!pk;I~OzWHnjn_M&IKy?9^)CuV?9YyHgdXu4(;7Bd5 zQBNYajdS@nDLd2>L`LZ_uqL%P^s?e#6x`!(UOu7E#8ZB2dT(B!9;#i)q>$wuuwA^h z1As!TH~iTQ%?dE+i+}q5Ts+rXiQ4Zbt;Os7rw1K@bJs%jRGxR}QP$xyB(hl|UGzI{ z_&}Bl{<|`5m=#psfJY=E?{IQ)LLo3%Td_LJuKal7>!>LA_aF(-0WAGk`b#2n8oQuR zBXSrK%_V)B-RXe|Lo6jl_-`$PR(VcOtlCKd8NuQV~m%VsU#5A;sxAif^%f2W!v zV6na%<#KXl>0(A?!t>d|Xs6GdrDS?=5%hQbgnWqO&}rE3oN3R2{281Vn#d2EoVz@B zFNsQTDcvkO^}5C)G@p3%M-UpQ=)qV!vgOej0_~u zxVm?()qPlQu+IR^jSYtx)EOOxcHyV4N>Mx8W1m86nCC2Aq}jL3u;Zzt0>tq%$*_Zg z&GV8S1T?JU?YpbxzgXO#7f|@|2zNjV06!N&KF*F8sq|(Fg7m&tlTDpz=v;hi6_F}?!{@{|?Ly{}xL_P%Q^5Mf!3Uv<6(a-(z0BoMwi+9SaqTkg#>?mqAtcx z7Vh2pH*2+T)_C~?zp_=^DTZ1|e#lm#W1_Vlgs`z7dTFc5)y!=)yBXI-q93sE$jN)W zci(K*?77VK`%s(xh#R+Q~3K z_SwGZ*lrDT=#Mw+#TV5Lh&{A|&l%X$hAv(%Jbc;)oh`WA`CHg`HO0zn^yJ?xXia%> zY$BfiLyFS#=9dCN5Pa)_=e%*kN9L;KaGTbp9fi%{(1NmOTlM$WOpd2na~su$2FzP8YrqpiD@lmitMf1)uah)UIlDowLgx;4CIVWA`=~L--eODx>>w0 zq42Eoza~BAJ$%bJ8Q@=ev~=X5hW6KsUuq+grCk-ylG{ChyStG|2W^?vp5IkS1!|R| zJSPJ+XDyG$!`L6Bm17Q=bH6bt)CN0vhdsU=$w}W%*ORs^itINANY8Cb2CVGrJspQ` zb)d7%O^4T_1pw(B^m`ENeE5N!-7XZc0m)L83yNq5Ii!L#^uAxITrXC#pbdEI`eu*v z#E0BJaTx@Uo~e9t8hIOS_`46)_Yv|b{mzas8ou{kUhRy)ro0!yLl7r4i6TRolRV}n zz-b$y`%$$Iokcs&O|=MfK(P&vM=x10xL%c2mnubaFlTN1%ctRr)FX*W-I!^U`wo+i zI-^egAkap=9LUdqa}}h(l>NB8Yf;Z7cl&ARwr@Ayo=ud*FQ^{V<~}t`@2c&7K7)kz zyBVdYim}v8y6~A}!9RB7>w@1h#(aCtmq=hdK;2j1FUGnr_YR@HWSDx=ZKq)<6Hr6Q_OlXKN8P8$@+TzJM)aIEAUWv3 zRqdt7&kapo0e$O~MVW5fCL9lD+K$`%mK__~j;r%g3SKioa1-)p~6CIl7WCx&<1X52k`&E#vUN_LjxZ=#tYs}e7C}f@Xbwd?wN6I)TQcH2O z@5phbWfo`MPTKAqrfOkfq9=v|)5=zU=+cfCgud1f%5fmbfuHk`W((P-W)v1iwI)-# zTTw^evY{)a)4mqLo2YoA7YM3Gxm#068=i-tQ=<$RvO;o68E$ctQBJ1Sa@yiRVIdk} zL=b9xV0Un+?$XP$2Q1o(0S4>|1Npxj?(l%Ge|wek#Dct)dyLE%#oYoGJE@PoZ|C<; z@)J&;GVmBE7WbN<@i=`{Eg{7Dbq{hzio)Y-6WX=!z)WCDZV)D?Ctnk;_MI}L>ZwtX zq3*g$rM9E=EZfxURP~agWyVx(C)$<#uvSu-H&`7L~=IWbY`erWU!GmxK~32z&7iUb+4*)M{62<(fbyUL}X z;gLm}Me|4C>eTss;;XQP>xoXUeV5lBizj>0%{g1R)I0IYWtBK63}X;0EhH7hLQ8V% z&Om<@Nl(RSGmZ4NM3d2HhT)ech{7#I(Uv79d#if5Ql5nb4U;ciMlm(CS+y)@o4N&_ z{#9|!`p$5O@O?)9JeGu3iqbtzYq7Wpi&>&;f(%-8*3}2kD_Px)daZ;a znk{{2M~%;IcIhlz@B$u?f|ir$Ee}Uwu6A6X!*;bG+>FQSp%Jg5dz~>OjdfER!Hgc2 zT^048Zs#3gx&VRG(F35LS%gfHvX}iqLC+*XDfZHS&(dK__!}bD{u5%5pkn z7n#LZcQwzs7b~;B)y6MFzNeECGlF>$ce|L_o+43@7eQsrt6(qxD|?McH8|!+ zi~&PUPFv{vaG(@l1+Ui{n-B=zCyWgUsRQv~->GuKGC1xZjYvO^bI=im)K{aT(C@qA z#}k2~RC=rwBn4zh)Cy?h$VQQ>9B05SnMGgDWEh*k-}&|hnc&GufLcy76!=D+pO()y zOV6e(>{dC4K*$4dzk9CM>Y`JxWx|WBFFz^D&<{W;$)#;>9HC)^Y0^bktoQ4W>w!j6(8#7d2(>HFoYbWxPa;=9VaWbohWgh0wIqJUyA;R;LdJ;Q%B>TbjyysI8lR36tBt z*F(=XO&(Q%$)4OFQXseJpCeeXN$>+qW61gL^>!B8eBL!fr#{c7gZUD!vgLgBYtI!S zXjja|Ll6cT2_qA}pijQTowea`BG`{%3k?X@5@b$NY`xD?3ST+0FjMxUZ$JJg8^G?S zw~Ia13HUvWu(o;x88d}GgT)xtGEhbJ3XN_Og2@`3`$~T3kNiRX{E+Q^ne~<{-`lqr z{HS=iS}K7}2@P4>3@Yq8rqv9HtLpvr)HJtwVkF;*rWtefVj9t?7M#iwaZ`?h@=sv4 zwfFU}Ei5Trm~;xVn}N$)fwy;pv`aaXfTUMiW{s*NVx5xmAPT3tJHUh9NSUd%+&HY# zxTMlL&3Kp3e3wt5wzgX|WBPF24sXDiDOohs$f4-v{q{2Yiuo^+g*TFgl8lZVV-vqJ z7Tfl^6QX?fo4Z#GSaGz9l`X#EdP{n1-QLt(U$$Iw`J@aC(U!xf4@(c%m)9e7zU!zC z4}7VdAlTeSKR)(VGCPJQzMyDAKe6#Rvp^scd|8b3jk6U-jeLDjbz0~5vRKWi&9lSw=8yHd5Ypk-r=N=*>&*L`*@5vnFxto1Bx7H98)pfdGR2n=eWjXGX?eq@pEG%q4pLag@G(l6N7amC4vea^al|i&J zo8DR}R@#f7i!z1mpj9l$6W7y3u_#7*Ctk;1O@MHwe38G#PD zXK4WD6J!+7$M8do`F=p4;H%MORtoN>AL4I6m)cIUrudR*Z*#v^Lk%)SC<6O8lf z=qF5psNO-g+DoF4qNl#1s1Lt+F2)K-O6F$0n}TiVFnd0FZQuw7DND&}`x&?2VW+be zzom_~X4GoV_&^Em=ntJ`SqcO3YRfQCKr@#(V3pLi*Rls#8-&yhpP@}JOnGZ{I=Vbv zd}nWmSOJEUkv$!{Z0u}J-TA?XZU4QlmL)iRbc%RTHQM_$e?g0-YfP9o(q!~+csQI$ zK)aoBALEJpAlRWN8Ja5%5zs;@9Z@%L=!8y9IRmRQ-hL{9+*0rKv)e7a!eJVPt$%h8 zvxlwXPV%n=toc+k6kgGB)4uzZ16)oi(Els1D|9?|dNg+I;Kvyr2u66}yDMNz{W9!-8T&0< z9`tLV5LKyQC`jb%NvOiU<7S9Zx%z-+2|nS_vTw@MU-zVdrvN5Yxqn*2m`yO0H5hc< zo?Mjk8+8TMg;C2?Dz5B1Aqd_vuUx41yZq#^ROedQSyiDr%6|oXUUOqQldf`eBe+=* z1TPO#@lWWV%VIh;asl>;g0>-AZY#M92GUD^P`#CM{+3l=v?B??h9y~ zMbgEK3L|ktg{6D<(H}cSKkutKzK<>;y{_P=omYFkncFbMmzW3essXsRB-@|bErFiYvPPVZ!)vc1PQ;Jo_0&@kl0D?z9*FXtQcPj ztMzyy*Xeb2Z>yFNa}rRlp@L4rW1|zNHFNrboj@s2ULkLv-tte{ciH$CTWz48mk9vt z>3;gh*>45~RB=G?or>l4@9C)bya_rZli4?X!4%^{8G0Xra}r?vb}LqHx4`-lEfi1u z*B0crsH33Mi*5^f(#Zkxv0M=zRWJ)NKuSM`p!~TuZ)JF-ZpEN_Mx$H@R^oUJwq&PF zXqpF@7wo>n&Vy0BRkahDEeT^h_1*B*3BF1nqd!9mt0btk=9%&sqL0g78^dK&I$Un0 z)}&%VO>sHP=(L831;_M%{%hVcQo`WDr-<*=OcL+ER{NuA&u}OEo}J0LFz=b4z>`&#jB*MLq2J&h!&9@o{VO zwYu({G*vbgPE=Qxu5zJ}!VmFiJOnOx$?15~i*MoiUoSoRKq;xb{iFVkFColaGzrqN z@>(D)dGes>A7c6{*LM4&*F#VDg(nJR*}x2?IR?4DvV@+1ON zfuGxXg4k8DO-p573F@$PwK^6%qc6$Ol*>RS%d^KeDH`{ncFrpoa#ww_LfVm-dbo)! zN}KX_*Qg-eJhvCZzLrP|Y|~@X&Xq*6>Jb)Mo#-kBQwo)OzFd&Ne^R?l_YJ8F!jZ!` z7u8U~7G8(S~@urM;F z7b4B;``hMIlP^ua4Uc16d>O9n8Jv5w0y1}`4c~8jHO&SJHBd24L8k6Hn4Rr{AV|=S3HYCloaak< z`wC}VdCjdWA7_6SXq0pqgE?Y@A$+F?N4>(LU#-ufDpwli9}@v=&6tBABSl$mx6eSm zYym_5K>|URD$7U9KPr9aJq8;WH-ac_UusZI!9EqfaS+c$7YR^V5$QyFWeg$jR{B*H z4a?hwrRGJqS|j>0NanjXQn4K*Pu6f{_|1i_xjrH?!!ws9Lj9w`_=A z@pXIADP9D)JMFL(*+HgIoweJ3Hw*{pgB4)VKkK zdwNC9X6lE|b^zGsSGab(>>#KT*`tn^kqRQ~OSE#1W7Bc^u#Qo{gLZI!WnNyALdg9t z=FQ>IVr*mnYCcH#iPx>m$foh}*%2;;9_(sg*SPIRPiq)yx{(?5Y%xorkii72G zv$3bKYY4;r{q~+Yw0drlXJiJaPo;(TrJ7Pe-(pJ?vLR0#;$v0IykGro{+7<-2}dv8m)YC4 zsesa{czQQjDu9Ldmh99J%9}1_5ulTe#mTnV;5*2{f=w9Wn*A+_xGPUfk`r4GB;`aEQkpd)ZSj8EYN`#wd6z05IlD;7Z|)jhM^WA ztus>Vv$o>r%7U#>)(htR(8rRRcRmV^{mk*()>Zd;3{J*--*OC~DdMH*YW91nUu$@P zY3I@%DnXG!TGKa7Q{{)wyDpS`Z@6vP-JITVZ3N>4f7*HIjIf4zi!W0YT*=5h%tP6G zevw9YYww^pMsHrTRb!24C}pXeA&L8W{u3Av1j!`P!q8dIANx%jT=QRzea8yLL-H7O zg)YnEQE+IX6Mv1Rr)9RV=|VQvMQ)BwUXCSh{`?g`#N!jE`E{jFp(jq8Z$-5dcG%X>nL1+YPd`8n>(p}-c@!<}9T(=L#1zT=fIv`13~G>80;F0BH6%20Ep=KO z0GZ3ZQBrTNe&fA}fKA)muLqLW{dQM!iR-v7NV5DEzKtTAdi(B*e^7KV$q>Wpkf7E| zb50UPwrE`>jhn@}gT7YNGlI_}pRK~_pY0h14X1m5V~>LQq1Za8oiPYIDa-f;sd#Y zcDUVzqhptwmjsumY>2I*T{fjxgzSjoa(m+-%2-VIR*7s=SYwXYpqp_z#WxF#s#Rd< zcmwlq{S(??Ak?uDAm$*K*I~PSOeW-Zb-SpbcjKMsE~&Ebf96|>O94G0T`GR?Co%9X zoT16tY0BM7k%kE`yzlA7YUZW8;uPL99k*HO?e?$6l$-oT9@^m_*(*^F_^g*M=v=>eI2o^n9%Pr5?lmlmp>E{s5Nj~x!};_dDqpH0koFDG0kXL zOWPnD#(!R|Bc>!zdfifZ0}bhnRv_su>9P?TJUn@xx&A&>MiT@u~uqLW{da5j3+G9YU>3JeCn1OS>p0UCopmL8 z3)Va5{Yq;o;M3uCTO0t}RY&%wMoh~Sh?-)n+8XMApiyATWal=`dP8w(gb=MsFVnoT zyPj>(f0(eoiiNac<1>?3RvTWUwe8gK{6LVn$3CVkXcye|KCU}O{9@BW9FhXOr@k92 z$DPX>kV3QT=cdV|v-k;`e6-VCJzeysOfh3f5$LtUOm+$KsZ4Lu_Fgr*(a(bkX&MW& z3X`J>3-`@I8^j(6nA*G)9+5S!viDxTQ!GibBAY}ZA^OYq_C2zqW>#B`MNA`9hJs>6 zU#L0`aR$>~az_kgNyiXVAFZ8m=*&88qt1<*S&_>P2MZ-82E|DJjZ|l5+vKpI>~DZ=Kxi@a-b-h5%ME5J4XTS`&6 zZoq&RFO}Z-dwWjt-9z>F7N3>6E$oEZazGU>9TTV+`7({1d45!fbtSnpsc-`1EC1JqGzR>|7byEk!PP2vt36DJ<{bj?GRJu-Ds4qfdx1-m^^NoE`-XN2CT6~CW{)68e>}wpg-DpXx=y;3)#Prr zT?F!FlC3wq&qTT@3`8Rb*LA=^E4-!hi~CT z-&zk1$K0(dGS9I03{T=eGr=1MEJS;SNgMh)qtDWPFfIo|U5w&fjHgyMTYI*0Nyn<)KQ&tm=LitCT53i%K7fgfu<3Wf@sP2)f1t* zMJYz^w2-9yd&E#<*)YPk4EL-j=I2 zp{YK3I)Bny-&{u7csL1VgBG)wR{T;j>y`KvU}i=5tm*Iwk>8Vs|k+7eXO0ndvY&uPPR?yvQV4#3s%v-inRcYoC_suE5G3pt*+;hn$H zUP&!JAzC@W8O-vFiXzLSiHW3@U7<~Gdgub%`9&4qzrIwxBv2PSJ4#?u0{uE{apj@^ zwyKYp7pg^U6s;-fMC;QXaLcvNuN{V!VA$VW)3C7H&`%$o-Qa4SnWgNZG4^B#^g0ut zjn39cPK=@ctIinZ5ArI+us~YqRc}Z!Az|An>^FQ%xd;7#SBo)ivT$l~WqmCManNy& zX!1q)K2z9gBHGiqbT7K^UU)55pY62%CMtnMS~}=~&pi<2&`+t-D*n-#X1^L0nkQw! zb=}{k;epXO=~*xa0J<2L;R#e!Vf_5JeritDJ6o3mvOmV@qkm+B$RL*Y(Z+oG&ktt0 z!_{P!Yjgjmtqh!X+v1vsVJO?@%x~+zt_O8)!%dXRBz58{{hr&O1_%#~T7aO2s(yX8a?l*)v6m#lqT zDX6HNHn|CZ(<7;KDvZ5H5jTh#YJi3sGuS)bd?jf66en(W8*X(PcwqNqP^(eFCnh*6 zTPHBZ-E|Qrpidq*m@tD~HB2F8`%H3BJbFCsI-{NhaRA*g6YSdgN)|x-^{*HH5P+?C zXp^t?t{mAd&k{X0TNMs_H#56kT>DZ#d#!^qWye=gyiIiR@haS)Jc=Ys#TFSR^5OQGeh)Gwp3p0MdYBY7OnJZB0jKGQeSC zNcN<0+8LknO^1iTe#OM*nFr4bb`@uxjKvZm|JCkK%VZ7$6i>!k;5rTAu5d?%tWw6g zt=b*h-Jd>Ijf09>^zqdp15Zd-73lirKx>XCbE{klcSS4ZxEBN8*+EP7Xz5`_o~eRT z)AET}A0FWCGV}k10K~FZJ_Q_g$1yj0=ygBu&-E{Ra{O+|K_d|j^yd7TjDFJYZ+ZGBG0$k9r!7sDI7{D8-G?mk-p+JcU(&G z!QapOtm(dwXu}N}8*Y{FzXUM-rn)=fsJwB2=TzUyXh3n%mz(fN+kMD+E(Qn=vw@_b zXUSDXb-Ch|af_yA;SXyiT;Uchm29$HX|4?HE?iDGljz24%o1`JV+~l9myD4}yx+nd z3^ zuvtE%$N_pOfkL z=U^?Ts`-NT6!z?2f>=qXit4W0OMHwt*u>A-_zk#3%QUpP9B zBT#hpp_x_2jrPJ%Ivy?Vj&@(IL-Bd{tf1qKqMf7lFrp{%Jwb`WtE+t|Ig?=_Ia$M_v!=(6YVI{W z?lmyvMz!}3U(ZU12zQTf2GZc!o@_f~#$m^Qs6{*?l}_b&u{r5$SpyXz%DuVOtz1u%iCx0XpHy*s>u=Yz`Y6ztlGP zP#8gf893Kf%1AwWn}P%>vHCu zf@Snh=Wv6Gv{AYLHTxA6XNW|G2x z!x&&kMEPoT@6`rN#ph?aBoag)jEutJ!t;w(!SOHfcwJSjB!YlIEXNbE`;bA0>S0?w zmkKe;k~(&RCoiGD&g>b>y(^pHzu03^`gwVRM(iSMDcq&>pS!aOSh?_U^TZM)bYX_9 z`gI(lzb)6N*|GVE!V2F$a&T6yCrUlRE!W2jPl_MF2r(QCGZ@6m2$wA;Z}@KiG||L5 z%-EXa@g2MvZ5HJiZdOs%&h-UJylPb|zsK({o#+u7W(qbx|D=>b9xu$p;Wal;s)DK1 zi;ir~>SVR`rtMQ8_t*}^^4_Er)l$#wv?)5-up0B+2|^fO+AEt1Xy?qV<@T1X=w{zz z!G|K`@y($20XwMgiMTG{06`lW;-NzRlTDCNpm0 zYznetu>CM{(X4iP63P%pvt??2qFrEsXCB6xzDvohwz_BMMV@mMw+LGa&U5})TF}quF=FDk_9~}1H!*++63B)oqR6uKBMi^jtx;&0q5a!%L z)9^DTb;1vsL&x<&$PVTpN%3d5SJEldB#gCP80E0I$Lq3$t1l%fxT~ZboJi5zGZUeG|2~}-vVCAX*hvN3qS~h zMehJS4r3iR-s>y6={U6H#IM{Nr`onn?#G4`FVHx@ib%H?`4M6CT8L&(tUjK*zC9s^ zwL9Uwu6>!$@Z$YnKjs^P`2g;4vWiSmTX*Efw`#Mx=T;xLd#G(+eVQ)`dwpR`U1scG zw(e)=^Qjr@s>FmuLGt0WG$?y~_#a_58QE>5?L~HYMVAn#ql2w9xm=2gi0BT6MQ|yI zgEfP3OaJw>a0~Xs9(?euGxeL>h57pS4#)LVWd6DhtC?7aX_j;;joJpwIz}gf5`+;> z#v?nL4Iu}1VYv+PFA(Z(l)#gp+mdqM$bJZa{2}YQfjOR&ju{}8v_6cVtk+#RUx zmRN|<8#@_jD9!>gkYu-1!;2iXH^TJ)AW=cFD%=0_=v)A4&~UBK=7x*KzTxWD`<96@ zli-t<++b7ad?)edwFZ{6HJd224P7Ke6VDVK38^B%b87=}>u!J2pT-!Vm7eR~$y?8V z_`9Z)I2dn48VUM2G>0K(#3V10vBUt*Bdqq1B{I_I-u_AB1y?5c_CW{t@nBqE1gzfD ze0LeE^VaQRSDFJER#(hs3AZY~kAy@&IX8Z}cb~xfP{r!fd1034;B=DrxTtuRo#V7G zjn95x7Axhl{`TbD`-%yV^44PK+RUCCsZ@zrT#+WE;bNsttbk0i&TFH)(9t3QK6?)d zNyT_)V}E)wO!J~!<5-qYl7r1*!PR|ccJ+n`PWd^hz4F8oPJJdnfu!98X-05cRc5OB&^lXja+EC#W7c^H>wi%$U2Lz zfGaZBsW6t2p|r&a2}u_N4sUdBExCckdLM^Duadl9F;zUS>PtI6TDm>oufDzF=f9jA z@xAtDc0O{6KFUF>@+~x*i6rP!>Rm{)AZS)g@z^hr*Z}WrE^!Je+VbAd>%U!sT3{Z%lE!-mbJ#Mc^u55O4I@4XN(QPDEuWK0M`aec5DA4mo z$*M35&fy{omtLyG4rY@Rd1iWTd^X4$DG^)I$k@xZ<;yjFBoCC78yy1+T7-n_86kmYk+H5-72Z}ir-B<=&(2iZeqiNL;rD)B-+blaxpsISMKVzDcrX(p0r{mq0s9yb;o}a5Mf_L1wG4rdzcyi#FUt{Vlsj=)l?Y4FH=DHDf zP;%Ryy+Eve8zg(|wY;U}3^|T$WaW0Qb28ne!t1%c)P$e%U#2WvUOAt7?(5wCZn?c^ zEVr&>xgDN9GD6~jZHAIx>~%KYQmv<+abt;!YI~hWiF#iL6n8IqyPcOe8{baru2Ftr zk9>%PRF-Gno4w<{v*T%_I|pqjy;)EDetXP!AmDskKL=fy7@yO+UGiY%U#K&@zVba+ zFkTBKPP^`Hjl*nkg8x23M4YbipHT-|ms@E~W{31AA!`;$g^-(tQm9YFQSjG6Iin?2 z%38!ok&sj~HjmF0NCs78+0aP(mG}$257cVR^NOVjYMtk2N7Jsh<`cFWwhEY%krK-| z?mJkPacaxZtujhUMZfz)LTco^nxWoroJr3)yz3w%;pxR8TeZ8rr-(iZHaB0UrnsK} z(D`plC4O()8zIZ$h(-^!voco&S#RvxOkN$xeCiHTm+H(&VidL3Amg3Xg}sX0TXnfR zlYFtaGcA)lR-z>?MH~_NjcK2M5gj(e90RG4y-K$Hvjz%^*3fxtUnY{iG_}_r(-o!b zUv5Gcu2+j^ttB~-p^?EMHJD*0AQAx&!@c%%qqMl{<;rs$aM?NQ-0&|r z^yG-|#-`>TOoEvs(quYV2xGbcO!o$ok1^^S(=JtMFYI!>*s-4A7L=b%9A{sC*66Ox zW|-@DL_$J}h0j!!o-U$I+_pp|-3*r#q+PPfq1(jt0Sp>z@JdL(?s)=kM?&I)qbhbY zsEo$oI^O;M%tof*sgWPG(8yy3o`h7DP;`+jB)4`^su^%c&`3>>na817dn>v%55O;* zAk{hAYTt;`T*c(VtOD>qNF4RQ$pRvWKg2k=Qsl1y34~D5uTSj#CsNe0LX)^6~hn zT=`cFp75@pEvn27)RKMTcgrvQhs+-PZZ)uUZe}|)=6`VEXYMy5$dAzdJCNd7sGqZC3$#y8`^$&>> zX274XAfxfY6wHQgOk7}rA^PRHOC4YzKlQ+8#C-z5)t@nYy<%Y5naWm{vZZHI>g3Qe z>k5bTdXt?40?j11`ipsUI5Rj;AW0fJXTJ`)9Epjk9Eqt6hm27MEw93+gbKb&7P|dV zO`fTbhiJmtCw09VE}GH)y=XpY9lCHkUfTUiLPL3@BC?H6q4pHlKQT)qQbTx>2tw|u zftiT>3Ou0d>ntkj1*%m({tw9**xttKvX9+|R-f^M8zU{)=1NeEviRM%`i$A*vJjiu z+cOg2_t=t1H9u;(-OfHWy}2|XqVfGy`d@BaI z{-KzM;&=KC>1kvI3i#(A@;_$@h~4oV(&z9yMnXb*E&hk71tTGMzrK>RQ)@v5_Dg`ufZviPSX%1&>B?v&`<+Pgu47RqDZjZR`I_<_;2tLBUS2mlH#ZK3hD8pBMcE7? zE{0~O^GhGg!Gvj6^}u3o3-OWINo~ovJ7G6tQL~=Py<5wqr8Yeys}YI+g8;c#tgeXb zUFwko4WGSlKzfNpy*97Qo4+@=pKTIYXcDL?D^sp1^Vtl{k`}7^?@>F3bN>xf-KNc6W!Fa|*OeI{8D1d27rki`TN*e*RIUS}^Wt z>*C43`W0|&crRQ2;N$}5fnJSZtY*Hmv*>YZ@rpOi^jnSH&?Ez`Nsk&Cqqc2qsEq7n z9W}3cU6SF1Ca)LM)`4HFv`n%^;A|FMpj!&tG!93%W<9r6V%3+f#Et-k-DAJlx8=uG z;>9QCP1%malZ{T+e>qcmG*+aJxzgR*Hdn1C3s^hClLQcP$w;BT}X=w$Mm+Z%xTLvOmRww&?h!p7Y38yLZ8p60diT$X}+62y(V7n-P9fWSb zuNGAtMPY1Y1hqh@?Y4Et4>rUHmAvAxK4SaF-e`R*&4b!1nD?5w#xnY)1J3l`h3sIPwc+dzEWS7j zpCpA>hxfXjg9Mfc7U}J{vYc{iRlRkB0q2_D+u4_$JU)TN%|?PV*9Qh0T#pb?;_6x| zxR(%w@ZAY~Erj>_l+(5>%k2Wzw;o5_a2x8t`|VE7WmL9^*`5iRvdYn)h6SkKkrTb@ zC{e<}2X`uYajZXf%>awV6L8@F&K42Oc64^kl584>&(<+&kxEXSUNrR=A8%F2h*)Ya zL@^?(bWS35g%-Qj6W?;W9c>hA)g~r^ryx}+7dZ&e2>K~vJrBAp*cbG=GyWQ?OYyo`5ss3_VGD*ZV_mbtXwQTA6Jy zd#YnjpXy=ivEqzLKi5xNKz!y^ARGx%H3^Q-h8J#r*$?pTP@Q1iFOJy1Ki*-d!D8z} zu`XPAJvPKjY+b+6y*{us z4ptt$GOq2iidT{HUNXtFdy@^SK&SQgV*;W;ra`rP7vG99sA=_2eL5c|o@(-t1)X9{%$!Bf5wnAB<&)?;)41Iew<|Ie(j}@j>7L}M2>34Yp7#VrO%BV9;4+se zC*-d>V?i1`S5fWcR+T1?QslWOHougZmSvWeD5_m)mJlXd-A=>|o{Em=1!5f%&^0(| z)={ecFlCkmi#Rr5=-FmuEfI(v0*~W;Be!E+Ut*dVDye-ak;j?f!D0SDZ;<^^LV8pW zNIV_Hl>lG9Qk2mMEB?sC_8C6sNTYm0GtC}y6;_`h@2RC4v)A(F4 zPW?Se;W38>;0=uSn}ZFL!x9Y#?Zd&wNyU#L1Qh%gP}dQu;N!TUB1yM0-5Q6D+5Qe1 z%yrtV6VBi#-%DO*@MgdtJ}mnQoGZ@C+ISC+g4j;cppHxfp$uJHNAFU6VvEU%g|G~`=rPM9as(*y&Vi++ENO&a$J#4ne8d41GsHj$DnvW2UN78N5gd-+ue zbL^3Y^v#JpEUIKDP3&eT-Ly=1aaXUjl&EtFRZJc1tN2K1u2#mnoRw%@>9Ag-)=0^! z+W~N>65{9(14=pB8giZ^)5VrmWE_IW0=A3Gbs^c^#Vt`j+iVVz|Ijzq+H9vi(@cX{ ztCpS}yyeiexEf={&oHFP*s$ULJ^k^Kl!tq)<`fd@4%-P50%>_(L#KNl-HA0 z+K)U(%AGBC1tD&nBE}b)okXFDO{ao;`FI4k%v$`*My6GlKFvp~?*_?E$7T9yZvnei zcFPwG+Q@TzzTKup;19^gjeZf9?8zV1OQhs}<(rEu>1m#b8PvGM82ipddp2j($s}<= za&t*%5sNl4yZqID&r&dZ$kIRPlY!uZM4V!V=RAOXBMDv+Yi_)pKZBX}SJpVxY z2tL|0A5|)uTqY3>Bc7`?SFy)&P|RXYjE>b*-u)r>HuHR;{w-!%X?srG^VwQI(?l6{kK>ZP3$Q+O^AzCBPCPjUZzLBo znE2u`)HHD*UmCZw7kyzQ*6Z02Ys%P(mD4$gf%NFJ?q2O$1WJiaC|+;>p852;j61iM zlkLT-Iy~^NZ~IxfM*pu*@c-Gp70?~OpVh5i_Hmkni;GXq(xT2RW~4!)<{?s{G;p;4 z(a1*&%#e&O=6BDP?&wtCztL$ptpP$Y?~5R#R;`oo;>|&B6AIGAoeLlS-nTR$yHrq- zM$7&*90iEg<);`iBO50B0<#gZ2#hRw+Ht=|j%Znx649H4#TEw|k0%e1VAOZd>3!Vl zejvB4`bl%()kofs#Vby?7+ermibluP_O1SSq|Y)@z{58e{e&3&N|C}p(@DbMq^m|q zr%1!*rF=@oA!+@~gIsRp-0*#=noE}H&nt;7RJvpCJmu{C^EuyDA`RTMlO;U@Sx&xz zB_9Y0YaN3V^==&$s(GSm0g;w_s6MDwlHhxk?rGzv~s}vT<7f6k#!$Pyr zN@9W*!bAxCi3kc~J7>dQ@tYjR?~|?3WkJ4E0WUGX)4>Y)bLE|{YM=t*$mzMfrltuFev!U8<`6GHijVw!)&De8So2^o7;`?4a>x1fhe|5@$d?j?;mO z+|(~{x8RSL$wDewZ$|2DD|z_bSftW43ntQgQ7Mp-%)bGeR>fi5vKWcaGcgsPA1L{*R_Z=pk5kU7ucPZ%>U!a{-r#U1D<447=)Na`FF~eFg%5S|*TatjGp@5B*BEU9R7%jwSX9z3V@IDVlbo(R76 zyC787atv<4HhaNH#YoC#_sodKJtXshyG4=NeQ2+5mHYH~UDdSa4Z9qn+1fMHggBux z&!4p0^5;KyG1kpj&u)SggqX~p7pBOBDZofDcI!9gq%0%HjHdhgeLiIj3mxXJnw08W zeb7V9`oF48Y?RqTrdz!pH?q`4(q-7ppWNCH%McCQnW-$OeuVUSO9kY~IDfG!Re#<5 zqMw1f_kuLVU@~AaAi^BW9qDtZSr**|AixJoFX?vpAervHm3h&^3`oB^?tJNcz5Fb( zn6@>Cn9<%fd{|L>w+|9iyYPe@eGpX#*UuC99Objq6NG-bPg zb=>|e%QL1(JTo?C4}-(3v|N*s*83bU`NuDj+Q%o^?< zncUo8ASQ_u0kymrgVYxoJ!9Xz6Bb^9t(SE8pJudq-Hr zd)39HpZH#qG+Nt}d7HqNeHeVO*svOZ!MDRQf`*9}zVD7tC4b-5 z_TrzMiiB-$uVoOX!cH@)n``I2ZW?b5=6-(|9`WZqJ#nxc%e9NBQvOavW;pF$ILz&U=hg#^G!(p`jrmEV7o+YyB(~ zLIp*<)@QL+jLhLYI0}u5p*yCiKFkxmIFcbL?0e#|y;&1%AxpAe8?sQp`nY6#PUF&O zpiPwjYNxy5l0+@>M3d!Dv=?^d^nBza8NQGGL5%1B*hcZV`7b0aukwwq0Er}f<#pt=s&-;&I!&RFpNhjn=13e}f^lf1lE%(44X zb1U%a%egOgr+NQsTe5Cd!kcfqC)X)0x9fUW|Ky_Er=lN^XUfL!o>g79(p~@AV&=?R~j!`T6hP`EI3K;1p0={86)cK~BzX=kN3X zf8?K(wPoXyS8o@W$5vFox|;I$(pzi0s`OQXOUiElVXy!Acx4*r?Z$TYbN>GWtNM@K zJIlPYRkyg-+HUWTOwXxzj%?fcDqiMhz>ljx949-=-i-Kh_1KBUKX&esw4a``^RJ>* zXwhtT%ei{n#FzEH|C;yZ>+$!u_x#*+`=L8{b9SH^9&27u3G_Gxqxe`L2UJtdxghk z&-wzDFvLvW{chK5u3{n6GSKKy!P&C6w^IFpbD0bcp^A{{2lcLh_DXj@ybtYvc^;(2 M)78&qol`;+0Fu7JivR!s diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index e27ff9959..edd48b83f 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -29,16 +29,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). -![MultiQC - FastQC sequence counts plot](images/mqc_fastqc_counts.png) - -![MultiQC - FastQC mean quality scores plot](images/mqc_fastqc_quality.png) - -![MultiQC - FastQC adapter content plot](images/mqc_fastqc_adapter.png) - -:::note -The FastQC plots {% if multiqc %}displayed in the MultiQC report{% endif %} shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. -::: -{% if multiqc %} +{%- if multiqc %} ### MultiQC From 3507b15ee40383a7cccce3975e2d9e9a96266985 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 11:56:01 +0200 Subject: [PATCH 445/737] add option to exclude codespaces from pipeline template --- .github/workflows/create-test-lint-wf-template.yml | 7 +++---- nf_core/pipeline-template/.github/CONTRIBUTING.md | 2 ++ nf_core/pipelines/create/create.py | 6 ++++++ nf_core/pipelines/create/custompipeline.py | 13 +++++++++++++ tests/data/pipeline_create_template_skip.yml | 1 + 5 files changed, 25 insertions(+), 4 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index c4eca40fa..79c2ba6c8 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -44,6 +44,7 @@ jobs: - "template_skip_code_linters.yml" - "template_skip_citations.yml" - "template_skip_gitpod.yml" + - "template_skip_codespaces.yml" runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} @@ -117,11 +118,9 @@ jobs: run: | printf "org: my-prefix\nskip: gitpod" > create-test-lint-wf/template_skip_gitpod.yml - # Create a pipeline from the template - - name: create a pipeline from the template ${{ matrix.TEMPLATE }} + - name: Create template skip codespaces run: | - cd create-test-lint-wf - nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} + printf "org: my-prefix\nskip: codespaces" > create-test-lint-wf/template_skip_codespaces.yml - name: run the pipeline run: | diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index e22a66425..3a9cd3bcb 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -116,6 +116,7 @@ If you are using a new feature from core Nextflow, you may bump the minimum requ ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). +{%- if codespaces %} ## GitHub Codespaces @@ -131,3 +132,4 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) + {% endif %} diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 05a5804ee..225565dba 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -103,6 +103,7 @@ def __init__( ], "citations": ["assets/methods_description_template.yml"], "gitpod": [".gitpod.yml"], + "codespaces": [".devcontainer/devcontainer.json"], } # Get list of files we're skipping with the supplied skip keys self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) @@ -214,6 +215,7 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): "code_linters": {"file": True, "content": True}, "citations": {"file": True, "content": True}, "gitpod": {"file": True, "content": True}, + "codespaces": {"file": True, "content": True}, } # Set the parameters for the jinja template @@ -509,6 +511,10 @@ def fix_linting(self): if not self.jinja_params["github_badges"] or not self.jinja_params["github"]: lint_config["readme"] = ["nextflow_badge"] + # Add codespaces specific configurations + if not self.jinja_params["codespaces"]: + lint_config["files_unchanged"].extend([".github/CONTRIBUTING.md"]) + # If the pipeline is not nf-core if not self.config.is_nfcore: lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 5f131a7d8..4c75ff279 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -65,6 +65,13 @@ This is useful to have all the tools ready for pipeline development. """ +markdown_codespaces = """ +The pipeline will include a devcontainer configuration. +The devcontainer will create a GitHub Codespaces for Nextflow development with nf-core/tools and Nextflow installed. + +Github Codespaces (https://github.com/features/codespaces) is an online developer environment that runs in your browser, complete with VSCode and a terminal. +""" + class CustomPipeline(Screen): """Select if the pipeline will use genomic data.""" @@ -122,6 +129,12 @@ def compose(self) -> ComposeResult: "Include the configuration required to use Gitpod.", "gitpod", ), + PipelineFeature( + markdown_codespaces, + "Include GitHub Codespaces", + "The pipeline will include a devcontainer configuration for GitHub Codespaces, providing a development environment with nf-core/tools and Nextflow installed.", + "codespaces", + ), classes="features-container", ) yield Center( diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index 1431114c0..29a336069 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -14,3 +14,4 @@ skip_features: - code_linters - citations - gitpod + - codespaces From 26769f2eb5568d56977023a88f142e0d28da7e0f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 12:44:24 +0200 Subject: [PATCH 446/737] add option to exclude changelog from custom pipeline template --- .github/workflows/create-test-lint-wf-template.yml | 5 +++++ nf_core/pipelines/create/create.py | 6 ++++++ nf_core/pipelines/create/custompipeline.py | 11 +++++++++++ tests/data/pipeline_create_template_skip.yml | 1 + 4 files changed, 23 insertions(+) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index c4eca40fa..b4022d780 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -44,6 +44,7 @@ jobs: - "template_skip_code_linters.yml" - "template_skip_citations.yml" - "template_skip_gitpod.yml" + - "template_skip_changelog.yml" runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} @@ -117,6 +118,10 @@ jobs: run: | printf "org: my-prefix\nskip: gitpod" > create-test-lint-wf/template_skip_gitpod.yml + - name: Create template skip changelog + run: | + printf "org: my-prefix\nskip: changelog" > create-test-lint-wf/template_skip_changelog.yml + # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 05a5804ee..27dc72943 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -103,6 +103,7 @@ def __init__( ], "citations": ["assets/methods_description_template.yml"], "gitpod": [".gitpod.yml"], + "changelog": ["CHANGELOG.md"], } # Get list of files we're skipping with the supplied skip keys self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) @@ -214,6 +215,7 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): "code_linters": {"file": True, "content": True}, "citations": {"file": True, "content": True}, "gitpod": {"file": True, "content": True}, + "changelog": {"file": True, "content": False}, } # Set the parameters for the jinja template @@ -509,6 +511,10 @@ def fix_linting(self): if not self.jinja_params["github_badges"] or not self.jinja_params["github"]: lint_config["readme"] = ["nextflow_badge"] + # Add changelog specific configurations + if not self.jinja_params["changelog"]: + lint_config["files_exist"].extend(["CHANGELOG.md"]) + # If the pipeline is not nf-core if not self.config.is_nfcore: lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 5f131a7d8..c39c26507 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -65,6 +65,11 @@ This is useful to have all the tools ready for pipeline development. """ +markdown_changelog = """ +Having a `CHANGELOG.md` file in the pipeline root directory is useful to track the changes added to each version. +You can read more information on the recommended format here: https://keepachangelog.com/en/1.0.0/ +""" + class CustomPipeline(Screen): """Select if the pipeline will use genomic data.""" @@ -122,6 +127,12 @@ def compose(self) -> ComposeResult: "Include the configuration required to use Gitpod.", "gitpod", ), + PipelineFeature( + markdown_changelog, + "Add a changelog", + "Add a CHANGELOG.md file.", + "changelog", + ), classes="features-container", ) yield Center( diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index 1431114c0..ed0b5ef2a 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -14,3 +14,4 @@ skip_features: - code_linters - citations - gitpod + - changelog From 4d55d7a3382e85efc8be610a8ed3211969626206 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 7 Aug 2024 11:16:38 +0000 Subject: [PATCH 447/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 765507c4e..52654b5de 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) - add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101)) - add option to exclude gitpod for custom pipeline template ([#3100](https://github.com/nf-core/tools/pull/3100)) +- add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) ### Linting From c0bbca287e39709b35457aa1970f9c745c206c0a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 7 Aug 2024 11:17:25 +0000 Subject: [PATCH 448/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 765507c4e..8e3dfdda5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) - add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101)) - add option to exclude gitpod for custom pipeline template ([#3100](https://github.com/nf-core/tools/pull/3100)) +- add option to exclude codespaces from pipeline template ([#3105](https://github.com/nf-core/tools/pull/3105)) ### Linting From 4cf16be72aa114b5062a82abc0fe0dafcc4f1b61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 7 Aug 2024 15:09:55 +0200 Subject: [PATCH 449/737] Update nf_core/pipelines/create/create.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipelines/create/create.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 908dd962d..d9051f416 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -520,14 +520,8 @@ def fix_linting(self): # Add multiqc specific configurations if not self.jinja_params["multiqc"]: - try: - lint_config["files_unchanged"].extend([".github/CONTRIBUTING.md", "assets/sendmail_template.txt"]) - except KeyError: - lint_config["files_unchanged"] = [".github/CONTRIBUTING.md", "assets/sendmail_template.txt"] - try: - lint_config["files_exist"].extend(["assets/multiqc_config.yml"]) - except KeyError: - lint_config["files_exist"] = ["assets/multiqc_config.yml"] + lint_config.setdefault("files_unchanged", []).extend([".github/CONTRIBUTING.md", "assets/sendmail_template.txt"]) +lint_config.setdefault("files_exist", []).extend(["assets/multiqc_config.yml"]) lint_config["multiqc_config"] = False # If the pipeline is not nf-core From f8d54635e853ecdd948a79d134bdea62c2dbb07a Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 15:38:59 +0200 Subject: [PATCH 450/737] fix indentation --- nf_core/pipelines/create/create.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index d9051f416..684b13f83 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -520,8 +520,10 @@ def fix_linting(self): # Add multiqc specific configurations if not self.jinja_params["multiqc"]: - lint_config.setdefault("files_unchanged", []).extend([".github/CONTRIBUTING.md", "assets/sendmail_template.txt"]) -lint_config.setdefault("files_exist", []).extend(["assets/multiqc_config.yml"]) + lint_config.setdefault("files_unchanged", []).extend( + [".github/CONTRIBUTING.md", "assets/sendmail_template.txt"] + ) + lint_config.setdefault("files_exist", []).extend(["assets/multiqc_config.yml"]) lint_config["multiqc_config"] = False # If the pipeline is not nf-core From 8b196cd9a2601e9f05270bbd4bac629cc0b8c823 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 16:12:04 +0200 Subject: [PATCH 451/737] update textual snapshots --- tests/__snapshots__/test_create_app.ambr | 512 +++++++++++------------ 1 file changed, 256 insertions(+), 256 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 9aad37cc9..231c038c6 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-2285198722-matrix { + .terminal-1308777655-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2285198722-title { + .terminal-1308777655-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2285198722-r1 { fill: #c5c8c6 } - .terminal-2285198722-r2 { fill: #e3e3e3 } - .terminal-2285198722-r3 { fill: #989898 } - .terminal-2285198722-r4 { fill: #e1e1e1 } - .terminal-2285198722-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2285198722-r6 { fill: #1e1e1e } - .terminal-2285198722-r7 { fill: #0178d4 } - .terminal-2285198722-r8 { fill: #454a50 } - .terminal-2285198722-r9 { fill: #e2e2e2 } - .terminal-2285198722-r10 { fill: #808080 } - .terminal-2285198722-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-2285198722-r12 { fill: #000000 } - .terminal-2285198722-r13 { fill: #e4e4e4 } - .terminal-2285198722-r14 { fill: #14191f } - .terminal-2285198722-r15 { fill: #507bb3 } - .terminal-2285198722-r16 { fill: #dde6ed;font-weight: bold } - .terminal-2285198722-r17 { fill: #001541 } - .terminal-2285198722-r18 { fill: #7ae998 } - .terminal-2285198722-r19 { fill: #0a180e;font-weight: bold } - .terminal-2285198722-r20 { fill: #008139 } - .terminal-2285198722-r21 { fill: #fea62b;font-weight: bold } - .terminal-2285198722-r22 { fill: #a7a9ab } - .terminal-2285198722-r23 { fill: #e2e3e3 } + .terminal-1308777655-r1 { fill: #c5c8c6 } + .terminal-1308777655-r2 { fill: #e3e3e3 } + .terminal-1308777655-r3 { fill: #989898 } + .terminal-1308777655-r4 { fill: #e1e1e1 } + .terminal-1308777655-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1308777655-r6 { fill: #1e1e1e } + .terminal-1308777655-r7 { fill: #0178d4 } + .terminal-1308777655-r8 { fill: #454a50 } + .terminal-1308777655-r9 { fill: #e2e2e2 } + .terminal-1308777655-r10 { fill: #808080 } + .terminal-1308777655-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-1308777655-r12 { fill: #000000 } + .terminal-1308777655-r13 { fill: #e4e4e4 } + .terminal-1308777655-r14 { fill: #14191f } + .terminal-1308777655-r15 { fill: #507bb3 } + .terminal-1308777655-r16 { fill: #dde6ed;font-weight: bold } + .terminal-1308777655-r17 { fill: #001541 } + .terminal-1308777655-r18 { fill: #7ae998 } + .terminal-1308777655-r19 { fill: #0a180e;font-weight: bold } + .terminal-1308777655-r20 { fill: #008139 } + .terminal-1308777655-r21 { fill: #fea62b;font-weight: bold } + .terminal-1308777655-r22 { fill: #a7a9ab } + .terminal-1308777655-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key,▁▁ - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files. + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-3802907671-matrix { + .terminal-557041160-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3802907671-title { + .terminal-557041160-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3802907671-r1 { fill: #c5c8c6 } - .terminal-3802907671-r2 { fill: #e3e3e3 } - .terminal-3802907671-r3 { fill: #989898 } - .terminal-3802907671-r4 { fill: #e1e1e1 } - .terminal-3802907671-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3802907671-r6 { fill: #1e1e1e } - .terminal-3802907671-r7 { fill: #507bb3 } - .terminal-3802907671-r8 { fill: #e2e2e2 } - .terminal-3802907671-r9 { fill: #808080 } - .terminal-3802907671-r10 { fill: #dde6ed;font-weight: bold } - .terminal-3802907671-r11 { fill: #001541 } - .terminal-3802907671-r12 { fill: #14191f } - .terminal-3802907671-r13 { fill: #454a50 } - .terminal-3802907671-r14 { fill: #7ae998 } - .terminal-3802907671-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-3802907671-r16 { fill: #0a180e;font-weight: bold } - .terminal-3802907671-r17 { fill: #000000 } - .terminal-3802907671-r18 { fill: #008139 } - .terminal-3802907671-r19 { fill: #fea62b;font-weight: bold } - .terminal-3802907671-r20 { fill: #a7a9ab } - .terminal-3802907671-r21 { fill: #e2e3e3 } + .terminal-557041160-r1 { fill: #c5c8c6 } + .terminal-557041160-r2 { fill: #e3e3e3 } + .terminal-557041160-r3 { fill: #989898 } + .terminal-557041160-r4 { fill: #e1e1e1 } + .terminal-557041160-r5 { fill: #4ebf71;font-weight: bold } + .terminal-557041160-r6 { fill: #1e1e1e } + .terminal-557041160-r7 { fill: #507bb3 } + .terminal-557041160-r8 { fill: #e2e2e2 } + .terminal-557041160-r9 { fill: #808080 } + .terminal-557041160-r10 { fill: #dde6ed;font-weight: bold } + .terminal-557041160-r11 { fill: #001541 } + .terminal-557041160-r12 { fill: #14191f } + .terminal-557041160-r13 { fill: #454a50 } + .terminal-557041160-r14 { fill: #7ae998 } + .terminal-557041160-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-557041160-r16 { fill: #0a180e;font-weight: bold } + .terminal-557041160-r17 { fill: #000000 } + .terminal-557041160-r18 { fill: #008139 } + .terminal-557041160-r19 { fill: #fea62b;font-weight: bold } + .terminal-557041160-r20 { fill: #a7a9ab } + .terminal-557041160-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges▃▃ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different  - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include ▅▅ + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles  + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From 1aec52881d806646565b336ce663bcf8366cad9f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 16:31:46 +0200 Subject: [PATCH 452/737] update textual snapshots --- tests/__snapshots__/test_create_app.ambr | 512 +++++++++++------------ 1 file changed, 256 insertions(+), 256 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index 231c038c6..f2c1c45c4 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-1308777655-matrix { + .terminal-463758155-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1308777655-title { + .terminal-463758155-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1308777655-r1 { fill: #c5c8c6 } - .terminal-1308777655-r2 { fill: #e3e3e3 } - .terminal-1308777655-r3 { fill: #989898 } - .terminal-1308777655-r4 { fill: #e1e1e1 } - .terminal-1308777655-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1308777655-r6 { fill: #1e1e1e } - .terminal-1308777655-r7 { fill: #0178d4 } - .terminal-1308777655-r8 { fill: #454a50 } - .terminal-1308777655-r9 { fill: #e2e2e2 } - .terminal-1308777655-r10 { fill: #808080 } - .terminal-1308777655-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-1308777655-r12 { fill: #000000 } - .terminal-1308777655-r13 { fill: #e4e4e4 } - .terminal-1308777655-r14 { fill: #14191f } - .terminal-1308777655-r15 { fill: #507bb3 } - .terminal-1308777655-r16 { fill: #dde6ed;font-weight: bold } - .terminal-1308777655-r17 { fill: #001541 } - .terminal-1308777655-r18 { fill: #7ae998 } - .terminal-1308777655-r19 { fill: #0a180e;font-weight: bold } - .terminal-1308777655-r20 { fill: #008139 } - .terminal-1308777655-r21 { fill: #fea62b;font-weight: bold } - .terminal-1308777655-r22 { fill: #a7a9ab } - .terminal-1308777655-r23 { fill: #e2e3e3 } + .terminal-463758155-r1 { fill: #c5c8c6 } + .terminal-463758155-r2 { fill: #e3e3e3 } + .terminal-463758155-r3 { fill: #989898 } + .terminal-463758155-r4 { fill: #e1e1e1 } + .terminal-463758155-r5 { fill: #4ebf71;font-weight: bold } + .terminal-463758155-r6 { fill: #1e1e1e } + .terminal-463758155-r7 { fill: #0178d4 } + .terminal-463758155-r8 { fill: #454a50 } + .terminal-463758155-r9 { fill: #e2e2e2 } + .terminal-463758155-r10 { fill: #808080 } + .terminal-463758155-r11 { fill: #e2e3e3;font-weight: bold } + .terminal-463758155-r12 { fill: #000000 } + .terminal-463758155-r13 { fill: #e4e4e4 } + .terminal-463758155-r14 { fill: #14191f } + .terminal-463758155-r15 { fill: #507bb3 } + .terminal-463758155-r16 { fill: #dde6ed;font-weight: bold } + .terminal-463758155-r17 { fill: #001541 } + .terminal-463758155-r18 { fill: #7ae998 } + .terminal-463758155-r19 { fill: #0a180e;font-weight: bold } + .terminal-463758155-r20 { fill: #008139 } + .terminal-463758155-r21 { fill: #fea62b;font-weight: bold } + .terminal-463758155-r22 { fill: #a7a9ab } + .terminal-463758155-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files. - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Hide help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common  + reference genome files. + + By selecting this option, your pipeline will include a configuration + file specifying the paths to these files.▂▂ + + The required code to use these files will also be included in the  + template. When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference ▂▂ + files. + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-557041160-matrix { + .terminal-2213530577-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-557041160-title { + .terminal-2213530577-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-557041160-r1 { fill: #c5c8c6 } - .terminal-557041160-r2 { fill: #e3e3e3 } - .terminal-557041160-r3 { fill: #989898 } - .terminal-557041160-r4 { fill: #e1e1e1 } - .terminal-557041160-r5 { fill: #4ebf71;font-weight: bold } - .terminal-557041160-r6 { fill: #1e1e1e } - .terminal-557041160-r7 { fill: #507bb3 } - .terminal-557041160-r8 { fill: #e2e2e2 } - .terminal-557041160-r9 { fill: #808080 } - .terminal-557041160-r10 { fill: #dde6ed;font-weight: bold } - .terminal-557041160-r11 { fill: #001541 } - .terminal-557041160-r12 { fill: #14191f } - .terminal-557041160-r13 { fill: #454a50 } - .terminal-557041160-r14 { fill: #7ae998 } - .terminal-557041160-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-557041160-r16 { fill: #0a180e;font-weight: bold } - .terminal-557041160-r17 { fill: #000000 } - .terminal-557041160-r18 { fill: #008139 } - .terminal-557041160-r19 { fill: #fea62b;font-weight: bold } - .terminal-557041160-r20 { fill: #a7a9ab } - .terminal-557041160-r21 { fill: #e2e3e3 } + .terminal-2213530577-r1 { fill: #c5c8c6 } + .terminal-2213530577-r2 { fill: #e3e3e3 } + .terminal-2213530577-r3 { fill: #989898 } + .terminal-2213530577-r4 { fill: #e1e1e1 } + .terminal-2213530577-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2213530577-r6 { fill: #1e1e1e } + .terminal-2213530577-r7 { fill: #507bb3 } + .terminal-2213530577-r8 { fill: #e2e2e2 } + .terminal-2213530577-r9 { fill: #808080 } + .terminal-2213530577-r10 { fill: #dde6ed;font-weight: bold } + .terminal-2213530577-r11 { fill: #001541 } + .terminal-2213530577-r12 { fill: #14191f } + .terminal-2213530577-r13 { fill: #454a50 } + .terminal-2213530577-r14 { fill: #7ae998 } + .terminal-2213530577-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-2213530577-r16 { fill: #0a180e;font-weight: bold } + .terminal-2213530577-r17 { fill: #000000 } + .terminal-2213530577-r18 { fill: #008139 } + .terminal-2213530577-r19 { fill: #fea62b;font-weight: bold } + .terminal-2213530577-r20 { fill: #a7a9ab } + .terminal-2213530577-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include ▅▅ - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different  - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + use a copy of the  + most common  + reference genome  + files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + GitHub actions for + Continuous  + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▆▆ +         Add Github badgesThe README.md file Show help  + ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + will include  + GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration         The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + configuration  + profiles  + containing custom  + parameters  + requried to run  + nf-core pipelines  + at different  + institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From 19590c3b8aefea9943ba8380e6ece16e9ccf1c67 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 7 Aug 2024 16:52:04 +0200 Subject: [PATCH 453/737] don't remove creating a pipeline to test it :no_mouth: --- .github/workflows/create-test-lint-wf-template.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index f35016197..dbe143fda 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -132,6 +132,12 @@ jobs: run: | printf "org: my-prefix\nskip: codespaces" > create-test-lint-wf/template_skip_codespaces.yml + # Create a pipeline from the template + - name: create a pipeline from the template ${{ matrix.TEMPLATE }} + run: | + cd create-test-lint-wf + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} + - name: run the pipeline run: | cd create-test-lint-wf From 8acb9c4856cddac922536682a2873946ef8aa56b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 8 Aug 2024 13:32:08 +0200 Subject: [PATCH 454/737] handle template features with a yaml file --- .../create-test-lint-wf-template.yml | 78 +++---- .prettierignore | 1 + nf_core/pipelines/create/create.py | 200 ++++------------ nf_core/pipelines/create/templatefeatures.yml | 213 ++++++++++++++++++ tests/data/pipeline_create_template_skip.yml | 13 +- tests/test_create.py | 18 +- 6 files changed, 297 insertions(+), 226 deletions(-) create mode 100644 nf_core/pipelines/create/templatefeatures.yml diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index dbe143fda..d4ff3fc55 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -31,31 +31,41 @@ env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: + install-yq: + name: Install yq + runs-on: ubuntu-latest + steps: + - name: 🏗 Set up yq + uses: frenck/action-setup-yq@v1 + + prepare-matrix: + name: Retrieve all template features + runs-on: ubuntu-latest + outputs: + all_features: ${{ steps.create_matrix.outputs.matrix }} + steps: + - name: checkout + uses: actions/checkout@v3 + - name: Create Matrix + id: create_matrix + run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml)" >> $GITHUB_OUTPUT + RunTestWorkflow: runs-on: ${{ matrix.runner }} env: NXF_ANSI_LOG: false strategy: matrix: - TEMPLATE: - - "template_skip_github_badges.yml" - - "template_skip_igenomes.yml" - - "template_skip_ci.yml" - - "template_skip_code_linters.yml" - - "template_skip_citations.yml" - - "template_skip_gitpod.yml" - - "template_skip_codespaces.yml" - - "template_skip_multiqc.yml" - - "template_skip_changelog.yml" + TEMPLATE: ${{ fromJSON(needs.prepare-matrix.outputs.all_features) }} runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} profile: ["self_hosted_runner"] include: - - TEMPLATE: "template_skip_all.yml" + - TEMPLATE: all runner: ubuntu-latest profile: "docker" - - TEMPLATE: "template_skip_nf_core_configs.yml" + - TEMPLATE: nf_core_configs runner: ubuntu-latest profile: "docker" fail-fast: false @@ -90,53 +100,17 @@ jobs: run: | mkdir create-test-lint-wf export NXF_WORK=$(pwd) - printf "org: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > create-test-lint-wf/template_skip_all.yml - - - name: Create template skip github_badges - run: | - printf "org: my-prefix\nskip: github_badges" > create-test-lint-wf/template_skip_github_badges.yml - - - name: Create template skip igenomes - run: | - printf "org: my-prefix\nskip: igenomes" > create-test-lint-wf/template_skip_igenomes.yml - - - name: Create template skip ci - run: | - printf "org: my-prefix\nskip: ci" > create-test-lint-wf/template_skip_ci.yml - - - name: Create template skip nf_core_configs - run: | - printf "org: my-prefix\nskip: nf_core_configs" > create-test-lint-wf/template_skip_nf_core_configs.yml - - - name: Create template skip code_linters - run: | - printf "org: my-prefix\nskip: code_linters" > create-test-lint-wf/template_skip_code_linters.yml - - - name: Create template skip citations - run: | - printf "org: my-prefix\nskip: citations" > create-test-lint-wf/template_skip_citations.yml - - - name: Create template skip gitpod - run: | - printf "org: my-prefix\nskip: gitpod" > create-test-lint-wf/template_skip_gitpod.yml - - - name: Create template skip multiqc - run: | - printf "org: my-prefix\nskip: multiqc" > create-test-lint-wf/template_skip_multiqc.yml - - - name: Create template skip changelog - run: | - printf "org: my-prefix\nskip: changelog" > create-test-lint-wf/template_skip_changelog.yml + printf "org: my-prefix\nskip: ${{ needs.prepare-matrix.outputs.all_features }}" > create-test-lint-wf/template_skip_all.yml - - name: Create template skip codespaces + - name: Create template skip {{ matrix.TEMPLATE }} run: | - printf "org: my-prefix\nskip: codespaces" > create-test-lint-wf/template_skip_codespaces.yml + printf "org: my-prefix\nskip: {{ matrix.TEMPLATE }}" > create-test-lint-wf/template_skip_{{ matrix.TEMPLATE }}.yml # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | cd create-test-lint-wf - nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} + nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml template_skip_${{ matrix.TEMPLATE }}.yml - name: run the pipeline run: | diff --git a/.prettierignore b/.prettierignore index 059007ab0..9387ee950 100644 --- a/.prettierignore +++ b/.prettierignore @@ -7,6 +7,7 @@ nf_core/module-template/meta.yml nf_core/pipeline-template/nextflow_schema.json nf_core/pipeline-template/modules.json nf_core/pipeline-template/tower.yml +tests/data/pipeline_create_template_skip.yml # don't run on things handled by ruff *.py *.pyc diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index b04b28c47..db0833c68 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -76,43 +76,26 @@ def __init__( else: raise UserWarning("The template configuration was not provided.") - self.jinja_params, skip_paths = self.obtain_jinja_params_dict( + # Read features yaml file + features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "templatefeatures.yml" + with open(features_yml_path) as fh: + self.template_features_yml = yaml.safe_load(fh) + + self.jinja_params, self.skip_areas = self.obtain_jinja_params_dict( self.config.skip_features or [], self.config.outdir ) - skippable_paths = { - "github": [ - ".github/", - ".gitignore", - ], - "ci": [".github/workflows/"], - "igenomes": ["conf/igenomes.config"], - "is_nfcore": [ - ".github/ISSUE_TEMPLATE/config", - "CODE_OF_CONDUCT.md", - ".github/workflows/awsfulltest.yml", - ".github/workflows/awstest.yml", - ".github/workflows/release-announcements.yml", - ], - "code_linters": [ - ".editorconfig", - ".pre-commit-config.yaml", - ".prettierignore", - ".prettierrc.yml", - ".github/workflows/fix-linting.yml", - ], - "citations": ["assets/methods_description_template.yml"], - "gitpod": [".gitpod.yml"], - "codespaces": [".devcontainer/devcontainer.json"], - "multiqc": [ - "assets/multiqc_config.yml", - "assets/methods_description_template.yml", - "modules/nf-core/multiqc/", - ], - "changelog": ["CHANGELOG.md"], - } + # format strings in features yaml + short_name = self.jinja_params["short_name"] + env = jinja2.Environment(loader=jinja2.PackageLoader("nf_core", "pipelines"), keep_trailing_newline=True) + features_template = env.get_template( + str(features_yml_path.relative_to(Path(nf_core.__file__).parent / "pipelines")) + ) + rendered_features = features_template.render({"short_name": short_name}) + self.template_features_yml = yaml.safe_load(rendered_features) + # Get list of files we're skipping with the supplied skip keys - self.skip_paths = set(sp for k in skip_paths for sp in skippable_paths[k]) + self.skip_paths = set(sp for k in self.skip_areas for sp in self.template_features_yml[k]["skippable_paths"]) # Set convenience variables self.name = self.config.name @@ -205,43 +188,27 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): Returns: jinja_params (dict): Dictionary of template areas to skip with values true/false. - skip_paths (list): List of template areas which contain paths to skip. + skip_areas (list): List of template areas which contain paths to skip. """ # Try reading config file _, config_yml = nf_core.utils.load_tools_config(pipeline_dir) - # Define the different template areas, and what actions to take for each - # if they are skipped - template_areas = { - "github": {"file": True, "content": False}, - "ci": {"file": True, "content": False}, - "github_badges": {"file": False, "content": True}, - "igenomes": {"file": True, "content": True}, - "nf_core_configs": {"file": False, "content": True}, - "code_linters": {"file": True, "content": True}, - "citations": {"file": True, "content": True}, - "gitpod": {"file": True, "content": True}, - "codespaces": {"file": True, "content": True}, - "multiqc": {"file": True, "content": True}, - "changelog": {"file": True, "content": False}, - } - # Set the parameters for the jinja template jinja_params = self.config.model_dump() # Add template areas to jinja params and create list of areas with paths to skip - skip_paths = [] - for t_area in template_areas: + skip_areas = [] + for t_area in self.template_features_yml.keys(): if t_area in features_to_skip: - if template_areas[t_area]["file"]: - skip_paths.append(t_area) + if self.template_features_yml[t_area]["skippable_paths"]: + skip_areas.append(t_area) jinja_params[t_area] = False else: jinja_params[t_area] = True # Add is_nfcore as an area to skip for non-nf-core pipelines, to skip all nf-core files - if not jinja_params["is_nfcore"]: - skip_paths.append("is_nfcore") + if not self.config.is_nfcore: + skip_areas.append("is_nfcore") # Set the last parameters based on the ones provided jinja_params["short_name"] = ( @@ -259,7 +226,7 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): and "nextflow_config" in config_yml["lint"] and "manifest.name" in config_yml["lint"]["nextflow_config"] ): - return jinja_params, skip_paths + return jinja_params, skip_areas # Check that the pipeline name matches the requirements if not re.match(r"^[a-z]+$", jinja_params["short_name"]): @@ -270,7 +237,7 @@ def obtain_jinja_params_dict(self, features_to_skip, pipeline_dir): "Your workflow name is not lowercase without punctuation. This may cause Nextflow errors.\nConsider changing the name to avoid special characters." ) - return jinja_params, skip_paths + return jinja_params, skip_areas def init_pipeline(self): """Creates the nf-core pipeline.""" @@ -438,109 +405,20 @@ def fix_linting(self): for a customized pipeline. """ # Create a lint config - short_name = self.jinja_params["short_name"] - if not self.config.is_nfcore: - lint_config = { - "files_exist": [ - "CODE_OF_CONDUCT.md", - f"assets/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_dark.png", - ".github/ISSUE_TEMPLATE/config.yml", - ".github/workflows/awstest.yml", - ".github/workflows/awsfulltest.yml", - ], - "files_unchanged": [ - "CODE_OF_CONDUCT.md", - f"assets/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_light.png", - f"docs/images/nf-core-{short_name}_logo_dark.png", - ], - "nextflow_config": [ - "manifest.name", - "manifest.homePage", - ], - "multiqc_config": ["report_comment"], - } - else: - lint_config = {} - - # Add GitHub hosting specific configurations - if not self.jinja_params["github"]: - lint_config["files_exist"].extend( - [ - ".github/ISSUE_TEMPLATE/bug_report.yml", - ".github/ISSUE_TEMPLATE/feature_request.yml", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/CONTRIBUTING.md", - ".github/.dockstore.yml", - ".gitignore", - ] - ) - lint_config["files_unchanged"].extend( - [ - ".github/ISSUE_TEMPLATE/bug_report.yml", - ".github/ISSUE_TEMPLATE/config.yml", - ".github/ISSUE_TEMPLATE/feature_request.yml", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/workflows/branch.yml", - ".github/workflows/linting_comment.yml", - ".github/workflows/linting.yml", - ".github/CONTRIBUTING.md", - ".github/.dockstore.yml", - ] - ) - - # Add CI specific configurations - if not self.jinja_params["ci"]: - lint_config["files_exist"].extend( - [ - ".github/workflows/branch.yml", - ".github/workflows/ci.yml", - ".github/workflows/linting_comment.yml", - ".github/workflows/linting.yml", - ] - ) - - # Add custom config specific configurations - if not self.jinja_params["nf_core_configs"]: - lint_config["files_exist"].extend(["conf/igenomes.config"]) - lint_config["nextflow_config"].extend( - [ - "process.cpus", - "process.memory", - "process.time", - "custom_config", - ] - ) - - # Add igenomes specific configurations - if not self.jinja_params["igenomes"]: - lint_config["files_exist"].extend(["conf/igenomes.config"]) - - # Add github badges specific configurations - if not self.jinja_params["github_badges"] or not self.jinja_params["github"]: - lint_config["readme"] = ["nextflow_badge"] - - # Add codespaces specific configurations - if not self.jinja_params["codespaces"]: - lint_config["files_unchanged"].extend([".github/CONTRIBUTING.md"]) - - # Add multiqc specific configurations - if not self.jinja_params["multiqc"]: - lint_config.setdefault("files_unchanged", []).extend( - [".github/CONTRIBUTING.md", "assets/sendmail_template.txt"] - ) - lint_config.setdefault("files_exist", []).extend(["assets/multiqc_config.yml"]) - lint_config["multiqc_config"] = False - - # Add changelog specific configurations - if not self.jinja_params["changelog"]: - lint_config["files_exist"].extend(["CHANGELOG.md"]) - - # If the pipeline is not nf-core - if not self.config.is_nfcore: - lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) + lint_config = {} + for area in self.skip_areas: + try: + for lint_test in self.template_features_yml[area]["linting"]: + if not lint_config[lint_test]: + pass + if self.template_features_yml[area]["linting"][lint_test]: + lint_config.setdefault(lint_test, []).extend( + self.template_features_yml[area]["linting"][lint_test] + ) + else: + lint_config[lint_test] = False + except KeyError: + pass # Areas without linting # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml new file mode 100644 index 000000000..843203c7f --- /dev/null +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -0,0 +1,213 @@ +github: + skippable_paths: + - ".github/" + - ".gitignore" + short_description: "" + description: "" + help: "" + linting: + files_exist: + - ".github/ISSUE_TEMPLATE/bug_report.yml" + - ".github/ISSUE_TEMPLATE/feature_request.yml" + - ".github/PULL_REQUEST_TEMPLATE.md" + - ".github/CONTRIBUTING.md" + - ".github/.dockstore.yml" + - ".gitignore" + files_unchanged: + - ".github/ISSUE_TEMPLATE/bug_report.yml" + - ".github/ISSUE_TEMPLATE/config.yml" + - ".github/ISSUE_TEMPLATE/feature_request.yml" + - ".github/PULL_REQUEST_TEMPLATE.md" + - ".github/workflows/branch.yml" + - ".github/workflows/linting_comment.yml" + - ".github/workflows/linting.yml" + - ".github/CONTRIBUTING.md" + - ".github/.dockstore.yml" + readme: + - "nextflow_badge" +ci: + skippable_paths: + - ".github/workflows/" + short_description: "Add Github CI tests" + description: "The pipeline will include several GitHub actions for Continuous Integration (CI) testing" + help: | + Nf-core provides a set of Continuous Integration (CI) tests for Github. + When you open a pull request (PR) on your pipeline repository, these tests will run automatically. + + There are different types of tests: + * Linting tests check that your code is formatted correctly and that it adheres to nf-core standards + For code linting they will use [prettier](https://prettier.io/). + * Pipeline tests run your pipeline on a small dataset to check that it works + These tests are run with a small test dataset on GitHub and a larger test dataset on AWS + * Marking old issues as stale + linting: + files_exist: + - ".github/workflows/branch.yml" + - ".github/workflows/ci.yml" + - ".github/workflows/linting_comment.yml" + - ".github/workflows/linting.yml" +igenomes: + skippable_paths: + - "conf/igenomes.config" + short_description: "Use reference genomes" + description: "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes" + help: | + Nf-core pipelines are configured to use a copy of the most common reference genome files. + + By selecting this option, your pipeline will include a configuration file specifying the paths to these files. + + The required code to use these files will also be included in the template. + When the pipeline user provides an appropriate genome key, + the pipeline will automatically download the required reference files. + + For more information about reference genomes in nf-core pipelines, + see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). + linting: + files_exist: + - "conf/igenomes.config" +github_badges: + skippable_paths: False + short_description: "Add Github badges" + description: "The README.md file of the pipeline will include GitHub badges" + help: | + The pipeline `README.md` will include badges for: + * AWS CI Tests + * Zenodo DOI + * Nextflow + * Conda + * Docker + * Singularity + * Launching on Nextflow Tower + linting: + readme: + - "nextflow_badge" +nf_core_configs: + skippable_paths: False + short_description: "Add configuration files" + description: "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions" + help: | + Nf-core has a repository with a collection of configuration profiles. + + Those config files define a set of parameters which are specific to compute environments at different Institutions. + They can be used within all nf-core pipelines. + If you are likely to be running nf-core pipelines regularly it is a good idea to use or create a custom config file for your organisation. + + For more information about nf-core configuration profiles, see the [nf-core/configs repository](https://github.com/nf-core/configs) + linting: + files_exist: + - "conf/igenomes.config" + nextflow_config: + - "process.cpus" + - "process.memory" + - "process.time" + - "custom_config" +is_nfcore: + skippable_paths: + - ".github/ISSUE_TEMPLATE/config" + - "CODE_OF_CONDUCT.md" + - ".github/workflows/awsfulltest.yml" + - ".github/workflows/awstest.yml" + - ".github/workflows/release-announcements.yml" + short_description: "" + description: "" + help: "" + linting: + files_exist: + - "CODE_OF_CONDUCT.md" + - "assets/nf-core-{{short_name}}_logo_light.png" + - "docs/images/nf-core-{{short_name}}_logo_light.png" + - "docs/images/nf-core-{{short_name}}_logo_dark.png" + - ".github/ISSUE_TEMPLATE/config.yml" + - ".github/workflows/awstest.yml" + - ".github/workflows/awsfulltest.yml" + files_unchanged: + - "CODE_OF_CONDUCT.md" + - "assets/nf-core-{{short_name}}_logo_light.png" + - "docs/images/nf-core-{{short_name}}_logo_light.png" + - "docs/images/nf-core-{{short_name}}_logo_dark.png" + - ".github/ISSUE_TEMPLATE/bug_report.yml" + nextflow_config: + - "manifest.name" + - "manifest.homePage" + multiqc_config: + - "report_comment" +code_linters: + skippable_paths: + - ".editorconfig" + - ".pre-commit-config.yaml" + - ".prettierignore" + - ".prettierrc.yml" + - ".github/workflows/fix-linting.yml" + short_description: "Use code linters" + description: "The pipeline will include code linters and CI tests to lint your code: pre-commit, editor-config and prettier." + help: | + Pipelines include code linters to check the formatting of your code in order to harmonize code styles between developers. + Linters will check all non-ignored files, e.g., JSON, YAML, Nextlow or Python files in your repository. + The available code linters are: + + - pre-commit (https://pre-commit.com/): used to run all code-linters on every PR and on ever commit if you run `pre-commit install` to install it in your local repository. + - editor-config (https://github.com/editorconfig-checker/editorconfig-checker): checks rules such as indentation or trailing spaces. + - prettier (https://github.com/prettier/prettier): enforces a consistent style (indentation, quoting, line length, etc). +citations: + skippable_paths: + - "assets/methods_description_template.yml" + short_description: "Include citations" + description: "Include pipeline tools citations in CITATIONS.md and a method description in the MultiQC report (if enabled)." + help: | + If adding citations, the pipeline template will contain a `CITATIONS.md` file to add the citations of all tools used in the pipeline. + + Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, + and the logics to add this section to the output MultiQC report (if the report is generated). +gitpod: + skippable_paths: + - ".gitpod.yml" + short_description: "Include a gitpod environment" + description: "Include the configuration required to use Gitpod." + help: | + Gitpod (https://www.gitpod.io/) provides standardized and automated development environments. + + Including this to your pipeline will provide an environment with the latest version of nf-core/tools installed and all its requirements. + This is useful to have all the tools ready for pipeline development. +codespaces: + skippable_paths: + - ".devcontainer/devcontainer.json" + short_description: "Include GitHub Codespaces" + description: "The pipeline will include a devcontainer configuration for GitHub Codespaces, providing a development environment with nf-core/tools and Nextflow installed." + help: | + The pipeline will include a devcontainer configuration. + The devcontainer will create a GitHub Codespaces for Nextflow development with nf-core/tools and Nextflow installed. + + Github Codespaces (https://github.com/features/codespaces) is an online developer environment that runs in your browser, complete with VSCode and a terminal. + linting: + files_unchanged: + - ".github/CONTRIBUTING.md" +multiqc: + skippable_paths: + - "assets/multiqc_config.yml" + - "assets/methods_description_template.yml" + - "modules/nf-core/multiqc/" + short_description: "Use multiqc" + description: "The pipeline will include the MultiQC module which generates an HTML report for quality control." + help: | + MultiQC is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline quality control results can be visualised in the report and further statistics are available in the report data directory. + + The pipeline will include the MultiQC module and will have special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see http://multiqc.info. + linting: + files_unchanged: + - ".github/CONTRIBUTING.md" + - "assets/sendmail_template.txt" + files_exist: + - "assets/multiqc_config.yml" + multiqc_config: False +changelog: + skippable_paths: + - "CHANGELOG.md" + short_description: "Add a changelog" + description: "Add a CHANGELOG.md file." + help: | + Having a `CHANGELOG.md` file in the pipeline root directory is useful to track the changes added to each version. + + You can read more information on the recommended format here: https://keepachangelog.com/en/1.0.0/ + linting: + files_exist: + - "CHANGELOG.md" diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml index 21cdc0c3d..3ab8f69ec 100644 --- a/tests/data/pipeline_create_template_skip.yml +++ b/tests/data/pipeline_create_template_skip.yml @@ -5,15 +5,4 @@ version: 1.0.0 force: True org: testprefix is_nfcore: False -skip_features: - - github - - ci - - github_badges - - igenomes - - nf_core_configs - - code_linters - - citations - - gitpod - - codespaces - - multiqc - - changelog +skip_features: {{ all_features }} diff --git a/tests/test_create.py b/tests/test_create.py index 920a0f475..92f7f2a9d 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -5,6 +5,7 @@ from pathlib import Path import git +import jinja2 import yaml import nf_core.pipelines.create.create @@ -98,9 +99,24 @@ def test_pipeline_creation_initiation_customize_template(self, tmp_path): @with_temporary_folder def test_pipeline_creation_with_yml_skip(self, tmp_path): + # Update pipeline_create_template_skip.yml file + features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "templatefeatures.yml" + with open(features_yml_path) as fh: + template_features_yml = yaml.safe_load(fh) + all_features = list(template_features_yml.keys()) + all_features.remove("is_nfcore") + env = jinja2.Environment(loader=jinja2.PackageLoader("tests", "data"), keep_trailing_newline=True) + skip_template = env.get_template( + str(PIPELINE_TEMPLATE_YML_SKIP.relative_to(Path(nf_core.__file__).parent.parent / "tests" / "data")) + ) + rendered_content = skip_template.render({"all_features": all_features}) + rendered_yaml = Path(tmp_path) / "pipeline_create_template_skip.yml" + with open(rendered_yaml, "w") as fh: + fh.write(rendered_content) + pipeline = nf_core.pipelines.create.create.PipelineCreate( outdir=tmp_path, - template_config=PIPELINE_TEMPLATE_YML_SKIP, + template_config=rendered_yaml, default_branch=self.default_branch, ) pipeline.init_pipeline() From 637317a3422f39d3f98fe2aafcc4efc6d0a88a80 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 8 Aug 2024 14:23:01 +0000 Subject: [PATCH 455/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 609c3aea0..c1e07f3bf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ - add option to exclude codespaces from pipeline template ([#3105](https://github.com/nf-core/tools/pull/3105)) - add option to exclude multiqc from pipeline template ([#3103](https://github.com/nf-core/tools/pull/3103)) - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) +- handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108)) ### Linting From efff9eada0641bc4ee42cf73f2c6943182f00886 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 9 Aug 2024 10:30:55 +0200 Subject: [PATCH 456/737] use template features yaml file to generate textual layout --- nf_core/pipelines/create/__init__.py | 8 + nf_core/pipelines/create/custompipeline.py | 142 ++---------------- nf_core/pipelines/create/nfcorepipeline.py | 25 ++- nf_core/pipelines/create/templatefeatures.yml | 24 +++ nf_core/pipelines/create/utils.py | 21 --- 5 files changed, 50 insertions(+), 170 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 56e25bf1d..ae766b4e2 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,10 +1,13 @@ """A Textual app to create a pipeline.""" import logging +from pathlib import Path +import yaml from textual.app import App from textual.widgets import Button +import nf_core from nf_core.pipelines.create import utils from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline @@ -66,6 +69,11 @@ class PipelineCreateApp(App[utils.CreateConfig]): # Logging state LOGGING_STATE = None + # Template features + features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "templatefeatures.yml" + with open(features_yml_path) as fh: + template_features_yml = yaml.safe_load(fh) + def on_mount(self) -> None: self.push_screen("welcome") diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 96b6df506..fdb7a46d2 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -6,76 +6,7 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Switch -from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes, markdown_multiqc - -markdown_ci = """ -Nf-core provides a set of Continuous Integration (CI) tests for Github. -When you open a pull request (PR) on your pipeline repository, these tests will run automatically. - -There are different types of tests: -* Linting tests check that your code is formatted correctly and that it adheres to nf-core standards - For code linting they will use [prettier](https://prettier.io/). -* Pipeline tests run your pipeline on a small dataset to check that it works - These tests are run with a small test dataset on GitHub and a larger test dataset on AWS -* Marking old issues as stale -""" - -markdown_badges = """ -The pipeline `README.md` will include badges for: -* AWS CI Tests -* Zenodo DOI -* Nextflow -* Conda -* Docker -* Singularity -* Launching on Nextflow Tower -""" - -markdown_configuration = """ -Nf-core has a repository with a collection of configuration profiles. - -Those config files define a set of parameters which are specific to compute environments at different Institutions. -They can be used within all nf-core pipelines. -If you are likely to be running nf-core pipelines regularly it is a good idea to use or create a custom config file for your organisation. - -For more information about nf-core configuration profiles, see the [nf-core/configs repository](https://github.com/nf-core/configs) -""" - -markdown_code_linters = """ -Pipelines include code linters to check the formatting of your code in order to harmonize code styles between developers. -Linters will check all non-ignored files, e.g., JSON, YAML, Nextlow or Python files in your repository. -The available code linters are: - -- pre-commit (https://pre-commit.com/): used to run all code-linters on every PR and on ever commit if you run `pre-commit install` to install it in your local repository. -- editor-config (https://github.com/editorconfig-checker/editorconfig-checker): checks rules such as indentation or trailing spaces. -- prettier (https://github.com/prettier/prettier): enforces a consistent style (indentation, quoting, line length, etc). -""" - -markdown_citations = """ -If adding citations, the pipeline template will contain a `CITATIONS.md` file to add the citations of all tools used in the pipeline. - -Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, -and the logics to add this section to the output MultiQC report (if the report is generated). -""" - -markdown_gitpod = """ -Gitpod (https://www.gitpod.io/) provides standardized and automated development environments. - -Including this to your pipeline will provide an environment with the latest version of nf-core/tools installed and all its requirements. -This is useful to have all the tools ready for pipeline development. -""" - -markdown_codespaces = """ -The pipeline will include a devcontainer configuration. -The devcontainer will create a GitHub Codespaces for Nextflow development with nf-core/tools and Nextflow installed. - -Github Codespaces (https://github.com/features/codespaces) is an online developer environment that runs in your browser, complete with VSCode and a terminal. -""" - -markdown_changelog = """ -Having a `CHANGELOG.md` file in the pipeline root directory is useful to track the changes added to each version. -You can read more information on the recommended format here: https://keepachangelog.com/en/1.0.0/ -""" +from nf_core.pipelines.create.utils import PipelineFeature class CustomPipeline(Screen): @@ -91,75 +22,20 @@ def compose(self) -> ComposeResult: """ ) ) - yield ScrollableContainer( - PipelineFeature( - markdown_genomes, - "Use reference genomes", - "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", - "igenomes", - ), - PipelineFeature( - markdown_ci, - "Add Github CI tests", - "The pipeline will include several GitHub actions for Continuous Integration (CI) testing", - "ci", - ), - PipelineFeature( - markdown_badges, - "Add Github badges", - "The README.md file of the pipeline will include GitHub badges", - "github_badges", - ), - PipelineFeature( - markdown_configuration, - "Add configuration files", - "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions", - "nf_core_configs", - ), - PipelineFeature( - markdown_code_linters, - "Use code linters", - "The pipeline will include code linters and CI tests to lint your code: pre-commit, editor-config and prettier.", - "code_linters", - ), - PipelineFeature( - markdown_citations, - "Include citations", - "Include pipeline tools citations in CITATIONS.md and a method description in the MultiQC report (if enabled).", - "citations", - ), - PipelineFeature( - markdown_gitpod, - "Include a gitpod environment", - "Include the configuration required to use Gitpod.", - "gitpod", - ), - PipelineFeature( - markdown_codespaces, - "Include GitHub Codespaces", - "The pipeline will include a devcontainer configuration for GitHub Codespaces, providing a development environment with nf-core/tools and Nextflow installed.", - "codespaces", - ), - PipelineFeature( - markdown_multiqc, - "Use multiqc", - "The pipeline will include the MultiQC module which generates an HTML report for quality control.", - "multiqc", - ), - PipelineFeature( - markdown_changelog, - "Add a changelog", - "Add a CHANGELOG.md file.", - "changelog", - ), - classes="features-container", - ) + yield ScrollableContainer(id="features") yield Center( Button("Back", id="back", variant="default"), Button("Continue", id="continue", variant="success"), classes="cta", ) + def on_mount(self) -> None: + for name, feature in self.parent.template_features_yml.items(): + if feature["custom_pipelines"]: + self.query_one("#features").mount( + PipelineFeature(feature["help"], feature["short_description"], feature["description"], name) + ) + @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: """Save fields to the config.""" diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index 8319cb044..b6a823169 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -6,7 +6,7 @@ from textual.screen import Screen from textual.widgets import Button, Footer, Header, Markdown, Switch -from nf_core.pipelines.create.utils import PipelineFeature, markdown_genomes, markdown_multiqc +from nf_core.pipelines.create.utils import PipelineFeature class NfcorePipeline(Screen): @@ -22,27 +22,20 @@ def compose(self) -> ComposeResult: """ ) ) - yield ScrollableContainer( - PipelineFeature( - markdown_genomes, - "Use reference genomes", - "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes", - "igenomes", - ), - PipelineFeature( - markdown_multiqc, - "Use multiqc", - "The pipeline will include the MultiQC module which generates an HTML report for quality control.", - "multiqc", - ), - classes="features-container", - ) + yield ScrollableContainer(id="features") yield Center( Button("Back", id="back", variant="default"), Button("Continue", id="continue", variant="success"), classes="cta", ) + def on_mount(self) -> None: + for name, feature in self.parent.template_features_yml.items(): + if feature["nfcore_pipelines"]: + self.query_one("#features").mount( + PipelineFeature(feature["help"], feature["short_description"], feature["description"], name) + ) + @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: """Save fields to the config.""" diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index 843203c7f..a91887a89 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -25,6 +25,8 @@ github: - ".github/.dockstore.yml" readme: - "nextflow_badge" + nfcore_pipelines: False + custom_pipelines: False ci: skippable_paths: - ".github/workflows/" @@ -46,6 +48,8 @@ ci: - ".github/workflows/ci.yml" - ".github/workflows/linting_comment.yml" - ".github/workflows/linting.yml" + nfcore_pipelines: False + custom_pipelines: True igenomes: skippable_paths: - "conf/igenomes.config" @@ -65,6 +69,8 @@ igenomes: linting: files_exist: - "conf/igenomes.config" + nfcore_pipelines: True + custom_pipelines: True github_badges: skippable_paths: False short_description: "Add Github badges" @@ -81,6 +87,8 @@ github_badges: linting: readme: - "nextflow_badge" + nfcore_pipelines: False + custom_pipelines: True nf_core_configs: skippable_paths: False short_description: "Add configuration files" @@ -101,6 +109,8 @@ nf_core_configs: - "process.memory" - "process.time" - "custom_config" + nfcore_pipelines: False + custom_pipelines: True is_nfcore: skippable_paths: - ".github/ISSUE_TEMPLATE/config" @@ -131,6 +141,8 @@ is_nfcore: - "manifest.homePage" multiqc_config: - "report_comment" + nfcore_pipelines: False + custom_pipelines: False code_linters: skippable_paths: - ".editorconfig" @@ -148,6 +160,8 @@ code_linters: - pre-commit (https://pre-commit.com/): used to run all code-linters on every PR and on ever commit if you run `pre-commit install` to install it in your local repository. - editor-config (https://github.com/editorconfig-checker/editorconfig-checker): checks rules such as indentation or trailing spaces. - prettier (https://github.com/prettier/prettier): enforces a consistent style (indentation, quoting, line length, etc). + nfcore_pipelines: False + custom_pipelines: True citations: skippable_paths: - "assets/methods_description_template.yml" @@ -158,6 +172,8 @@ citations: Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, and the logics to add this section to the output MultiQC report (if the report is generated). + nfcore_pipelines: False + custom_pipelines: True gitpod: skippable_paths: - ".gitpod.yml" @@ -168,6 +184,8 @@ gitpod: Including this to your pipeline will provide an environment with the latest version of nf-core/tools installed and all its requirements. This is useful to have all the tools ready for pipeline development. + nfcore_pipelines: False + custom_pipelines: True codespaces: skippable_paths: - ".devcontainer/devcontainer.json" @@ -181,6 +199,8 @@ codespaces: linting: files_unchanged: - ".github/CONTRIBUTING.md" + nfcore_pipelines: False + custom_pipelines: True multiqc: skippable_paths: - "assets/multiqc_config.yml" @@ -199,6 +219,8 @@ multiqc: files_exist: - "assets/multiqc_config.yml" multiqc_config: False + nfcore_pipelines: True + custom_pipelines: True changelog: skippable_paths: - "CHANGELOG.md" @@ -211,3 +233,5 @@ changelog: linting: files_exist: - "CHANGELOG.md" + nfcore_pipelines: False + custom_pipelines: True diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 43094ab53..dba498d18 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -244,24 +244,3 @@ def add_hide_class(app, widget_id: str) -> None: def remove_hide_class(app, widget_id: str) -> None: """Remove class 'hide' to a widget. Display widget.""" app.get_widget_by_id(widget_id).remove_class("hide") - - -## Markdown text to reuse in different screens -markdown_genomes = """ -Nf-core pipelines are configured to use a copy of the most common reference genome files. - -By selecting this option, your pipeline will include a configuration file specifying the paths to these files. - -The required code to use these files will also be included in the template. -When the pipeline user provides an appropriate genome key, -the pipeline will automatically download the required reference files. - -For more information about reference genomes in nf-core pipelines, -see the [nf-core docs](https://nf-co.re/docs/usage/reference_genomes). -""" - -markdown_multiqc = """ -MultiQC is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline quality control results can be visualised in the report and further statistics are available in the report data directory. - -The pipeline will include the MultiQC module and will have special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see http://multiqc.info. -""" From 2fa7a033ac1a4d99563e63cdaf76611c223b5da8 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 9 Aug 2024 11:26:29 +0200 Subject: [PATCH 457/737] update textual snapshots --- tests/__snapshots__/test_create_app.ambr | 764 +++++++++++------------ 1 file changed, 382 insertions(+), 382 deletions(-) diff --git a/tests/__snapshots__/test_create_app.ambr b/tests/__snapshots__/test_create_app.ambr index f2c1c45c4..f4eb25508 100644 --- a/tests/__snapshots__/test_create_app.ambr +++ b/tests/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-463758155-matrix { + .terminal-3220763577-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-463758155-title { + .terminal-3220763577-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-463758155-r1 { fill: #c5c8c6 } - .terminal-463758155-r2 { fill: #e3e3e3 } - .terminal-463758155-r3 { fill: #989898 } - .terminal-463758155-r4 { fill: #e1e1e1 } - .terminal-463758155-r5 { fill: #4ebf71;font-weight: bold } - .terminal-463758155-r6 { fill: #1e1e1e } - .terminal-463758155-r7 { fill: #0178d4 } - .terminal-463758155-r8 { fill: #454a50 } - .terminal-463758155-r9 { fill: #e2e2e2 } - .terminal-463758155-r10 { fill: #808080 } - .terminal-463758155-r11 { fill: #e2e3e3;font-weight: bold } - .terminal-463758155-r12 { fill: #000000 } - .terminal-463758155-r13 { fill: #e4e4e4 } - .terminal-463758155-r14 { fill: #14191f } - .terminal-463758155-r15 { fill: #507bb3 } - .terminal-463758155-r16 { fill: #dde6ed;font-weight: bold } - .terminal-463758155-r17 { fill: #001541 } - .terminal-463758155-r18 { fill: #7ae998 } - .terminal-463758155-r19 { fill: #0a180e;font-weight: bold } - .terminal-463758155-r20 { fill: #008139 } - .terminal-463758155-r21 { fill: #fea62b;font-weight: bold } - .terminal-463758155-r22 { fill: #a7a9ab } - .terminal-463758155-r23 { fill: #e2e3e3 } + .terminal-3220763577-r1 { fill: #c5c8c6 } + .terminal-3220763577-r2 { fill: #e3e3e3 } + .terminal-3220763577-r3 { fill: #989898 } + .terminal-3220763577-r4 { fill: #e1e1e1 } + .terminal-3220763577-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3220763577-r6 { fill: #1e1e1e } + .terminal-3220763577-r7 { fill: #507bb3 } + .terminal-3220763577-r8 { fill: #e2e2e2 } + .terminal-3220763577-r9 { fill: #808080 } + .terminal-3220763577-r10 { fill: #dde6ed;font-weight: bold } + .terminal-3220763577-r11 { fill: #001541 } + .terminal-3220763577-r12 { fill: #0178d4 } + .terminal-3220763577-r13 { fill: #454a50 } + .terminal-3220763577-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-3220763577-r15 { fill: #000000 } + .terminal-3220763577-r16 { fill: #e4e4e4 } + .terminal-3220763577-r17 { fill: #14191f } + .terminal-3220763577-r18 { fill: #7ae998 } + .terminal-3220763577-r19 { fill: #0a180e;font-weight: bold } + .terminal-3220763577-r20 { fill: #008139 } + .terminal-3220763577-r21 { fill: #fea62b;font-weight: bold } + .terminal-3220763577-r22 { fill: #a7a9ab } + .terminal-3220763577-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Hide help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common  - reference genome files. - - By selecting this option, your pipeline will include a configuration - file specifying the paths to these files.▂▂ - - The required code to use these files will also be included in the  - template. When the pipeline user provides an appropriate genome key, - the pipeline will automatically download the required reference ▂▂ - files. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Hide help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common reference  + genome files.▂▂ + + By selecting this option, your pipeline will include a configuration file  + specifying the paths to these files. + + The required code to use these files will also be included in the template.  + When the pipeline user provides an appropriate genome key, the pipeline will + automatically download the required reference files. + ▅▅ + For more information about reference genomes in nf-core pipelines, see the  + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-2213530577-matrix { + .terminal-537214554-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2213530577-title { + .terminal-537214554-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2213530577-r1 { fill: #c5c8c6 } - .terminal-2213530577-r2 { fill: #e3e3e3 } - .terminal-2213530577-r3 { fill: #989898 } - .terminal-2213530577-r4 { fill: #e1e1e1 } - .terminal-2213530577-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2213530577-r6 { fill: #1e1e1e } - .terminal-2213530577-r7 { fill: #507bb3 } - .terminal-2213530577-r8 { fill: #e2e2e2 } - .terminal-2213530577-r9 { fill: #808080 } - .terminal-2213530577-r10 { fill: #dde6ed;font-weight: bold } - .terminal-2213530577-r11 { fill: #001541 } - .terminal-2213530577-r12 { fill: #14191f } - .terminal-2213530577-r13 { fill: #454a50 } - .terminal-2213530577-r14 { fill: #7ae998 } - .terminal-2213530577-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-2213530577-r16 { fill: #0a180e;font-weight: bold } - .terminal-2213530577-r17 { fill: #000000 } - .terminal-2213530577-r18 { fill: #008139 } - .terminal-2213530577-r19 { fill: #fea62b;font-weight: bold } - .terminal-2213530577-r20 { fill: #a7a9ab } - .terminal-2213530577-r21 { fill: #e2e3e3 } + .terminal-537214554-r1 { fill: #c5c8c6 } + .terminal-537214554-r2 { fill: #e3e3e3 } + .terminal-537214554-r3 { fill: #989898 } + .terminal-537214554-r4 { fill: #e1e1e1 } + .terminal-537214554-r5 { fill: #4ebf71;font-weight: bold } + .terminal-537214554-r6 { fill: #1e1e1e } + .terminal-537214554-r7 { fill: #507bb3 } + .terminal-537214554-r8 { fill: #e2e2e2 } + .terminal-537214554-r9 { fill: #808080 } + .terminal-537214554-r10 { fill: #dde6ed;font-weight: bold } + .terminal-537214554-r11 { fill: #001541 } + .terminal-537214554-r12 { fill: #14191f } + .terminal-537214554-r13 { fill: #454a50 } + .terminal-537214554-r14 { fill: #7ae998 } + .terminal-537214554-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-537214554-r16 { fill: #0a180e;font-weight: bold } + .terminal-537214554-r17 { fill: #000000 } + .terminal-537214554-r18 { fill: #008139 } + .terminal-537214554-r19 { fill: #fea62b;font-weight: bold } + .terminal-537214554-r20 { fill: #a7a9ab } + .terminal-537214554-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        testsinclude several ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub actions for - Continuous  - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▆▆ -         Add Github badgesThe README.md file Show help  - ▁▁▁▁▁▁▁▁of the pipeline ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - will include  - GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration         The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - configuration  - profiles  - containing custom  - parameters  - requried to run  - nf-core pipelines  - at different  - institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Show help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help ▇▇ + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + requried to run  + nf-core pipelines at  + different institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + and CI tests to lint  + your code: pre-commit, + editor-config and  + prettier. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Include citationsInclude pipeline tools Show help  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2511,254 +2511,254 @@ font-weight: 700; } - .terminal-157696122-matrix { + .terminal-1633351929-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-157696122-title { + .terminal-1633351929-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-157696122-r1 { fill: #c5c8c6 } - .terminal-157696122-r2 { fill: #e3e3e3 } - .terminal-157696122-r3 { fill: #989898 } - .terminal-157696122-r4 { fill: #e1e1e1 } - .terminal-157696122-r5 { fill: #4ebf71;font-weight: bold } - .terminal-157696122-r6 { fill: #1e1e1e } - .terminal-157696122-r7 { fill: #507bb3 } - .terminal-157696122-r8 { fill: #e2e2e2 } - .terminal-157696122-r9 { fill: #808080 } - .terminal-157696122-r10 { fill: #dde6ed;font-weight: bold } - .terminal-157696122-r11 { fill: #001541 } - .terminal-157696122-r12 { fill: #454a50 } - .terminal-157696122-r13 { fill: #7ae998 } - .terminal-157696122-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-157696122-r15 { fill: #0a180e;font-weight: bold } - .terminal-157696122-r16 { fill: #000000 } - .terminal-157696122-r17 { fill: #008139 } - .terminal-157696122-r18 { fill: #fea62b;font-weight: bold } - .terminal-157696122-r19 { fill: #a7a9ab } - .terminal-157696122-r20 { fill: #e2e3e3 } + .terminal-1633351929-r1 { fill: #c5c8c6 } + .terminal-1633351929-r2 { fill: #e3e3e3 } + .terminal-1633351929-r3 { fill: #989898 } + .terminal-1633351929-r4 { fill: #e1e1e1 } + .terminal-1633351929-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1633351929-r6 { fill: #1e1e1e } + .terminal-1633351929-r7 { fill: #507bb3 } + .terminal-1633351929-r8 { fill: #e2e2e2 } + .terminal-1633351929-r9 { fill: #808080 } + .terminal-1633351929-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1633351929-r11 { fill: #001541 } + .terminal-1633351929-r12 { fill: #454a50 } + .terminal-1633351929-r13 { fill: #7ae998 } + .terminal-1633351929-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-1633351929-r15 { fill: #0a180e;font-weight: bold } + .terminal-1633351929-r16 { fill: #000000 } + .terminal-1633351929-r17 { fill: #008139 } + .terminal-1633351929-r18 { fill: #fea62b;font-weight: bold } + .terminal-1633351929-r19 { fill: #a7a9ab } + .terminal-1633351929-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        genomesbe configured to ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - use a copy of the  - most common  - reference genome  - files from iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use multiqcThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include the MultiQC▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - module which  - generates an HTML  - report for quality  - control. - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Show help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most common + reference genome files  + from iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use multiqcThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + module which generates  + an HTML report for  + quality control. + + + + + + + + + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From a009e68f594767a90cccd0df0e4fbbbceaf4a3f2 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 9 Aug 2024 11:37:18 +0200 Subject: [PATCH 458/737] try reading yaml keys with a list format --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index d4ff3fc55..15582951d 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -48,7 +48,7 @@ jobs: uses: actions/checkout@v3 - name: Create Matrix id: create_matrix - run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml)" >> $GITHUB_OUTPUT + run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml | awk '{print $2}' | paste -sd "," - | awk '{print "[" $0 "]"}')" >> $GITHUB_OUTPUT RunTestWorkflow: runs-on: ${{ matrix.runner }} From e36bd7fc6411b68c8b22b17aa783ae9680f4fb2c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 12 Aug 2024 09:37:06 +0200 Subject: [PATCH 459/737] don't read from json on matrix creation --- .github/workflows/create-test-lint-wf-template.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 15582951d..fd7a494d0 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -56,7 +56,7 @@ jobs: NXF_ANSI_LOG: false strategy: matrix: - TEMPLATE: ${{ fromJSON(needs.prepare-matrix.outputs.all_features) }} + TEMPLATE: ${{ needs.prepare-matrix.outputs.all_features }} runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} @@ -68,6 +68,9 @@ jobs: - TEMPLATE: nf_core_configs runner: ubuntu-latest profile: "docker" + exclude: + - TEMPLATE: github + - TEMPLATE: is_nfcore fail-fast: false steps: From d4d231444d9a342624b7fb8c2154ba07424bf34d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 12 Aug 2024 10:22:26 +0200 Subject: [PATCH 460/737] reset lintconfigtype --- nf_core/utils.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 9b3e56da5..d0546a5c5 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -19,7 +19,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, TypedDict, Union +from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -1077,11 +1077,7 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -class LintConfigType(TypedDict): - files_exist: Optional[Union[List, List[str], List[Dict[str, List[str]]]]] - files_unchanged: Optional[Union[List[str], List[Dict[str, List[str]]]]] - nextflow_config: Optional[Union[List[str], List[Dict[str, List[str]]]]] - multiqc_config: Optional[Union[List[str], bool]] +LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] class NFCoreYamlConfig(BaseModel): From 78c64c719db446bf84f33137cbce80cd54ee37bc Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 11:07:04 +0200 Subject: [PATCH 461/737] apply suggestions from code review by @mashehu --- .../create-test-lint-wf-template.yml | 9 ++---- nf_core/pipelines/create/create.py | 4 ++- nf_core/pipelines/create/custompipeline.py | 2 +- nf_core/pipelines/create/nfcorepipeline.py | 2 +- nf_core/pipelines/create/templatefeatures.yml | 28 +++++++++---------- 5 files changed, 21 insertions(+), 24 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index fd7a494d0..bd661ce07 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -31,19 +31,14 @@ env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: - install-yq: - name: Install yq - runs-on: ubuntu-latest - steps: - - name: 🏗 Set up yq - uses: frenck/action-setup-yq@v1 - prepare-matrix: name: Retrieve all template features runs-on: ubuntu-latest outputs: all_features: ${{ steps.create_matrix.outputs.matrix }} steps: + - name: 🏗 Set up yq + uses: frenck/action-setup-yq@v1 - name: checkout uses: actions/checkout@v3 - name: Create Matrix diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 584a4673d..1df6cc3f4 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -185,7 +185,9 @@ def update_config(self, organisation, version, force, outdir): if self.config.is_nfcore is None: self.config.is_nfcore = self.config.org == "nf-core" - def obtain_jinja_params_dict(self, features_to_skip: List[str], pipeline_dir: Union[str, Path]): + def obtain_jinja_params_dict( + self, features_to_skip: List[str], pipeline_dir: Union[str, Path] + ) -> tuple[dict, list[str]]: """Creates a dictionary of parameters for the new pipeline. Args: diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index fdb7a46d2..5debcfee7 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -33,7 +33,7 @@ def on_mount(self) -> None: for name, feature in self.parent.template_features_yml.items(): if feature["custom_pipelines"]: self.query_one("#features").mount( - PipelineFeature(feature["help"], feature["short_description"], feature["description"], name) + PipelineFeature(feature["help_text"], feature["short_description"], feature["description"], name) ) @on(Button.Pressed, "#continue") diff --git a/nf_core/pipelines/create/nfcorepipeline.py b/nf_core/pipelines/create/nfcorepipeline.py index b6a823169..ebb986698 100644 --- a/nf_core/pipelines/create/nfcorepipeline.py +++ b/nf_core/pipelines/create/nfcorepipeline.py @@ -33,7 +33,7 @@ def on_mount(self) -> None: for name, feature in self.parent.template_features_yml.items(): if feature["nfcore_pipelines"]: self.query_one("#features").mount( - PipelineFeature(feature["help"], feature["short_description"], feature["description"], name) + PipelineFeature(feature["help_text"], feature["short_description"], feature["description"], name) ) @on(Button.Pressed, "#continue") diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index a91887a89..48ce20055 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -2,9 +2,9 @@ github: skippable_paths: - ".github/" - ".gitignore" - short_description: "" + short_description: "Skip the creation of a local Git repository." description: "" - help: "" + help_text: "" linting: files_exist: - ".github/ISSUE_TEMPLATE/bug_report.yml" @@ -32,7 +32,7 @@ ci: - ".github/workflows/" short_description: "Add Github CI tests" description: "The pipeline will include several GitHub actions for Continuous Integration (CI) testing" - help: | + help_text: | Nf-core provides a set of Continuous Integration (CI) tests for Github. When you open a pull request (PR) on your pipeline repository, these tests will run automatically. @@ -55,7 +55,7 @@ igenomes: - "conf/igenomes.config" short_description: "Use reference genomes" description: "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes" - help: | + help_text: | Nf-core pipelines are configured to use a copy of the most common reference genome files. By selecting this option, your pipeline will include a configuration file specifying the paths to these files. @@ -75,7 +75,7 @@ github_badges: skippable_paths: False short_description: "Add Github badges" description: "The README.md file of the pipeline will include GitHub badges" - help: | + help_text: | The pipeline `README.md` will include badges for: * AWS CI Tests * Zenodo DOI @@ -93,7 +93,7 @@ nf_core_configs: skippable_paths: False short_description: "Add configuration files" description: "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions" - help: | + help_text: | Nf-core has a repository with a collection of configuration profiles. Those config files define a set of parameters which are specific to compute environments at different Institutions. @@ -118,9 +118,9 @@ is_nfcore: - ".github/workflows/awsfulltest.yml" - ".github/workflows/awstest.yml" - ".github/workflows/release-announcements.yml" - short_description: "" + short_description: "A custom pipeline which won't be part of the nf-core organisation but be compatible with nf-core/tools." description: "" - help: "" + help_text: "" linting: files_exist: - "CODE_OF_CONDUCT.md" @@ -152,7 +152,7 @@ code_linters: - ".github/workflows/fix-linting.yml" short_description: "Use code linters" description: "The pipeline will include code linters and CI tests to lint your code: pre-commit, editor-config and prettier." - help: | + help_text: | Pipelines include code linters to check the formatting of your code in order to harmonize code styles between developers. Linters will check all non-ignored files, e.g., JSON, YAML, Nextlow or Python files in your repository. The available code linters are: @@ -167,7 +167,7 @@ citations: - "assets/methods_description_template.yml" short_description: "Include citations" description: "Include pipeline tools citations in CITATIONS.md and a method description in the MultiQC report (if enabled)." - help: | + help_text: | If adding citations, the pipeline template will contain a `CITATIONS.md` file to add the citations of all tools used in the pipeline. Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, @@ -179,7 +179,7 @@ gitpod: - ".gitpod.yml" short_description: "Include a gitpod environment" description: "Include the configuration required to use Gitpod." - help: | + help_text: | Gitpod (https://www.gitpod.io/) provides standardized and automated development environments. Including this to your pipeline will provide an environment with the latest version of nf-core/tools installed and all its requirements. @@ -191,7 +191,7 @@ codespaces: - ".devcontainer/devcontainer.json" short_description: "Include GitHub Codespaces" description: "The pipeline will include a devcontainer configuration for GitHub Codespaces, providing a development environment with nf-core/tools and Nextflow installed." - help: | + help_text: | The pipeline will include a devcontainer configuration. The devcontainer will create a GitHub Codespaces for Nextflow development with nf-core/tools and Nextflow installed. @@ -208,7 +208,7 @@ multiqc: - "modules/nf-core/multiqc/" short_description: "Use multiqc" description: "The pipeline will include the MultiQC module which generates an HTML report for quality control." - help: | + help_text: | MultiQC is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline quality control results can be visualised in the report and further statistics are available in the report data directory. The pipeline will include the MultiQC module and will have special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see http://multiqc.info. @@ -226,7 +226,7 @@ changelog: - "CHANGELOG.md" short_description: "Add a changelog" description: "Add a CHANGELOG.md file." - help: | + help_text: | Having a `CHANGELOG.md` file in the pipeline root directory is useful to track the changes added to each version. You can read more information on the recommended format here: https://keepachangelog.com/en/1.0.0/ From bf07bf24a31850250486000c708c7b6e31b60f01 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 11:32:58 +0200 Subject: [PATCH 462/737] load feature yaml file with a utils function --- nf_core/pipelines/create/__init__.py | 4 +--- nf_core/pipelines/create/create.py | 6 ++---- nf_core/pipelines/create/utils.py | 11 +++++++++++ tests/pipelines/test_create.py | 5 ++--- 4 files changed, 16 insertions(+), 10 deletions(-) diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 1774d0a01..8b0edf34c 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -72,9 +72,7 @@ class PipelineCreateApp(App[utils.CreateConfig]): LOGGING_STATE = None # Template features - features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "templatefeatures.yml" - with open(features_yml_path) as fh: - template_features_yml = yaml.safe_load(fh) + template_features_yml = utils.load_features_yaml() def on_mount(self) -> None: self.push_screen("welcome") diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 1df6cc3f4..b3135435e 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -18,7 +18,7 @@ import nf_core import nf_core.pipelines.schema import nf_core.utils -from nf_core.pipelines.create.utils import CreateConfig +from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file from nf_core.utils import LintConfigType, NFCoreTemplateConfig @@ -82,9 +82,7 @@ def __init__( raise UserWarning("The template configuration was not provided.") # Read features yaml file - features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "templatefeatures.yml" - with open(features_yml_path) as fh: - self.template_features_yml = yaml.safe_load(fh) + self.template_features_yml = load_features_yaml() if self.config.outdir is None: self.config.outdir = str(Path.cwd()) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 89cf0ab65..2e0015ed9 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -5,6 +5,7 @@ from pathlib import Path from typing import Any, Dict, Iterator, Union +import yaml from pydantic import ConfigDict, ValidationError, ValidationInfo, field_validator from rich.logging import RichHandler from textual import on @@ -16,6 +17,7 @@ from textual.widget import Widget from textual.widgets import Button, Input, Markdown, RichLog, Static, Switch +import nf_core from nf_core.utils import NFCoreTemplateConfig # Use ContextVar to define a context on the model initialization @@ -34,6 +36,9 @@ def init_context(value: Dict[str, Any]) -> Iterator[None]: # Define a global variable to store the pipeline type NFCORE_PIPELINE_GLOBAL: bool = True +# YAML file describing template features +features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "templatefeatures.yml" + class CreateConfig(NFCoreTemplateConfig): """Pydantic model for the nf-core create config.""" @@ -242,3 +247,9 @@ def add_hide_class(app, widget_id: str) -> None: def remove_hide_class(app, widget_id: str) -> None: """Remove class 'hide' to a widget. Display widget.""" app.get_widget_by_id(widget_id).remove_class("hide") + + +def load_features_yaml() -> dict: + """Load the YAML file describing template features.""" + with open(features_yml_path) as fh: + return yaml.safe_load(fh) diff --git a/tests/pipelines/test_create.py b/tests/pipelines/test_create.py index b684a4669..13fd3b24c 100644 --- a/tests/pipelines/test_create.py +++ b/tests/pipelines/test_create.py @@ -9,6 +9,7 @@ import yaml import nf_core.pipelines.create.create +from nf_core.pipelines.create.utils import load_features_yaml from ..utils import TEST_DATA_DIR, with_temporary_folder @@ -101,9 +102,7 @@ def test_pipeline_creation_initiation_customize_template(self, tmp_path): @with_temporary_folder def test_pipeline_creation_with_yml_skip(self, tmp_path): # Update pipeline_create_template_skip.yml file - features_yml_path = Path(nf_core.__file__).parent / "pipelines" / "create" / "templatefeatures.yml" - with open(features_yml_path) as fh: - template_features_yml = yaml.safe_load(fh) + template_features_yml = load_features_yaml() all_features = list(template_features_yml.keys()) all_features.remove("is_nfcore") env = jinja2.Environment(loader=jinja2.PackageLoader("tests", "data"), keep_trailing_newline=True) From 2c2ec5db394128a947bc874461582e45711648d1 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 11:51:26 +0200 Subject: [PATCH 463/737] set force to true when creating a pipeline from the app --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index d0546a5c5..96c16130c 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1055,7 +1055,7 @@ class NFCoreTemplateConfig(BaseModel): description: Optional[str] = None author: Optional[str] = None version: Optional[str] = None - force: Optional[bool] = None + force: Optional[bool] = True outdir: Optional[Union[str, Path]] = None skip_features: Optional[list] = None is_nfcore: Optional[bool] = None From 2535b6fc136fc24bef77249f3fc755b344100b9c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Tue, 13 Aug 2024 12:16:51 +0200 Subject: [PATCH 464/737] Update nf_core/pipelines/create/utils.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipelines/create/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/utils.py b/nf_core/pipelines/create/utils.py index 2e0015ed9..0b72c2bcf 100644 --- a/nf_core/pipelines/create/utils.py +++ b/nf_core/pipelines/create/utils.py @@ -249,7 +249,7 @@ def remove_hide_class(app, widget_id: str) -> None: app.get_widget_by_id(widget_id).remove_class("hide") -def load_features_yaml() -> dict: +def load_features_yaml() -> Dict: """Load the YAML file describing template features.""" with open(features_yml_path) as fh: return yaml.safe_load(fh) From 6af86cf39306c2bf3c9bd4d0f61057fdea24f590 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 12:29:02 +0200 Subject: [PATCH 465/737] fix typing --- nf_core/pipelines/create/create.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index b3135435e..200507d8a 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,7 +8,7 @@ import re import shutil from pathlib import Path -from typing import Dict, List, Optional, Union, cast +from typing import Dict, List, Optional, Tuple, Union, cast import git import git.config @@ -185,7 +185,7 @@ def update_config(self, organisation, version, force, outdir): def obtain_jinja_params_dict( self, features_to_skip: List[str], pipeline_dir: Union[str, Path] - ) -> tuple[dict, list[str]]: + ) -> Tuple[Dict, List[str]]: """Creates a dictionary of parameters for the new pipeline. Args: From d6459cba0cf3e96e156350e541850afb61409e57 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 17:39:35 +0200 Subject: [PATCH 466/737] add templatefeatures.yml to python package --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 68f115d97..2bec40380 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -10,3 +10,4 @@ include nf_core/assets/logo/nf-core-repo-logo-base-darkbg.png include nf_core/assets/logo/placeholder_logo.svg include nf_core/assets/logo/MavenPro-Bold.ttf include nf_core/pipelines/create/create.tcss +include nf_core/pipelines/create/templatefeatures.yml From f6505c2fea94057e66954f45b4c1ad6e37dc8a77 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 13 Aug 2024 15:41:13 +0000 Subject: [PATCH 467/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 398aee575..c7095e292 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ - add option to exclude multiqc from pipeline template ([#3103](https://github.com/nf-core/tools/pull/3103)) - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) - handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108)) +- add templatefeatures.yml to python package ([#3112](https://github.com/nf-core/tools/pull/3112)) ### Linting From afec8773b56cb813c4de59f882f0c056c9b4d114 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 18:11:41 +0200 Subject: [PATCH 468/737] output matrix elements with double quotes --- .github/workflows/create-test-lint-wf-template.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index bd661ce07..0057bdb2d 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -35,7 +35,7 @@ jobs: name: Retrieve all template features runs-on: ubuntu-latest outputs: - all_features: ${{ steps.create_matrix.outputs.matrix }} + all_features: ${{ fromJSON(steps.create_matrix.outputs.matrix) }} steps: - name: 🏗 Set up yq uses: frenck/action-setup-yq@v1 @@ -43,7 +43,7 @@ jobs: uses: actions/checkout@v3 - name: Create Matrix id: create_matrix - run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml | awk '{print $2}' | paste -sd "," - | awk '{print "[" $0 "]"}')" >> $GITHUB_OUTPUT + run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml | awk '{print "\""$2"\""}' | paste -sd "," - | awk '{print "[" $0 "]"}')" >> $GITHUB_OUTPUT RunTestWorkflow: runs-on: ${{ matrix.runner }} From c8188130153a4c4192714255b98ecd6fc08155bf Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 18:18:20 +0200 Subject: [PATCH 469/737] don't use fromJSON --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 0057bdb2d..0c97168f4 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -35,7 +35,7 @@ jobs: name: Retrieve all template features runs-on: ubuntu-latest outputs: - all_features: ${{ fromJSON(steps.create_matrix.outputs.matrix) }} + all_features: ${{ steps.create_matrix.outputs.matrix }} steps: - name: 🏗 Set up yq uses: frenck/action-setup-yq@v1 From aef4895bf7a1a5b50d855399d37635cc088d7abb Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 18:23:23 +0200 Subject: [PATCH 470/737] try fromJSON when reading the matrix --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 0c97168f4..c2d61c51c 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -51,7 +51,7 @@ jobs: NXF_ANSI_LOG: false strategy: matrix: - TEMPLATE: ${{ needs.prepare-matrix.outputs.all_features }} + TEMPLATE: ${{ fromJson(needs.prepare-matrix.outputs.all_features) }} runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} From 3464aa45a689e33a64bbf2ac3390772ae3b4d3d0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 18:34:47 +0200 Subject: [PATCH 471/737] surround whole matrix by single quotes --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index c2d61c51c..e097c0c82 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -43,7 +43,7 @@ jobs: uses: actions/checkout@v3 - name: Create Matrix id: create_matrix - run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml | awk '{print "\""$2"\""}' | paste -sd "," - | awk '{print "[" $0 "]"}')" >> $GITHUB_OUTPUT + run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml | awk '{print "\""$2"\""}' | paste -sd "," - | awk '{print "'\''[" $0 "]'\''"}')" >> $GITHUB_OUTPUT RunTestWorkflow: runs-on: ${{ matrix.runner }} From 5156d0129ae602af54291742cdd7d107bcb2c129 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 13 Aug 2024 18:42:24 +0200 Subject: [PATCH 472/737] escape double quotes to use a json raw string literal --- .github/workflows/create-test-lint-wf-template.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index e097c0c82..3d3726b82 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -43,7 +43,7 @@ jobs: uses: actions/checkout@v3 - name: Create Matrix id: create_matrix - run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml | awk '{print "\""$2"\""}' | paste -sd "," - | awk '{print "'\''[" $0 "]'\''"}')" >> $GITHUB_OUTPUT + run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml | awk '{print "\\""\""$2"\\""\""}' | paste -sd "," - | awk '{print "'\''[" $0 "]'\''"}')" >> $GITHUB_OUTPUT RunTestWorkflow: runs-on: ${{ matrix.runner }} @@ -51,7 +51,7 @@ jobs: NXF_ANSI_LOG: false strategy: matrix: - TEMPLATE: ${{ fromJson(needs.prepare-matrix.outputs.all_features) }} + TEMPLATE: ${{ fromJSON(needs.prepare-matrix.outputs.all_features) }} runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} From 07858624a4c97c209d5b002a699b1e6d89a6001b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 08:46:14 +0200 Subject: [PATCH 473/737] allow spaces at the betinning of include statements in components --- nf_core/components/components_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 01650a643..dee4fbf6b 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -143,7 +143,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str regex = re.compile( r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")" ) - match = regex.match(line) + match = regex.search(line) if match and len(match.groups()) == 2: name, link = match.groups() if link.startswith("../../../"): From dea1781dfeb402b99a0c1a209928db7c55ff7c5b Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 08:49:56 +0200 Subject: [PATCH 474/737] try simpler yq command --- .github/workflows/create-test-lint-wf-template.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 3d3726b82..cc7f272c6 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -43,7 +43,9 @@ jobs: uses: actions/checkout@v3 - name: Create Matrix id: create_matrix - run: echo "matrix=$(yq 'keys' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml | awk '{print "\\""\""$2"\\""\""}' | paste -sd "," - | awk '{print "'\''[" $0 "]'\''"}')" >> $GITHUB_OUTPUT + run: | + echo "matrix=$(yq 'keys | tojson' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml >> $GITHUB_OUTPUT + echo $GITHUB_OUTPUT RunTestWorkflow: runs-on: ${{ matrix.runner }} @@ -51,7 +53,7 @@ jobs: NXF_ANSI_LOG: false strategy: matrix: - TEMPLATE: ${{ fromJSON(needs.prepare-matrix.outputs.all_features) }} + TEMPLATE: ${{ needs.prepare-matrix.outputs.all_features }} runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} From d838c869602c6a08238b7bbc17abdc23046083d6 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 14 Aug 2024 06:51:20 +0000 Subject: [PATCH 475/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 609c3aea0..91e32374d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -64,6 +64,7 @@ - Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) - Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) - Add bot action to update textual snapshots and write bot documentation ([#3102](https://github.com/nf-core/tools/pull/3102)) +- Components: allow spaces at the betinning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From e19be1d13025b18273f494f86f39d67fc2ba1901 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 08:53:05 +0200 Subject: [PATCH 476/737] add missing parenthesis --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index cc7f272c6..e55f904c7 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -44,7 +44,7 @@ jobs: - name: Create Matrix id: create_matrix run: | - echo "matrix=$(yq 'keys | tojson' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml >> $GITHUB_OUTPUT + echo "matrix=$(yq 'keys | tojson' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml) >> $GITHUB_OUTPUT echo $GITHUB_OUTPUT RunTestWorkflow: From 7c6281f113cd3cac0a0b570e2c178f3036ae80e2 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 08:54:29 +0200 Subject: [PATCH 477/737] and missing closing quote --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index e55f904c7..fd20b6793 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -44,7 +44,7 @@ jobs: - name: Create Matrix id: create_matrix run: | - echo "matrix=$(yq 'keys | tojson' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml) >> $GITHUB_OUTPUT + echo "matrix=$(yq 'keys | tojson' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml)" >> $GITHUB_OUTPUT echo $GITHUB_OUTPUT RunTestWorkflow: From 96c2b0a1a8ec5be01cac80c8f418ffacdbd63eda Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 09:01:56 +0200 Subject: [PATCH 478/737] remove debugging --- .github/workflows/create-test-lint-wf-template.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index fd20b6793..311c6a058 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -45,7 +45,6 @@ jobs: id: create_matrix run: | echo "matrix=$(yq 'keys | tojson' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml)" >> $GITHUB_OUTPUT - echo $GITHUB_OUTPUT RunTestWorkflow: runs-on: ${{ matrix.runner }} From 08888ffeea3ca0a1c1f2a607d4614b14a109887d Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 09:06:12 +0200 Subject: [PATCH 479/737] more debugging --- .github/workflows/create-test-lint-wf-template.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 311c6a058..d46d8f7d3 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -44,7 +44,9 @@ jobs: - name: Create Matrix id: create_matrix run: | - echo "matrix=$(yq 'keys | tojson' ${{ github.workspace }}/nf_core/pipelines/create/templatefeatures.yml)" >> $GITHUB_OUTPUT + echo "matrix=$(yq 'keys | tojson(0)' nf_core/pipelines/create/templatefeatures.yml)" + - name: Setup ssh session + uses: Warpbuilds/action-debugger@v1.3 RunTestWorkflow: runs-on: ${{ matrix.runner }} From b564d9f451c48888987d6e9ec24ef8cd49fd60b7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 09:08:18 +0200 Subject: [PATCH 480/737] add output step back --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index d46d8f7d3..e835a2b30 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -44,7 +44,7 @@ jobs: - name: Create Matrix id: create_matrix run: | - echo "matrix=$(yq 'keys | tojson(0)' nf_core/pipelines/create/templatefeatures.yml)" + echo "matrix=$(yq 'keys | tojson(0)' nf_core/pipelines/create/templatefeatures.yml)" >> $GITHUB_OUTPUT - name: Setup ssh session uses: Warpbuilds/action-debugger@v1.3 From 237f888bdb3a3202291a34b03ea0ef2f9981079f Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 09:11:31 +0200 Subject: [PATCH 481/737] add `needs` step --- .github/workflows/create-test-lint-wf-template.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index e835a2b30..4fa41a0d4 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -45,11 +45,10 @@ jobs: id: create_matrix run: | echo "matrix=$(yq 'keys | tojson(0)' nf_core/pipelines/create/templatefeatures.yml)" >> $GITHUB_OUTPUT - - name: Setup ssh session - uses: Warpbuilds/action-debugger@v1.3 RunTestWorkflow: runs-on: ${{ matrix.runner }} + needs: prepare-matrix env: NXF_ANSI_LOG: false strategy: From d81019f56ba0ebf9ea12c7ed72a80f408e650a51 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 09:16:20 +0200 Subject: [PATCH 482/737] parse output step --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 4fa41a0d4..71ea02539 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -53,7 +53,7 @@ jobs: NXF_ANSI_LOG: false strategy: matrix: - TEMPLATE: ${{ needs.prepare-matrix.outputs.all_features }} + TEMPLATE: ${{ fromJson(needs.prepare-matrix.outputs.all_features) }} runner: # use the runner given by the input if it is dispatched manually, run on github if it is a rerun or on self-hosted by default - ${{ github.event.inputs.runners || github.run_number > 1 && 'ubuntu-latest' || 'self-hosted' }} From c4a1961e33b23e9eb5f1972b0ab84301770974f7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 09:18:57 +0200 Subject: [PATCH 483/737] add missing escape character --- .github/workflows/create-test-lint-wf-template.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 71ea02539..29c393375 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -40,7 +40,7 @@ jobs: - name: 🏗 Set up yq uses: frenck/action-setup-yq@v1 - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - name: Create Matrix id: create_matrix run: | @@ -102,9 +102,9 @@ jobs: export NXF_WORK=$(pwd) printf "org: my-prefix\nskip: ${{ needs.prepare-matrix.outputs.all_features }}" > create-test-lint-wf/template_skip_all.yml - - name: Create template skip {{ matrix.TEMPLATE }} + - name: Create template skip ${{ matrix.TEMPLATE }} run: | - printf "org: my-prefix\nskip: {{ matrix.TEMPLATE }}" > create-test-lint-wf/template_skip_{{ matrix.TEMPLATE }}.yml + printf "org: my-prefix\nskip: ${{ matrix.TEMPLATE }}" > create-test-lint-wf/template_skip_{{ matrix.TEMPLATE }}.yml # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} From d401a40cc25419e50846d8b01abf0e66d36a5ec5 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 14 Aug 2024 09:21:06 +0200 Subject: [PATCH 484/737] more escape characters --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 29c393375..1c5189f54 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -104,7 +104,7 @@ jobs: - name: Create template skip ${{ matrix.TEMPLATE }} run: | - printf "org: my-prefix\nskip: ${{ matrix.TEMPLATE }}" > create-test-lint-wf/template_skip_{{ matrix.TEMPLATE }}.yml + printf "org: my-prefix\nskip: ${{ matrix.TEMPLATE }}" > create-test-lint-wf/template_skip_${{ matrix.TEMPLATE }}.yml # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} From 5936e61cf4d44242167550419e5f4f001b4212e0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 10:08:16 +0200 Subject: [PATCH 485/737] fix creating pipeline from template skip_features --- .github/workflows/create-test-lint-wf-template.yml | 4 ++-- nf_core/pipelines/create/create.py | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 1c5189f54..c15783e67 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -100,11 +100,11 @@ jobs: run: | mkdir create-test-lint-wf export NXF_WORK=$(pwd) - printf "org: my-prefix\nskip: ${{ needs.prepare-matrix.outputs.all_features }}" > create-test-lint-wf/template_skip_all.yml + printf "org: my-prefix\nskip_features: ${{ needs.prepare-matrix.outputs.all_features }}" > create-test-lint-wf/template_skip_all.yml - name: Create template skip ${{ matrix.TEMPLATE }} run: | - printf "org: my-prefix\nskip: ${{ matrix.TEMPLATE }}" > create-test-lint-wf/template_skip_${{ matrix.TEMPLATE }}.yml + printf "org: my-prefix\nskip_features: [${{ matrix.TEMPLATE }}]" > create-test-lint-wf/template_skip_${{ matrix.TEMPLATE }}.yml # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 200507d8a..873a2ecb1 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -417,8 +417,6 @@ def fix_linting(self): for area in self.skip_areas: try: for lint_test in self.template_features_yml[area]["linting"]: - if not lint_config[lint_test]: - pass if self.template_features_yml[area]["linting"][lint_test]: lint_config.setdefault(lint_test, []).extend( self.template_features_yml[area]["linting"][lint_test] From 4941833dd1fae5720b4b10c3cdf63bb9dbdd3e6f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 10:23:48 +0200 Subject: [PATCH 486/737] handle linting tests false --- nf_core/pipelines/create/create.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 873a2ecb1..c015a43f7 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -417,12 +417,15 @@ def fix_linting(self): for area in self.skip_areas: try: for lint_test in self.template_features_yml[area]["linting"]: - if self.template_features_yml[area]["linting"][lint_test]: - lint_config.setdefault(lint_test, []).extend( - self.template_features_yml[area]["linting"][lint_test] - ) - else: - lint_config[lint_test] = False + try: + if self.template_features_yml[area]["linting"][lint_test]: + lint_config.setdefault(lint_test, []).extend( + self.template_features_yml[area]["linting"][lint_test] + ) + else: + lint_config[lint_test] = False + except AttributeError: + pass # When linting is False except KeyError: pass # Areas without linting From b474a049e7d4cc1cea8d7006e1a2aeb79b810dcc Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 10:33:56 +0200 Subject: [PATCH 487/737] exclude running skip nf_core_configs with self_hosted_runner --- .github/workflows/create-test-lint-wf-template.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index c15783e67..6b916fb79 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -68,6 +68,9 @@ jobs: exclude: - TEMPLATE: github - TEMPLATE: is_nfcore + - TEMPLATE: nf_core_configs + runner: self-hosted + profile: "self_hosted_runner" fail-fast: false steps: From 442f2f0097226595730dd4e1c9af86696f6c16fb Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 11:56:50 +0200 Subject: [PATCH 488/737] fix pipeline skipping nf_core_configs --- .prettierignore | 1 + .../.github/ISSUE_TEMPLATE/bug_report.yml | 2 + nf_core/pipeline-template/nextflow.config | 2 +- .../pipeline-template/nextflow_schema.json | 6 ++- nf_core/pipelines/create/create.py | 46 ++----------------- nf_core/pipelines/create/templatefeatures.yml | 2 + 6 files changed, 15 insertions(+), 44 deletions(-) diff --git a/.prettierignore b/.prettierignore index 9387ee950..cbe7274a4 100644 --- a/.prettierignore +++ b/.prettierignore @@ -7,6 +7,7 @@ nf_core/module-template/meta.yml nf_core/pipeline-template/nextflow_schema.json nf_core/pipeline-template/modules.json nf_core/pipeline-template/tower.yml +nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml tests/data/pipeline_create_template_skip.yml # don't run on things handled by ruff *.py diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml index 063690f29..412f5bd3b 100644 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml @@ -2,6 +2,7 @@ name: Bug report description: Report something that is broken or incorrect labels: bug body: +{%- if is_nfcore %} - type: markdown attributes: value: | @@ -9,6 +10,7 @@ body: - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) - [{{ name }} pipeline documentation](https://nf-co.re/{{ short_name }}/usage) +{%- endif %} - type: textarea id: description diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 3a7f54480..0469d6cfe 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -41,11 +41,11 @@ params { version = false pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/' + {%- if nf_core_configs %} // Config options config_profile_name = null config_profile_description = null - {%- if nf_core_configs %} custom_config_version = 'master' custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" config_profile_contact = null diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 1c52e2298..4a1a22c3e 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -77,6 +77,7 @@ } }, {%- endif %} + {%- if nf_core_configs %} "institutional_config_options": { "title": "Institutional config options", "type": "object", @@ -125,6 +126,7 @@ } } }, + {%- endif %} "max_job_request_options": { "title": "Max job request options", "type": "object", @@ -289,9 +291,9 @@ {% if igenomes %}{ "$ref": "#/definitions/reference_genome_options" },{% endif %} - { + {% if nf_core_configs %}{ "$ref": "#/definitions/institutional_config_options" - }, + },{% endif %} { "$ref": "#/definitions/max_job_request_options" }, diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index c015a43f7..e3f8dc697 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -349,17 +349,9 @@ def render_template(self) -> None: template_stat = os.stat(template_fn_path) os.chmod(output_path, template_stat.st_mode) - # Remove all unused parameters in the nextflow schema - if not self.jinja_params["igenomes"] or not self.jinja_params["nf_core_configs"]: - self.update_nextflow_schema() if self.config.is_nfcore: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() - else: - if self.jinja_params["github"]: - # Remove field mentioning nf-core docs - # in the github bug report template - self.remove_nf_core_in_bug_report_template() # Update the .nf-core.yml with linting configurations self.fix_linting() @@ -374,38 +366,10 @@ def render_template(self) -> None: log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") run_prettier_on_file(self.outdir / config_fn) - def update_nextflow_schema(self): - """ - Removes unused parameters from the nextflow schema. - """ - schema_path = self.outdir / "nextflow_schema.json" - - schema = nf_core.pipelines.schema.PipelineSchema() - schema.schema_filename = schema_path - schema.no_prompts = True - schema.load_schema() - schema.get_wf_params() - schema.remove_schema_notfound_configs() - schema.save_schema(suppress_logging=True) - run_prettier_on_file(schema_path) - - def remove_nf_core_in_bug_report_template(self): - """ - Remove the field mentioning nf-core documentation - in the github bug report template - """ - bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml" - - with open(bug_report_path) as fh: - contents = yaml.load(fh, Loader=yaml.FullLoader) - - # Remove the first item in the body, which is the information about the docs - contents["body"].pop(0) - - with open(bug_report_path, "w") as fh: - yaml.dump(contents, fh, default_flow_style=False, sort_keys=False) - - run_prettier_on_file(bug_report_path) + # Run prettier on files + for file in self.outdir.iterdir(): + if file.is_file() and file.name.endswith(("yaml", "yml", "json")): + run_prettier_on_file(file) def fix_linting(self): """ @@ -414,7 +378,7 @@ def fix_linting(self): """ # Create a lint config lint_config = {} - for area in self.skip_areas: + for area in (self.config.skip_features or []) + self.skip_areas: try: for lint_test in self.template_features_yml[area]["linting"]: try: diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index 48ce20055..ac026c9e2 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -109,6 +109,8 @@ nf_core_configs: - "process.memory" - "process.time" - "custom_config" + - "params.custom_config_version" + - "params.custom_config_base" nfcore_pipelines: False custom_pipelines: True is_nfcore: From c3f669e704de117bff30eb569788669e0f8f26ce Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 12:10:00 +0200 Subject: [PATCH 489/737] fix lint pipeline without code linters --- nf_core/pipelines/create/templatefeatures.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index ac026c9e2..b97bf347a 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -162,6 +162,11 @@ code_linters: - pre-commit (https://pre-commit.com/): used to run all code-linters on every PR and on ever commit if you run `pre-commit install` to install it in your local repository. - editor-config (https://github.com/editorconfig-checker/editorconfig-checker): checks rules such as indentation or trailing spaces. - prettier (https://github.com/prettier/prettier): enforces a consistent style (indentation, quoting, line length, etc). + linting: + files_exist: + - ".editorconfig" + - ".prettierignore" + - ".prettierrc.yml" nfcore_pipelines: False custom_pipelines: True citations: From 6da1adc768e9032887422fd475c524b94e0b3bb2 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 12:12:01 +0200 Subject: [PATCH 490/737] not run skip nf_core_configs with self_hosted_runner --- .github/workflows/create-test-lint-wf-template.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 6b916fb79..fc6338ab4 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -69,7 +69,6 @@ jobs: - TEMPLATE: github - TEMPLATE: is_nfcore - TEMPLATE: nf_core_configs - runner: self-hosted profile: "self_hosted_runner" fail-fast: false From 598bd17acc9ebf949c3e56b057629e899c453e26 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 12:20:20 +0200 Subject: [PATCH 491/737] create template with skip all only once --- .github/workflows/create-test-lint-wf-template.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index fc6338ab4..70125a10f 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -98,15 +98,16 @@ jobs: version: latest-everything # Create template files - - name: Create template skip all (except github) + - name: Create template skip ${{ matrix.TEMPLATE }} run: | mkdir create-test-lint-wf export NXF_WORK=$(pwd) - printf "org: my-prefix\nskip_features: ${{ needs.prepare-matrix.outputs.all_features }}" > create-test-lint-wf/template_skip_all.yml - - - name: Create template skip ${{ matrix.TEMPLATE }} - run: | - printf "org: my-prefix\nskip_features: [${{ matrix.TEMPLATE }}]" > create-test-lint-wf/template_skip_${{ matrix.TEMPLATE }}.yml + if [ ${{ matrix.TEMPLATE }} == "all" ] + then + printf "org: my-prefix\nskip_features: ${{ needs.prepare-matrix.outputs.all_features }}" > create-test-lint-wf/template_skip_all.yml + else + printf "org: my-prefix\nskip_features: [${{ matrix.TEMPLATE }}]" > create-test-lint-wf/template_skip_${{ matrix.TEMPLATE }}.yml + fi # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} From c038e81c303926f46cf52daa36ec09a9c95b39e5 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 12:46:58 +0200 Subject: [PATCH 492/737] set is_nfcore false when needed --- nf_core/pipelines/create/create.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index e3f8dc697..dfdcf07a4 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -218,6 +218,7 @@ def obtain_jinja_params_dict( # Add is_nfcore as an area to skip for non-nf-core pipelines, to skip all nf-core files if not self.config.is_nfcore: skip_areas.append("is_nfcore") + jinja_params["is_nfcore"] = False # Set the last parameters based on the ones provided jinja_params["short_name"] = ( From 95a57cffa77e6b7a0918454acb04433c94cd5284 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 14 Aug 2024 13:33:09 +0200 Subject: [PATCH 493/737] run prettier on the pipeline output directory --- nf_core/pipelines/create/create.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index dfdcf07a4..05b04a542 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -368,9 +368,7 @@ def render_template(self) -> None: run_prettier_on_file(self.outdir / config_fn) # Run prettier on files - for file in self.outdir.iterdir(): - if file.is_file() and file.name.endswith(("yaml", "yml", "json")): - run_prettier_on_file(file) + run_prettier_on_file(self.outdir) def fix_linting(self): """ From a754fe0845f0e28ae0398625d8132a6047aa991e Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 15:07:38 +0200 Subject: [PATCH 494/737] add schema checking for draft 2020-12 --- nf_core/pipelines/lint/schema_lint.py | 32 ++++++++++++++++++++++----- nf_core/pipelines/schema.py | 21 +++++++++++++----- requirements.txt | 2 +- 3 files changed, 44 insertions(+), 11 deletions(-) diff --git a/nf_core/pipelines/lint/schema_lint.py b/nf_core/pipelines/lint/schema_lint.py index 6786c5012..4007bf8fe 100644 --- a/nf_core/pipelines/lint/schema_lint.py +++ b/nf_core/pipelines/lint/schema_lint.py @@ -16,26 +16,26 @@ def schema_lint(self): The lint test checks the schema for the following: * Schema should be a valid JSON file - * Schema should adhere to `JSONSchema `_, Draft 7. + * Schema should adhere to `JSONSchema `_, Draft 7 or Draft 2020-12. * Parameters can be described in two places: * As ``properties`` in the top-level schema object - * As ``properties`` within subschemas listed in a top-level ``definitions`` objects + * As ``properties`` within subschemas listed in a top-level ``definitions``(draft 7) or ``$defs``(draft 2020-12) objects * The schema must describe at least one parameter * There must be no duplicate parameter IDs across the schema and definition subschema - * All subschema in ``definitions`` must be referenced in the top-level ``allOf`` key + * All subschema in ``definitions`` or ``$defs`` must be referenced in the top-level ``allOf`` key * The top-level ``allOf`` key must not describe any non-existent definitions * Default parameters in the schema must be valid * Core top-level schema attributes should exist and be set as follows: - * ``$schema``: ``https://json-schema.org/draft-07/schema`` + * ``$schema``: ``https://json-schema.org/draft-07/schema`` or ``https://json-schema.org/draft/2020-12/schema`` * ``$id``: URL to the raw schema file, eg. ``https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`` * ``title``: ``YOURPIPELINE pipeline parameters`` * ``description``: The pipeline config ``manifest.description`` * That the ``input`` property is defined and has a mimetype. A list of common mimetypes can be found `here `_. - For example, an *extremely* minimal schema could look like this: + For example, an *extremely* minimal schema could look like this (draft 7): .. code-block:: json @@ -57,6 +57,28 @@ def schema_lint(self): "allOf": [{"$ref": "#/definitions/my_first_group"}] } + Or this (draft 2020-12): + + .. code-block:: json + + { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json", + "title": "YOURPIPELINE pipeline parameters", + "description": "This pipeline is for testing", + "properties": { + "first_param": { "type": "string" } + }, + "$defs": { + "my_first_group": { + "properties": { + "second_param": { "type": "string" } + } + } + }, + "allOf": [{"$ref": "#/$defs/my_first_group"}] + } + .. tip:: You can check your pipeline schema without having to run the entire pipeline lint by running ``nf-core pipelines schema lint`` instead of ``nf-core pipelines lint`` """ diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 7f562bff3..854bddccc 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -359,11 +359,22 @@ def validate_schema(self, schema=None): """ if schema is None: schema = self.schema - try: - jsonschema.Draft7Validator.check_schema(schema) - log.debug("JSON Schema Draft7 validated") - except jsonschema.exceptions.SchemaError as e: - raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") + + schema_draft = schema["$schema"] + if schema_draft == "https://json-schema.org/draft-07/schema": + try: + jsonschema.Draft7Validator.check_schema(schema) + log.debug("JSON Schema Draft7 validated") + except jsonschema.exceptions.SchemaError as e: + raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") + elif schema_draft == "https://json-schema.org/draft/2020-12/schema": + try: + jsonschema.Draft202012Validator.check_schema(schema) + log.debug("JSON Schema Draft2020-12 validated") + except jsonschema.exceptions.SchemaError as e: + raise AssertionError(f"Schema does not validate as Draft 2020-12 JSON Schema:\n {e}") + else: + raise AssertionError(f"Unsupported JSON schema draft detected: ${schema_draft}. Use draft 7 or 2020-12 instead.") param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) diff --git a/requirements.txt b/requirements.txt index fb658be2f..eba6460f0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ filetype GitPython PyGithub jinja2 -jsonschema>=3.0 +jsonschema>=4.0 markdown>=3.3 packaging pillow From 464f16e982337699382e66e3b1da21af4f01dd7a Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 15:26:52 +0200 Subject: [PATCH 495/737] update validate schema --- nf_core/pipelines/schema.py | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 854bddccc..abdad719f 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -378,17 +378,32 @@ def validate_schema(self, schema=None): param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) - for d_key, d_schema in schema.get("definitions", {}).items(): + schema_defs = dict() + defs_notation = "" + if "$defs" in schema: + schema_defs = schema.get("$defs", {}).items() + defs_notation = "$defs" + elif "definitions" in schema: + schema_defs = schema.get("definitions", {}).items() + defs_notation = "definitions" + elif "defs" in schema: + # nf-schema v2.0.0 only supported defs. this has been changed to $defs in nf-schema v2.1.0 + # this line prevents the breakage of schemas created for v2.0.0 + schema_defs = schema.get("defs", {}).items() + defs_notation = "defs" + + for d_key, d_schema in schema_defs: # Check that this definition is mentioned in allOf if "allOf" not in schema: raise AssertionError("Schema has definitions, but no allOf key") in_allOf = False for allOf in schema.get("allOf", []): - if allOf["$ref"] == f"#/definitions/{d_key}": + if allOf["$ref"] == f"#/{defs_notation}/{d_key}": in_allOf = True if not in_allOf: - raise AssertionError(f"Definition subschema `{d_key}` not included in schema `allOf`") + raise AssertionError(f"Definition subschema `#{defs_notation}/{d_key}` not included in schema `allOf`") + # TODO add support for nested parameters for d_param_id in d_schema.get("properties", {}): # Check that we don't have any duplicate parameter IDs in different definitions if d_param_id in param_keys: @@ -398,11 +413,11 @@ def validate_schema(self, schema=None): # Check that everything in allOf exists for allOf in schema.get("allOf", []): - if "definitions" not in schema: - raise AssertionError("Schema has allOf, but no definitions") - def_key = allOf["$ref"][14:] - if def_key not in schema.get("definitions", {}): - raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `definitions`") + _, allOf_defs_notation, def_key = allOf["$ref"].split("/") # "#//" + if allOf_defs_notation not in schema: + raise AssertionError(f"Schema has allOf, but no {allOf_defs_notation}") + if def_key not in schema.get(allOf_defs_notation, {}): + raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `{allOf_defs_notation}`") # Check that the schema describes at least one parameter if num_params == 0: From 6658449cc892021782a22885c3db63c0215254b6 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 15:51:27 +0200 Subject: [PATCH 496/737] update more code to support draft 2020-12 --- nf_core/pipelines/schema.py | 55 ++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 25 deletions(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index abdad719f..398783d5a 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -116,8 +116,26 @@ def load_schema(self): self.schema = json.load(fh) self.schema_defaults = {} self.schema_params = {} + if "$schema" not in self.schema: + raise AssertionError("Schema missing top-level `$schema` attribute") + self.schema_draft = self.schema["$schema"] + self.defs_notation = self.get_defs_notation() log.debug(f"JSON file loaded: {self.schema_filename}") + def get_defs_notation(self, schema=None): + if schema is None: + schema = self.schema + + if "$defs" in schema: + defs_notation = "$defs" + elif "definitions" in schema: + defs_notation = "definitions" + elif "defs" in schema: + # nf-schema v2.0.0 only supported defs. this has been changed to $defs in nf-schema v2.1.0 + # this line prevents the breakage of schemas created for v2.0.0 + defs_notation = "defs" + return defs_notation + def sanitise_param_default(self, param): """ Given a param, ensure that the default value is the correct variable type @@ -168,10 +186,11 @@ def get_schema_defaults(self) -> None: if param["default"] is not None: self.schema_defaults[p_key] = param["default"] + # TODO add support for nested parameters # Grouped schema properties in subschema definitions - for defn_name, definition in self.schema.get("definitions", {}).items(): + for defn_name, definition in self.schema.get(self.defs_notation, {}).items(): for p_key, param in definition.get("properties", {}).items(): - self.schema_params[p_key] = ("definitions", defn_name, "properties", p_key) + self.schema_params[p_key] = (self.defs_notation, defn_name, "properties", p_key) if "default" in param: param = self.sanitise_param_default(param) if param["default"] is not None: @@ -248,13 +267,14 @@ def validate_default_params(self): if self.schema is None: log.error("[red][✗] Pipeline schema not found") try: + # TODO add support for nested parameters # Make copy of schema and remove required flags schema_no_required = copy.deepcopy(self.schema) if "required" in schema_no_required: schema_no_required.pop("required") - for group_key, group in schema_no_required.get("definitions", {}).items(): + for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): if "required" in group: - schema_no_required["definitions"][group_key].pop("required") + schema_no_required[self.defs_notation][group_key].pop("required") jsonschema.validate(self.schema_defaults, schema_no_required) except jsonschema.exceptions.ValidationError as e: raise AssertionError(f"Default parameters are invalid: {e.message}") @@ -360,6 +380,8 @@ def validate_schema(self, schema=None): if schema is None: schema = self.schema + if "$schema" not in schema: + raise AssertionError("Schema missing top-level `$schema` attribute") schema_draft = schema["$schema"] if schema_draft == "https://json-schema.org/draft-07/schema": try: @@ -374,25 +396,14 @@ def validate_schema(self, schema=None): except jsonschema.exceptions.SchemaError as e: raise AssertionError(f"Schema does not validate as Draft 2020-12 JSON Schema:\n {e}") else: - raise AssertionError(f"Unsupported JSON schema draft detected: ${schema_draft}. Use draft 7 or 2020-12 instead.") + raise AssertionError(f"Schema `$schema` should be `https://json-schema.org/draft/2020-12/schema` or `https://json-schema.org/draft-07/schema` \n Found `{schema_draft}`") param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) schema_defs = dict() - defs_notation = "" - if "$defs" in schema: - schema_defs = schema.get("$defs", {}).items() - defs_notation = "$defs" - elif "definitions" in schema: - schema_defs = schema.get("definitions", {}).items() - defs_notation = "definitions" - elif "defs" in schema: - # nf-schema v2.0.0 only supported defs. this has been changed to $defs in nf-schema v2.1.0 - # this line prevents the breakage of schemas created for v2.0.0 - schema_defs = schema.get("defs", {}).items() - defs_notation = "defs" + defs_notation = self.get_defs_notation(schema) - for d_key, d_schema in schema_defs: + for d_key, d_schema in schema.get(defs_notation, {}).items(): # Check that this definition is mentioned in allOf if "allOf" not in schema: raise AssertionError("Schema has definitions, but no allOf key") @@ -428,7 +439,7 @@ def validate_schema(self, schema=None): def validate_schema_title_description(self, schema=None): """ Extra validation command for linting. - Checks that the schema "$id", "title" and "description" attributes match the piipeline config. + Checks that the schema "$id", "title" and "description" attributes match the pipeline config. """ if schema is None: schema = self.schema @@ -436,12 +447,6 @@ def validate_schema_title_description(self, schema=None): log.debug("Pipeline schema not set - skipping validation of top-level attributes") return None - if "$schema" not in self.schema: - raise AssertionError("Schema missing top-level `$schema` attribute") - schema_attr = "http://json-schema.org/draft-07/schema" - if self.schema["$schema"] != schema_attr: - raise AssertionError(f"Schema `$schema` should be `{schema_attr}`\n Found `{self.schema['$schema']}`") - if self.pipeline_manifest == {}: self.get_wf_params() From 6ecde202d05baa2944f582f0b0835ea73765006b Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 16:11:47 +0200 Subject: [PATCH 497/737] fix draft 7 schema using http --- nf_core/pipelines/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 398783d5a..e1931b900 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -383,7 +383,7 @@ def validate_schema(self, schema=None): if "$schema" not in schema: raise AssertionError("Schema missing top-level `$schema` attribute") schema_draft = schema["$schema"] - if schema_draft == "https://json-schema.org/draft-07/schema": + if schema_draft == "https://json-schema.org/draft-07/schema" or schema_draft == "http://json-schema.org/draft-07/schema": try: jsonschema.Draft7Validator.check_schema(schema) log.debug("JSON Schema Draft7 validated") From 36fad24c7eda672fce4a885fe2e7d5610b022a9c Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 16:18:08 +0200 Subject: [PATCH 498/737] ruff --- nf_core/pipelines/schema.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index e1931b900..188861fff 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -400,7 +400,6 @@ def validate_schema(self, schema=None): param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) - schema_defs = dict() defs_notation = self.get_defs_notation(schema) for d_key, d_schema in schema.get(defs_notation, {}).items(): @@ -424,11 +423,11 @@ def validate_schema(self, schema=None): # Check that everything in allOf exists for allOf in schema.get("allOf", []): - _, allOf_defs_notation, def_key = allOf["$ref"].split("/") # "#//" - if allOf_defs_notation not in schema: - raise AssertionError(f"Schema has allOf, but no {allOf_defs_notation}") - if def_key not in schema.get(allOf_defs_notation, {}): - raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `{allOf_defs_notation}`") + _, allof_defs_notation, def_key = allOf["$ref"].split("/") # "#//" + if allof_defs_notation not in schema: + raise AssertionError(f"Schema has allOf, but no {allof_defs_notation}") + if def_key not in schema.get(allof_defs_notation, {}): + raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `{allof_defs_notation}`") # Check that the schema describes at least one parameter if num_params == 0: From 6777694b20726f333dd8eae416917c3324c3e158 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 16:33:00 +0200 Subject: [PATCH 499/737] fix tests + add draft 2020-12 as default draft --- nf_core/pipelines/schema.py | 2 ++ tests/pipelines/test_schema.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 188861fff..1555a4198 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -46,6 +46,8 @@ def __init__(self): self.web_schema_build_url = "https://nf-co.re/pipeline_schema_builder" self.web_schema_build_web_url = None self.web_schema_build_api_url = None + self.schema_draft = "https://json-schema.org/draft/2020-12/schema" + self.defs_notation = "$defs" def get_schema_path( self, path: Union[str, Path], local_only: bool = False, revision: Union[str, None] = None diff --git a/tests/pipelines/test_schema.py b/tests/pipelines/test_schema.py index 633de3db6..777d959e6 100644 --- a/tests/pipelines/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -175,6 +175,7 @@ def test_validate_schema_fail_duplicate_ids(self): Check that the schema validation fails when we have duplicate IDs in definition subschema """ self.schema_obj.schema = { + "$schema": "http://json-schema.org/draft-07/schema", "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"foo": {}}}}, "allOf": [{"$ref": "#/definitions/groupOne"}, {"$ref": "#/definitions/groupTwo"}], } @@ -187,18 +188,20 @@ def test_validate_schema_fail_missing_def(self): Check that the schema validation fails when we a definition in allOf is not in definitions """ self.schema_obj.schema = { + "$schema": "http://json-schema.org/draft-07/schema", "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, "allOf": [{"$ref": "#/definitions/groupOne"}], } with pytest.raises(AssertionError) as exc_info: self.schema_obj.validate_schema(self.schema_obj.schema) - assert exc_info.value.args[0] == "Definition subschema `groupTwo` not included in schema `allOf`" + assert exc_info.value.args[0] == "Definition subschema `#/definitions/groupTwo` not included in schema `allOf`" def test_validate_schema_fail_unexpected_allof(self): """ Check that the schema validation fails when we an unrecognised definition is in allOf """ self.schema_obj.schema = { + "$schema": "http://json-schema.org/draft-07/schema", "definitions": {"groupOne": {"properties": {"foo": {}}}, "groupTwo": {"properties": {"bar": {}}}}, "allOf": [ {"$ref": "#/definitions/groupOne"}, From 0dfd15ded935054fc8d185b5fd36223505441ff7 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 16:34:01 +0200 Subject: [PATCH 500/737] fix typo --- nf_core/pipelines/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 1555a4198..ba5af51f9 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -413,7 +413,7 @@ def validate_schema(self, schema=None): if allOf["$ref"] == f"#/{defs_notation}/{d_key}": in_allOf = True if not in_allOf: - raise AssertionError(f"Definition subschema `#{defs_notation}/{d_key}` not included in schema `allOf`") + raise AssertionError(f"Definition subschema `#/{defs_notation}/{d_key}` not included in schema `allOf`") # TODO add support for nested parameters for d_param_id in d_schema.get("properties", {}): From 7f29f373a82ed1ada16b6b493f10d724a4b879cc Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 16:37:06 +0200 Subject: [PATCH 501/737] ruff --- nf_core/pipelines/schema.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index ba5af51f9..43058d4e6 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -119,7 +119,7 @@ def load_schema(self): self.schema_defaults = {} self.schema_params = {} if "$schema" not in self.schema: - raise AssertionError("Schema missing top-level `$schema` attribute") + raise AssertionError("Schema missing top-level `$schema` attribute") self.schema_draft = self.schema["$schema"] self.defs_notation = self.get_defs_notation() log.debug(f"JSON file loaded: {self.schema_filename}") @@ -385,7 +385,10 @@ def validate_schema(self, schema=None): if "$schema" not in schema: raise AssertionError("Schema missing top-level `$schema` attribute") schema_draft = schema["$schema"] - if schema_draft == "https://json-schema.org/draft-07/schema" or schema_draft == "http://json-schema.org/draft-07/schema": + if ( + schema_draft == "https://json-schema.org/draft-07/schema" + or schema_draft == "http://json-schema.org/draft-07/schema" + ): try: jsonschema.Draft7Validator.check_schema(schema) log.debug("JSON Schema Draft7 validated") @@ -398,7 +401,9 @@ def validate_schema(self, schema=None): except jsonschema.exceptions.SchemaError as e: raise AssertionError(f"Schema does not validate as Draft 2020-12 JSON Schema:\n {e}") else: - raise AssertionError(f"Schema `$schema` should be `https://json-schema.org/draft/2020-12/schema` or `https://json-schema.org/draft-07/schema` \n Found `{schema_draft}`") + raise AssertionError( + f"Schema `$schema` should be `https://json-schema.org/draft/2020-12/schema` or `https://json-schema.org/draft-07/schema` \n Found `{schema_draft}`" + ) param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) @@ -425,7 +430,7 @@ def validate_schema(self, schema=None): # Check that everything in allOf exists for allOf in schema.get("allOf", []): - _, allof_defs_notation, def_key = allOf["$ref"].split("/") # "#//" + _, allof_defs_notation, def_key = allOf["$ref"].split("/") # "#//" if allof_defs_notation not in schema: raise AssertionError(f"Schema has allOf, but no {allof_defs_notation}") if def_key not in schema.get(allof_defs_notation, {}): From 730c6779779d4e14512df25c043eb02efe6d1659 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 17:34:53 +0200 Subject: [PATCH 502/737] add checks for nf-validation and nf-schema plugin config --- nf_core/pipelines/lint/nextflow_config.py | 33 +++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 96323af94..8fe933d10 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -336,6 +336,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: ) # Check for the availability of the "test" configuration profile by parsing nextflow.config + # Also check for the presence of nf-validation/nf-schema and check if they have pinned versions with open(Path(self.wf_path, "nextflow.config")) as f: content = f.read() @@ -363,6 +364,38 @@ def nextflow_config(self) -> Dict[str, List[str]]: else: failed.append("nextflow.config does not contain configuration profile `test`") + match_plugins = re.search(r"\bplugins\s*\{([^}]+)}", cleaned_content, re.MULTILINE) + if not match_plugins: + failed.append( + "nextflow.config does not contain `plugins` scope, but `nf-validation` or `nf-schema` plugins are required" + ) + else: + found_plugins = {} + for line in match_plugins.group(1).split("\n"): + cleaned_line = line.split("//")[0].strip().replace("\"", "'") + if "id" not in line: continue + match_line = re.search(r"\bid\s'([^']+)'", cleaned_line) + if not match_line: + failed.append(f"nextflow.config contains an invalid plugins identifier: {cleaned_line}") + continue + plugin = match_line.group(1) + name = plugin.split("@")[0] + version = "" + if "@" in plugin: + version = plugin.split("@")[1] + found_plugins[name] = version + + if len(found_plugins) == 0: + failed.append("nextflow.config contains an empty plugins scope") + elif "nf-validation" in found_plugins and "nf-schema" in found_plugins: + failed.append("nextflow.config contains both nf-validation and nf-schema") + elif "nf-validation" in found_plugins and found_plugins["nf-validation"] == "": + failed.append("nextflow.config contains an unpinned version of nf-validation") + elif "nf-schema" in found_plugins and found_plugins["nf-schema"] == "": + failed.append("nextflow.config contains an unpinned version of nf-schema") + elif "nf-validation" not in found_plugins and "nf-schema" not in found_plugins: + failed.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") + # Check that the default values in nextflow.config match the default values defined in the nextflow_schema.json ignore_defaults = [] for item in ignore_configs: From 7fbbff7a412c64ef67417dd60a722d67f6d37647 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 14 Aug 2024 17:39:10 +0200 Subject: [PATCH 503/737] linting fully works now! --- nf_core/pipelines/lint/schema_description.py | 5 +++-- nf_core/pipelines/schema.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/lint/schema_description.py b/nf_core/pipelines/lint/schema_description.py index d617e4094..b586cc524 100644 --- a/nf_core/pipelines/lint/schema_description.py +++ b/nf_core/pipelines/lint/schema_description.py @@ -36,8 +36,9 @@ def schema_description(self): warned.append(f"Ungrouped param in schema: `{up}`") # Iterate over groups and add warning for parameters without a description - for group_key in self.schema_obj.schema["definitions"].keys(): - group = self.schema_obj.schema["definitions"][group_key] + defs_notation = self.schema_obj.defs_notation + for group_key in self.schema_obj.schema[defs_notation].keys(): + group = self.schema_obj.schema[defs_notation][group_key] for param_key, param in group["properties"].items(): if param_key in ignore_params: ignored.append(f"Ignoring description check for param in schema: `{param_key}`") diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 43058d4e6..c70332beb 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -203,7 +203,7 @@ def get_schema_types(self) -> None: for name, param in self.schema.get("properties", {}).items(): if "type" in param: self.schema_types[name] = param["type"] - for _, definition in self.schema.get("definitions", {}).items(): + for _, definition in self.schema.get(self.defs_notation, {}).items(): for name, param in definition.get("properties", {}).items(): if "type" in param: self.schema_types[name] = param["type"] From 1c69f9d84e5c342a54ed733c4497de53ee8c1cea Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Thu, 15 Aug 2024 15:37:21 +0200 Subject: [PATCH 504/737] Add a verbose click option --- nf_core/__main__.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 153b8b7e7..7da60180c 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1136,6 +1136,13 @@ def command_modules_create( @modules.command("test") @click.pass_context @click.argument("tool", type=str, callback=normalize_case, required=False, metavar=" or ") +@click.option( + "-v", + "--verbose", + is_flag=True, + default=False, + help="Print verbose output to the console.", +) @click.option( "-d", "--dir", @@ -1171,11 +1178,11 @@ def command_modules_create( default=False, help="Migrate a module with pytest tests to nf-test", ) -def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest): +def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest, verbose): """ Run nf-test for a module. """ - modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest) + modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest, verbose) # nf-core modules lint From 9f6c1f4710ee10d094b0b9b1e011c891d62e5b8c Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Thu, 15 Aug 2024 16:44:24 +0200 Subject: [PATCH 505/737] Overwrite the verbose in ctx in case the user uses it --- nf_core/__main__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 7da60180c..64956e5f3 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1182,7 +1182,8 @@ def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile """ Run nf-test for a module. """ - modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest, verbose) + ctx.obj['verbose'] = verbose + modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest) # nf-core modules lint From 7d873b5a456e6a13f77c4a8a6ff8f947b5eafc04 Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Thu, 15 Aug 2024 16:44:46 +0200 Subject: [PATCH 506/737] Overwrite the verbose in ctx in case the user uses it --- nf_core/__main__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 64956e5f3..3b56bda66 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1182,7 +1182,8 @@ def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile """ Run nf-test for a module. """ - ctx.obj['verbose'] = verbose + if verbose: + ctx.obj['verbose'] = verbose modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest) From 22a18a4273bd4db1c2343a90742c1b5b9a3b62eb Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Thu, 15 Aug 2024 16:48:05 +0200 Subject: [PATCH 507/737] add matthias' suggestion woops --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 3b56bda66..23930ccd3 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1141,7 +1141,7 @@ def command_modules_create( "--verbose", is_flag=True, default=False, - help="Print verbose output to the console.", + help="Print verbose output to the console. Sets `--debug` inside the nf-test command.", ) @click.option( "-d", From 7aead16a057bc0e30344aec26d4e12d9512bfa11 Mon Sep 17 00:00:00 2001 From: laurencekuhl Date: Thu, 15 Aug 2024 17:26:25 +0200 Subject: [PATCH 508/737] Ran pre commit --- nf_core/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 23930ccd3..0efea13ec 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1183,7 +1183,7 @@ def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile Run nf-test for a module. """ if verbose: - ctx.obj['verbose'] = verbose + ctx.obj["verbose"] = verbose modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrate_pytest) From e5714d3cccdfce236c6c3ee3878282761d598be2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 04:56:51 +0000 Subject: [PATCH 509/737] Update pre-commit hook astral-sh/ruff-pre-commit to v0.6.0 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 815e98027..4f08d8419 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.6 + rev: v0.6.0 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 5da0e06d4f7b55e1ec8851c85380b36dbad7919e Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 16 Aug 2024 04:57:40 +0000 Subject: [PATCH 510/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index bc78a1628..ddc346b9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -69,6 +69,7 @@ - Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) - Add bot action to update textual snapshots and write bot documentation ([#3102](https://github.com/nf-core/tools/pull/3102)) - Components: allow spaces at the betinning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.6.0 ([#3122](https://github.com/nf-core/tools/pull/3122)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 85461c5e47c7527e4879513e90c7150b19bb9013 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 15:34:04 +0000 Subject: [PATCH 511/737] Update python:3.12-slim Docker digest to 59c7332 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c88abcb1c..fb1a86793 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:740d94a19218c8dd584b92f804b1158f85b0d241e5215ea26ed2dcade2b9d138 +FROM python:3.12-slim@sha256:59c7332a4a24373861c4a5f0eec2c92b87e3efeb8ddef011744ef9a751b1d11c LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From 129858342d0529ef59f1f04f7da0332fec038a1a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Sat, 17 Aug 2024 16:36:57 +0000 Subject: [PATCH 512/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ddc346b9d..0a450a672 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -70,6 +70,7 @@ - Add bot action to update textual snapshots and write bot documentation ([#3102](https://github.com/nf-core/tools/pull/3102)) - Components: allow spaces at the betinning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.6.0 ([#3122](https://github.com/nf-core/tools/pull/3122)) +- Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 0838d6bb040b8218493fad13c5fc33fa355b84af Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 19 Aug 2024 14:58:27 +0200 Subject: [PATCH 513/737] warning for nf-validation --- nf_core/pipelines/lint/nextflow_config.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 8fe933d10..84bb492cb 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -396,6 +396,9 @@ def nextflow_config(self) -> Dict[str, List[str]]: elif "nf-validation" not in found_plugins and "nf-schema" not in found_plugins: failed.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") + if "nf-validation" in found_plugins: + warned.append("nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/") + # Check that the default values in nextflow.config match the default values defined in the nextflow_schema.json ignore_defaults = [] for item in ignore_configs: From 8559c2d6a18624c5096957e6a941011c8725e580 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 19 Aug 2024 16:54:38 +0200 Subject: [PATCH 514/737] fetch plugin on schema filename setting --- nf_core/pipelines/lint/nextflow_config.py | 1 + nf_core/pipelines/schema.py | 81 +++++++++++++++-------- 2 files changed, 54 insertions(+), 28 deletions(-) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 84bb492cb..a5b7cf325 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -364,6 +364,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: else: failed.append("nextflow.config does not contain configuration profile `test`") + # Lint for nf-validation and nf-schema match_plugins = re.search(r"\bplugins\s*\{([^}]+)}", cleaned_content, re.MULTILINE) if not match_plugins: failed.append( diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index c70332beb..77f421724 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -5,6 +5,7 @@ import logging import tempfile import webbrowser +import re from pathlib import Path from typing import Union @@ -32,7 +33,7 @@ def __init__(self): self.schema = {} self.pipeline_dir = "" - self.schema_filename = "" + self._schema_filename = "" self.schema_defaults = {} self.schema_types = {} self.schema_params = {} @@ -46,8 +47,46 @@ def __init__(self): self.web_schema_build_url = "https://nf-co.re/pipeline_schema_builder" self.web_schema_build_web_url = None self.web_schema_build_api_url = None - self.schema_draft = "https://json-schema.org/draft/2020-12/schema" - self.defs_notation = "$defs" + self.validation_plugin = None + self.schema_draft = None + self.defs_notation = None + + # Update the validation plugin code everytime the schema gets changed + def set_schema_filename(self, schema: str) -> None: + self._schema_filename = schema + basepath = "/".join(str(schema).split("/")[:-1]) + config = f"{basepath}/nextflow.config" if basepath != "" else "nextflow.config" + self._update_validation_plugin_from_config(config) + + def get_schema_filename(self) -> str: + return self._schema_filename + + def del_schema_filename(self) -> None: + del self._schema_filename + + schema_filename = property(get_schema_filename, set_schema_filename, del_schema_filename) + + def _update_validation_plugin_from_config(self, config: str) -> None: + plugin = "nf-schema" + with open(Path(config)) as conf: + nf_schema_pattern = re.compile("id\s*[\"']nf-schema", re.MULTILINE) + nf_validation_pattern = re.compile("id\s*[\"']nf-validation", re.MULTILINE) + config_content = conf.read() + if re.search(nf_validation_pattern, config_content): + plugin = "nf-validation" + elif re.search(nf_schema_pattern, config_content): + plugin = "nf-schema" + else: + log.warning("Could not find nf-schema or nf-validation in the pipeline config. Defaulting to nf-schema") + + self.validation_plugin = plugin + # Previous versions of nf-schema used "defs", but it's advised to use "$defs" + if plugin == "nf-schema": + self.defs_notation = "$defs" + self.schema_draft = "https://json-schema.org/draft/2020-12/schema" + else: + self.defs_notation = "definitions" + self.schema_draft = "https://json-schema.org/draft-07/schema" def get_schema_path( self, path: Union[str, Path], local_only: bool = False, revision: Union[str, None] = None @@ -120,24 +159,8 @@ def load_schema(self): self.schema_params = {} if "$schema" not in self.schema: raise AssertionError("Schema missing top-level `$schema` attribute") - self.schema_draft = self.schema["$schema"] - self.defs_notation = self.get_defs_notation() log.debug(f"JSON file loaded: {self.schema_filename}") - def get_defs_notation(self, schema=None): - if schema is None: - schema = self.schema - - if "$defs" in schema: - defs_notation = "$defs" - elif "definitions" in schema: - defs_notation = "definitions" - elif "defs" in schema: - # nf-schema v2.0.0 only supported defs. this has been changed to $defs in nf-schema v2.1.0 - # this line prevents the breakage of schemas created for v2.0.0 - defs_notation = "defs" - return defs_notation - def sanitise_param_default(self, param): """ Given a param, ensure that the default value is the correct variable type @@ -385,16 +408,15 @@ def validate_schema(self, schema=None): if "$schema" not in schema: raise AssertionError("Schema missing top-level `$schema` attribute") schema_draft = schema["$schema"] - if ( - schema_draft == "https://json-schema.org/draft-07/schema" - or schema_draft == "http://json-schema.org/draft-07/schema" - ): + if self.schema_draft != schema_draft: + raise AssertionError(f"Schema is using the wrong draft: {schema_draft}, should be {self.schema_draft}") + if self.schema_draft == "https://json-schema.org/draft-07/schema": try: jsonschema.Draft7Validator.check_schema(schema) log.debug("JSON Schema Draft7 validated") except jsonschema.exceptions.SchemaError as e: raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") - elif schema_draft == "https://json-schema.org/draft/2020-12/schema": + elif self.schema_draft == "https://json-schema.org/draft/2020-12/schema": try: jsonschema.Draft202012Validator.check_schema(schema) log.debug("JSON Schema Draft2020-12 validated") @@ -407,18 +429,21 @@ def validate_schema(self, schema=None): param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) - defs_notation = self.get_defs_notation(schema) - for d_key, d_schema in schema.get(defs_notation, {}).items(): + print(self.defs_notation) + + for d_key, d_schema in schema.get(self.defs_notation, {}).items(): + print(d_key) + print(d_schema) # Check that this definition is mentioned in allOf if "allOf" not in schema: raise AssertionError("Schema has definitions, but no allOf key") in_allOf = False for allOf in schema.get("allOf", []): - if allOf["$ref"] == f"#/{defs_notation}/{d_key}": + if allOf["$ref"] == f"#/{self.defs_notation}/{d_key}": in_allOf = True if not in_allOf: - raise AssertionError(f"Definition subschema `#/{defs_notation}/{d_key}` not included in schema `allOf`") + raise AssertionError(f"Definition subschema `#/{self.defs_notation}/{d_key}` not included in schema `allOf`") # TODO add support for nested parameters for d_param_id in d_schema.get("properties", {}): From 0296f7d8fa6c5cc7a8dac62b450a8babcd77bab7 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 19 Aug 2024 17:00:33 +0200 Subject: [PATCH 515/737] add a check for defs usage instead of $defs --- nf_core/pipelines/schema.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 77f421724..9c65f34fd 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -430,11 +430,11 @@ def validate_schema(self, schema=None): param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) - print(self.defs_notation) + # Add a small check for older nf-schema JSON schemas + if "defs" in schema: + raise AssertionError(f'Using "defs" for schema definitions is not supported. Please use {self.defs_notation} instead') for d_key, d_schema in schema.get(self.defs_notation, {}).items(): - print(d_key) - print(d_schema) # Check that this definition is mentioned in allOf if "allOf" not in schema: raise AssertionError("Schema has definitions, but no allOf key") From 0b8d04f89eb7f348bce87cc93f04d983e48e218f Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 19 Aug 2024 17:16:31 +0200 Subject: [PATCH 516/737] change all definitions to self.defs_notation --- nf_core/pipelines/schema.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 9c65f34fd..9db945751 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -235,7 +235,7 @@ def save_schema(self, suppress_logging=False): """Save a pipeline schema to a file""" # Write results to a JSON file num_params = len(self.schema.get("properties", {})) - num_params += sum(len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()) + num_params += sum(len(d.get("properties", {})) for d in self.schema.get(self.defs_notation, {}).values()) if not suppress_logging: log.info(f"Writing schema with {num_params} params: '{self.schema_filename}'") dump_json_with_prettier(self.schema_filename, self.schema) @@ -321,7 +321,7 @@ def validate_default_params(self): params_ignore = [] # Go over group keys - for group_key, group in schema_no_required.get("definitions", {}).items(): + for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): group_properties = group.get("properties") for param in group_properties: if param in params_ignore: @@ -527,9 +527,9 @@ def check_for_input_mimetype(self): if "input" not in self.schema_params: raise LookupError("Parameter `input` not found in schema") # Check that the input parameter is defined in the right place - if "input" not in self.schema.get("definitions", {}).get("input_output_options", {}).get("properties", {}): + if "input" not in self.schema.get(self.defs_notation, {}).get("input_output_options", {}).get("properties", {}): raise LookupError("Parameter `input` is not defined in the correct subschema (input_output_options)") - input_entry = self.schema["definitions"]["input_output_options"]["properties"]["input"] + input_entry = self.schema[self.defs_notation]["input_output_options"]["properties"]["input"] if "mimetype" not in input_entry: return None mimetype = input_entry["mimetype"] @@ -581,7 +581,7 @@ def schema_to_markdown(self, columns): out = f"# {self.schema['title']}\n\n" out += f"{self.schema['description']}\n" # Grouped parameters - for definition in self.schema.get("definitions", {}).values(): + for definition in self.schema.get(self.defs_notation, {}).values(): out += f"\n## {definition.get('title', {})}\n\n" out += f"{definition.get('description', '')}\n\n" required = definition.get("required", []) @@ -763,15 +763,15 @@ def remove_schema_empty_definitions(self): """ # Identify and remove empty definitions from the schema empty_definitions = [] - for d_key, d_schema in list(self.schema.get("definitions", {}).items()): + for d_key, d_schema in list(self.schema.get(self.defs_notation, {}).items()): if not d_schema.get("properties"): - del self.schema["definitions"][d_key] + del self.schema[self.defs_notation][d_key] empty_definitions.append(d_key) log.warning(f"Removing empty group: '{d_key}'") # Remove "allOf" group with empty definitions from the schema for d_key in empty_definitions: - allOf = {"$ref": f"#/definitions/{d_key}"} + allOf = {"$ref": f"#/{self.defs_notation}/{d_key}"} if allOf in self.schema.get("allOf", []): self.schema["allOf"].remove(allOf) @@ -780,8 +780,8 @@ def remove_schema_empty_definitions(self): del self.schema["allOf"] # If we don't have anything left in "definitions", remove it - if self.schema.get("definitions") == {}: - del self.schema["definitions"] + if self.schema.get(self.defs_notation) == {}: + del self.schema[self.defs_notation] def remove_schema_notfound_configs(self): """ @@ -791,9 +791,9 @@ def remove_schema_notfound_configs(self): # Top-level properties self.schema, params_removed = self.remove_schema_notfound_configs_single_schema(self.schema) # Sub-schemas in definitions - for d_key, definition in self.schema.get("definitions", {}).items(): + for d_key, definition in self.schema.get(self.defs_notation, {}).items(): cleaned_schema, p_removed = self.remove_schema_notfound_configs_single_schema(definition) - self.schema["definitions"][d_key] = cleaned_schema + self.schema[self.defs_notation][d_key] = cleaned_schema params_removed.extend(p_removed) return params_removed From 544381310c6e5ac57664524f63105441246c34fd Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Mon, 19 Aug 2024 18:33:52 +0200 Subject: [PATCH 517/737] migrate template part 1 --- nf_core/pipeline-template/conf/colors.config | 61 ++++++++ nf_core/pipeline-template/nextflow.config | 44 ++++-- .../pipeline-template/nextflow_schema.json | 55 +++---- .../utils_nfcore_pipeline_pipeline/main.nf | 17 +- .../nf-core/utils_nfcore_pipeline/main.nf | 46 ------ .../nf-core/utils_nfvalidation_plugin/main.nf | 25 +-- .../tests/main.nf.test | 145 +----------------- .../tests/nextflow.config | 1 + .../pipeline-template/workflows/pipeline.nf | 2 +- 9 files changed, 130 insertions(+), 266 deletions(-) create mode 100644 nf_core/pipeline-template/conf/colors.config create mode 100644 nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow.config diff --git a/nf_core/pipeline-template/conf/colors.config b/nf_core/pipeline-template/conf/colors.config new file mode 100644 index 000000000..00f296326 --- /dev/null +++ b/nf_core/pipeline-template/conf/colors.config @@ -0,0 +1,61 @@ +// For now, there is no easy way to set monochromeLogs +colors { + // Reset / Meta + reset = "\033[0m" + bold = "\033[1m" + dim = "\033[2m" + underlined = "\033[4m" + blink = "\033[5m" + reverse = "\033[7m" + hidden = "\033[8m" + + // Regular Colors + black = "\033[0;30m" + red = "\033[0;31m" + green = "\033[0;32m" + yellow = "\033[0;33m" + blue = "\033[0;34m" + purple = "\033[0;35m" + cyan = "\033[0;36m" + white = "\033[0;37m" + + // Bold + bblack = "\033[1;30m" + bred = "\033[1;31m" + bgreen = "\033[1;32m" + byellow = "\033[1;33m" + bblue = "\033[1;34m" + bpurple = "\033[1;35m" + bcyan = "\033[1;36m" + bwhite = "\033[1;37m" + + // Underline + ublack = "\033[4;30m" + ured = "\033[4;31m" + ugreen = "\033[4;32m" + uyellow = "\033[4;33m" + ublue = "\033[4;34m" + upurple = "\033[4;35m" + ucyan = "\033[4;36m" + uwhite = "\033[4;37m" + + // High Intensity + iblack = "\033[0;90m" + ired = "\033[0;91m" + igreen = "\033[0;92m" + iyellow = "\033[0;93m" + iblue = "\033[0;94m" + ipurple = "\033[0;95m" + icyan = "\033[0;96m" + iwhite = "\033[0;97m" + + // Bold High Intensity + biblack = "\033[1;90m" + bired = "\033[1;91m" + bigreen = "\033[1;92m" + biyellow = "\033[1;93m" + biblue = "\033[1;94m" + bipurple = "\033[1;95m" + bicyan = "\033[1;96m" + biwhite = "\033[1;97m" +} \ No newline at end of file diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 0469d6cfe..3d3841ba4 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -38,6 +38,8 @@ params { monochrome_logs = false hook_url = null help = false + helpFull = false + showHidden = false version = false pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/' @@ -59,10 +61,6 @@ params { max_time = '240.h' // Schema validation default options - validationFailUnrecognisedParams = false - validationLenientMode = false - validationSchemaIgnoreParams = 'genomes,igenomes_base' - validationShowHiddenParams = false validate_params = true } @@ -200,11 +198,6 @@ podman.registry = 'quay.io' singularity.registry = 'quay.io' charliecloud.registry = 'quay.io' -// Nextflow plugins -plugins { - id 'nf-validation@1.1.3' // Validation of pipeline parameters and creation of an input channel from a sample sheet -} - {% if igenomes -%} // Load igenomes.config if required if (!params.igenomes_ignore) { @@ -267,6 +260,39 @@ manifest { doi = '' } +// Nextflow plugins +plugins { + id 'nf-schema@2.1.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet +} + +includeConfig "conf/colors.config" + +validation { + parametersSchema = "${projectDir}/nextflow_schema.json" + help { + enabled = true + command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir " + beforeText = """ +-${colors.dim}----------------------------------------------------${colors.reset}- + ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} +${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} +${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} +${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} + ${colors.green}`._,._,\'${colors.reset} +${colors.purple} ${manifest.name} ${manifest.version}${colors.reset} +-${colors.dim}----------------------------------------------------${colors.reset}- +""" + afterText = """${manifest.doi ? "* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""} +* The nf-core framework + https://doi.org/10.1038/s41587-020-0439-x + +* Software dependencies + https://github.com/${manifest.name}/blob/master/CITATIONS.md +""" + } +} + + // Load modules.config for DSL2 module specific options includeConfig 'conf/modules.config' diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 4a1a22c3e..14120bbab 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -1,10 +1,10 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/{{ name }}/master/nextflow_schema.json", "title": "{{ name }} pipeline parameters", "description": "{{ description }}", "type": "object", - "definitions": { + "$defs": { "input_output_options": { "title": "Input/output options", "type": "object", @@ -73,6 +73,20 @@ "fa_icon": "fas fa-ban", "hidden": true, "help_text": "Do not load `igenomes.config` when running the pipeline. You may choose this option if you observe clashes between custom parameters and those supplied in `igenomes.config`." + }, + "igenomes_base": { + "type": "string", + "format": "directory-path", + "description": "The base path to the igenomes reference files", + "fa_icon": "fas fa-ban", + "hidden": true, + "default": "s3://ngi-igenomes/igenomes/" + }, + "genomes": { + "type": "object", + "description": "An object containing all reference data availabe in igenomes", + "fa_icon": "fas fa-ban", + "hidden": true } } }, @@ -169,12 +183,6 @@ "description": "Less common options for the pipeline, typically set in a config file.", "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", "properties": { - "help": { - "type": "boolean", - "description": "Display help text.", - "fa_icon": "fas fa-question-circle", - "hidden": true - }, "version": { "type": "boolean", "description": "Display version and exit.", @@ -253,27 +261,6 @@ "fa_icon": "fas fa-check-square", "hidden": true }, - "validationShowHiddenParams": { - "type": "boolean", - "fa_icon": "far fa-eye-slash", - "description": "Show all params when using `--help`", - "hidden": true, - "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." - }, - "validationFailUnrecognisedParams": { - "type": "boolean", - "fa_icon": "far fa-check-circle", - "description": "Validation of parameters fails when an unrecognised parameter is found.", - "hidden": true, - "help_text": "By default, when an unrecognised parameter is found, it returns a warinig." - }, - "validationLenientMode": { - "type": "boolean", - "fa_icon": "far fa-check-circle", - "description": "Validation of parameters in lenient more.", - "hidden": true, - "help_text": "Allows string values that are parseable as numbers or booleans. For further information see [JSONSchema docs](https://github.com/everit-org/json-schema#lenient-mode)." - }, "pipelines_testdata_base_path": { "type": "string", "fa_icon": "far fa-check-circle", @@ -286,19 +273,19 @@ }, "allOf": [ { - "$ref": "#/definitions/input_output_options" + "$ref": "#/$defs/input_output_options" }, {% if igenomes %}{ - "$ref": "#/definitions/reference_genome_options" + "$ref": "#/$defs/reference_genome_options" },{% endif %} {% if nf_core_configs %}{ - "$ref": "#/definitions/institutional_config_options" + "$ref": "#/$defs/institutional_config_options" },{% endif %} { - "$ref": "#/definitions/max_job_request_options" + "$ref": "#/$defs/max_job_request_options" }, { - "$ref": "#/definitions/generic_options" + "$ref": "#/$defs/generic_options" } ] } diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index 72c9be85e..c83d2ae0c 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -9,8 +9,8 @@ */ include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin' -include { paramsSummaryMap } from 'plugin/nf-validation' -include { fromSamplesheet } from 'plugin/nf-validation' +include { paramsSummaryMap } from 'plugin/nf-schema' +include { samplesheetToList } from 'plugin/nf-schema' include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' @@ -30,7 +30,6 @@ workflow PIPELINE_INITIALISATION { take: version // boolean: Display version and exit - help // boolean: Display help text validate_params // boolean: Boolean whether to validate parameters against the schema at runtime monochrome_logs // boolean: Do not use coloured log outputs nextflow_cli_args // array: List of positional nextflow CLI args @@ -54,16 +53,8 @@ workflow PIPELINE_INITIALISATION { // // Validate parameters and generate parameter summary to stdout // - pre_help_text = nfCoreLogo(monochrome_logs) - post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs) - def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --outdir " UTILS_NFVALIDATION_PLUGIN ( - help, - workflow_command, - pre_help_text, - post_help_text, - validate_params, - "nextflow_schema.json" + validate_params ) // @@ -84,7 +75,7 @@ workflow PIPELINE_INITIALISATION { // Create channel from input file provided through params.input // Channel - .fromSamplesheet("input") + .fromList(samplesheetToList(params.input, "${projectDir}/assets/schema_input.json")) .map { meta, fastq_1, fastq_2 -> if (!fastq_2) { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 14558c392..a5dcd5f83 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -61,25 +61,6 @@ def checkProfileProvided(nextflow_cli_args) { } } -// -// Citation string for pipeline -// -def workflowCitation() { - def temp_doi_ref = "" - String[] manifest_doi = workflow.manifest.doi.tokenize(",") - // Using a loop to handle multiple DOIs - // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers - // Removing ` ` since the manifest.doi is a string and not a proper list - for (String doi_ref: manifest_doi) temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + - "* The pipeline\n" + - temp_doi_ref + "\n" + - "* The nf-core framework\n" + - " https://doi.org/10.1038/s41587-020-0439-x\n\n" + - "* Software dependencies\n" + - " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" -} - // // Generate workflow version string // @@ -157,33 +138,6 @@ def paramsSummaryMultiqc(summary_params) { return yaml_file_text } -// -// nf-core logo -// -def nfCoreLogo(monochrome_logs=true) { - Map colors = logColours(monochrome_logs) - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) -} - -// -// Return dashed line -// -def dashedLine(monochrome_logs=true) { - Map colors = logColours(monochrome_logs) - return "-${colors.dim}----------------------------------------------------${colors.reset}-" -} - // // ANSII colours used for terminal logging // diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf index 2585b65d1..453894a64 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf @@ -8,9 +8,8 @@ ======================================================================================== */ -include { paramsHelp } from 'plugin/nf-validation' -include { paramsSummaryLog } from 'plugin/nf-validation' -include { validateParameters } from 'plugin/nf-validation' +include { paramsSummaryLog } from 'plugin/nf-schema' +include { validateParameters } from 'plugin/nf-schema' /* ======================================================================================== @@ -21,12 +20,7 @@ include { validateParameters } from 'plugin/nf-validation' workflow UTILS_NFVALIDATION_PLUGIN { take: - print_help // boolean: print help - workflow_command // string: default commmand used to run pipeline - pre_help_text // string: string to be printed before help text and summary log - post_help_text // string: string to be printed after help text and summary log validate_params // boolean: validate parameters - schema_filename // path: JSON schema file, null to use default value main: @@ -37,24 +31,11 @@ workflow UTILS_NFVALIDATION_PLUGIN { post_help_text = post_help_text ?: '' workflow_command = workflow_command ?: '' - // - // Print help message if needed - // - if (print_help) { - log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text - System.exit(0) - } - - // - // Print parameter summary to stdout - // - log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text - // // Validate parameters relative to the parameter JSON schema // if (validate_params){ - validateParameters(parameters_schema: schema_filename) + validateParameters() } emit: diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test index 5784a33f2..354b81754 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test @@ -10,30 +10,21 @@ nextflow_workflow { tag "utils_nfvalidation_plugin" tag "subworkflows/utils_nfvalidation_plugin" + config "./nextflow.config" + test("Should run nothing") { when { params { - monochrome_logs = true test_data = '' } workflow { """ - help = false - workflow_command = null - pre_help_text = null - post_help_text = null validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename + input[0] = validate_params """ } } @@ -45,147 +36,19 @@ nextflow_workflow { } } - test("Should run help") { - - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = null - pre_help_text = null - post_help_text = null - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } } - ) - } - } - - test("Should run help with command") { - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = "nextflow run noorg/doesntexist" - pre_help_text = null - post_help_text = null - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } } - ) - } - } - - test("Should run help with extra text") { - - - when { - - params { - monochrome_logs = true - test_data = '' - } - workflow { - """ - help = true - workflow_command = "nextflow run noorg/doesntexist" - pre_help_text = "pre-help-text" - post_help_text = "post-help-text" - validate_params = false - schema_filename = "$moduleTestDir/nextflow_schema.json" - - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename - """ - } - } - - then { - assertAll( - { assert workflow.success }, - { assert workflow.exitStatus == 0 }, - { assert workflow.stdout.any { it.contains('pre-help-text') } }, - { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, - { assert workflow.stdout.any { it.contains('Input/output options') } }, - { assert workflow.stdout.any { it.contains('--outdir') } }, - { assert workflow.stdout.any { it.contains('post-help-text') } } - ) - } - } - test("Should validate params") { when { params { - monochrome_logs = true test_data = '' outdir = 1 } workflow { """ - help = false - workflow_command = null - pre_help_text = null - post_help_text = null validate_params = true - schema_filename = "$moduleTestDir/nextflow_schema.json" - input[0] = help - input[1] = workflow_command - input[2] = pre_help_text - input[3] = post_help_text - input[4] = validate_params - input[5] = schema_filename + input[0] = validate_params """ } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow.config new file mode 100644 index 000000000..8d047ed59 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow.config @@ -0,0 +1 @@ +validation.monochromeLogs = true \ No newline at end of file diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index d98c392f0..4bb091321 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -6,7 +6,7 @@ include { FASTQC } from '../modules/nf-core/fastqc/main' {% if multiqc %}include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} -include { paramsSummaryMap } from 'plugin/nf-validation' +include { paramsSummaryMap } from 'plugin/nf-schema' {% if multiqc %}include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' {% if citations or multiqc %}include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} From 7931c7f658712e15d522531616c3e001849e338b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 20 Aug 2024 12:43:06 +0200 Subject: [PATCH 518/737] add option to exclude license from pipeline template --- nf_core/pipelines/create/templatefeatures.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index b97bf347a..dc3f07271 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -242,3 +242,18 @@ changelog: - "CHANGELOG.md" nfcore_pipelines: False custom_pipelines: True +license: + skippable_paths: + - "LICENSE" + short_description: "Add a license" + description: "Add the MIT license file." + help_text: | + To protect the copyright of the pipeline, you can add a LICENSE file. + This option ads the MIT License. You can read the conditions here: https://opensource.org/license/MIT + linting: + files_exist: + - "LICENSE" + files_unchanged: + - "LICENSE" + nfcore_pipelines: False + custom_pipelines: True From 6ff0929c0956b148839c231133b440f5f0cd876a Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 20 Aug 2024 10:46:47 +0000 Subject: [PATCH 519/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a450a672..b530f597b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) - handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108)) - add templatefeatures.yml to python package ([#3112](https://github.com/nf-core/tools/pull/3112)) +- add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) ### Linting From 78d469fd893a6e1c45aa59330378f213a994fdbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Tue, 20 Aug 2024 12:56:04 +0200 Subject: [PATCH 520/737] Update nf_core/pipelines/create/templatefeatures.yml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipelines/create/templatefeatures.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index dc3f07271..a8a6ce565 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -245,7 +245,7 @@ changelog: license: skippable_paths: - "LICENSE" - short_description: "Add a license" + short_description: "Add a license File" description: "Add the MIT license file." help_text: | To protect the copyright of the pipeline, you can add a LICENSE file. From 34565c1e12ce20cda823e4e92eed6c084501821f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 20 Aug 2024 13:00:04 +0200 Subject: [PATCH 521/737] update textual snapshots --- .../__snapshots__/test_create_app.ambr | 512 +++++++++--------- 1 file changed, 256 insertions(+), 256 deletions(-) diff --git a/tests/pipelines/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr index f4eb25508..66c08faba 100644 --- a/tests/pipelines/__snapshots__/test_create_app.ambr +++ b/tests/pipelines/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-3220763577-matrix { + .terminal-2278814444-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3220763577-title { + .terminal-2278814444-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3220763577-r1 { fill: #c5c8c6 } - .terminal-3220763577-r2 { fill: #e3e3e3 } - .terminal-3220763577-r3 { fill: #989898 } - .terminal-3220763577-r4 { fill: #e1e1e1 } - .terminal-3220763577-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3220763577-r6 { fill: #1e1e1e } - .terminal-3220763577-r7 { fill: #507bb3 } - .terminal-3220763577-r8 { fill: #e2e2e2 } - .terminal-3220763577-r9 { fill: #808080 } - .terminal-3220763577-r10 { fill: #dde6ed;font-weight: bold } - .terminal-3220763577-r11 { fill: #001541 } - .terminal-3220763577-r12 { fill: #0178d4 } - .terminal-3220763577-r13 { fill: #454a50 } - .terminal-3220763577-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-3220763577-r15 { fill: #000000 } - .terminal-3220763577-r16 { fill: #e4e4e4 } - .terminal-3220763577-r17 { fill: #14191f } - .terminal-3220763577-r18 { fill: #7ae998 } - .terminal-3220763577-r19 { fill: #0a180e;font-weight: bold } - .terminal-3220763577-r20 { fill: #008139 } - .terminal-3220763577-r21 { fill: #fea62b;font-weight: bold } - .terminal-3220763577-r22 { fill: #a7a9ab } - .terminal-3220763577-r23 { fill: #e2e3e3 } + .terminal-2278814444-r1 { fill: #c5c8c6 } + .terminal-2278814444-r2 { fill: #e3e3e3 } + .terminal-2278814444-r3 { fill: #989898 } + .terminal-2278814444-r4 { fill: #e1e1e1 } + .terminal-2278814444-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2278814444-r6 { fill: #1e1e1e } + .terminal-2278814444-r7 { fill: #507bb3 } + .terminal-2278814444-r8 { fill: #e2e2e2 } + .terminal-2278814444-r9 { fill: #808080 } + .terminal-2278814444-r10 { fill: #dde6ed;font-weight: bold } + .terminal-2278814444-r11 { fill: #001541 } + .terminal-2278814444-r12 { fill: #0178d4 } + .terminal-2278814444-r13 { fill: #454a50 } + .terminal-2278814444-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-2278814444-r15 { fill: #000000 } + .terminal-2278814444-r16 { fill: #e4e4e4 } + .terminal-2278814444-r17 { fill: #14191f } + .terminal-2278814444-r18 { fill: #7ae998 } + .terminal-2278814444-r19 { fill: #0a180e;font-weight: bold } + .terminal-2278814444-r20 { fill: #008139 } + .terminal-2278814444-r21 { fill: #fea62b;font-weight: bold } + .terminal-2278814444-r22 { fill: #a7a9ab } + .terminal-2278814444-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Hide help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common reference  - genome files.▂▂ - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see the  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Hide help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common reference  + genome files. + + By selecting this option, your pipeline will include a configuration file  + specifying the paths to these files. + + The required code to use these files will also be included in the template.  + When the pipeline user provides an appropriate genome key, the pipeline will + automatically download the required reference files. + ▅▅ + For more information about reference genomes in nf-core pipelines, see the  + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-537214554-matrix { + .terminal-2282176583-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-537214554-title { + .terminal-2282176583-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-537214554-r1 { fill: #c5c8c6 } - .terminal-537214554-r2 { fill: #e3e3e3 } - .terminal-537214554-r3 { fill: #989898 } - .terminal-537214554-r4 { fill: #e1e1e1 } - .terminal-537214554-r5 { fill: #4ebf71;font-weight: bold } - .terminal-537214554-r6 { fill: #1e1e1e } - .terminal-537214554-r7 { fill: #507bb3 } - .terminal-537214554-r8 { fill: #e2e2e2 } - .terminal-537214554-r9 { fill: #808080 } - .terminal-537214554-r10 { fill: #dde6ed;font-weight: bold } - .terminal-537214554-r11 { fill: #001541 } - .terminal-537214554-r12 { fill: #14191f } - .terminal-537214554-r13 { fill: #454a50 } - .terminal-537214554-r14 { fill: #7ae998 } - .terminal-537214554-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-537214554-r16 { fill: #0a180e;font-weight: bold } - .terminal-537214554-r17 { fill: #000000 } - .terminal-537214554-r18 { fill: #008139 } - .terminal-537214554-r19 { fill: #fea62b;font-weight: bold } - .terminal-537214554-r20 { fill: #a7a9ab } - .terminal-537214554-r21 { fill: #e2e3e3 } + .terminal-2282176583-r1 { fill: #c5c8c6 } + .terminal-2282176583-r2 { fill: #e3e3e3 } + .terminal-2282176583-r3 { fill: #989898 } + .terminal-2282176583-r4 { fill: #e1e1e1 } + .terminal-2282176583-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2282176583-r6 { fill: #1e1e1e } + .terminal-2282176583-r7 { fill: #507bb3 } + .terminal-2282176583-r8 { fill: #e2e2e2 } + .terminal-2282176583-r9 { fill: #808080 } + .terminal-2282176583-r10 { fill: #dde6ed;font-weight: bold } + .terminal-2282176583-r11 { fill: #001541 } + .terminal-2282176583-r12 { fill: #14191f } + .terminal-2282176583-r13 { fill: #454a50 } + .terminal-2282176583-r14 { fill: #7ae998 } + .terminal-2282176583-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-2282176583-r16 { fill: #0a180e;font-weight: bold } + .terminal-2282176583-r17 { fill: #000000 } + .terminal-2282176583-r18 { fill: #008139 } + .terminal-2282176583-r19 { fill: #fea62b;font-weight: bold } + .terminal-2282176583-r20 { fill: #a7a9ab } + .terminal-2282176583-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help ▇▇ - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - and CI tests to lint  - your code: pre-commit, - editor-config and  - prettier. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Include citationsInclude pipeline tools Show help  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Show help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▇▇ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + requried to run  + nf-core pipelines at  + different institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + and CI tests to lint  + your code: pre-commit, + editor-config and  + prettier. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Include citationsInclude pipeline tools Show help  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From ceb78a84c90d7955e903f3ffbfa26000d62557fb Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 20 Aug 2024 15:11:26 +0200 Subject: [PATCH 522/737] add option to exclude email from pipeline template --- nf_core/pipeline-template/.editorconfig | 2 + nf_core/pipeline-template/.prettierignore | 2 + nf_core/pipeline-template/docs/output.md | 2 +- nf_core/pipeline-template/main.nf | 4 +- nf_core/pipeline-template/nextflow.config | 2 + .../pipeline-template/nextflow_schema.json | 8 +- .../utils_nfcore_pipeline_pipeline/main.nf | 6 + nf_core/pipelines/create/templatefeatures.yml | 18 + .../__snapshots__/test_create_app.ambr | 510 +++++++++--------- 9 files changed, 293 insertions(+), 261 deletions(-) diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index 72dda289a..5efa58610 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -25,8 +25,10 @@ insert_final_newline = unset trim_trailing_whitespace = unset indent_style = unset +{%- if email %} [/assets/email*] indent_size = unset +{%- endif %} # ignore python and markdown [*.{py,md}] diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index 437d763d0..3b3d5ff08 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -1,4 +1,6 @@ +{%- if email %} email_template.html +{%- endif %} adaptivecard.json slackreport.json .nextflow* diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index edd48b83f..76195a682 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -55,7 +55,7 @@ Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQ - `pipeline_info/` - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. - - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. + {% if email %}- Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. {% endif %} - Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. - Parameters used by the pipeline run: `params.json`. diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index fddfc5489..0cf688a7e 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -20,7 +20,7 @@ include { {{ short_name|upper }} } from './workflows/{{ short_name }}' include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' -{% if igenomes %} +{%- if igenomes %} include { getGenomeAttribute } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline' /* @@ -95,9 +95,11 @@ workflow { // SUBWORKFLOW: Run completion tasks // PIPELINE_COMPLETION ( + {%- if email %} params.email, params.email_on_fail, params.plaintext_email, + {%- endif %} params.outdir, params.monochrome_logs, params.hook_url, diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 0469d6cfe..daa1c5e36 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -32,9 +32,11 @@ params { // Boilerplate options outdir = null publish_dir_mode = 'copy' + {%- if email %} email = null email_on_fail = null plaintext_email = false + {%- endif %} monochrome_logs = false hook_url = null help = false diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 4a1a22c3e..4a376330b 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -28,14 +28,14 @@ "format": "directory-path", "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", "fa_icon": "fas fa-folder-open" - }, + }{% if email %}, "email": { "type": "string", "description": "Email address for completion summary.", "fa_icon": "fas fa-envelope", "help_text": "Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run.", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$" - }{% if multiqc %}, + }{% endif %}{% if multiqc %}, "multiqc_title": { "type": "string", "description": "MultiQC report title. Printed as page header, used for filename if not otherwise specified.", @@ -189,7 +189,7 @@ "fa_icon": "fas fa-copy", "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], "hidden": true - }, + },{% if email %} "email_on_fail": { "type": "string", "description": "Email address for completion summary, only when pipeline fails.", @@ -203,7 +203,7 @@ "description": "Send plain-text email instead of HTML.", "fa_icon": "fas fa-remove-format", "hidden": true - }, + },{% endif %} {%- if multiqc %} "max_multiqc_email_size": { "type": "string", diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index 72c9be85e..62ef73d65 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -12,7 +12,9 @@ include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plu include { paramsSummaryMap } from 'plugin/nf-validation' include { fromSamplesheet } from 'plugin/nf-validation' include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' +{%- if email %} include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' +{%- endif %} include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' include { dashedLine } from '../../nf-core/utils_nfcore_pipeline' include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline' @@ -117,9 +119,11 @@ workflow PIPELINE_INITIALISATION { workflow PIPELINE_COMPLETION { take: + {%- if email %} email // string: email address email_on_fail // string: email address sent on pipeline failure plaintext_email // boolean: Send plain-text email instead of HTML + {% endif %} outdir // path: Path to output directory where results will be published monochrome_logs // boolean: Disable ANSI colour codes in log output hook_url // string: hook URL for notifications @@ -133,6 +137,7 @@ workflow PIPELINE_COMPLETION { // Completion email and summary // workflow.onComplete { + {%- if email %} if (email || email_on_fail) { {%- if multiqc %} completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList()) @@ -140,6 +145,7 @@ workflow PIPELINE_COMPLETION { completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, []) {%- endif %} } + {%- endif %} completionSummary(monochrome_logs) diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index a8a6ce565..5dafd7e31 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -257,3 +257,21 @@ license: - "LICENSE" nfcore_pipelines: False custom_pipelines: True +email: + skippable_paths: + - "assets/email_template.html" + - "assets/sendmail_template.txt" + - "assets/email_template.txt" + short_description: "Enable email updates" + description: "Enable sending emails on pipeline completion." + help_text: | + Enable the option of sending an email which will include pipeline execution reports on pipeline completion. + linting: + files_exist: + - "assets/email_template.html" + - "assets/sendmail_template.txt" + - "assets/email_template.txt" + files_unchanged: + - ".prettierignore" + nfcore_pipelines: False + custom_pipelines: True diff --git a/tests/pipelines/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr index 66c08faba..f2a057847 100644 --- a/tests/pipelines/__snapshots__/test_create_app.ambr +++ b/tests/pipelines/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-2278814444-matrix { + .terminal-2220422576-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2278814444-title { + .terminal-2220422576-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2278814444-r1 { fill: #c5c8c6 } - .terminal-2278814444-r2 { fill: #e3e3e3 } - .terminal-2278814444-r3 { fill: #989898 } - .terminal-2278814444-r4 { fill: #e1e1e1 } - .terminal-2278814444-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2278814444-r6 { fill: #1e1e1e } - .terminal-2278814444-r7 { fill: #507bb3 } - .terminal-2278814444-r8 { fill: #e2e2e2 } - .terminal-2278814444-r9 { fill: #808080 } - .terminal-2278814444-r10 { fill: #dde6ed;font-weight: bold } - .terminal-2278814444-r11 { fill: #001541 } - .terminal-2278814444-r12 { fill: #0178d4 } - .terminal-2278814444-r13 { fill: #454a50 } - .terminal-2278814444-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-2278814444-r15 { fill: #000000 } - .terminal-2278814444-r16 { fill: #e4e4e4 } - .terminal-2278814444-r17 { fill: #14191f } - .terminal-2278814444-r18 { fill: #7ae998 } - .terminal-2278814444-r19 { fill: #0a180e;font-weight: bold } - .terminal-2278814444-r20 { fill: #008139 } - .terminal-2278814444-r21 { fill: #fea62b;font-weight: bold } - .terminal-2278814444-r22 { fill: #a7a9ab } - .terminal-2278814444-r23 { fill: #e2e3e3 } + .terminal-2220422576-r1 { fill: #c5c8c6 } + .terminal-2220422576-r2 { fill: #e3e3e3 } + .terminal-2220422576-r3 { fill: #989898 } + .terminal-2220422576-r4 { fill: #e1e1e1 } + .terminal-2220422576-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2220422576-r6 { fill: #1e1e1e } + .terminal-2220422576-r7 { fill: #507bb3 } + .terminal-2220422576-r8 { fill: #e2e2e2 } + .terminal-2220422576-r9 { fill: #808080 } + .terminal-2220422576-r10 { fill: #dde6ed;font-weight: bold } + .terminal-2220422576-r11 { fill: #001541 } + .terminal-2220422576-r12 { fill: #0178d4 } + .terminal-2220422576-r13 { fill: #454a50 } + .terminal-2220422576-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-2220422576-r15 { fill: #000000 } + .terminal-2220422576-r16 { fill: #e4e4e4 } + .terminal-2220422576-r17 { fill: #14191f } + .terminal-2220422576-r18 { fill: #7ae998 } + .terminal-2220422576-r19 { fill: #0a180e;font-weight: bold } + .terminal-2220422576-r20 { fill: #008139 } + .terminal-2220422576-r21 { fill: #fea62b;font-weight: bold } + .terminal-2220422576-r22 { fill: #a7a9ab } + .terminal-2220422576-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - + - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Hide help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common reference  - genome files. - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see the  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Hide help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common reference ▇▇ + genome files. + + By selecting this option, your pipeline will include a configuration file  + specifying the paths to these files. + + The required code to use these files will also be included in the template.  + When the pipeline user provides an appropriate genome key, the pipeline will + automatically download the required reference files. + ▅▅ + For more information about reference genomes in nf-core pipelines, see the  + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-2282176583-matrix { + .terminal-1370375189-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2282176583-title { + .terminal-1370375189-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2282176583-r1 { fill: #c5c8c6 } - .terminal-2282176583-r2 { fill: #e3e3e3 } - .terminal-2282176583-r3 { fill: #989898 } - .terminal-2282176583-r4 { fill: #e1e1e1 } - .terminal-2282176583-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2282176583-r6 { fill: #1e1e1e } - .terminal-2282176583-r7 { fill: #507bb3 } - .terminal-2282176583-r8 { fill: #e2e2e2 } - .terminal-2282176583-r9 { fill: #808080 } - .terminal-2282176583-r10 { fill: #dde6ed;font-weight: bold } - .terminal-2282176583-r11 { fill: #001541 } - .terminal-2282176583-r12 { fill: #14191f } - .terminal-2282176583-r13 { fill: #454a50 } - .terminal-2282176583-r14 { fill: #7ae998 } - .terminal-2282176583-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-2282176583-r16 { fill: #0a180e;font-weight: bold } - .terminal-2282176583-r17 { fill: #000000 } - .terminal-2282176583-r18 { fill: #008139 } - .terminal-2282176583-r19 { fill: #fea62b;font-weight: bold } - .terminal-2282176583-r20 { fill: #a7a9ab } - .terminal-2282176583-r21 { fill: #e2e3e3 } + .terminal-1370375189-r1 { fill: #c5c8c6 } + .terminal-1370375189-r2 { fill: #e3e3e3 } + .terminal-1370375189-r3 { fill: #989898 } + .terminal-1370375189-r4 { fill: #e1e1e1 } + .terminal-1370375189-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1370375189-r6 { fill: #1e1e1e } + .terminal-1370375189-r7 { fill: #507bb3 } + .terminal-1370375189-r8 { fill: #e2e2e2 } + .terminal-1370375189-r9 { fill: #808080 } + .terminal-1370375189-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1370375189-r11 { fill: #001541 } + .terminal-1370375189-r12 { fill: #14191f } + .terminal-1370375189-r13 { fill: #454a50 } + .terminal-1370375189-r14 { fill: #7ae998 } + .terminal-1370375189-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-1370375189-r16 { fill: #0a180e;font-weight: bold } + .terminal-1370375189-r17 { fill: #000000 } + .terminal-1370375189-r18 { fill: #008139 } + .terminal-1370375189-r19 { fill: #fea62b;font-weight: bold } + .terminal-1370375189-r20 { fill: #a7a9ab } + .terminal-1370375189-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▇▇ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - and CI tests to lint  - your code: pre-commit, - editor-config and  - prettier. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Include citationsInclude pipeline tools Show help  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Show help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges▁▁ + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + requried to run  + nf-core pipelines at  + different institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + and CI tests to lint  + your code: pre-commit, + editor-config and  + prettier. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Include citationsInclude pipeline tools Show help  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From 1390a6d5e2bb259c6004a845e212abe2b80ff167 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 20 Aug 2024 13:19:14 +0000 Subject: [PATCH 523/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b530f597b..98fb92533 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ - handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108)) - add templatefeatures.yml to python package ([#3112](https://github.com/nf-core/tools/pull/3112)) - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) +- add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) ### Linting From 5519c83d5b582af797ff4ae242fa98113bbe847a Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Tue, 20 Aug 2024 15:24:51 +0200 Subject: [PATCH 524/737] some small updates for the new release --- nf_core/pipeline-template/conf/colors.config | 61 ------------------- nf_core/pipeline-template/modules.json | 4 +- nf_core/pipeline-template/nextflow.config | 22 ++++--- .../nf-core/utils_nfschema_plugin/main.nf | 32 ++++++++++ .../nf-core/utils_nfschema_plugin/meta.yml | 22 +++++++ .../tests/main.nf.test | 27 ++++---- .../tests/nextflow.config | 8 +++ .../tests/nextflow_schema.json | 8 +-- .../nf-core/utils_nfvalidation_plugin/main.nf | 43 ------------- .../utils_nfvalidation_plugin/meta.yml | 44 ------------- .../tests/nextflow.config | 1 - .../utils_nfvalidation_plugin/tests/tags.yml | 2 - 12 files changed, 93 insertions(+), 181 deletions(-) delete mode 100644 nf_core/pipeline-template/conf/colors.config create mode 100644 nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf create mode 100644 nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml rename nf_core/pipeline-template/subworkflows/nf-core/{utils_nfvalidation_plugin => utils_nfschema_plugin}/tests/main.nf.test (62%) create mode 100644 nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config rename nf_core/pipeline-template/subworkflows/nf-core/{utils_nfvalidation_plugin => utils_nfschema_plugin}/tests/nextflow_schema.json (95%) delete mode 100644 nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf delete mode 100644 nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml delete mode 100644 nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow.config delete mode 100644 nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml diff --git a/nf_core/pipeline-template/conf/colors.config b/nf_core/pipeline-template/conf/colors.config deleted file mode 100644 index 00f296326..000000000 --- a/nf_core/pipeline-template/conf/colors.config +++ /dev/null @@ -1,61 +0,0 @@ -// For now, there is no easy way to set monochromeLogs -colors { - // Reset / Meta - reset = "\033[0m" - bold = "\033[1m" - dim = "\033[2m" - underlined = "\033[4m" - blink = "\033[5m" - reverse = "\033[7m" - hidden = "\033[8m" - - // Regular Colors - black = "\033[0;30m" - red = "\033[0;31m" - green = "\033[0;32m" - yellow = "\033[0;33m" - blue = "\033[0;34m" - purple = "\033[0;35m" - cyan = "\033[0;36m" - white = "\033[0;37m" - - // Bold - bblack = "\033[1;30m" - bred = "\033[1;31m" - bgreen = "\033[1;32m" - byellow = "\033[1;33m" - bblue = "\033[1;34m" - bpurple = "\033[1;35m" - bcyan = "\033[1;36m" - bwhite = "\033[1;37m" - - // Underline - ublack = "\033[4;30m" - ured = "\033[4;31m" - ugreen = "\033[4;32m" - uyellow = "\033[4;33m" - ublue = "\033[4;34m" - upurple = "\033[4;35m" - ucyan = "\033[4;36m" - uwhite = "\033[4;37m" - - // High Intensity - iblack = "\033[0;90m" - ired = "\033[0;91m" - igreen = "\033[0;92m" - iyellow = "\033[0;93m" - iblue = "\033[0;94m" - ipurple = "\033[0;95m" - icyan = "\033[0;96m" - iwhite = "\033[0;97m" - - // Bold High Intensity - biblack = "\033[1;90m" - bired = "\033[1;91m" - bigreen = "\033[1;92m" - biyellow = "\033[1;93m" - biblue = "\033[1;94m" - bipurple = "\033[1;95m" - bicyan = "\033[1;96m" - biwhite = "\033[1;97m" -} \ No newline at end of file diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index eb9391b29..1a3b96490 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -30,9 +30,9 @@ "git_sha": "92de218a329bfc9a9033116eb5f65fd270e72ba3", "installed_by": ["subworkflows"] }, - "utils_nfvalidation_plugin": { + "utils_nfschema_plugin": { "branch": "master", - "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", + "git_sha": "a3e87febb28bd0461c22a917c5b2c1492053ef85", "installed_by": ["subworkflows"] } } diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 3d3841ba4..346292c98 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -265,22 +265,20 @@ plugins { id 'nf-schema@2.1.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet } -includeConfig "conf/colors.config" - validation { parametersSchema = "${projectDir}/nextflow_schema.json" help { enabled = true command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir " beforeText = """ --${colors.dim}----------------------------------------------------${colors.reset}- - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} -${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} -${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} -${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} -${colors.purple} ${manifest.name} ${manifest.version}${colors.reset} --${colors.dim}----------------------------------------------------${colors.reset}- +-\033[2m----------------------------------------------------\033[0m- + \033[0;32m,--.\033[0;30m/\033[0;32m,-.\033[0m +\033[0;34m ___ __ __ __ ___ \033[0;32m/,-._.--~\'\033[0m +\033[0;34m |\\ | |__ __ / ` / \\ |__) |__ \033[0;33m} {\033[0m +\033[0;34m | \\| | \\__, \\__/ | \\ |___ \033[0;32m\\`-._,-`-,\033[0m + \033[0;32m`._,._,\'\033[0m +\033[0;35m ${manifest.name} ${manifest.version}\033[0m +-\033[2m----------------------------------------------------\033[0m- """ afterText = """${manifest.doi ? "* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""} * The nf-core framework @@ -290,6 +288,10 @@ ${colors.purple} ${manifest.name} ${manifest.version}${colors.reset} https://github.com/${manifest.name}/blob/master/CITATIONS.md """ } + summary { + beforeText = validation.help.beforeText + afterText = validation.help.afterText + } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf new file mode 100644 index 000000000..5d1dc7c92 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/main.nf @@ -0,0 +1,32 @@ +// +// Subworkflow that uses the nf-schema plugin to validate parameters and render the parameter summary +// + +include { paramsSummaryLog } from 'plugin/nf-schema' +include { validateParameters } from 'plugin/nf-schema' + +workflow UTILS_NFSCHEMA_PLUGIN { + + take: + validate_params // boolean: validate the parameters + + main: + + // + // Print parameter summary to stdout. This will display the parameters + // that differ from the default given in the JSON schema + // + log.info paramsSummaryLog(workflow) + + // + // Validate the parameters using nextflow_schema.json or the schema + // given via the validation.parametersSchema configuration option + // + if(validate_params) { + validateParameters() + } + + emit: + dummy_emit = true +} + diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml new file mode 100644 index 000000000..90b8cb189 --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/meta.yml @@ -0,0 +1,22 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json +name: "utils_nfschema_plugin" +description: Run nf-schema to validate parameters and create a summary of changed parameters +keywords: + - validation + - JSON schema + - plugin + - parameters + - summary +components: [] +input: + - validate_params: + type: boolean + description: Validate the parameters and error if invalid. +output: + - dummy_emit: + type: boolean + description: Dummy emit to make nf-core subworkflows lint happy +authors: + - "@nvnieuwk" +maintainers: + - "@nvnieuwk" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test similarity index 62% rename from nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test rename to nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test index 354b81754..703d3a9b8 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -1,14 +1,14 @@ +// TODO nf-core: Once you have added the required tests, please run the following command to build this file: +// nf-core subworkflows test utils_nfschema_plugin nextflow_workflow { - name "Test Workflow UTILS_NFVALIDATION_PLUGIN" + name "Test Subworkflow UTILS_NFSCHEMA_PLUGIN" script "../main.nf" - workflow "UTILS_NFVALIDATION_PLUGIN" + workflow "UTILS_NFSCHEMA_PLUGIN" + tag "subworkflows" tag "subworkflows_nfcore" - tag "plugin/nf-validation" - tag "'plugin/nf-validation'" - tag "utils_nfvalidation_plugin" - tag "subworkflows/utils_nfvalidation_plugin" + tag "subworkflows/utils_nfschema_plugin" config "./nextflow.config" @@ -17,13 +17,12 @@ nextflow_workflow { when { params { - test_data = '' + test_data = '' } workflow { """ - validate_params = false - + validate_params = false input[0] = validate_params """ } @@ -41,13 +40,13 @@ nextflow_workflow { when { params { - test_data = '' - outdir = 1 + test_data = '' + outdir = 1 } + workflow { """ - validate_params = true - + validate_params = true input[0] = validate_params """ } @@ -56,7 +55,7 @@ nextflow_workflow { then { assertAll( { assert workflow.failed }, - { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } } + { assert workflow.stdout.any { it.contains('ERROR ~ Validation of pipeline parameters failed!') } } ) } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config new file mode 100644 index 000000000..0907ac58f --- /dev/null +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config @@ -0,0 +1,8 @@ +plugins { + id "nf-schema@2.1.0" +} + +validation { + parametersSchema = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" + monochromeLogs = true +} \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json similarity index 95% rename from nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json rename to nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json index 7626c1c93..331e0d2f4 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json @@ -1,10 +1,10 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", "title": ". pipeline parameters", "description": "", "type": "object", - "definitions": { + "$defs": { "input_output_options": { "title": "Input/output options", "type": "object", @@ -87,10 +87,10 @@ }, "allOf": [ { - "$ref": "#/definitions/input_output_options" + "$ref": "#/$defs/input_output_options" }, { - "$ref": "#/definitions/generic_options" + "$ref": "#/$defs/generic_options" } ] } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf deleted file mode 100644 index 453894a64..000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf +++ /dev/null @@ -1,43 +0,0 @@ -// -// Subworkflow that uses the nf-validation plugin to render help text and parameter summary -// - -/* -======================================================================================== - IMPORT NF-VALIDATION PLUGIN -======================================================================================== -*/ - -include { paramsSummaryLog } from 'plugin/nf-schema' -include { validateParameters } from 'plugin/nf-schema' - -/* -======================================================================================== - SUBWORKFLOW DEFINITION -======================================================================================== -*/ - -workflow UTILS_NFVALIDATION_PLUGIN { - - take: - validate_params // boolean: validate parameters - - main: - - log.debug "Using schema file: ${schema_filename}" - - // Default values for strings - pre_help_text = pre_help_text ?: '' - post_help_text = post_help_text ?: '' - workflow_command = workflow_command ?: '' - - // - // Validate parameters relative to the parameter JSON schema - // - if (validate_params){ - validateParameters() - } - - emit: - dummy_emit = true -} diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml deleted file mode 100644 index 3d4a6b04f..000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml +++ /dev/null @@ -1,44 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json -name: "UTILS_NFVALIDATION_PLUGIN" -description: Use nf-validation to initiate and validate a pipeline -keywords: - - utility - - pipeline - - initialise - - validation -components: [] -input: - - print_help: - type: boolean - description: | - Print help message and exit - - workflow_command: - type: string - description: | - The command to run the workflow e.g. "nextflow run main.nf" - - pre_help_text: - type: string - description: | - Text to print before the help message - - post_help_text: - type: string - description: | - Text to print after the help message - - validate_params: - type: boolean - description: | - Validate the parameters and error if invalid. - - schema_filename: - type: string - description: | - The filename of the schema to validate against. -output: - - dummy_emit: - type: boolean - description: | - Dummy emit to make nf-core subworkflows lint happy -authors: - - "@adamrtalbot" -maintainers: - - "@adamrtalbot" - - "@maxulysse" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow.config deleted file mode 100644 index 8d047ed59..000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow.config +++ /dev/null @@ -1 +0,0 @@ -validation.monochromeLogs = true \ No newline at end of file diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml deleted file mode 100644 index 60b1cfff4..000000000 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml +++ /dev/null @@ -1,2 +0,0 @@ -subworkflows/utils_nfvalidation_plugin: - - subworkflows/nf-core/utils_nfvalidation_plugin/** From 171640b62dddcb81feddced4f674df88c47654ab Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Tue, 20 Aug 2024 16:13:59 +0200 Subject: [PATCH 525/737] some more small updates to the template --- nf_core/pipeline-template/nextflow.config | 2 +- .../subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 346292c98..ef067dcfa 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -255,7 +255,7 @@ manifest { homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=23.04.0' + nextflowVersion = '!>=23.10.0' version = '{{ version }}' doi = '' } diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index c83d2ae0c..a994cfd58 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -8,7 +8,7 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin' +include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin' include { paramsSummaryMap } from 'plugin/nf-schema' include { samplesheetToList } from 'plugin/nf-schema' include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' @@ -53,7 +53,7 @@ workflow PIPELINE_INITIALISATION { // // Validate parameters and generate parameter summary to stdout // - UTILS_NFVALIDATION_PLUGIN ( + UTILS_NFSCHEMA_PLUGIN ( validate_params ) From f72a2c325324dfc21d494d4135ae03da416ee709 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 20 Aug 2024 16:19:19 +0200 Subject: [PATCH 526/737] update pytest test_make_pipeline_schema --- tests/pipelines/test_launch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index da7618d48..7c6e3d619 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -60,7 +60,7 @@ def test_make_pipeline_schema(self, tmp_path): Path(test_pipeline_dir, "nextflow_schema.json").unlink() self.launcher = nf_core.pipelines.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) self.launcher.get_pipeline_schema() - assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) > 2 + assert len(self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]) >= 2 assert self.launcher.schema_obj.schema["definitions"]["input_output_options"]["properties"]["outdir"] == { "type": "string", "format": "directory-path", From 249b3da383f17ab0beecfeb9e8346619edaf6776 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Tue, 20 Aug 2024 16:59:22 +0200 Subject: [PATCH 527/737] update handling of ignored parameters --- .../.github/workflows/ci.yml | 2 +- nf_core/pipeline-template/README.md | 2 +- nf_core/pipelines/lint/nextflow_config.py | 15 +++++-------- nf_core/pipelines/schema.py | 22 ++++++++++++++----- 4 files changed, 23 insertions(+), 18 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 6b2547765..63fa99cec 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -24,7 +24,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.04.0" + - "23.10.0" - "latest-everything" steps: - name: Check out pipeline code diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 7718d2e5f..d31dee93f 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -16,7 +16,7 @@ [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) [![nf-test](https://img.shields.io/badge/unit_tests-nf--test-337ab7.svg)](https://www.nf-test.com) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.10.0-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index a5b7cf325..150a89e10 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -65,14 +65,6 @@ def nextflow_config(self) -> Dict[str, List[str]]: * Should always be set to default value: ``https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}`` - * ``params.validationShowHiddenParams`` - - * Determines whether boilerplate params are showed by schema. Set to ``false`` by default - - * ``params.validationSchemaIgnoreParams`` - - * A comma separated string of inputs the schema validation should ignore. - **The following variables throw warnings if missing:** * ``manifest.mainScript``: The filename of the main pipeline script (should be ``main.nf``) @@ -151,8 +143,11 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["process.time"], ["params.outdir"], ["params.input"], - ["params.validationShowHiddenParams"], - ["params.validationSchemaIgnoreParams"], + ["validation.help.enabled"], + ["validation.help.beforeText"], + ["validation.help.afterText"], + ["validation.summary.beforeText"], + ["validation.summary.afterText"] ] # Throw a warning if these are missing config_warn = [ diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 9db945751..78f431709 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -50,6 +50,7 @@ def __init__(self): self.validation_plugin = None self.schema_draft = None self.defs_notation = None + self.ignored_params = [] # Update the validation plugin code everytime the schema gets changed def set_schema_filename(self, schema: str) -> None: @@ -69,16 +70,26 @@ def del_schema_filename(self) -> None: def _update_validation_plugin_from_config(self, config: str) -> None: plugin = "nf-schema" with open(Path(config)) as conf: - nf_schema_pattern = re.compile("id\s*[\"']nf-schema", re.MULTILINE) - nf_validation_pattern = re.compile("id\s*[\"']nf-validation", re.MULTILINE) + nf_schema_pattern = re.compile(r"id\s*[\"']nf-schema", re.MULTILINE) + nf_validation_pattern = re.compile(r"id\s*[\"']nf-validation", re.MULTILINE) config_content = conf.read() if re.search(nf_validation_pattern, config_content): plugin = "nf-validation" + self.ignored_params = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") + self.ignored_params.append("validationSchemaIgnoreParams") elif re.search(nf_schema_pattern, config_content): plugin = "nf-schema" + ignored_params_pattern = re.compile(r"defaultIgnoreParams\s*=\s*\[([^\]]*)\]", re.MULTILINE) + ignored_params_match = re.search(ignored_params_pattern, config_content) + ignored_params = ["help", "helpFull", "showHidden"] # Help parameter should be ignored by default + if ignored_params_match and len(ignored_params_match.groups()) == 1: + ignored_params.extend(ignored_params_match.group(1).replace("\"", "").replace("'", '').replace(" ", "").split(",")) + self.ignored_params = ignored_params else: log.warning("Could not find nf-schema or nf-validation in the pipeline config. Defaulting to nf-schema") + + self.validation_plugin = plugin # Previous versions of nf-schema used "defs", but it's advised to use "$defs" if plugin == "nf-schema": @@ -845,13 +856,12 @@ def add_schema_found_configs(self): Update defaults if they have changed """ params_added = [] - params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") - params_ignore.append("validationSchemaIgnoreParams") + for p_key, p_val in self.pipeline_params.items(): s_key = self.schema_params.get(p_key) # Check if key is in schema parameters # Key is in pipeline but not in schema or ignored from schema - if p_key not in self.schema_params and p_key not in params_ignore: + if p_key not in self.schema_params and p_key not in self.ignored_params: if ( self.no_prompts or self.schema_from_scratch @@ -884,7 +894,7 @@ def add_schema_found_configs(self): elif ( s_key and (p_key not in self.schema_defaults) - and (p_key not in params_ignore) + and (p_key not in self.ignored_params) and (p_def := self.build_schema_param(p_val).get("default")) ): if self.no_prompts or Confirm.ask( From af34d0718db98544a8da402952aea87ca778a60e Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Tue, 20 Aug 2024 17:50:32 +0200 Subject: [PATCH 528/737] update validation plugin fetching --- nf_core/pipeline-template/nextflow.config | 2 +- nf_core/pipelines/schema.py | 58 ++++++++++++----------- 2 files changed, 32 insertions(+), 28 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index ef067dcfa..3a98c40f0 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -61,7 +61,7 @@ params { max_time = '240.h' // Schema validation default options - validate_params = true + validate_params = true } diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 78f431709..e9c5556db 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -69,35 +69,45 @@ def del_schema_filename(self) -> None: def _update_validation_plugin_from_config(self, config: str) -> None: plugin = "nf-schema" - with open(Path(config)) as conf: - nf_schema_pattern = re.compile(r"id\s*[\"']nf-schema", re.MULTILINE) - nf_validation_pattern = re.compile(r"id\s*[\"']nf-validation", re.MULTILINE) - config_content = conf.read() - if re.search(nf_validation_pattern, config_content): - plugin = "nf-validation" - self.ignored_params = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") - self.ignored_params.append("validationSchemaIgnoreParams") - elif re.search(nf_schema_pattern, config_content): + conf = nf_core.utils.fetch_wf_config(Path(self.schema_filename).parent) + + plugins = str(conf.get("plugins", "")).strip("\"").strip("'").strip(" ").split(",") + plugin_found = False + for plugin_instance in plugins: + if "nf-schema" in plugin_instance: plugin = "nf-schema" - ignored_params_pattern = re.compile(r"defaultIgnoreParams\s*=\s*\[([^\]]*)\]", re.MULTILINE) - ignored_params_match = re.search(ignored_params_pattern, config_content) - ignored_params = ["help", "helpFull", "showHidden"] # Help parameter should be ignored by default - if ignored_params_match and len(ignored_params_match.groups()) == 1: - ignored_params.extend(ignored_params_match.group(1).replace("\"", "").replace("'", '').replace(" ", "").split(",")) - self.ignored_params = ignored_params - else: - log.warning("Could not find nf-schema or nf-validation in the pipeline config. Defaulting to nf-schema") - + plugin_found = True + break + elif "nf-validation" in plugin_instance: + plugin = "nf-validation" + plugin_found = True + break + + if not plugin_found: + log.warning("Could not find nf-schema or nf-validation in the pipeline config. Defaulting to nf-schema") + + if "nf-validation" in plugins: + plugin = "nf-validation" + elif "nf-schema" in plugins: + plugin = "nf-schema" - self.validation_plugin = plugin # Previous versions of nf-schema used "defs", but it's advised to use "$defs" if plugin == "nf-schema": self.defs_notation = "$defs" + ignored_params = ["help", "helpFull", "showHidden"] # Help parameter should be ignored by default + ignored_params_config = conf.get("validation", {}).get("defaultIgnoreParams", []) + if len(ignored_params_config) > 0: + ignored_params.extend(ignored_params_config) + self.ignored_params = ignored_params self.schema_draft = "https://json-schema.org/draft/2020-12/schema" + else: self.defs_notation = "definitions" self.schema_draft = "https://json-schema.org/draft-07/schema" + self.get_wf_params() + self.ignored_params = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") + self.ignored_params.append("validationSchemaIgnoreParams") def get_schema_path( self, path: Union[str, Path], local_only: bool = False, revision: Union[str, None] = None @@ -325,17 +335,11 @@ def validate_default_params(self): if self.pipeline_params == {}: self.get_wf_params() - # Collect parameters to ignore - if "validationSchemaIgnoreParams" in self.pipeline_params: - params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") - else: - params_ignore = [] - # Go over group keys for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): group_properties = group.get("properties") for param in group_properties: - if param in params_ignore: + if param in self.ignored_params: continue if param in self.pipeline_params: self.validate_config_default_parameter(param, group_properties[param], self.pipeline_params[param]) @@ -348,7 +352,7 @@ def validate_default_params(self): ungrouped_properties = self.schema.get("properties") if ungrouped_properties: for param in ungrouped_properties: - if param in params_ignore: + if param in self.ignored_params: continue if param in self.pipeline_params: self.validate_config_default_parameter( From e6381b71c67fcc1ecb18c8b3c6e796dd6df0dec7 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 21 Aug 2024 10:59:31 +0200 Subject: [PATCH 529/737] update plugin linting --- nf_core/pipelines/lint/nextflow_config.py | 56 ++++++++--------------- 1 file changed, 20 insertions(+), 36 deletions(-) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 150a89e10..bd2f4f768 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -1,3 +1,4 @@ +import ast import logging import re from pathlib import Path @@ -330,6 +331,25 @@ def nextflow_config(self) -> Dict[str, List[str]]: ) ) + # Lint for plugins + config_plugins = ast.literal_eval(self.nf_config.get("plugins", "").strip("\"")) + found_plugins = [] + if len(config_plugins) == 0: + failed.append("nextflow.config contains an empty plugins scope") + for plugin in config_plugins: + if "@" not in plugin: + failed.append(f"Plugin '{plugin}' does not have a pinned version") + found_plugins.append(plugin.split("@")[0]) + + if "nf-validation" in found_plugins and "nf-schema" in found_plugins: + failed.append("nextflow.config contains both nf-validation and nf-schema") + if "nf-validation" not in found_plugins and "nf-schema" not in found_plugins: + failed.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") + + if "nf-validation" in found_plugins: + warned.append("nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/") + + # Check for the availability of the "test" configuration profile by parsing nextflow.config # Also check for the presence of nf-validation/nf-schema and check if they have pinned versions with open(Path(self.wf_path, "nextflow.config")) as f: @@ -359,42 +379,6 @@ def nextflow_config(self) -> Dict[str, List[str]]: else: failed.append("nextflow.config does not contain configuration profile `test`") - # Lint for nf-validation and nf-schema - match_plugins = re.search(r"\bplugins\s*\{([^}]+)}", cleaned_content, re.MULTILINE) - if not match_plugins: - failed.append( - "nextflow.config does not contain `plugins` scope, but `nf-validation` or `nf-schema` plugins are required" - ) - else: - found_plugins = {} - for line in match_plugins.group(1).split("\n"): - cleaned_line = line.split("//")[0].strip().replace("\"", "'") - if "id" not in line: continue - match_line = re.search(r"\bid\s'([^']+)'", cleaned_line) - if not match_line: - failed.append(f"nextflow.config contains an invalid plugins identifier: {cleaned_line}") - continue - plugin = match_line.group(1) - name = plugin.split("@")[0] - version = "" - if "@" in plugin: - version = plugin.split("@")[1] - found_plugins[name] = version - - if len(found_plugins) == 0: - failed.append("nextflow.config contains an empty plugins scope") - elif "nf-validation" in found_plugins and "nf-schema" in found_plugins: - failed.append("nextflow.config contains both nf-validation and nf-schema") - elif "nf-validation" in found_plugins and found_plugins["nf-validation"] == "": - failed.append("nextflow.config contains an unpinned version of nf-validation") - elif "nf-schema" in found_plugins and found_plugins["nf-schema"] == "": - failed.append("nextflow.config contains an unpinned version of nf-schema") - elif "nf-validation" not in found_plugins and "nf-schema" not in found_plugins: - failed.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") - - if "nf-validation" in found_plugins: - warned.append("nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/") - # Check that the default values in nextflow.config match the default values defined in the nextflow_schema.json ignore_defaults = [] for item in ignore_configs: From e66a732e7db6452fb12b0f311762b19bbe7f3418 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 21 Aug 2024 11:01:16 +0200 Subject: [PATCH 530/737] ignore genomes for now --- nf_core/pipeline-template/nextflow.config | 2 +- nf_core/pipeline-template/nextflow_schema.json | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 3a98c40f0..bb5567800 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -266,7 +266,7 @@ plugins { } validation { - parametersSchema = "${projectDir}/nextflow_schema.json" + defaultIgnoreParams = ["genomes"] help { enabled = true command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir " diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 14120bbab..908d31017 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -81,12 +81,6 @@ "fa_icon": "fas fa-ban", "hidden": true, "default": "s3://ngi-igenomes/igenomes/" - }, - "genomes": { - "type": "object", - "description": "An object containing all reference data availabe in igenomes", - "fa_icon": "fas fa-ban", - "hidden": true } } }, From b6ae8d01dd440558cb3a2af821a04ea8fc89591a Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 21 Aug 2024 12:23:52 +0200 Subject: [PATCH 531/737] improve config linting for validation even more --- nf_core/pipelines/lint/nextflow_config.py | 59 ++++++++++++++--------- 1 file changed, 35 insertions(+), 24 deletions(-) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index bd2f4f768..6e292deb0 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -144,11 +144,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["process.time"], ["params.outdir"], ["params.input"], - ["validation.help.enabled"], - ["validation.help.beforeText"], - ["validation.help.afterText"], - ["validation.summary.beforeText"], - ["validation.summary.afterText"] + ["validation.help.enabled"] ] # Throw a warning if these are missing config_warn = [ @@ -157,6 +153,11 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["trace.file"], ["report.file"], ["dag.file"], + ["validation.help.beforeText"], + ["validation.help.afterText"], + ["validation.help.command"], + ["validation.summary.beforeText"], + ["validation.summary.afterText"] ] # Old depreciated vars - fail if present config_fail_ifdefined = [ @@ -168,6 +169,35 @@ def nextflow_config(self) -> Dict[str, List[str]]: "params.enable_conda", ] + # Lint for plugins + config_plugins = ast.literal_eval(self.nf_config.get("plugins", "").strip("\"")) + found_plugins = [] + if len(config_plugins) == 0: + failed.append("nextflow.config contains an empty plugins scope") + for plugin in config_plugins: + if "@" not in plugin: + failed.append(f"Plugin '{plugin}' does not have a pinned version") + found_plugins.append(plugin.split("@")[0]) + + if "nf-validation" in found_plugins and "nf-schema" in found_plugins: + failed.append("nextflow.config contains both nf-validation and nf-schema") + if "nf-validation" not in found_plugins and "nf-schema" not in found_plugins: + failed.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") + + if "nf-schema" in found_plugins: + if self.nf_config.get("validation.help.enabled", "false") == "false": + failed.append("The help message has not been enabled. Set the `validation.help.enabled` configuration option to `true` to enable help messages") + config_fail_ifdefined.extend([ + "params.validationFailUnrecognisedParams", + "params.validationLenientMode", + "params.validationSchemaIgnoreParams", + "params.validationShowHiddenParams" + ]) + + if "nf-validation" in found_plugins: + warned.append("nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/") + + # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get("nextflow_config", []) if self.lint_config is not None else [] @@ -331,25 +361,6 @@ def nextflow_config(self) -> Dict[str, List[str]]: ) ) - # Lint for plugins - config_plugins = ast.literal_eval(self.nf_config.get("plugins", "").strip("\"")) - found_plugins = [] - if len(config_plugins) == 0: - failed.append("nextflow.config contains an empty plugins scope") - for plugin in config_plugins: - if "@" not in plugin: - failed.append(f"Plugin '{plugin}' does not have a pinned version") - found_plugins.append(plugin.split("@")[0]) - - if "nf-validation" in found_plugins and "nf-schema" in found_plugins: - failed.append("nextflow.config contains both nf-validation and nf-schema") - if "nf-validation" not in found_plugins and "nf-schema" not in found_plugins: - failed.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") - - if "nf-validation" in found_plugins: - warned.append("nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/") - - # Check for the availability of the "test" configuration profile by parsing nextflow.config # Also check for the presence of nf-validation/nf-schema and check if they have pinned versions with open(Path(self.wf_path, "nextflow.config")) as f: From 1ede989be403e25f8b7485503d6a07bb6f20301d Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 21 Aug 2024 11:27:40 +0200 Subject: [PATCH 532/737] add option to exclude adaptivecard and slackreport from pipeline template --- nf_core/pipeline-template/.prettierignore | 4 +++ nf_core/pipeline-template/main.nf | 4 +-- .../utils_nfcore_pipeline_pipeline/main.nf | 6 ++++- nf_core/pipelines/create/templatefeatures.yml | 25 +++++++++++++++++++ 4 files changed, 36 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index 3b3d5ff08..c8e8ad9e1 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -1,8 +1,12 @@ {%- if email %} email_template.html {%- endif %} +{%- if adaptivecard %} adaptivecard.json +{%- endif %} +{%- if slackreport %} slackreport.json +{%- endif %} .nextflow* work/ data/ diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 0cf688a7e..7002a9c2d 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -102,8 +102,8 @@ workflow { {%- endif %} params.outdir, params.monochrome_logs, - params.hook_url, - {%- if multiqc %}{{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} + {% if adaptivecard or slackreport %}params.hook_url,{% endif %} + {% if multiqc %}{{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} ) } diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index 62ef73d65..3db8f66cb 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -18,7 +18,9 @@ include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' include { dashedLine } from '../../nf-core/utils_nfcore_pipeline' include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline' +{%- if adaptivecard or slackreport %} include { imNotification } from '../../nf-core/utils_nfcore_pipeline' +{%- endif %} include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline' @@ -126,7 +128,7 @@ workflow PIPELINE_COMPLETION { {% endif %} outdir // path: Path to output directory where results will be published monochrome_logs // boolean: Disable ANSI colour codes in log output - hook_url // string: hook URL for notifications + {% if adaptivecard or slackreport %}hook_url // string: hook URL for notifications{% endif %} {% if multiqc %}multiqc_report // string: Path to MultiQC report{% endif %} main: @@ -149,9 +151,11 @@ workflow PIPELINE_COMPLETION { completionSummary(monochrome_logs) + {%- if adaptivecard or slackreport %} if (hook_url) { imNotification(summary_params, hook_url) } + {%- endif %} } workflow.onError { diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index 5dafd7e31..ff56ee190 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -275,3 +275,28 @@ email: - ".prettierignore" nfcore_pipelines: False custom_pipelines: True +adaptivecard: + skippable_paths: + - "assets/adaptivecard.json" + short_description: "Add template for status messages" + description: "Enable pipeline status update messages through Microsoft Teams" + help_text: | + This adds an Adaptive Card. A snippets of user interface. + This Adaptive Card is used as a template for pipeline update messages and it is compatible with Microsoft Teams. + linting: + files_unchanged: + - ".prettierignore" + nfcore_pipelines: False + custom_pipelines: True +slackreport: + skippable_paths: + - "assets/slackreport.json" + short_description: "Add template for slack status messages" + description: "Enable pipeline status update messages through Slack" + help_text: | + This adds an JSON template used as a template for pipeline update messages in Slack. + linting: + files_unchanged: + - ".prettierignore" + nfcore_pipelines: False + custom_pipelines: True From c9adbd5bff432f4370a6901e00a576a687cdc416 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 21 Aug 2024 13:58:07 +0200 Subject: [PATCH 533/737] update textual snapshot --- .../__snapshots__/test_create_app.ambr | 512 +++++++++--------- 1 file changed, 256 insertions(+), 256 deletions(-) diff --git a/tests/pipelines/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr index f2a057847..ee470c915 100644 --- a/tests/pipelines/__snapshots__/test_create_app.ambr +++ b/tests/pipelines/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-2220422576-matrix { + .terminal-3293903238-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2220422576-title { + .terminal-3293903238-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2220422576-r1 { fill: #c5c8c6 } - .terminal-2220422576-r2 { fill: #e3e3e3 } - .terminal-2220422576-r3 { fill: #989898 } - .terminal-2220422576-r4 { fill: #e1e1e1 } - .terminal-2220422576-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2220422576-r6 { fill: #1e1e1e } - .terminal-2220422576-r7 { fill: #507bb3 } - .terminal-2220422576-r8 { fill: #e2e2e2 } - .terminal-2220422576-r9 { fill: #808080 } - .terminal-2220422576-r10 { fill: #dde6ed;font-weight: bold } - .terminal-2220422576-r11 { fill: #001541 } - .terminal-2220422576-r12 { fill: #0178d4 } - .terminal-2220422576-r13 { fill: #454a50 } - .terminal-2220422576-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-2220422576-r15 { fill: #000000 } - .terminal-2220422576-r16 { fill: #e4e4e4 } - .terminal-2220422576-r17 { fill: #14191f } - .terminal-2220422576-r18 { fill: #7ae998 } - .terminal-2220422576-r19 { fill: #0a180e;font-weight: bold } - .terminal-2220422576-r20 { fill: #008139 } - .terminal-2220422576-r21 { fill: #fea62b;font-weight: bold } - .terminal-2220422576-r22 { fill: #a7a9ab } - .terminal-2220422576-r23 { fill: #e2e3e3 } + .terminal-3293903238-r1 { fill: #c5c8c6 } + .terminal-3293903238-r2 { fill: #e3e3e3 } + .terminal-3293903238-r3 { fill: #989898 } + .terminal-3293903238-r4 { fill: #e1e1e1 } + .terminal-3293903238-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3293903238-r6 { fill: #1e1e1e } + .terminal-3293903238-r7 { fill: #507bb3 } + .terminal-3293903238-r8 { fill: #e2e2e2 } + .terminal-3293903238-r9 { fill: #808080 } + .terminal-3293903238-r10 { fill: #dde6ed;font-weight: bold } + .terminal-3293903238-r11 { fill: #001541 } + .terminal-3293903238-r12 { fill: #0178d4 } + .terminal-3293903238-r13 { fill: #454a50 } + .terminal-3293903238-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-3293903238-r15 { fill: #000000 } + .terminal-3293903238-r16 { fill: #14191f } + .terminal-3293903238-r17 { fill: #e4e4e4 } + .terminal-3293903238-r18 { fill: #7ae998 } + .terminal-3293903238-r19 { fill: #0a180e;font-weight: bold } + .terminal-3293903238-r20 { fill: #008139 } + .terminal-3293903238-r21 { fill: #fea62b;font-weight: bold } + .terminal-3293903238-r22 { fill: #a7a9ab } + .terminal-3293903238-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Hide help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common reference ▇▇ - genome files. - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see the  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Hide help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + ▅▅ + + Nf-core pipelines are configured to use a copy of the most common reference  + genome files. + + By selecting this option, your pipeline will include a configuration file  + specifying the paths to these files. + + The required code to use these files will also be included in the template.  + When the pipeline user provides an appropriate genome key, the pipeline will + automatically download the required reference files. + ▅▅ + For more information about reference genomes in nf-core pipelines, see the  + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-1370375189-matrix { + .terminal-2443134963-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1370375189-title { + .terminal-2443134963-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1370375189-r1 { fill: #c5c8c6 } - .terminal-1370375189-r2 { fill: #e3e3e3 } - .terminal-1370375189-r3 { fill: #989898 } - .terminal-1370375189-r4 { fill: #e1e1e1 } - .terminal-1370375189-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1370375189-r6 { fill: #1e1e1e } - .terminal-1370375189-r7 { fill: #507bb3 } - .terminal-1370375189-r8 { fill: #e2e2e2 } - .terminal-1370375189-r9 { fill: #808080 } - .terminal-1370375189-r10 { fill: #dde6ed;font-weight: bold } - .terminal-1370375189-r11 { fill: #001541 } - .terminal-1370375189-r12 { fill: #14191f } - .terminal-1370375189-r13 { fill: #454a50 } - .terminal-1370375189-r14 { fill: #7ae998 } - .terminal-1370375189-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-1370375189-r16 { fill: #0a180e;font-weight: bold } - .terminal-1370375189-r17 { fill: #000000 } - .terminal-1370375189-r18 { fill: #008139 } - .terminal-1370375189-r19 { fill: #fea62b;font-weight: bold } - .terminal-1370375189-r20 { fill: #a7a9ab } - .terminal-1370375189-r21 { fill: #e2e3e3 } + .terminal-2443134963-r1 { fill: #c5c8c6 } + .terminal-2443134963-r2 { fill: #e3e3e3 } + .terminal-2443134963-r3 { fill: #989898 } + .terminal-2443134963-r4 { fill: #e1e1e1 } + .terminal-2443134963-r5 { fill: #4ebf71;font-weight: bold } + .terminal-2443134963-r6 { fill: #1e1e1e } + .terminal-2443134963-r7 { fill: #507bb3 } + .terminal-2443134963-r8 { fill: #e2e2e2 } + .terminal-2443134963-r9 { fill: #808080 } + .terminal-2443134963-r10 { fill: #dde6ed;font-weight: bold } + .terminal-2443134963-r11 { fill: #001541 } + .terminal-2443134963-r12 { fill: #14191f } + .terminal-2443134963-r13 { fill: #454a50 } + .terminal-2443134963-r14 { fill: #7ae998 } + .terminal-2443134963-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-2443134963-r16 { fill: #0a180e;font-weight: bold } + .terminal-2443134963-r17 { fill: #000000 } + .terminal-2443134963-r18 { fill: #008139 } + .terminal-2443134963-r19 { fill: #fea62b;font-weight: bold } + .terminal-2443134963-r20 { fill: #a7a9ab } + .terminal-2443134963-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - and CI tests to lint  - your code: pre-commit, - editor-config and  - prettier. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Include citationsInclude pipeline tools Show help  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Show help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help ▃▃ + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + requried to run  + nf-core pipelines at  + different institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + and CI tests to lint  + your code: pre-commit, + editor-config and  + prettier. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Include citationsInclude pipeline tools Show help  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From 1be17370b30fe34adccb5b698d26145e05522e99 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 21 Aug 2024 14:56:37 +0200 Subject: [PATCH 534/737] check plugin includes in pipeline --- nf_core/pipelines/lint/__init__.py | 3 ++ nf_core/pipelines/lint/nextflow_config.py | 4 ++- nf_core/pipelines/lint/plugin_includes.py | 38 +++++++++++++++++++++++ 3 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 nf_core/pipelines/lint/plugin_includes.py diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index ed833d321..219cdd8f5 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -45,6 +45,7 @@ from .nfcore_yml import nfcore_yml from .pipeline_name_conventions import pipeline_name_conventions from .pipeline_todos import pipeline_todos +from .plugin_includes import plugin_includes from .readme import readme from .schema_description import schema_description from .schema_lint import schema_lint @@ -92,6 +93,7 @@ class PipelineLint(nf_core.utils.Pipeline): nfcore_yml = nfcore_yml pipeline_name_conventions = pipeline_name_conventions pipeline_todos = pipeline_todos + plugin_includes = plugin_includes readme = readme schema_description = schema_description schema_lint = schema_lint @@ -135,6 +137,7 @@ def _get_all_lint_tests(release_mode): "actions_awsfulltest", "readme", "pipeline_todos", + "plugin_includes", "pipeline_name_conventions", "template_strings", "schema_lint", diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 6e292deb0..a735c5042 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -170,7 +170,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: ] # Lint for plugins - config_plugins = ast.literal_eval(self.nf_config.get("plugins", "").strip("\"")) + config_plugins = ast.literal_eval(self.nf_config.get("plugins", "")) found_plugins = [] if len(config_plugins) == 0: failed.append("nextflow.config contains an empty plugins scope") @@ -185,6 +185,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: failed.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") if "nf-schema" in found_plugins: + passed.append("Found nf-schema plugin") if self.nf_config.get("validation.help.enabled", "false") == "false": failed.append("The help message has not been enabled. Set the `validation.help.enabled` configuration option to `true` to enable help messages") config_fail_ifdefined.extend([ @@ -195,6 +196,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: ]) if "nf-validation" in found_plugins: + passed.append("Found nf-validation plugin") warned.append("nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/") diff --git a/nf_core/pipelines/lint/plugin_includes.py b/nf_core/pipelines/lint/plugin_includes.py new file mode 100644 index 000000000..247c4e493 --- /dev/null +++ b/nf_core/pipelines/lint/plugin_includes.py @@ -0,0 +1,38 @@ +import ast +import glob +import logging +import re +from typing import Dict, List + +log = logging.getLogger(__name__) + + +def plugin_includes(self) -> Dict[str, List[str]]: + """Checks the include statements in the all *.nf files for plugin includes + + When nf-schema is used in an nf-core pipeline, the include statements of the plugin + functions have to use nf-schema instead of nf-validation and vice versa + """ + config_plugins = [plugin.split("@")[0] for plugin in ast.literal_eval(self.nf_config.get("plugins", ""))] + validation_plugin = "nf-validation" if "nf-validation" in config_plugins else "nf-schema" + + passed = [] + warned = [] + failed = [] + ignored = [] + + plugin_include_pattern = re.compile(r"^include\s*{[^}]+}\s*from\s*[\"']plugin/([^\"']+)[\"']\s*$", re.MULTILINE) + workflow_files = [file for file in glob.glob(f"{self.wf_path}/**/*.nf", recursive=True) if not file.startswith("./modules/")] + test_passed = True + for file in workflow_files: + with open(file, "r") as of: + plugin_includes = re.findall(plugin_include_pattern, of.read()) + for include in plugin_includes: + if include not in ["nf-validation", "nf-schema"]: continue + if include != validation_plugin: + test_passed = False + failed.append(f"Found a `{include}` plugin import in `{file[2:]}`, but `{validation_plugin}` was used in `nextflow.config`") + + if test_passed: passed.append("No wrong validation plugin imports have been found") + + return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} From f7137ca3c6e0240033cb56e690125cfbed54301b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 21 Aug 2024 16:44:58 +0200 Subject: [PATCH 535/737] Apply suggestions from code review --- nf_core/pipelines/create/templatefeatures.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index ff56ee190..6a5fac358 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -278,7 +278,7 @@ email: adaptivecard: skippable_paths: - "assets/adaptivecard.json" - short_description: "Add template for status messages" + short_description: "Support Microsoft Teams notifications" description: "Enable pipeline status update messages through Microsoft Teams" help_text: | This adds an Adaptive Card. A snippets of user interface. @@ -291,7 +291,7 @@ adaptivecard: slackreport: skippable_paths: - "assets/slackreport.json" - short_description: "Add template for slack status messages" + short_description: "Support Slack notifications" description: "Enable pipeline status update messages through Slack" help_text: | This adds an JSON template used as a template for pipeline update messages in Slack. From 2f1a4428dc73e15aea2fc001bec4df323700035e Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 22 Aug 2024 14:51:07 +0200 Subject: [PATCH 536/737] make nf-schema optional in the template --- .../assets/schema_input.json | 2 +- nf_core/pipeline-template/main.nf | 1 - nf_core/pipeline-template/modules.json | 4 +- nf_core/pipeline-template/nextflow.config | 9 ++-- .../pipeline-template/nextflow_schema.json | 3 +- .../utils_nfcore_pipeline_pipeline/main.nf | 18 +++++--- .../nf-core/utils_nfcore_pipeline/main.nf | 46 +++++++++++++++++++ .../pipeline-template/workflows/pipeline.nf | 2 +- nf_core/pipelines/create/templatefeatures.yml | 10 ++++ nf_core/pipelines/lint/nextflow_config.py | 20 ++++---- 10 files changed, 90 insertions(+), 25 deletions(-) diff --git a/nf_core/pipeline-template/assets/schema_input.json b/nf_core/pipeline-template/assets/schema_input.json index e76b95fa9..28a468ada 100644 --- a/nf_core/pipeline-template/assets/schema_input.json +++ b/nf_core/pipeline-template/assets/schema_input.json @@ -1,5 +1,5 @@ { - "$schema": "http://json-schema.org/draft-07/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/{{ name }}/master/assets/schema_input.json", "title": "{{ name }} pipeline - params.input schema", "description": "Schema for the file provided with params.input", diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index fddfc5489..f644f52af 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -76,7 +76,6 @@ workflow { // PIPELINE_INITIALISATION ( params.version, - params.help, params.validate_params, params.monochrome_logs, args, diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 1a3b96490..0a883b60b 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -29,12 +29,12 @@ "branch": "master", "git_sha": "92de218a329bfc9a9033116eb5f65fd270e72ba3", "installed_by": ["subworkflows"] - }, + }{% if nf_schema %}, "utils_nfschema_plugin": { "branch": "master", "git_sha": "a3e87febb28bd0461c22a917c5b2c1492053ef85", "installed_by": ["subworkflows"] - } + }{% endif %} } } } diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index bb5567800..4acc34e40 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -37,9 +37,9 @@ params { plaintext_email = false monochrome_logs = false hook_url = null - help = false + {% if nf_schema %}help = false helpFull = false - showHidden = false + showHidden = false{% endif %} version = false pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/' @@ -260,13 +260,14 @@ manifest { doi = '' } +{% if nf_schema -%} // Nextflow plugins plugins { id 'nf-schema@2.1.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet } validation { - defaultIgnoreParams = ["genomes"] + defaultIgnoreParams = ["genomes", "helpFull", "showHidden", "help-full", "show-hidden"] // The last 4 parameters are here because of a bug in nf-schema. This will be fixed in a later version help { enabled = true command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir " @@ -293,7 +294,7 @@ validation { afterText = validation.help.afterText } } - +{% endif -%} // Load modules.config for DSL2 module specific options includeConfig 'conf/modules.config' diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 908d31017..c729c370b 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -40,8 +40,7 @@ "type": "string", "description": "MultiQC report title. Printed as page header, used for filename if not otherwise specified.", "fa_icon": "fas fa-file-signature" - } - {% endif %} + }{% endif %} } }, {%- if igenomes %} diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index a994cfd58..1fef6912b 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -8,17 +8,14 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin' +{% if nf_schema %}include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin' include { paramsSummaryMap } from 'plugin/nf-schema' -include { samplesheetToList } from 'plugin/nf-schema' -include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' +include { samplesheetToList } from 'plugin/nf-schema'{% endif %} include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' -include { dashedLine } from '../../nf-core/utils_nfcore_pipeline' -include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline' include { imNotification } from '../../nf-core/utils_nfcore_pipeline' include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' -include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline' +include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' /* ======================================================================================== @@ -50,12 +47,14 @@ workflow PIPELINE_INITIALISATION { workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 ) + {% if nf_schema %} // // Validate parameters and generate parameter summary to stdout // UTILS_NFSCHEMA_PLUGIN ( validate_params ) + {% endif %} // // Check config provided to the pipeline @@ -74,6 +73,7 @@ workflow PIPELINE_INITIALISATION { // // Create channel from input file provided through params.input // + {% if nf_schema %} Channel .fromList(samplesheetToList(params.input, "${projectDir}/assets/schema_input.json")) .map { @@ -93,6 +93,12 @@ workflow PIPELINE_INITIALISATION { return [ meta, fastqs.flatten() ] } .set { ch_samplesheet } + {% else %} + Channel + .fromPath(params.input) + .splitCsv(header: true, strip: true) + .set { ch_samplesheet } + {% endif %} emit: samplesheet = ch_samplesheet diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index a5dcd5f83..14558c392 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -61,6 +61,25 @@ def checkProfileProvided(nextflow_cli_args) { } } +// +// Citation string for pipeline +// +def workflowCitation() { + def temp_doi_ref = "" + String[] manifest_doi = workflow.manifest.doi.tokenize(",") + // Using a loop to handle multiple DOIs + // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers + // Removing ` ` since the manifest.doi is a string and not a proper list + for (String doi_ref: manifest_doi) temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" + return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + + "* The pipeline\n" + + temp_doi_ref + "\n" + + "* The nf-core framework\n" + + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + + "* Software dependencies\n" + + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" +} + // // Generate workflow version string // @@ -138,6 +157,33 @@ def paramsSummaryMultiqc(summary_params) { return yaml_file_text } +// +// nf-core logo +// +def nfCoreLogo(monochrome_logs=true) { + Map colors = logColours(monochrome_logs) + String.format( + """\n + ${dashedLine(monochrome_logs)} + ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} + ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} + ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} + ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} + ${colors.green}`._,._,\'${colors.reset} + ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} + ${dashedLine(monochrome_logs)} + """.stripIndent() + ) +} + +// +// Return dashed line +// +def dashedLine(monochrome_logs=true) { + Map colors = logColours(monochrome_logs) + return "-${colors.dim}----------------------------------------------------${colors.reset}-" +} + // // ANSII colours used for terminal logging // diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 4bb091321..8d2773820 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -6,7 +6,7 @@ include { FASTQC } from '../modules/nf-core/fastqc/main' {% if multiqc %}include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} -include { paramsSummaryMap } from 'plugin/nf-schema' +{% if nf_schema %}include { paramsSummaryMap } from 'plugin/nf-schema'{% endif %} {% if multiqc %}include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' {% if citations or multiqc %}include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index b97bf347a..7a24b3d91 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -242,3 +242,13 @@ changelog: - "CHANGELOG.md" nfcore_pipelines: False custom_pipelines: True +nf_schema: + skippable_paths: + - "subworkflows/nf-core/utils_nfschema_plugin" + short_description: "Use nf-schema for this pipeline." + help_text: | + nf-schema is used to validate input parameters based on a JSON schema. + It also provides helper functionality to create help messages, get a summary + of changed parameters and validate and convert a samplesheet to a channel. + nfcore_pipelines: False + custom_pipelines: False diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index a735c5042..1dfdabaf1 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -143,8 +143,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["process.memory"], ["process.time"], ["params.outdir"], - ["params.input"], - ["validation.help.enabled"] + ["params.input"] ] # Throw a warning if these are missing config_warn = [ @@ -152,12 +151,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["timeline.file"], ["trace.file"], ["report.file"], - ["dag.file"], - ["validation.help.beforeText"], - ["validation.help.afterText"], - ["validation.help.command"], - ["validation.summary.beforeText"], - ["validation.summary.afterText"] + ["dag.file"] ] # Old depreciated vars - fail if present config_fail_ifdefined = [ @@ -188,6 +182,16 @@ def nextflow_config(self) -> Dict[str, List[str]]: passed.append("Found nf-schema plugin") if self.nf_config.get("validation.help.enabled", "false") == "false": failed.append("The help message has not been enabled. Set the `validation.help.enabled` configuration option to `true` to enable help messages") + config_fail.extend([ + ["validation.help.enabled"] + ]) + config_warn.extend([ + ["validation.help.beforeText"], + ["validation.help.afterText"], + ["validation.help.command"], + ["validation.summary.beforeText"], + ["validation.summary.afterText"] + ]) config_fail_ifdefined.extend([ "params.validationFailUnrecognisedParams", "params.validationLenientMode", From 364b54bde2994927d3124bfdf53d97bd1ecc6350 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 22 Aug 2024 15:57:29 +0200 Subject: [PATCH 537/737] small fixes for template without nf-schema --- .../utils_nfcore_pipeline_pipeline/main.nf | 33 ++++++++++--------- .../pipeline-template/workflows/pipeline.nf | 6 ++-- nf_core/pipelines/lint/nextflow_config.py | 6 ++-- nf_core/pipelines/lint/plugin_includes.py | 2 +- 4 files changed, 25 insertions(+), 22 deletions(-) diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index d96f20354..0867a0a4c 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -77,9 +77,14 @@ workflow PIPELINE_INITIALISATION { // // Create channel from input file provided through params.input // - {% if nf_schema %} - Channel - .fromList(samplesheetToList(params.input, "${projectDir}/assets/schema_input.json")) + + Channel{% if nf_schema %} + .fromList(samplesheetToList(params.input, "${projectDir}/assets/schema_input.json")){% else %} + .fromPath(params.input) + .splitCsv(header: true, strip: true) + .map { row -> + [[id:row.sample], row.fastq_1, row.fastq_2] + }{% endif %} .map { meta, fastq_1, fastq_2 -> if (!fastq_2) { @@ -97,12 +102,6 @@ workflow PIPELINE_INITIALISATION { return [ meta, fastqs.flatten() ] } .set { ch_samplesheet } - {% else %} - Channel - .fromPath(params.input) - .splitCsv(header: true, strip: true) - .set { ch_samplesheet } - {% endif %} emit: samplesheet = ch_samplesheet @@ -130,19 +129,21 @@ workflow PIPELINE_COMPLETION { main: - summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") - // // Completion email and summary // workflow.onComplete { {%- if email %} if (email || email_on_fail) { - {%- if multiqc %} - completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList()) - {%- else %} - completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, []) - {%- endif %} + completionEmail( + {% if nf_schema %}paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json"){% else %}{}{% endif %}, + email, + email_on_fail, + plaintext_email, + outdir, + monochrome_logs, + {% if multiqc %}multiqc_report.toList(){% else %}[]{% endif %} + ) } {%- endif %} diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 8d2773820..ca5de36a6 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -60,9 +60,13 @@ workflow {{ short_name|upper }} { Channel.fromPath(params.multiqc_logo, checkIfExists: true) : Channel.empty() + {% if nf_schema %} summary_params = paramsSummaryMap( workflow, parameters_schema: "nextflow_schema.json") ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) + ch_multiqc_files = ch_multiqc_files.mix( + ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) + {% endif %} {%- if citations %} ch_multiqc_custom_methods_description = params.multiqc_methods_description ? @@ -72,8 +76,6 @@ workflow {{ short_name|upper }} { methodsDescriptionText(ch_multiqc_custom_methods_description)) {%- endif %} - ch_multiqc_files = ch_multiqc_files.mix( - ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) {%- if citations %} ch_multiqc_files = ch_multiqc_files.mix( diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 1dfdabaf1..f5689813a 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -164,10 +164,10 @@ def nextflow_config(self) -> Dict[str, List[str]]: ] # Lint for plugins - config_plugins = ast.literal_eval(self.nf_config.get("plugins", "")) + config_plugins = ast.literal_eval(self.nf_config.get("plugins", "[]")) found_plugins = [] if len(config_plugins) == 0: - failed.append("nextflow.config contains an empty plugins scope") + warned.append("nextflow.config contains an empty plugins scope") for plugin in config_plugins: if "@" not in plugin: failed.append(f"Plugin '{plugin}' does not have a pinned version") @@ -176,7 +176,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: if "nf-validation" in found_plugins and "nf-schema" in found_plugins: failed.append("nextflow.config contains both nf-validation and nf-schema") if "nf-validation" not in found_plugins and "nf-schema" not in found_plugins: - failed.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") + warned.append("nextflow.config does not contain `nf-validation` or `nf-schema` in the plugins scope") if "nf-schema" in found_plugins: passed.append("Found nf-schema plugin") diff --git a/nf_core/pipelines/lint/plugin_includes.py b/nf_core/pipelines/lint/plugin_includes.py index 247c4e493..740c001a1 100644 --- a/nf_core/pipelines/lint/plugin_includes.py +++ b/nf_core/pipelines/lint/plugin_includes.py @@ -13,7 +13,7 @@ def plugin_includes(self) -> Dict[str, List[str]]: When nf-schema is used in an nf-core pipeline, the include statements of the plugin functions have to use nf-schema instead of nf-validation and vice versa """ - config_plugins = [plugin.split("@")[0] for plugin in ast.literal_eval(self.nf_config.get("plugins", ""))] + config_plugins = [plugin.split("@")[0] for plugin in ast.literal_eval(self.nf_config.get("plugins", "[]"))] validation_plugin = "nf-validation" if "nf-validation" in config_plugins else "nf-schema" passed = [] From 4e631384906d2c02ab537276f22ea47c3ceb438d Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 22 Aug 2024 16:02:58 +0200 Subject: [PATCH 538/737] bump nextflow version --- .github/workflows/create-lint-wf.yml | 2 +- .github/workflows/create-test-wf.yml | 2 +- .../nf-core/utils_nextflow_pipeline/tests/nextflow.config | 2 +- .../nf-core/utils_nfcore_pipeline/tests/nextflow.config | 2 +- nf_core/pipelines/lint/readme.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 7e90febb7..f07b31e9d 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -38,7 +38,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.04.0" + - "23.10.0" - "latest-everything" steps: - name: go to subdirectory and change nextflow workdir diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index a95a47745..56c6c822a 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -39,7 +39,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.04.0" + - "23.10.0" - "latest-everything" steps: - name: go to working directory diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config index d0a926bf6..0fa4abaf4 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config @@ -3,7 +3,7 @@ manifest { author = """nf-core""" homePage = 'https://127.0.0.1' description = """Dummy pipeline""" - nextflowVersion = '!>=23.04.0' + nextflowVersion = '!>=23.10.0' version = '9.9.9' doi = 'https://doi.org/10.5281/zenodo.5070524' } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config index d0a926bf6..0fa4abaf4 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config @@ -3,7 +3,7 @@ manifest { author = """nf-core""" homePage = 'https://127.0.0.1' description = """Dummy pipeline""" - nextflowVersion = '!>=23.04.0' + nextflowVersion = '!>=23.10.0' version = '9.9.9' doi = 'https://doi.org/10.5281/zenodo.5070524' } diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index 4c1624369..1c0910425 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -36,7 +36,7 @@ def readme(self): if "nextflow_badge" not in ignore_configs: # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.10.0-23aa62.svg)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) From 11efc5547be76f1a6412d049dbfcd10cd337c660 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 22 Aug 2024 16:08:41 +0200 Subject: [PATCH 539/737] update changelog --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 98fb92533..b4fcfe864 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,8 @@ - add templatefeatures.yml to python package ([#3112](https://github.com/nf-core/tools/pull/3112)) - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) - add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) +- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) +- add option to exclude nf-schema from the template ([#3116](https://github.com/nf-core/tools/pull/3116)) ### Linting @@ -26,6 +28,10 @@ - Conda module linting: Include package name in log file ([#3014](https://github.com/nf-core/tools/pull/3014)) - Restructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) - Fix module linting warning for process_high_memory ([#3086](https://github.com/nf-core/tools/issues/3086)) +- Linting will now fail when an unpinned plugin is used ([#3116](https://github.com/nf-core/tools/pull/3116)) +- Linting will now check if the schema is correct for the used validation plugin ([#3116])(https://github.com/nf-core/tools/pull/3116) +- Linting will now check the use of the right validation plugin include statements in the workflow scripts ([#3116])(https://github.com/nf-core/tools/pull/3116) +- Full linting for correct use of nf-schema and nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) ### Download From 47c3fe324280a54d4cae4227ab7679ac488b1edb Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 22 Aug 2024 16:13:04 +0200 Subject: [PATCH 540/737] fix ruff linting --- nf_core/pipelines/lint/nextflow_config.py | 53 +++++++++++------------ nf_core/pipelines/lint/plugin_includes.py | 24 ++++++---- nf_core/pipelines/schema.py | 17 +++++--- 3 files changed, 51 insertions(+), 43 deletions(-) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index f5689813a..255042ce5 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -143,16 +143,10 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["process.memory"], ["process.time"], ["params.outdir"], - ["params.input"] + ["params.input"], ] # Throw a warning if these are missing - config_warn = [ - ["manifest.mainScript"], - ["timeline.file"], - ["trace.file"], - ["report.file"], - ["dag.file"] - ] + config_warn = [["manifest.mainScript"], ["timeline.file"], ["trace.file"], ["report.file"], ["dag.file"]] # Old depreciated vars - fail if present config_fail_ifdefined = [ "params.nf_required_version", @@ -181,28 +175,33 @@ def nextflow_config(self) -> Dict[str, List[str]]: if "nf-schema" in found_plugins: passed.append("Found nf-schema plugin") if self.nf_config.get("validation.help.enabled", "false") == "false": - failed.append("The help message has not been enabled. Set the `validation.help.enabled` configuration option to `true` to enable help messages") - config_fail.extend([ - ["validation.help.enabled"] - ]) - config_warn.extend([ - ["validation.help.beforeText"], - ["validation.help.afterText"], - ["validation.help.command"], - ["validation.summary.beforeText"], - ["validation.summary.afterText"] - ]) - config_fail_ifdefined.extend([ - "params.validationFailUnrecognisedParams", - "params.validationLenientMode", - "params.validationSchemaIgnoreParams", - "params.validationShowHiddenParams" - ]) + failed.append( + "The help message has not been enabled. Set the `validation.help.enabled` configuration option to `true` to enable help messages" + ) + config_fail.extend([["validation.help.enabled"]]) + config_warn.extend( + [ + ["validation.help.beforeText"], + ["validation.help.afterText"], + ["validation.help.command"], + ["validation.summary.beforeText"], + ["validation.summary.afterText"], + ] + ) + config_fail_ifdefined.extend( + [ + "params.validationFailUnrecognisedParams", + "params.validationLenientMode", + "params.validationSchemaIgnoreParams", + "params.validationShowHiddenParams", + ] + ) if "nf-validation" in found_plugins: passed.append("Found nf-validation plugin") - warned.append("nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/") - + warned.append( + "nf-validation has been detected in the pipeline. Please migrate to nf-schema: https://nextflow-io.github.io/nf-schema/latest/migration_guide/" + ) # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get("nextflow_config", []) if self.lint_config is not None else [] diff --git a/nf_core/pipelines/lint/plugin_includes.py b/nf_core/pipelines/lint/plugin_includes.py index 740c001a1..6a57b1d28 100644 --- a/nf_core/pipelines/lint/plugin_includes.py +++ b/nf_core/pipelines/lint/plugin_includes.py @@ -16,23 +16,29 @@ def plugin_includes(self) -> Dict[str, List[str]]: config_plugins = [plugin.split("@")[0] for plugin in ast.literal_eval(self.nf_config.get("plugins", "[]"))] validation_plugin = "nf-validation" if "nf-validation" in config_plugins else "nf-schema" - passed = [] - warned = [] - failed = [] - ignored = [] + passed: list[str] = [] + warned: list[str] = [] + failed: list[str] = [] + ignored: list[str] = [] plugin_include_pattern = re.compile(r"^include\s*{[^}]+}\s*from\s*[\"']plugin/([^\"']+)[\"']\s*$", re.MULTILINE) - workflow_files = [file for file in glob.glob(f"{self.wf_path}/**/*.nf", recursive=True) if not file.startswith("./modules/")] + workflow_files = [ + file for file in glob.glob(f"{self.wf_path}/**/*.nf", recursive=True) if not file.startswith("./modules/") + ] test_passed = True for file in workflow_files: - with open(file, "r") as of: + with open(file) as of: plugin_includes = re.findall(plugin_include_pattern, of.read()) for include in plugin_includes: - if include not in ["nf-validation", "nf-schema"]: continue + if include not in ["nf-validation", "nf-schema"]: + continue if include != validation_plugin: test_passed = False - failed.append(f"Found a `{include}` plugin import in `{file[2:]}`, but `{validation_plugin}` was used in `nextflow.config`") + failed.append( + f"Found a `{include}` plugin import in `{file[2:]}`, but `{validation_plugin}` was used in `nextflow.config`" + ) - if test_passed: passed.append("No wrong validation plugin imports have been found") + if test_passed: + passed.append("No wrong validation plugin imports have been found") return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index e9c5556db..ccbc86645 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -5,7 +5,6 @@ import logging import tempfile import webbrowser -import re from pathlib import Path from typing import Union @@ -58,10 +57,10 @@ def set_schema_filename(self, schema: str) -> None: basepath = "/".join(str(schema).split("/")[:-1]) config = f"{basepath}/nextflow.config" if basepath != "" else "nextflow.config" self._update_validation_plugin_from_config(config) - + def get_schema_filename(self) -> str: return self._schema_filename - + def del_schema_filename(self) -> None: del self._schema_filename @@ -71,7 +70,7 @@ def _update_validation_plugin_from_config(self, config: str) -> None: plugin = "nf-schema" conf = nf_core.utils.fetch_wf_config(Path(self.schema_filename).parent) - plugins = str(conf.get("plugins", "")).strip("\"").strip("'").strip(" ").split(",") + plugins = str(conf.get("plugins", "")).strip('"').strip("'").strip(" ").split(",") plugin_found = False for plugin_instance in plugins: if "nf-schema" in plugin_instance: @@ -95,7 +94,7 @@ def _update_validation_plugin_from_config(self, config: str) -> None: # Previous versions of nf-schema used "defs", but it's advised to use "$defs" if plugin == "nf-schema": self.defs_notation = "$defs" - ignored_params = ["help", "helpFull", "showHidden"] # Help parameter should be ignored by default + ignored_params = ["help", "helpFull", "showHidden"] # Help parameter should be ignored by default ignored_params_config = conf.get("validation", {}).get("defaultIgnoreParams", []) if len(ignored_params_config) > 0: ignored_params.extend(ignored_params_config) @@ -447,7 +446,9 @@ def validate_schema(self, schema=None): # Add a small check for older nf-schema JSON schemas if "defs" in schema: - raise AssertionError(f'Using "defs" for schema definitions is not supported. Please use {self.defs_notation} instead') + raise AssertionError( + f'Using "defs" for schema definitions is not supported. Please use {self.defs_notation} instead' + ) for d_key, d_schema in schema.get(self.defs_notation, {}).items(): # Check that this definition is mentioned in allOf @@ -458,7 +459,9 @@ def validate_schema(self, schema=None): if allOf["$ref"] == f"#/{self.defs_notation}/{d_key}": in_allOf = True if not in_allOf: - raise AssertionError(f"Definition subschema `#/{self.defs_notation}/{d_key}` not included in schema `allOf`") + raise AssertionError( + f"Definition subschema `#/{self.defs_notation}/{d_key}` not included in schema `allOf`" + ) # TODO add support for nested parameters for d_param_id in d_schema.get("properties", {}): From 09bfca2663275c37173ce8018c858164974479f8 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 22 Aug 2024 16:29:39 +0200 Subject: [PATCH 541/737] add a plugin includes test --- tests/pipelines/lint/test_plugin_includes.py | 23 ++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 tests/pipelines/lint/test_plugin_includes.py diff --git a/tests/pipelines/lint/test_plugin_includes.py b/tests/pipelines/lint/test_plugin_includes.py new file mode 100644 index 000000000..21e6107c1 --- /dev/null +++ b/tests/pipelines/lint/test_plugin_includes.py @@ -0,0 +1,23 @@ +import nf_core.pipelines.lint + +from ..test_lint import TestLint + +class TestLintNextflowConfig(TestLint): + def setUp(self) -> None: + super().setUp() + self.new_pipeline = self._make_pipeline_copy() + + def test_default_values_match(self): + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + result = lint_obj.plugin_includes() + assert len(result["failed"]) == 0 + assert len(result["warned"]) == 0 + + def test_wrong_include(self): + test_path = self.new_pipeline / "test.nf" + with open(test_path, "w") as of: + of.write("include { paramsSummary } from 'plugin/nf-validation'\n") + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + result = lint_obj.plugin_includes() + assert len(result["failed"]) == 1 + assert len(result["warned"]) == 0 \ No newline at end of file From 44dbbf7a2b095144cb954bb1ed09578776979c92 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Fri, 23 Aug 2024 09:50:51 +0200 Subject: [PATCH 542/737] update review comments --- nf_core/pipeline-template/nextflow.config | 7 ++++--- nf_core/pipelines/create/templatefeatures.yml | 7 ++++--- nf_core/pipelines/lint/nextflow_config.py | 1 - nf_core/pipelines/schema.py | 2 +- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index ad800cc55..3b3183ece 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -273,6 +273,7 @@ validation { help { enabled = true command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir " + {% if is_nfcore %} beforeText = """ -\033[2m----------------------------------------------------\033[0m- \033[0;32m,--.\033[0;30m/\033[0;32m,-.\033[0m @@ -289,12 +290,12 @@ validation { * Software dependencies https://github.com/${manifest.name}/blob/master/CITATIONS.md -""" - } +"""{% endif %} + }{% if is_nfcore %} summary { beforeText = validation.help.beforeText afterText = validation.help.afterText - } + }[% endif %] } {% endif -%} diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index 29b53eb0a..224bc3826 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -245,13 +245,14 @@ changelog: nf_schema: skippable_paths: - "subworkflows/nf-core/utils_nfschema_plugin" - short_description: "Use nf-schema for this pipeline." + short_description: "Use nf-schema" + description: "Use nf-schema for this pipeline." help_text: | nf-schema is used to validate input parameters based on a JSON schema. It also provides helper functionality to create help messages, get a summary of changed parameters and validate and convert a samplesheet to a channel. - nfcore_pipelines: False - custom_pipelines: False + nfcore_pipelines: True + custom_pipelines: True license: skippable_paths: - "LICENSE" diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 255042ce5..71eb6f118 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -367,7 +367,6 @@ def nextflow_config(self) -> Dict[str, List[str]]: ) # Check for the availability of the "test" configuration profile by parsing nextflow.config - # Also check for the presence of nf-validation/nf-schema and check if they have pinned versions with open(Path(self.wf_path, "nextflow.config")) as f: content = f.read() diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index ccbc86645..04f5ca2ac 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -447,7 +447,7 @@ def validate_schema(self, schema=None): # Add a small check for older nf-schema JSON schemas if "defs" in schema: raise AssertionError( - f'Using "defs" for schema definitions is not supported. Please use {self.defs_notation} instead' + f'Using "defs" for schema definitions is not supported. Please use "{self.defs_notation}" instead' ) for d_key, d_schema in schema.get(self.defs_notation, {}).items(): From ea51814bc2b1549c14605ba66591e7043bb5e4b3 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Fri, 23 Aug 2024 10:17:15 +0200 Subject: [PATCH 543/737] remove duplicate code --- nf_core/pipelines/schema.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 04f5ca2ac..dbe851415 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -85,11 +85,6 @@ def _update_validation_plugin_from_config(self, config: str) -> None: if not plugin_found: log.warning("Could not find nf-schema or nf-validation in the pipeline config. Defaulting to nf-schema") - if "nf-validation" in plugins: - plugin = "nf-validation" - elif "nf-schema" in plugins: - plugin = "nf-schema" - self.validation_plugin = plugin # Previous versions of nf-schema used "defs", but it's advised to use "$defs" if plugin == "nf-schema": From 113fa4226cbdb471607615cf9c45b61bcfbc55c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Fri, 23 Aug 2024 10:06:12 +0000 Subject: [PATCH 544/737] update utils_nextflow_pipeline --- nf_core/pipeline-template/modules.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index eb9391b29..8b156083f 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -22,7 +22,7 @@ "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", + "git_sha": "20c03aede5a80ff520a905cea1f8ca121b5bb661", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { From 195fc5168c9810d36f994996d588c219b72b9eef Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Fri, 23 Aug 2024 11:35:43 +0000 Subject: [PATCH 545/737] Uncomment nextflow extension and align comments --- .gitpod.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitpod.yml b/.gitpod.yml index b2fbb7313..b93095c78 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -11,12 +11,12 @@ tasks: unset JAVA_TOOL_OPTIONS vscode: - extensions: # based on nf-core.nf-core-extensionpack + extensions: - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - mechatroner.rainbow-csv # Highlight columns in csv files in different colors - # - nextflow.nextflow # Nextflow syntax highlighting + - nextflow.nextflow # Nextflow syntax highlighting - oderwat.indent-rainbow # Highlight indentation level - streetsidesoftware.code-spell-checker # Spelling checker for source code - charliermarsh.ruff # Code linter Ruff From 65f26b1455b5592ec301fb618464372b3a8280da Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Fri, 23 Aug 2024 11:44:48 +0000 Subject: [PATCH 546/737] Add Digest comment and remove conda defaults channel --- nf_core/gitpod/gitpod.Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index fd0e8bb79..944222668 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -1,6 +1,7 @@ # Test build locally before making a PR # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . +# See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. FROM gitpod/workspace-base@sha256:f189a4195c3861365356f9c1b438ab26fd88e1ff46ce2843afc62861fc982e0c USER root @@ -40,9 +41,8 @@ RUN chown -R gitpod:gitpod /opt/conda /usr/src/nf_core # Change user to gitpod USER gitpod -# Install nextflow, nf-core, Mamba, and pytest-workflow -RUN conda config --add channels defaults && \ - conda config --add channels bioconda && \ +# Install nextflow, nf-core, nf-test, and other useful tools +RUN conda config --add channels bioconda && \ conda config --add channels conda-forge && \ conda config --set channel_priority strict && \ conda install --quiet --yes --name base \ From 506491f66e3dd612e726b508ab72e125fd8e31b8 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Fri, 23 Aug 2024 12:04:32 +0000 Subject: [PATCH 547/737] Blank JAVA_TOOL_OPTIONS --- nf_core/gitpod/gitpod.Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 944222668..849729966 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -63,3 +63,4 @@ RUN nextflow self-update && \ # Setup pdiff for nf-test diffs ENV NFT_DIFF="pdiff" ENV NFT_DIFF_ARGS="--line-numbers --expand-tabs=2" +ENV JAVA_TOOL_OPTIONS= From 411cd22da23380799e7535b7340a5a0e16510bd7 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Fri, 23 Aug 2024 12:07:25 +0000 Subject: [PATCH 548/737] Remove unset JAVA_TOOL_OPTIONS --- .gitpod.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.gitpod.yml b/.gitpod.yml index b93095c78..efe193f35 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -6,9 +6,6 @@ tasks: python -m pip install -r requirements-dev.txt pre-commit install --install-hooks nextflow self-update - - name: unset JAVA_TOOL_OPTIONS - command: | - unset JAVA_TOOL_OPTIONS vscode: extensions: From bc6ecda3aec81e24b858b6e98e62eb30ad13dd0a Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Fri, 23 Aug 2024 12:11:13 +0000 Subject: [PATCH 549/737] Remove unset JAVA_TOOL_OPTIONS from pipeline template --- nf_core/pipeline-template/.gitpod.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 30e85ed97..5907fb59c 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -4,9 +4,6 @@ tasks: command: | pre-commit install --install-hooks nextflow self-update - - name: unset JAVA_TOOL_OPTIONS - command: | - unset JAVA_TOOL_OPTIONS vscode: extensions: # based on nf-core.nf-core-extensionpack @@ -15,7 +12,7 @@ vscode: - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files{% endif %} - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - mechatroner.rainbow-csv # Highlight columns in csv files in different colors - # - nextflow.nextflow # Nextflow syntax highlighting + - nextflow.nextflow # Nextflow syntax highlighting - oderwat.indent-rainbow # Highlight indentation level - streetsidesoftware.code-spell-checker # Spelling checker for source code - charliermarsh.ruff # Code linter Ruff From d324760487b25de1f12027699099bc5d649eaf60 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Fri, 23 Aug 2024 12:14:18 +0000 Subject: [PATCH 550/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 98fb92533..2e13e82f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -73,6 +73,7 @@ - Components: allow spaces at the betinning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.6.0 ([#3122](https://github.com/nf-core/tools/pull/3122)) - Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) +- Update gitpod ([#3136](https://github.com/nf-core/tools/pull/3136)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 1bcb70fbfc3b17b3a73dec382a96a20d02108617 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Fri, 23 Aug 2024 14:26:26 +0200 Subject: [PATCH 551/737] Update CHANGELOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e13e82f5..aca2e6f20 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -73,7 +73,7 @@ - Components: allow spaces at the betinning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.6.0 ([#3122](https://github.com/nf-core/tools/pull/3122)) - Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) -- Update gitpod ([#3136](https://github.com/nf-core/tools/pull/3136)) +- Update gitpod setup ([#3136](https://github.com/nf-core/tools/pull/3136)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] From 6e018ce6981548fb11ae5f34e3c476305695ad8f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 23 Aug 2024 15:41:07 +0200 Subject: [PATCH 552/737] organise changelog and bump version to 3.0.0dev --- CHANGELOG.md | 55 +++++++++++++++++++++++++++------------------------- setup.py | 2 +- 2 files changed, 30 insertions(+), 27 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index aca2e6f20..42cb2a62a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,18 @@ # nf-core/tools: Changelog -## v2.14.2dev +## v3.0.0dev + +**Highlights** + +- Pipeline commands are renamed from nf-core to nf-core pipelines +- More customisation for pipeline templates +- A Text User Interface app when running nf-core pipelines create +- The pipeline template will come with the nf-schema plugin +- New command nf-core pipelines rocrate to create a Research Object (RO) crate for a pipeline +- `nf-core licences` command is deprecated. +- Pipeline linting will run with the used version of the template +- The structure of nf-core/tools pytests has been updated +- The structure of the API docs has been updated ### Template @@ -9,14 +21,13 @@ - Remove deprecated syntax ([#3046](https://github.com/nf-core/tools/pull/3046)) - Use filename in code block for `params.yml` ([#3055](https://github.com/nf-core/tools/pull/3055)) - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) +- handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108), [#3112](https://github.com/nf-core/tools/pull/3112)) - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) - add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101)) - add option to exclude gitpod for custom pipeline template ([#3100](https://github.com/nf-core/tools/pull/3100)) - add option to exclude codespaces from pipeline template ([#3105](https://github.com/nf-core/tools/pull/3105)) - add option to exclude multiqc from pipeline template ([#3103](https://github.com/nf-core/tools/pull/3103)) - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) -- handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108)) -- add templatefeatures.yml to python package ([#3112](https://github.com/nf-core/tools/pull/3112)) - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) - add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) @@ -27,53 +38,45 @@ - Restructure pipeline tests and move pipeline linting into subfolder ([#3070](https://github.com/nf-core/tools/pull/3070)) - Fix module linting warning for process_high_memory ([#3086](https://github.com/nf-core/tools/issues/3086)) -### Download +### Pipeline create command + +- Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) +- Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) ### Components - The `modules_nfcore` tag in the `main.nf.test` file of modules/subworkflows now displays the organization name in custom modules repositories ([#3005](https://github.com/nf-core/tools/pull/3005)) +- Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) +- Components: allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) ### General -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.4 ([#2974](https://github.com/nf-core/tools/pull/2974)) -- Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) - update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) - move pipeline subcommands for v3.0 ([#2983](https://github.com/nf-core/tools/pull/2983)) - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.6 ([#3006](https://github.com/nf-core/tools/pull/3006)) -- Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) - Remove nf-core licences command ([#3012](https://github.com/nf-core/tools/pull/3012)) - README - absolute image paths ([#3013](https://github.com/nf-core/tools/pull/3013)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.7 ([#3015](https://github.com/nf-core/tools/pull/3015)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.8 ([#3017](https://github.com/nf-core/tools/pull/3017)) -- Update python:3.12-slim Docker digest to e3ae8cf ([#3020](https://github.com/nf-core/tools/pull/3020)) -- Update python:3.12-slim Docker digest to 2fba8e7 ([#3023](https://github.com/nf-core/tools/pull/3023)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.4.10 ([#3031](https://github.com/nf-core/tools/pull/3031)) - Add warning deprecation message to top-level commands ([#3036](https://github.com/nf-core/tools/pull/3036)) -- Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) - move pipeline commands to functions to avoid duplication ([#3039](https://github.com/nf-core/tools/pull/3039)) -- Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) -- Update python:3.12-slim Docker digest to da2d7af ([#3041](https://github.com/nf-core/tools/pull/3041)) -- Update gitpod/workspace-base Docker digest to 0f38224 ([#3048](https://github.com/nf-core/tools/pull/3048)) - update output_dir for api docs to new website structure ([#3051](https://github.com/nf-core/tools/pull/3051)) -- Update pre-commit hook astral-sh/ruff-pre-commit to v0.5.1 ([#3052](https://github.com/nf-core/tools/pull/3052)) - Add `--limit-output` argument for modules/subworkflow update ([#3047](https://github.com/nf-core/tools/pull/3047)) - update api docs to new structure ([#3054](https://github.com/nf-core/tools/pull/3054)) -- Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) - handle new jsonschema error type ([#3061](https://github.com/nf-core/tools/pull/3061)) -- Update python:3.12-slim Docker digest to f11725a ([#3071](https://github.com/nf-core/tools/pull/3071)) - Fix number of arguments for pipelines_create within the command_create function ([#3074](https://github.com/nf-core/tools/pull/3074)) -- Update python:3.12-slim Docker digest to 740d94a ([#3079](https://github.com/nf-core/tools/pull/3079)) -- Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) -- Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) -- Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) - Add bot action to update textual snapshots and write bot documentation ([#3102](https://github.com/nf-core/tools/pull/3102)) -- Components: allow spaces at the betinning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Update gitpod setup ([#3136](https://github.com/nf-core/tools/pull/3136)) + +## Version updates + - Update pre-commit hook astral-sh/ruff-pre-commit to v0.6.0 ([#3122](https://github.com/nf-core/tools/pull/3122)) +- Update gitpod/workspace-base Docker digest to 92dd1bc ([#2982](https://github.com/nf-core/tools/pull/2982)) - Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) -- Update gitpod setup ([#3136](https://github.com/nf-core/tools/pull/3136)) +- Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) +- Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] diff --git a/setup.py b/setup.py index 8f32daa90..45df29b8b 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.14.2dev" +version = "3.0.0dev" with open("README.md") as f: readme = f.read() From 41b2676c8746e92960907c3263f296ea7f606d96 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 23 Aug 2024 15:52:40 +0200 Subject: [PATCH 553/737] more details! :D --- CHANGELOG.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 42cb2a62a..2a3e98f63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,13 +4,13 @@ **Highlights** -- Pipeline commands are renamed from nf-core to nf-core pipelines -- More customisation for pipeline templates -- A Text User Interface app when running nf-core pipelines create -- The pipeline template will come with the nf-schema plugin -- New command nf-core pipelines rocrate to create a Research Object (RO) crate for a pipeline +- Pipeline commands are renamed from `nf-core ` to `nf-core pipelines ` to match the structure of modules and subworkflows commands. +- More customisation for pipeline templates. The template has been divided into features which can be skipped, making it easier to avoid annoying merge conflicts, for example, for pipeline which don't have a FastQC module. +- A new Text User Interface app when running nf-core pipelines create +- The pipeline template will come with the `nf-schema` plugin +- Pipeline linting will run with the used version of the template to minimise errors in opened PRs with every new tools release. +- New command `nf-core pipelines rocrate` to create a [Research Object (RO) crate](https://www.researchobject.org/ro-crate/about_ro_crate) for a pipeline - `nf-core licences` command is deprecated. -- Pipeline linting will run with the used version of the template - The structure of nf-core/tools pytests has been updated - The structure of the API docs has been updated From 8000e1f5c31e7903ff72bb2453b67ca8db262f67 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 23 Aug 2024 16:04:19 +0200 Subject: [PATCH 554/737] add suggestions from @mashehu --- CHANGELOG.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a3e98f63..5590abc04 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,12 @@ **Highlights** -- Pipeline commands are renamed from `nf-core ` to `nf-core pipelines ` to match the structure of modules and subworkflows commands. -- More customisation for pipeline templates. The template has been divided into features which can be skipped, making it easier to avoid annoying merge conflicts, for example, for pipeline which don't have a FastQC module. -- A new Text User Interface app when running nf-core pipelines create -- The pipeline template will come with the `nf-schema` plugin -- Pipeline linting will run with the used version of the template to minimise errors in opened PRs with every new tools release. -- New command `nf-core pipelines rocrate` to create a [Research Object (RO) crate](https://www.researchobject.org/ro-crate/about_ro_crate) for a pipeline +- Pipeline commands are renamed from `nf-core ` to `nf-core pipelines ` to follow the same command structure as modules and subworkflows commands. +- More customisation for pipeline templates. The template has been divided into features which can be skipped, e.g. you can create a new pipeline without any traces of FastQC in it. +- A new Text User Interface app when running `nf-core pipelines create` to help us guide you through the process better (no worries, you can still use the cli if you give all values as parameters) +- We replaced nf-validation with nf-schema in the pipeline template +- CI tests now lint with the nf-core tools version matching the template version of the pipeline, to minimise errors in opened PRs with every new tools release. +- New command `nf-core pipelines ro-crate` to create a [Research Object (RO) crate](https://www.researchobject.org/ro-crate/) for a pipeline - `nf-core licences` command is deprecated. - The structure of nf-core/tools pytests has been updated - The structure of the API docs has been updated From f2b282c696ebfff78ac4e328dca3d9a25b258ec4 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 21 Aug 2024 16:12:58 +0200 Subject: [PATCH 555/737] add option to exclude fastqc from pipeline template --- nf_core/pipeline-template/CITATIONS.md | 7 +++---- nf_core/pipeline-template/README.md | 4 ++-- nf_core/pipeline-template/conf/modules.config | 2 ++ nf_core/pipeline-template/docs/output.md | 9 ++++++--- nf_core/pipeline-template/modules.json | 3 ++- .../local/utils_nfcore_pipeline_pipeline/main.nf | 4 ++-- nf_core/pipeline-template/workflows/pipeline.nf | 4 +++- nf_core/pipelines/create/templatefeatures.yml | 10 ++++++++++ 8 files changed, 30 insertions(+), 13 deletions(-) diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 7cf37e501..2373f1de7 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -12,11 +12,10 @@ ## Pipeline tools -- [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) +{% if fastqc %}- [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) - > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. - -{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) +> Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. +> {% endif %} > {% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. > {%- endif %} diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 7718d2e5f..a618f87bf 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -44,8 +44,8 @@ workflows use the "tube map" design for that. See https://nf-co.re/docs/contributing/design_guidelines#examples for examples. --> -1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)) - {% if multiqc %}2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)){% endif %} +{% if fastqc %}1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)){% endif %} +{% if multiqc %}2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)){% endif %} ## Usage diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index 84972d8e2..078142fad 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -18,9 +18,11 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] + {%- if fastqc %} withName: FASTQC { ext.args = '--quiet' } + {%- endif %} {%- if multiqc %} withName: 'MULTIQC' { diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 76195a682..5e42d50cc 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -12,10 +12,13 @@ The directories listed below will be created in the results directory after the The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: -- [FastQC](#fastqc) - Raw read QC - {% if multiqc %}- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline{% endif %} +{% if fastqc %}- [FastQC](#fastqc) - Raw read QC{% endif %} +{% if multiqc %}- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline{% endif %} + - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution +{%- if fastqc %} + ### FastQC
@@ -28,7 +31,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d
[FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). - +{%- endif %} {%- if multiqc %} ### MultiQC diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 8b156083f..6ab68d0f2 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -5,11 +5,12 @@ "https://github.com/nf-core/modules.git": { "modules": { "nf-core": { + {%- if fastqc %} "fastqc": { "branch": "master", "git_sha": "285a50500f9e02578d90b3ce6382ea3c30216acd", "installed_by": ["modules"] - }{%- if multiqc %}, + }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { "branch": "master", "git_sha": "b7ebe95761cd389603f9cc0e0dc384c0f663815a", diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index 3db8f66cb..fe9e558cf 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -230,7 +230,7 @@ def toolCitationText() { // Uncomment function in methodsDescriptionText to render in MultiQC report def citation_text = [ "Tools used in the workflow included:", - "FastQC (Andrews 2010),", + {% if fastqc %}"FastQC (Andrews 2010),",{% endif %} "MultiQC (Ewels et al. 2016)", "." ].join(' ').trim() @@ -243,7 +243,7 @@ def toolBibliographyText() { // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "
  • Author (2023) Pub name, Journal, DOI
  • " : "", // Uncomment function in methodsDescriptionText to render in MultiQC report def reference_text = [ - "
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ", + {% if fastqc %}"
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ",{% endif %} "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • " ].join(' ').trim() diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index d98c392f0..e15d32f97 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -4,7 +4,7 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -include { FASTQC } from '../modules/nf-core/fastqc/main' +{% if fastqc %}include { FASTQC } from '../modules/nf-core/fastqc/main'{% endif %} {% if multiqc %}include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} include { paramsSummaryMap } from 'plugin/nf-validation' {% if multiqc %}include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} @@ -27,6 +27,7 @@ workflow {{ short_name|upper }} { ch_versions = Channel.empty() {% if multiqc %}ch_multiqc_files = Channel.empty(){% endif %} + {%- if fastqc %} // // MODULE: Run FastQC // @@ -35,6 +36,7 @@ workflow {{ short_name|upper }} { ) {% if multiqc %}ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}){% endif %} ch_versions = ch_versions.mix(FASTQC.out.versions.first()) + {%- endif %} // // Collate and save software versions diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index 6a5fac358..9fb56d610 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -228,6 +228,16 @@ multiqc: multiqc_config: False nfcore_pipelines: True custom_pipelines: True +fastqc: + skippable_paths: + - "modules/nf-core/fastqc/" + short_description: "Use fastqc" + description: "The pipeline will include the FastQC module which performs quality control analysis of input FASTQ files." + help_text: | + FastQC is a tool which provides quality control checks on raw sequencing data. + The pipeline will include the FastQC module. + nfcore_pipelines: True + custom_pipelines: True changelog: skippable_paths: - "CHANGELOG.md" From e4f685d90e48c9ba0d01312eeae4fb2238afb000 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 21 Aug 2024 14:34:56 +0000 Subject: [PATCH 556/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1527f5ba4..6415612bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) - add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) +- add option to exclude fastqc from pipeline template ([#3129](https://github.com/nf-core/tools/pull/3129)) ### Linting From 0e4a7d1393fbe944f39e6c38219c43d02831999a Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 26 Aug 2024 11:31:14 +0200 Subject: [PATCH 557/737] don't fail when no nf-core modules installed --- nf_core/components/lint/__init__.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index d0f668536..0373170cf 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -114,9 +114,7 @@ def __init__( ) ) if not self.all_remote_components: - raise LookupError( - f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline." - ) + log.debug(f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline.") local_component_dir = Path(self.directory, self.component_type, "local") if local_component_dir.exists(): @@ -146,7 +144,7 @@ def __init__( ] self.all_local_components = [] if not self.all_remote_components: - raise LookupError(f"No {self.component_type} in '{self.component_type}' directory") + log.debug(f"No {self.component_type} in '{self.component_type}' directory") # This could be better, perhaps glob for all nextflow.config files in? self.config = nf_core.utils.fetch_wf_config(self.directory / "tests" / "config", cache_config=True) From 92618ad8be8362ba5bece5eb82ab44fc08012c21 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 26 Aug 2024 11:46:39 +0200 Subject: [PATCH 558/737] fix versions yaml file name --- nf_core/pipeline-template/workflows/pipeline.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index e15d32f97..8c797ede3 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -44,7 +44,7 @@ workflow {{ short_name|upper }} { softwareVersionsToYAML(ch_versions) .collectFile( storeDir: "${params.outdir}/pipeline_info", - name: 'nf_core_pipeline_software_mqc_versions.yml', + name: {% if is_nfcore %}'nf_core_' {% else %} '' {% endif %} + 'pipeline_software_' + {% if multiqc %} 'mqc_' {% else %} '' {% endif %} + 'versions.yml', sort: true, newLine: true ).set { ch_collated_versions } From d437582e632ad7bed1768a684eafdeba13cd16cf Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 26 Aug 2024 12:46:28 +0200 Subject: [PATCH 559/737] udpate bpipe and fix pytests --- nf_core/components/lint/__init__.py | 4 ++-- nf_core/modules/lint/__init__.py | 2 +- tests/modules/test_lint.py | 11 +++++------ tests/subworkflows/test_lint.py | 10 ++++------ tests/test_modules.py | 4 ++-- 5 files changed, 14 insertions(+), 17 deletions(-) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index 0373170cf..c1b1f24cb 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -114,7 +114,7 @@ def __init__( ) ) if not self.all_remote_components: - log.debug(f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline.") + log.warning(f"No {self.component_type} from {self.modules_repo.remote_url} installed in pipeline.") local_component_dir = Path(self.directory, self.component_type, "local") if local_component_dir.exists(): @@ -144,7 +144,7 @@ def __init__( ] self.all_local_components = [] if not self.all_remote_components: - log.debug(f"No {self.component_type} in '{self.component_type}' directory") + log.warning(f"No {self.component_type} in '{self.component_type}' directory") # This could be better, perhaps glob for all nextflow.config files in? self.config = nf_core.utils.fetch_wf_config(self.directory / "tests" / "config", cache_config=True) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index b75f7e757..017b3965b 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -110,7 +110,7 @@ def lint( """ # TODO: consider unifying modules and subworkflows lint() function and add it to the ComponentLint class # Prompt for module or all - if module is None and not all_modules: + if module is None and not all_modules and len(self.all_remote_components) > 0: questions = [ { "type": "list", diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py index dfe288a6e..cc7e0565e 100644 --- a/tests/modules/test_lint.py +++ b/tests/modules/test_lint.py @@ -2,7 +2,6 @@ from pathlib import Path from typing import Union -import pytest import yaml from git.repo import Repo @@ -191,8 +190,8 @@ def test_modules_lint_empty(self): """Test linting a pipeline with no modules installed""" self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) - with pytest.raises(LookupError): - nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + assert "No modules from https://github.com/nf-core/modules.git installed in pipeline" in self.caplog.text def test_modules_lint_new_modules(self): """lint a new module""" @@ -206,8 +205,8 @@ def test_modules_lint_no_gitlab(self): """Test linting a pipeline with no modules installed""" self.mods_remove.remove("fastqc", force=True) self.mods_remove.remove("multiqc", force=True) - with pytest.raises(LookupError): - nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + assert f"No modules from {GITLAB_URL} installed in pipeline" in self.caplog.text def test_modules_lint_gitlab_modules(self): """Lint modules from a different remote""" @@ -566,7 +565,7 @@ def test_modules_meta_yml_input_mismatch(self): fh.write(main_nf) assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 + assert len(module_lint.warned) == 2, f"Linting warning with {[x.__dict__ for x in module_lint.warned]}" lint_tests = [x.lint_test for x in module_lint.warned] # check that it is there twice: assert lint_tests.count("meta_input_meta_only") == 1 diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index 269300870..c8b97fd0b 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -2,8 +2,6 @@ import shutil from pathlib import Path -import pytest - import nf_core.subworkflows from ..test_subworkflows import TestSubworkflows @@ -25,8 +23,8 @@ def test_subworkflows_lint_empty(self): self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True) self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True) self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True) - with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) + nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir) + assert "No subworkflows from https://github.com/nf-core/modules.git installed in pipeline" in self.caplog.text def test_subworkflows_lint_new_subworkflow(self): """lint a new subworkflow""" @@ -39,8 +37,8 @@ def test_subworkflows_lint_new_subworkflow(self): def test_subworkflows_lint_no_gitlab(self): """Test linting a pipeline with no subworkflows installed""" - with pytest.raises(LookupError): - nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + nf_core.subworkflows.SubworkflowLint(directory=self.pipeline_dir, remote_url=GITLAB_URL) + assert f"No subworkflows from {GITLAB_URL} installed in pipeline" in self.caplog.text def test_subworkflows_lint_gitlab_subworkflows(self): """Lint subworkflows from a different remote""" diff --git a/tests/test_modules.py b/tests/test_modules.py index 9ce74fd4e..0e1649717 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -44,8 +44,8 @@ def create_modules_repo_dummy(tmp_dir): yaml.dump(nf_core_yml.model_dump(), fh) # mock biocontainers and anaconda response with responses.RequestsMock() as rsps: - mock_anaconda_api_calls(rsps, "bpipe", "0.9.12--hdfd78af_0") - mock_biocontainers_api_calls(rsps, "bpipe", "0.9.12--hdfd78af_0") + mock_anaconda_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") + mock_biocontainers_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules module_create = nf_core.modules.create.ModuleCreate( root_dir, "bpipe/test", "@author", "process_single", False, False From 9551cdc3467dca4b4cd1bc3d3bf735340b9ee7bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 26 Aug 2024 14:21:37 +0200 Subject: [PATCH 560/737] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipeline-template/conf/modules.config | 2 +- .../subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index 078142fad..35e861d9b 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -18,7 +18,7 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] - {%- if fastqc %} + {% if fastqc -%} withName: FASTQC { ext.args = '--quiet' } diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index fe9e558cf..b130b7f88 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -231,7 +231,7 @@ def toolCitationText() { def citation_text = [ "Tools used in the workflow included:", {% if fastqc %}"FastQC (Andrews 2010),",{% endif %} - "MultiQC (Ewels et al. 2016)", + {% if multiqc %}"MultiQC (Ewels et al. 2016)",{% endif %} "." ].join(' ').trim() @@ -244,7 +244,7 @@ def toolBibliographyText() { // Uncomment function in methodsDescriptionText to render in MultiQC report def reference_text = [ {% if fastqc %}"
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ",{% endif %} - "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • " + {% if multiqc %}"
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • "{% endif %} ].join(' ').trim() return reference_text From 9d29e9342b5258b8c08f495d49409761707493c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 26 Aug 2024 12:25:34 +0000 Subject: [PATCH 561/737] update textual snapshots --- .../__snapshots__/test_create_app.ambr | 764 +++++++++--------- .../pytest-0/test_github_details0 | 1 - .../pytest-0/test_github_exit_message0 | 1 - .../pytest-0/test_github_question0 | 1 - .../pytest-1/test_github_details0 | 1 - .../pytest-1/test_github_exit_message0 | 1 - .../pytest-1/test_github_question0 | 1 - 7 files changed, 382 insertions(+), 388 deletions(-) delete mode 160000 tmp/pytest-of-gitpod/pytest-0/test_github_details0 delete mode 160000 tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 delete mode 160000 tmp/pytest-of-gitpod/pytest-0/test_github_question0 delete mode 160000 tmp/pytest-of-gitpod/pytest-1/test_github_details0 delete mode 160000 tmp/pytest-of-gitpod/pytest-1/test_github_exit_message0 delete mode 160000 tmp/pytest-of-gitpod/pytest-1/test_github_question0 diff --git a/tests/pipelines/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr index ee470c915..f5a19837b 100644 --- a/tests/pipelines/__snapshots__/test_create_app.ambr +++ b/tests/pipelines/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-3293903238-matrix { + .terminal-1136262003-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3293903238-title { + .terminal-1136262003-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3293903238-r1 { fill: #c5c8c6 } - .terminal-3293903238-r2 { fill: #e3e3e3 } - .terminal-3293903238-r3 { fill: #989898 } - .terminal-3293903238-r4 { fill: #e1e1e1 } - .terminal-3293903238-r5 { fill: #4ebf71;font-weight: bold } - .terminal-3293903238-r6 { fill: #1e1e1e } - .terminal-3293903238-r7 { fill: #507bb3 } - .terminal-3293903238-r8 { fill: #e2e2e2 } - .terminal-3293903238-r9 { fill: #808080 } - .terminal-3293903238-r10 { fill: #dde6ed;font-weight: bold } - .terminal-3293903238-r11 { fill: #001541 } - .terminal-3293903238-r12 { fill: #0178d4 } - .terminal-3293903238-r13 { fill: #454a50 } - .terminal-3293903238-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-3293903238-r15 { fill: #000000 } - .terminal-3293903238-r16 { fill: #14191f } - .terminal-3293903238-r17 { fill: #e4e4e4 } - .terminal-3293903238-r18 { fill: #7ae998 } - .terminal-3293903238-r19 { fill: #0a180e;font-weight: bold } - .terminal-3293903238-r20 { fill: #008139 } - .terminal-3293903238-r21 { fill: #fea62b;font-weight: bold } - .terminal-3293903238-r22 { fill: #a7a9ab } - .terminal-3293903238-r23 { fill: #e2e3e3 } + .terminal-1136262003-r1 { fill: #c5c8c6 } + .terminal-1136262003-r2 { fill: #e3e3e3 } + .terminal-1136262003-r3 { fill: #989898 } + .terminal-1136262003-r4 { fill: #e1e1e1 } + .terminal-1136262003-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1136262003-r6 { fill: #1e1e1e } + .terminal-1136262003-r7 { fill: #507bb3 } + .terminal-1136262003-r8 { fill: #e2e2e2 } + .terminal-1136262003-r9 { fill: #808080 } + .terminal-1136262003-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1136262003-r11 { fill: #001541 } + .terminal-1136262003-r12 { fill: #0178d4 } + .terminal-1136262003-r13 { fill: #454a50 } + .terminal-1136262003-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-1136262003-r15 { fill: #000000 } + .terminal-1136262003-r16 { fill: #14191f } + .terminal-1136262003-r17 { fill: #e4e4e4 } + .terminal-1136262003-r18 { fill: #7ae998 } + .terminal-1136262003-r19 { fill: #0a180e;font-weight: bold } + .terminal-1136262003-r20 { fill: #008139 } + .terminal-1136262003-r21 { fill: #fea62b;font-weight: bold } + .terminal-1136262003-r22 { fill: #a7a9ab } + .terminal-1136262003-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Hide help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - ▅▅ - - Nf-core pipelines are configured to use a copy of the most common reference  - genome files. - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see the  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Hide help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes▅▅ + + + Nf-core pipelines are configured to use a copy of the most common reference  + genome files. + + By selecting this option, your pipeline will include a configuration file  + specifying the paths to these files. + + The required code to use these files will also be included in the template.  + When the pipeline user provides an appropriate genome key, the pipeline will + automatically download the required reference files. + ▅▅ + For more information about reference genomes in nf-core pipelines, see the  + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-2443134963-matrix { + .terminal-242174438-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2443134963-title { + .terminal-242174438-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2443134963-r1 { fill: #c5c8c6 } - .terminal-2443134963-r2 { fill: #e3e3e3 } - .terminal-2443134963-r3 { fill: #989898 } - .terminal-2443134963-r4 { fill: #e1e1e1 } - .terminal-2443134963-r5 { fill: #4ebf71;font-weight: bold } - .terminal-2443134963-r6 { fill: #1e1e1e } - .terminal-2443134963-r7 { fill: #507bb3 } - .terminal-2443134963-r8 { fill: #e2e2e2 } - .terminal-2443134963-r9 { fill: #808080 } - .terminal-2443134963-r10 { fill: #dde6ed;font-weight: bold } - .terminal-2443134963-r11 { fill: #001541 } - .terminal-2443134963-r12 { fill: #14191f } - .terminal-2443134963-r13 { fill: #454a50 } - .terminal-2443134963-r14 { fill: #7ae998 } - .terminal-2443134963-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-2443134963-r16 { fill: #0a180e;font-weight: bold } - .terminal-2443134963-r17 { fill: #000000 } - .terminal-2443134963-r18 { fill: #008139 } - .terminal-2443134963-r19 { fill: #fea62b;font-weight: bold } - .terminal-2443134963-r20 { fill: #a7a9ab } - .terminal-2443134963-r21 { fill: #e2e3e3 } + .terminal-242174438-r1 { fill: #c5c8c6 } + .terminal-242174438-r2 { fill: #e3e3e3 } + .terminal-242174438-r3 { fill: #989898 } + .terminal-242174438-r4 { fill: #e1e1e1 } + .terminal-242174438-r5 { fill: #4ebf71;font-weight: bold } + .terminal-242174438-r6 { fill: #1e1e1e } + .terminal-242174438-r7 { fill: #507bb3 } + .terminal-242174438-r8 { fill: #e2e2e2 } + .terminal-242174438-r9 { fill: #808080 } + .terminal-242174438-r10 { fill: #dde6ed;font-weight: bold } + .terminal-242174438-r11 { fill: #001541 } + .terminal-242174438-r12 { fill: #14191f } + .terminal-242174438-r13 { fill: #454a50 } + .terminal-242174438-r14 { fill: #7ae998 } + .terminal-242174438-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-242174438-r16 { fill: #0a180e;font-weight: bold } + .terminal-242174438-r17 { fill: #000000 } + .terminal-242174438-r18 { fill: #008139 } + .terminal-242174438-r19 { fill: #fea62b;font-weight: bold } + .terminal-242174438-r20 { fill: #a7a9ab } + .terminal-242174438-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help ▃▃ - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - and CI tests to lint  - your code: pre-commit, - editor-config and  - prettier. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Include citationsInclude pipeline tools Show help  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Show help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▆▆ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + requried to run  + nf-core pipelines at  + different institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + and CI tests to lint  + your code: pre-commit, + editor-config and  + prettier. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Include citationsInclude pipeline tools Show help  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2511,254 +2511,254 @@ font-weight: 700; } - .terminal-1633351929-matrix { + .terminal-1052877418-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1633351929-title { + .terminal-1052877418-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1633351929-r1 { fill: #c5c8c6 } - .terminal-1633351929-r2 { fill: #e3e3e3 } - .terminal-1633351929-r3 { fill: #989898 } - .terminal-1633351929-r4 { fill: #e1e1e1 } - .terminal-1633351929-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1633351929-r6 { fill: #1e1e1e } - .terminal-1633351929-r7 { fill: #507bb3 } - .terminal-1633351929-r8 { fill: #e2e2e2 } - .terminal-1633351929-r9 { fill: #808080 } - .terminal-1633351929-r10 { fill: #dde6ed;font-weight: bold } - .terminal-1633351929-r11 { fill: #001541 } - .terminal-1633351929-r12 { fill: #454a50 } - .terminal-1633351929-r13 { fill: #7ae998 } - .terminal-1633351929-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-1633351929-r15 { fill: #0a180e;font-weight: bold } - .terminal-1633351929-r16 { fill: #000000 } - .terminal-1633351929-r17 { fill: #008139 } - .terminal-1633351929-r18 { fill: #fea62b;font-weight: bold } - .terminal-1633351929-r19 { fill: #a7a9ab } - .terminal-1633351929-r20 { fill: #e2e3e3 } + .terminal-1052877418-r1 { fill: #c5c8c6 } + .terminal-1052877418-r2 { fill: #e3e3e3 } + .terminal-1052877418-r3 { fill: #989898 } + .terminal-1052877418-r4 { fill: #e1e1e1 } + .terminal-1052877418-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1052877418-r6 { fill: #1e1e1e } + .terminal-1052877418-r7 { fill: #507bb3 } + .terminal-1052877418-r8 { fill: #e2e2e2 } + .terminal-1052877418-r9 { fill: #808080 } + .terminal-1052877418-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1052877418-r11 { fill: #001541 } + .terminal-1052877418-r12 { fill: #454a50 } + .terminal-1052877418-r13 { fill: #7ae998 } + .terminal-1052877418-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-1052877418-r15 { fill: #0a180e;font-weight: bold } + .terminal-1052877418-r16 { fill: #000000 } + .terminal-1052877418-r17 { fill: #008139 } + .terminal-1052877418-r18 { fill: #fea62b;font-weight: bold } + .terminal-1052877418-r19 { fill: #a7a9ab } + .terminal-1052877418-r20 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most common - reference genome files  - from iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use multiqcThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - module which generates  - an HTML report for  - quality control. - - - - - - - - - - - - - - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Show help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most common + reference genome files  + from iGenomes + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use multiqcThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + module which generates  + an HTML report for  + quality control. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use fastqcThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include the FastQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + module which performs  + quality control  + analysis of input FASTQ + files. + + + + + + + + + + + + + + + + + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_details0 b/tmp/pytest-of-gitpod/pytest-0/test_github_details0 deleted file mode 160000 index ee82f320c..000000000 --- a/tmp/pytest-of-gitpod/pytest-0/test_github_details0 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit ee82f320cb567b302e7328c1cacab94a98dae787 diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 b/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 deleted file mode 160000 index f7fe48cf9..000000000 --- a/tmp/pytest-of-gitpod/pytest-0/test_github_exit_message0 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f7fe48cf9d00ab24581686a6d4226d2e9005c607 diff --git a/tmp/pytest-of-gitpod/pytest-0/test_github_question0 b/tmp/pytest-of-gitpod/pytest-0/test_github_question0 deleted file mode 160000 index dce3324ac..000000000 --- a/tmp/pytest-of-gitpod/pytest-0/test_github_question0 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit dce3324acbbe32f905afae8553e042f39404b37e diff --git a/tmp/pytest-of-gitpod/pytest-1/test_github_details0 b/tmp/pytest-of-gitpod/pytest-1/test_github_details0 deleted file mode 160000 index e2638dca9..000000000 --- a/tmp/pytest-of-gitpod/pytest-1/test_github_details0 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit e2638dca91b137a96b491008b1eeef2bfd791bec diff --git a/tmp/pytest-of-gitpod/pytest-1/test_github_exit_message0 b/tmp/pytest-of-gitpod/pytest-1/test_github_exit_message0 deleted file mode 160000 index 74cda800a..000000000 --- a/tmp/pytest-of-gitpod/pytest-1/test_github_exit_message0 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 74cda800a86fd07e93bbf8e00e4890516e3d838d diff --git a/tmp/pytest-of-gitpod/pytest-1/test_github_question0 b/tmp/pytest-of-gitpod/pytest-1/test_github_question0 deleted file mode 160000 index 444ebc537..000000000 --- a/tmp/pytest-of-gitpod/pytest-1/test_github_question0 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 444ebc5370bf91ea808f5d65c5788064f0480045 From 52da9e369598bc69ad356b25c38d6c09fed940cf Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 21 Aug 2024 15:05:24 +0000 Subject: [PATCH 562/737] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6415612bb..45d4fc18b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) - add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) - add option to exclude fastqc from pipeline template ([#3129](https://github.com/nf-core/tools/pull/3129)) +- add option to exclude documentation from pipeline template ([#3130](https://github.com/nf-core/tools/pull/3130)) ### Linting From a8cb9408d16ce47eda65baee2f73354b7566b72e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 26 Aug 2024 14:10:30 +0200 Subject: [PATCH 563/737] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipelines/create/templatefeatures.yml | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/nf_core/pipelines/create/templatefeatures.yml b/nf_core/pipelines/create/templatefeatures.yml index 9fb56d610..ab5f26237 100644 --- a/nf_core/pipelines/create/templatefeatures.yml +++ b/nf_core/pipelines/create/templatefeatures.yml @@ -310,3 +310,23 @@ slackreport: - ".prettierignore" nfcore_pipelines: False custom_pipelines: True +documentation: + skippable_paths: + - "docs" + short_description: "Add documentation" + description: "Add documentation to the pipeline" + help_text: | + This will add documentation markdown files where you can describe your pipeline. + It includes: + - docs/README.md: A README file where you can describe the structure of your documentation. + - docs/output.md: A file where you can explain the output generated by the pipeline + - docs/usage.md: A file where you can explain the usage of the pipeline and its parameters. + + These files come with an exemplary documentation structure written. + linting: + files_exist: + - "docs/output.md" + - "docs/README.md" + - "docs/usage.md" + nfcore_pipelines: False + custom_pipelines: True From bcbed0567441c2e5d62f2525da1a2ebebc7d5a23 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 26 Aug 2024 15:55:44 +0200 Subject: [PATCH 564/737] update textual snapshots --- .../__snapshots__/test_create_app.ambr | 512 +++++++++--------- 1 file changed, 256 insertions(+), 256 deletions(-) diff --git a/tests/pipelines/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr index f5a19837b..0015728fd 100644 --- a/tests/pipelines/__snapshots__/test_create_app.ambr +++ b/tests/pipelines/__snapshots__/test_create_app.ambr @@ -851,257 +851,257 @@ font-weight: 700; } - .terminal-1136262003-matrix { + .terminal-3611359904-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1136262003-title { + .terminal-3611359904-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1136262003-r1 { fill: #c5c8c6 } - .terminal-1136262003-r2 { fill: #e3e3e3 } - .terminal-1136262003-r3 { fill: #989898 } - .terminal-1136262003-r4 { fill: #e1e1e1 } - .terminal-1136262003-r5 { fill: #4ebf71;font-weight: bold } - .terminal-1136262003-r6 { fill: #1e1e1e } - .terminal-1136262003-r7 { fill: #507bb3 } - .terminal-1136262003-r8 { fill: #e2e2e2 } - .terminal-1136262003-r9 { fill: #808080 } - .terminal-1136262003-r10 { fill: #dde6ed;font-weight: bold } - .terminal-1136262003-r11 { fill: #001541 } - .terminal-1136262003-r12 { fill: #0178d4 } - .terminal-1136262003-r13 { fill: #454a50 } - .terminal-1136262003-r14 { fill: #e2e3e3;font-weight: bold } - .terminal-1136262003-r15 { fill: #000000 } - .terminal-1136262003-r16 { fill: #14191f } - .terminal-1136262003-r17 { fill: #e4e4e4 } - .terminal-1136262003-r18 { fill: #7ae998 } - .terminal-1136262003-r19 { fill: #0a180e;font-weight: bold } - .terminal-1136262003-r20 { fill: #008139 } - .terminal-1136262003-r21 { fill: #fea62b;font-weight: bold } - .terminal-1136262003-r22 { fill: #a7a9ab } - .terminal-1136262003-r23 { fill: #e2e3e3 } + .terminal-3611359904-r1 { fill: #c5c8c6 } + .terminal-3611359904-r2 { fill: #e3e3e3 } + .terminal-3611359904-r3 { fill: #989898 } + .terminal-3611359904-r4 { fill: #e1e1e1 } + .terminal-3611359904-r5 { fill: #4ebf71;font-weight: bold } + .terminal-3611359904-r6 { fill: #1e1e1e } + .terminal-3611359904-r7 { fill: #507bb3 } + .terminal-3611359904-r8 { fill: #e2e2e2 } + .terminal-3611359904-r9 { fill: #808080 } + .terminal-3611359904-r10 { fill: #dde6ed;font-weight: bold } + .terminal-3611359904-r11 { fill: #001541 } + .terminal-3611359904-r12 { fill: #0178d4 } + .terminal-3611359904-r13 { fill: #454a50 } + .terminal-3611359904-r14 { fill: #e2e3e3;font-weight: bold } + .terminal-3611359904-r15 { fill: #000000 } + .terminal-3611359904-r16 { fill: #e4e4e4 } + .terminal-3611359904-r17 { fill: #14191f } + .terminal-3611359904-r18 { fill: #7ae998 } + .terminal-3611359904-r19 { fill: #0a180e;font-weight: bold } + .terminal-3611359904-r20 { fill: #008139 } + .terminal-3611359904-r21 { fill: #fea62b;font-weight: bold } + .terminal-3611359904-r22 { fill: #a7a9ab } + .terminal-3611359904-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Hide help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes▅▅ - - - Nf-core pipelines are configured to use a copy of the most common reference  - genome files. - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see the  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Hide help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + + + Nf-core pipelines are configured to use a copy of the most common reference  + genome files. + + By selecting this option, your pipeline will include a configuration file  + specifying the paths to these files. + + The required code to use these files will also be included in the template.  + When the pipeline user provides an appropriate genome key, the pipeline will + automatically download the required reference files. + ▅▅ + For more information about reference genomes in nf-core pipelines, see the  + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  @@ -2233,255 +2233,255 @@ font-weight: 700; } - .terminal-242174438-matrix { + .terminal-1661160397-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-242174438-title { + .terminal-1661160397-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-242174438-r1 { fill: #c5c8c6 } - .terminal-242174438-r2 { fill: #e3e3e3 } - .terminal-242174438-r3 { fill: #989898 } - .terminal-242174438-r4 { fill: #e1e1e1 } - .terminal-242174438-r5 { fill: #4ebf71;font-weight: bold } - .terminal-242174438-r6 { fill: #1e1e1e } - .terminal-242174438-r7 { fill: #507bb3 } - .terminal-242174438-r8 { fill: #e2e2e2 } - .terminal-242174438-r9 { fill: #808080 } - .terminal-242174438-r10 { fill: #dde6ed;font-weight: bold } - .terminal-242174438-r11 { fill: #001541 } - .terminal-242174438-r12 { fill: #14191f } - .terminal-242174438-r13 { fill: #454a50 } - .terminal-242174438-r14 { fill: #7ae998 } - .terminal-242174438-r15 { fill: #e2e3e3;font-weight: bold } - .terminal-242174438-r16 { fill: #0a180e;font-weight: bold } - .terminal-242174438-r17 { fill: #000000 } - .terminal-242174438-r18 { fill: #008139 } - .terminal-242174438-r19 { fill: #fea62b;font-weight: bold } - .terminal-242174438-r20 { fill: #a7a9ab } - .terminal-242174438-r21 { fill: #e2e3e3 } + .terminal-1661160397-r1 { fill: #c5c8c6 } + .terminal-1661160397-r2 { fill: #e3e3e3 } + .terminal-1661160397-r3 { fill: #989898 } + .terminal-1661160397-r4 { fill: #e1e1e1 } + .terminal-1661160397-r5 { fill: #4ebf71;font-weight: bold } + .terminal-1661160397-r6 { fill: #1e1e1e } + .terminal-1661160397-r7 { fill: #507bb3 } + .terminal-1661160397-r8 { fill: #e2e2e2 } + .terminal-1661160397-r9 { fill: #808080 } + .terminal-1661160397-r10 { fill: #dde6ed;font-weight: bold } + .terminal-1661160397-r11 { fill: #001541 } + .terminal-1661160397-r12 { fill: #14191f } + .terminal-1661160397-r13 { fill: #454a50 } + .terminal-1661160397-r14 { fill: #7ae998 } + .terminal-1661160397-r15 { fill: #e2e3e3;font-weight: bold } + .terminal-1661160397-r16 { fill: #0a180e;font-weight: bold } + .terminal-1661160397-r17 { fill: #000000 } + .terminal-1661160397-r18 { fill: #008139 } + .terminal-1661160397-r19 { fill: #fea62b;font-weight: bold } + .terminal-1661160397-r20 { fill: #a7a9ab } + .terminal-1661160397-r21 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core create + nf-core create - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▆▆ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - and CI tests to lint  - your code: pre-commit, - editor-config and  - prettier. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Include citationsInclude pipeline tools Show help  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + + Template features + + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github CI testsThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + actions for Continuous + Integration (CI)  + testing + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use reference genomesThe pipeline will be  Show help  + ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + copy of the most  + common reference  + genome files from  + iGenomes + ▃▃ + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add Github badgesThe README.md file of  Show help  + ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + include GitHub badges + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Add configuration The pipeline will  Show help  + ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + profiles containing  + custom parameters  + requried to run  + nf-core pipelines at  + different institutions + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Use code lintersThe pipeline will  Show help  + ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + and CI tests to lint  + your code: pre-commit, + editor-config and  + prettier. + + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +         Include citationsInclude pipeline tools Show help  + ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +  Back  Continue  + ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +  d Toggle dark mode  q Quit  From d3d6345b5bc1519dd9426b66319043d05d0c2a85 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Thu, 22 Aug 2024 10:54:18 +0200 Subject: [PATCH 565/737] add option to exclude test configs from pipeline template --- .../create-test-lint-wf-template.yml | 1 + .../pipeline-template/.github/CONTRIBUTING.md | 7 ++++- .../.github/PULL_REQUEST_TEMPLATE.md | 2 ++ .../.github/workflows/download_pipeline.yml | 4 +-- nf_core/pipeline-template/README.md | 2 +- nf_core/pipeline-template/docs/usage.md | 3 ++ nf_core/pipeline-template/nextflow.config | 4 ++- .../pipeline-template/nextflow_schema.json | 4 +-- nf_core/pipelines/create/templatefeatures.yml | 28 +++++++++++++++++++ 9 files changed, 48 insertions(+), 7 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 70125a10f..46f55a4b6 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -70,6 +70,7 @@ jobs: - TEMPLATE: is_nfcore - TEMPLATE: nf_core_configs profile: "self_hosted_runner" + - TEMPLATE: test_config fail-fast: false steps: diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 5a58501bb..f331d3867 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -30,16 +30,18 @@ If you're not used to this workflow with git, you can start with some [docs from ## Tests +{%- if test_config %} You have the option to test your changes locally by running the pipeline. For receiving warnings about process selectors and other `debug` information, it is recommended to use the debug profile. Execute all the tests with the following command: ```bash nf-test test --profile debug,test,docker --verbose ``` +{% endif %} When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. -There are typically two types of tests that run: +{% if test_config %}There are typically two types of tests that run:{% endif %} ### Lint tests @@ -48,12 +50,15 @@ To enforce these and ensure that all pipelines stay in sync, we have developed a If any failures or warnings are encountered, please follow the listed URL for more documentation. +{%- if test_config %} + ### Pipeline tests Each `nf-core` pipeline should be set up with a minimal set of test-data. `GitHub Actions` then runs the pipeline on this data to ensure that it exits successfully. If there are any failures then the automated tests fail. These tests are run both with the latest available version of `Nextflow` and also the minimum required version that is stated in the pipeline code. +{%- endif %} ## Patch diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index dee23ccab..c96f2dd4c 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -20,8 +20,10 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/t - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. {%- endif %} - [ ] Make sure your code lints (`nf-core pipelines lint`). + {%- if test_config %} - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). + {%- endif %} - [ ] Usage Documentation in `docs/usage.md` is updated. - [ ] Output Documentation in `docs/output.md` is updated. - [ ] `CHANGELOG.md` is updated. diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 99a42d86d..e7a28e5ac 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -68,7 +68,7 @@ jobs: --download-configuration - name: Inspect download - run: tree ./${{ env.REPOTITLE_LOWERCASE }} + run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} - name: Run the downloaded pipeline (stub) id: stub_run_pipeline @@ -83,4 +83,4 @@ jobs: env: NXF_SINGULARITY_CACHEDIR: ./ NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %} + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %}{% endif %} diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index a618f87bf..c8ed828d3 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -50,7 +50,7 @@ ## Usage > [!NOTE] -> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data. +> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. {% if test_config %}Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data.{% endif %} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_basic_details_nfcore - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                                   Pipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_choose_type - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Choose pipeline type - - - - - Choose "nf-core" if:Choose "Custom" if: - - ● You want your pipeline to be part of the ● Your pipeline will never be part of  - nf-core communitynf-core - ● You think that there's an outside chance ● You want full control over all features  - that it ever could be part of nf-corethat are included from the template  - (including those that are mandatory for  - nf-core). - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  nf-core  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Custom  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - What's the difference? - -   Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions continuous-integration configuration files: - ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) - ▪ Code formatting checks with Prettier - ▪ Auto-fix linting functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core configuration profiles - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_customisation_help - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use a GitHub Create a GitHub  Show help  - ▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing▄▄ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Hide help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common reference  - genome files. - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see the  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_final_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Final details - - - - - First version of the pipelinePath to the output directory where the  - pipeline will be created - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - 1.0.0dev.                                          - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Finish  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - -   Now that we have created a new pipeline locally, we can create a new GitHub repository and push    -   the code to it. - - - - - Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - for login. Show  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub username••••••••••••                   - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                               mypipeline                             - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_exit_message - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - HowTo create a GitHub repository - - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - -   If you would like to create the GitHub repository later, you can do it manually by following  -   these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd <pipeline_directory> - git remote add origin git@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - git push --all origin - - - 💡 Note the --all flag: this is needed to push all branches to the remote. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Close  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_question - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - -   After creating the pipeline template locally, we can create a GitHub repository and push the  -   code to it. - -   Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_custom - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use a GitHub Create a GitHub  Show help  - ▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - ▃▃ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - and CI tests to lint  - your code: pre-commit, - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_nfcore - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most common - reference genome files  - from iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use multiqcThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - module which generates  - an HTML report for  - quality control. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use fastqcThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include the FastQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - module which performs  - quality control  - analysis of input FASTQ - files. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use nf-schemaUse the nf-schema  Show help  - ▁▁▁▁▁▁▁▁Nextflow plugin for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - this pipeline. - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_nfcore_validation - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                                   Pipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_welcome - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - - - Welcome to the nf-core pipeline creation wizard - -   This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. - -   The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    -   pipelines. - - 💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with - the community as early as possible; ideally before you start on your pipeline! See the  - nf-core guidelines and the #new-pipelines Slack channel for more information. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Let's go!  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg new file mode 100644 index 000000000..5c4a15831 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-corePipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg new file mode 100644 index 000000000..4445dc68a --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg @@ -0,0 +1,274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                                   Pipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg new file mode 100644 index 000000000..f32d5271e --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Choose pipeline type + + + + +Choose "nf-core" if:Choose "Custom" if: + +● You want your pipeline to be part of the ● Your pipeline will never be part of  +nf-core communitynf-core +● You think that there's an outside chance ● You want full control over all features  +that it ever could be part of nf-corethat are included from the template  +(including those that are mandatory for  +nf-core). +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Custom  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + +What's the difference? + +  Choosing "nf-core" effectively pre-selects the following template features: + +● GitHub Actions continuous-integration configuration files: +▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) +▪ Code formatting checks with Prettier +▪ Auto-fix linting functionality using @nf-core-bot +▪ Marking old issues as stale +● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg new file mode 100644 index 000000000..8969694cb --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -0,0 +1,275 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Template features + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI)  +testing▄▄ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Hide help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg new file mode 100644 index 000000000..ddd0ff57e --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Final details + + + + +First version of the pipelinePath to the output directory where the  +pipeline will be created +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +1.0.0dev.                                          +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Finish  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg new file mode 100644 index 000000000..3013b9961 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg @@ -0,0 +1,276 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Create GitHub repository + +  Now that we have created a new pipeline locally, we can create a new GitHub repository and push    +  the code to it. + + + + +Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +for login. Show  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +GitHub username••••••••••••                   +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +The name of the organisation where the The name of the new GitHub repository +GitHub repo will be cretaed +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                               mypipeline                             +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + +⚠️ You can't create a repository directly in the nf-core organisation. +Please create the pipeline repo to an organisation where you have access or use your user  +account. A core-team member will be able to transfer the repo to nf-core once the development +has started. + +💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + +▔▔▔▔▔▔▔▔Private +Select to make the new GitHub repo private. +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg new file mode 100644 index 000000000..3612a062c --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +HowTo create a GitHub repository + + + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +  If you would like to create the GitHub repository later, you can do it manually by following  +  these steps: + + 1. Create a new GitHub repository + 2. Add the remote to your local repository: + + +cd <pipeline_directory> +git remote add origin git@github.com:<username>/<repo_name>.git + + + 3. Push the code to the remote: + + +git push --all origin + + +💡 Note the --all flag: this is needed to push all branches to the remote. + + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg new file mode 100644 index 000000000..a0ca3d70d --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg @@ -0,0 +1,265 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Create GitHub repository + + +  After creating the pipeline template locally, we can create a GitHub repository and push the  +  code to it. + +  Do you want to create a GitHub repository? + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg new file mode 100644 index 000000000..7ff071efa --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -0,0 +1,273 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Template features + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI)  +testing +▃▃ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add configuration The pipeline will  Show help  +▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +profiles containing  +custom parameters  +requried to run  +nf-core pipelines at  +different institutions + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use code lintersThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +and CI tests to lint  +your code: pre-commit, +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg new file mode 100644 index 000000000..48b9b91e9 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Template features + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most common +reference genome files  +from iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use multiqcThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +module which generates  +an HTML report for  +quality control. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use fastqcThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include the FastQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +module which performs  +quality control  +analysis of input FASTQ +files. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use nf-schemaUse the nf-schema  Show help  +▁▁▁▁▁▁▁▁Nextflow plugin for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +this pipeline. + + + + + + + + + + + + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg new file mode 100644 index 000000000..7e55b2b0f --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg @@ -0,0 +1,273 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                                   Pipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Must be lowercase without  +punctuation. + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Cannot be left empty. + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Cannot be left empty. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg new file mode 100644 index 000000000..2670307c2 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core create + + + + + + + + + + nf-core create — Create a new pipeline with the nf-core pipeline template + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + + + +Welcome to the nf-core pipeline creation wizard + +  This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. + +  The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    +  pipelines. + +💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with +the community as early as possible; ideally before you start on your pipeline! See the  +nf-core guidelines and the #new-pipelines Slack channel for more information. + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Let's go!  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  + + + From 8d5f898f3114d1b029963af7ed60590b56288175 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 27 Sep 2024 11:51:12 +0200 Subject: [PATCH 687/737] remove code duplication and format components info command --- nf_core/components/info.py | 43 +++++++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/nf_core/components/info.py b/nf_core/components/info.py index e4a58d161..f3e5bf617 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -229,6 +229,25 @@ def get_remote_yaml(self) -> Optional[dict]: self.remote_location = self.modules_repo.remote_url return yaml.safe_load(file_contents) + def generate_params_table(self, type) -> Table: + "Generate a rich table for inputs and outputs" + table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) + table.add_column(f":inbox_tray: {type}") + table.add_column("Description") + if self.component_type == "modules": + table.add_column("Pattern", justify="right", style="green") + elif self.component_type == "subworkflows": + table.add_column("Structure", justify="right", style="green") + return table + + def get_channel_structure(self, structure: dict) -> str: + "Get the structure of a channel" + structure_str = "" + for key, info in structure.items(): + pattern = f" - {info['pattern']}" if info.get("pattern") else "" + structure_str += f"{key} ({info['type']}{pattern})" + return structure_str + def generate_component_info_help(self): """Take the parsed meta.yml and generate rich help. @@ -277,14 +296,9 @@ def generate_component_info_help(self): # Inputs if self.meta.get("input"): - inputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - inputs_table.add_column(":inbox_tray: Inputs") - inputs_table.add_column("Description") - if self.component_type == "modules": - inputs_table.add_column("Pattern", justify="right", style="green") - elif self.component_type == "subworkflows": - inputs_table.add_column("Structure", justify="right", style="green") - for input in self.meta["input"]: + inputs_table = self.generate_params_table("Inputs") + for i, input in enumerate(self.meta["input"]): + inputs_table.add_row(f"[italic]input[{i}][/]", "", "") if self.component_type == "modules": for element in input: for key, info in element.items(): @@ -298,23 +312,18 @@ def generate_component_info_help(self): inputs_table.add_row( f"[orange1 on black] {key} [/][dim i]", Markdown(info["description"] if info["description"] else ""), - info.get("structure", ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", ) renderables.append(inputs_table) # Outputs if self.meta.get("output"): - outputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - outputs_table.add_column(":outbox_tray: Outputs") - outputs_table.add_column("Description") - if self.component_type == "modules": - inputs_table.add_column("Pattern", justify="right", style="green") - elif self.component_type == "subworkflows": - inputs_table.add_column("Structure", justify="right", style="green") + outputs_table = self.generate_params_table("Outputs") for output in self.meta["output"]: if self.component_type == "modules": for ch_name, elements in output.items(): + outputs_table.add_row(f"{ch_name}", "", "") for element in elements: for key, info in element.items(): outputs_table.add_row( @@ -327,7 +336,7 @@ def generate_component_info_help(self): outputs_table.add_row( f"[orange1 on black] {key} [/][dim i]", Markdown(info["description"] if info["description"] else ""), - info.get("structure", ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", ) renderables.append(outputs_table) From efc11ba691b08b06df1e1631e9416de36a503a02 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 27 Sep 2024 16:35:00 +0200 Subject: [PATCH 688/737] add conf/igenomes_ignored.config to linting --- nf_core/pipelines/create/template_features.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index c16642943..6f476cdfb 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -77,6 +77,7 @@ igenomes: linting: files_exist: - "conf/igenomes.config" + - "conf/igenomes_ignored.config" nfcore_pipelines: True custom_pipelines: True github_badges: From 27093939c2dec3d770a97ea17222ffbe7c2b6934 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 13 Sep 2024 16:46:16 +0200 Subject: [PATCH 689/737] remove try-catch blocks from nextflow.config --- CHANGELOG.md | 1 + nf_core/pipeline-template/nextflow.config | 13 +++---------- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a11aee51..b1c5463a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -41,6 +41,7 @@ - Remove if/else block to include `igenomes.config` ([#3168](https://github.com/nf-core/tools/pull/3168)) - Replaces the old custom `check_max()` function with the Nextflow native `resourceLimits` directive ([#3037](https://github.com/nf-core/tools/pull/3037)) - Fixed release announcement hashtags for Mastodon ([#3099](https://github.com/nf-core/tools/pull/3176)) +- Remove try/catch blocks from `nextflow.config` ([#3167](https://github.com/nf-core/tools/pull/3167)) ### Linting diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 39c352136..dc2aea0b7 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -186,18 +186,11 @@ profiles { {% if nf_core_configs -%} // Load nf-core custom profiles from different Institutions -try { - includeConfig "${params.custom_config_base}/nfcore_custom.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") -} +includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null" // Load {{ name }} custom profiles from different institutions. -try { - includeConfig "${params.custom_config_base}/pipeline/{{ short_name }}.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") -} +// TODO nf-core: Optionally, you can add a pipeline-specific nf-core config at https://github.com/nf-core/configs +includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/pipeline/{{ short_name }}.config" : "/dev/null" {% endif -%} // Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile From b1df038c263bd8893d7971e0cfe30dac746f86d1 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 16 Sep 2024 11:52:34 +0200 Subject: [PATCH 690/737] change the name of tmp pipeline to be consistent with all tests --- tests/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/utils.py b/tests/utils.py index 6f4b73ccc..022b91227 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -113,7 +113,7 @@ def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: tmp_dir = Path(tempfile.TemporaryDirectory().name) root_repo_dir = Path(__file__).resolve().parent.parent template_dir = root_repo_dir / "nf_core" / "pipeline-template" - pipeline_name = "mypipeline" + pipeline_name = "testpipeline" pipeline_dir = tmp_dir / pipeline_name pipeline_dir.mkdir(parents=True) @@ -123,7 +123,7 @@ def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: org_path="nf-core", lint=None, template=NFCoreTemplateConfig( - name="mypipeline", + name="testpipeline", author="me", description="it is mine", org="nf-core", From 7e738eab0ed726ea21d007822a30e396a62c8ded Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 16 Sep 2024 12:19:46 +0200 Subject: [PATCH 691/737] update nextflow_config linting to include the new includeconfig line --- nf_core/pipelines/lint/nextflow_config.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 14e91cc60..dd45621bc 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -346,7 +346,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") # Check that lines for loading custom profiles exist - lines = [ + old_lines = [ r"// Load nf-core custom profiles from different Institutions", r"try {", r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', @@ -354,11 +354,19 @@ def nextflow_config(self) -> Dict[str, List[str]]: r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', r"}", ] + lines = [ + r"// Load nf-core custom profiles from different Institutions", + r'''includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null"''', + ] path = Path(self.wf_path, "nextflow.config") i = 0 with open(path) as f: for line in f: - if lines[i] in line: + if old_lines[i] in line: + i += 1 + if i == len(old_lines): + break + elif lines[i] in line: i += 1 if i == len(lines): break @@ -366,6 +374,12 @@ def nextflow_config(self) -> Dict[str, List[str]]: i = 0 if i == len(lines): passed.append("Lines for loading custom profiles found") + elif i == len(old_lines): + failed.append( + "Old lines for loading custom profiles found. File should contain: ```groovy\n{}".format( + "\n".join(lines) + ) + ) else: lines[2] = f"\t{lines[2]}" lines[4] = f"\t{lines[4]}" From e46779b6b8709af658c43ed5533603db306231e1 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 30 Sep 2024 11:04:28 +0200 Subject: [PATCH 692/737] add test on pipeline release to check that custom configs are included --- nf_core/pipeline-template/nextflow.config | 2 +- nf_core/pipelines/lint/__init__.py | 4 ++- nf_core/pipelines/lint/included_configs.py | 37 ++++++++++++++++++++++ 3 files changed, 41 insertions(+), 2 deletions(-) create mode 100644 nf_core/pipelines/lint/included_configs.py diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index dc2aea0b7..4c816a2a2 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -190,7 +190,7 @@ includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${pa // Load {{ name }} custom profiles from different institutions. // TODO nf-core: Optionally, you can add a pipeline-specific nf-core config at https://github.com/nf-core/configs -includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/pipeline/{{ short_name }}.config" : "/dev/null" +// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/pipeline/{{ short_name }}.config" : "/dev/null" {% endif -%} // Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 6d27351b6..8cc7c37cb 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -37,6 +37,7 @@ from .configs import base_config, modules_config from .files_exist import files_exist from .files_unchanged import files_unchanged +from .included_configs import included_configs from .merge_markers import merge_markers from .modules_json import modules_json from .modules_structure import modules_structure @@ -101,6 +102,7 @@ class PipelineLint(nf_core.utils.Pipeline): system_exit = system_exit template_strings = template_strings version_consistency = version_consistency + included_configs = included_configs def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False @@ -152,7 +154,7 @@ def _get_all_lint_tests(release_mode): "base_config", "modules_config", "nfcore_yml", - ] + (["version_consistency"] if release_mode else []) + ] + (["version_consistency", "included_configs"] if release_mode else []) def _load(self) -> bool: """Load information about the pipeline into the PipelineLint object""" diff --git a/nf_core/pipelines/lint/included_configs.py b/nf_core/pipelines/lint/included_configs.py new file mode 100644 index 000000000..9d4eb1f99 --- /dev/null +++ b/nf_core/pipelines/lint/included_configs.py @@ -0,0 +1,37 @@ +from pathlib import Path + + +def included_configs(self): + """Check that the pipeline nextflow.config includes the pipeline custom configs. + + If the include line is uncommented, the test passes. + If the include line is commented, the test fails. + If the include line is missing, the test warns. + + Can be skipped by adding the following to the .nf-core.yml file: + lint: + included_configs: False + """ + passed = [] + failed = [] + warned = [] + + config_file = Path(self.wf_path / "nextflow.config") + + with open(config_file) as fh: + config = fh.read() + print(self.pipeline_name) + if ( + f"// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + failed.append("Pipeline config does not include custom configs. Please uncomment the includeConfig line.") + elif ( + "includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + passed.append("Pipeline config includes custom configs.") + else: + warned.append("Pipeline config does not include custom configs. Please add the includeConfig line.") + + return {"passed": passed, "failed": failed, "warned": warned} From 70b231e9ee4eb1361f5bfb0cc47aec41bc73f849 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 30 Sep 2024 11:43:28 +0200 Subject: [PATCH 693/737] fix tests by uncommenting includeConfig statements --- .github/actions/create-lint-wf/action.yml | 6 ++++++ .github/workflows/create-lint-wf.yml | 5 +++++ .github/workflows/create-test-lint-wf-template.yml | 5 +++++ nf_core/pipelines/create/template_features.yml | 1 + nf_core/pipelines/lint/included_configs.py | 1 - 5 files changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 8760901db..7cc6f85d4 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -53,6 +53,12 @@ runs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + shell: bash + run: sed -i 's/\/\/ includeConfig/includeConfig/' nf-core-testpipeline/nextflow.config + working-directory: create-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX shell: bash diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 1a3e283d0..190cd0117 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -78,6 +78,11 @@ jobs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: sed -i 's/\/\/ includeConfig/includeConfig/' nf-core-testpipeline/nextflow.config + working-directory: create-lint-wf + # Run the other nf-core commands - name: nf-core pipelines list run: nf-core --log-file log.txt pipelines list diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 1fb521b4b..10aeccd29 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -137,6 +137,11 @@ jobs: run: find my-prefix-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-test-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: sed -i 's/\/\/ includeConfig/includeConfig/' nf-core-testpipeline/nextflow.config + working-directory: create-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index 6f476cdfb..55bb3b0b3 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -120,6 +120,7 @@ nf_core_configs: - "custom_config" - "params.custom_config_version" - "params.custom_config_base" + included_configs: False nfcore_pipelines: False custom_pipelines: True is_nfcore: diff --git a/nf_core/pipelines/lint/included_configs.py b/nf_core/pipelines/lint/included_configs.py index 9d4eb1f99..6cfeb3f8a 100644 --- a/nf_core/pipelines/lint/included_configs.py +++ b/nf_core/pipelines/lint/included_configs.py @@ -20,7 +20,6 @@ def included_configs(self): with open(config_file) as fh: config = fh.read() - print(self.pipeline_name) if ( f"// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" in config From 3a0a8ca33cc876bfb6299ded1b09f1b9cd4e4dcd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 30 Sep 2024 11:56:51 +0200 Subject: [PATCH 694/737] fix sed commands --- .github/actions/create-lint-wf/action.yml | 2 +- .github/workflows/create-lint-wf.yml | 2 +- .github/workflows/create-test-lint-wf-template.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 7cc6f85d4..ecd0eef87 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -56,7 +56,7 @@ runs: # Uncomment includeConfig statement - name: uncomment include config shell: bash - run: sed -i 's/\/\/ includeConfig/includeConfig/' nf-core-testpipeline/nextflow.config + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; working-directory: create-lint-wf # Replace zenodo.XXXXXX to pass readme linting diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 190cd0117..e0b4c67cf 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -80,7 +80,7 @@ jobs: # Uncomment includeConfig statement - name: uncomment include config - run: sed -i 's/\/\/ includeConfig/includeConfig/' nf-core-testpipeline/nextflow.config + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; working-directory: create-lint-wf # Run the other nf-core commands diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 10aeccd29..86d6b3f6b 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -139,7 +139,7 @@ jobs: # Uncomment includeConfig statement - name: uncomment include config - run: sed -i 's/\/\/ includeConfig/includeConfig/' nf-core-testpipeline/nextflow.config + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; working-directory: create-lint-wf # Replace zenodo.XXXXXX to pass readme linting From c779bf4197ae545527cc4251ce27aeb3ee011c3b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 30 Sep 2024 12:10:19 +0200 Subject: [PATCH 695/737] formatted string --- nf_core/pipelines/lint/included_configs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/lint/included_configs.py b/nf_core/pipelines/lint/included_configs.py index 6cfeb3f8a..75c4594f4 100644 --- a/nf_core/pipelines/lint/included_configs.py +++ b/nf_core/pipelines/lint/included_configs.py @@ -26,7 +26,7 @@ def included_configs(self): ): failed.append("Pipeline config does not include custom configs. Please uncomment the includeConfig line.") elif ( - "includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + f"includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" in config ): passed.append("Pipeline config includes custom configs.") From d625d923933294b9da93cc63de0b4beccdd35752 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 30 Sep 2024 12:16:03 +0200 Subject: [PATCH 696/737] test output directory typo --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 86d6b3f6b..debbf8e69 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -140,7 +140,7 @@ jobs: # Uncomment includeConfig statement - name: uncomment include config run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; - working-directory: create-lint-wf + working-directory: create-test-lint-wf # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX From 19d21bb91ab2c765406224417db417189ece6af2 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 30 Sep 2024 12:19:38 +0200 Subject: [PATCH 697/737] fix more output directory errors --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index debbf8e69..d8df2f690 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -139,7 +139,7 @@ jobs: # Uncomment includeConfig statement - name: uncomment include config - run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + run: find my-prefix-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; working-directory: create-test-lint-wf # Replace zenodo.XXXXXX to pass readme linting From 5f367d18ffa4274ba8eb3084cf94abe6c10e19c2 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 30 Sep 2024 12:32:51 +0200 Subject: [PATCH 698/737] generate API docs --- docs/api/_src/pipeline_lint_tests/included_configs.md | 5 +++++ docs/api/_src/pipeline_lint_tests/index.md | 2 ++ 2 files changed, 7 insertions(+) create mode 100644 docs/api/_src/pipeline_lint_tests/included_configs.md diff --git a/docs/api/_src/pipeline_lint_tests/included_configs.md b/docs/api/_src/pipeline_lint_tests/included_configs.md new file mode 100644 index 000000000..f68f7da25 --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/included_configs.md @@ -0,0 +1,5 @@ +# included_configs + + ```{eval-rst} + .. automethod:: nf_core.pipelines.lint.PipelineLint.included_configs + ``` diff --git a/docs/api/_src/pipeline_lint_tests/index.md b/docs/api/_src/pipeline_lint_tests/index.md index 3575c08db..4dd93442d 100644 --- a/docs/api/_src/pipeline_lint_tests/index.md +++ b/docs/api/_src/pipeline_lint_tests/index.md @@ -7,6 +7,7 @@ - [base_config](./base_config/) - [files_exist](./files_exist/) - [files_unchanged](./files_unchanged/) + - [included_configs](./included_configs/) - [merge_markers](./merge_markers/) - [modules_config](./modules_config/) - [modules_json](./modules_json/) @@ -16,6 +17,7 @@ - [nfcore_yml](./nfcore_yml/) - [pipeline_name_conventions](./pipeline_name_conventions/) - [pipeline_todos](./pipeline_todos/) + - [plugin_includes](./plugin_includes/) - [readme](./readme/) - [schema_description](./schema_description/) - [schema_lint](./schema_lint/) From ae5084038ac5712e12566eb39ebd0aab593942be Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Mon, 23 Sep 2024 19:18:21 +0200 Subject: [PATCH 699/737] Refactor CLI commands for Download. --- nf_core/__main__.py | 20 +++++--------------- nf_core/pipelines/download.py | 6 ++---- 2 files changed, 7 insertions(+), 19 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index c7e927c8c..9255c53bd 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -366,26 +366,18 @@ def command_pipelines_lint( help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") -# TODO: Remove this in a future release. Deprecated in March 2024. -@click.option( - "-t", - "--tower", - is_flag=True, - default=False, - hidden=True, - help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", -) @click.option( + "-p", "--platform", is_flag=True, default=False, help="Download for Seqera Platform (formerly Nextflow Tower)", ) @click.option( - "-d", + "-c", "--download-configuration", - is_flag=True, - default=False, + type=click.Choice(["yes", "no"]), + default="no", help="Include configuration profiles in download. Not available with `--platform`", ) @click.option( @@ -420,7 +412,7 @@ def command_pipelines_lint( help="List of images already available in a remote `singularity.cacheDir`.", ) @click.option( - "-p", + "-n", "--parallel-downloads", type=int, default=4, @@ -434,7 +426,6 @@ def command_pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -454,7 +445,6 @@ def command_pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 97453b127..7018dc7b4 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -133,10 +133,8 @@ def __init__( self.force = force self.platform = platform self.fullname: Optional[str] = None - # if flag is not specified, do not assume deliberate choice and prompt config inclusion interactively. - # this implies that non-interactive "no" choice is only possible implicitly (e.g. with --platform or if prompt is suppressed by !stderr.is_interactive). - # only alternative would have been to make it a parameter with argument, e.g. -d="yes" or -d="no". - self.include_configs = True if download_configuration else False if bool(platform) else None + # downloading configs is not supported for Seqera Platform downloads. + self.include_configs = True if download_configuration == "yes" and not bool(platform) else False # Additional tags to add to the downloaded pipeline. This enables to mark particular commits or revisions with # additional tags, e.g. "stable", "testing", "validated", "production" etc. Since this requires a git-repo, it is only # available for the bare / Seqera Platform download. From 95435c9c58cb556a096906bf368549e18de21799 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Mon, 23 Sep 2024 19:55:51 +0200 Subject: [PATCH 700/737] Adapt tests for the new Pipelines Download CLI. --- nf_core/__main__.py | 3 +-- nf_core/commands_pipelines.py | 6 +----- tests/test_cli.py | 4 ++-- 3 files changed, 4 insertions(+), 9 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 9255c53bd..98673fe1e 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -2110,8 +2110,7 @@ def command_download( outdir, compress, force, - tower, - platform, + platform or tower, download_configuration, tag, container_system, diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 23affb1d2..1186935e5 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -167,7 +167,6 @@ def pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -185,16 +184,13 @@ def pipelines_download( """ from nf_core.pipelines.download import DownloadWorkflow - if tower: - log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") - dl = DownloadWorkflow( pipeline, revision, outdir, compress, force, - tower or platform, # True if either specified + platform, download_configuration, tag, container_system, diff --git a/tests/test_cli.py b/tests/test_cli.py index 026efd1e6..bea0223f0 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -167,7 +167,7 @@ def test_cli_download(self, mock_dl): "compress": "tar.gz", "force": None, "platform": None, - "download-configuration": None, + "download-configuration": "yes", "tag": "3.12=testing", "container-system": "singularity", "container-library": "quay.io", @@ -188,7 +188,7 @@ def test_cli_download(self, mock_dl): params["compress"], "force" in params, "platform" in params, - "download-configuration" in params, + params["download-configuration"], (params["tag"],), params["container-system"], (params["container-library"],), From 5dea0d032a52cfa2779b487baa5e38aaa5421cd1 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Mon, 23 Sep 2024 20:24:10 +0200 Subject: [PATCH 701/737] Fix GitHub Action in pipeline-template and update Changelog. --- CHANGELOG.md | 11 +++++++++++ nf_core/__main__.py | 2 +- .../.github/workflows/download_pipeline.yml | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a11aee51..bc740a803 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -70,6 +70,17 @@ - Components: allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) - Add option `--fix` to update the `meta.yml` file of subworkflows ([#3077](https://github.com/nf-core/tools/pull/3077)) +### Download + +- Fully removed already deprecated `-t` / `--tower` flag. +- Refactored the CLI for consistency (short flag is usually second word, e.g. also for `--container-library` etc.): + +| Old parameter | New parameter | +| --------------------------------- | --------------------------------- | +| `-d` / `--download-configuration` | `-c` / `--download-configuration` | +| `-p` / `--parallel-downloads` | `-d` / `--parallel-downloads` | +| new parameter | `-p` / (`--platform`) | + ### General - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 98673fe1e..e0c1b85e9 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -412,7 +412,7 @@ def command_pipelines_lint( help="List of images already available in a remote `singularity.cacheDir`.", ) @click.option( - "-n", + "-d", "--parallel-downloads", type=int, default=4, diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index e7a28e5ac..f704609ca 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -65,7 +65,7 @@ jobs: --container-system 'singularity' \ --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ --container-cache-utilisation 'amend' \ - --download-configuration + --download-configuration 'yes' - name: Inspect download run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} From 91e9de554e251236cf9b5683859bdd68284f8fa5 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 1 Oct 2024 11:53:40 +0200 Subject: [PATCH 702/737] skip citations.md from linting --- nf_core/pipelines/create/template_features.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index 1b9ff6874..3eb654726 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -194,6 +194,9 @@ citations: Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, and the logics to add this section to the output MultiQC report (if the report is generated). + linting: + files_exist: + - "CITATIONS.md" nfcore_pipelines: False custom_pipelines: True gitpod: From b75ba51dba493d8171e1bdb10d13b952a82f405b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 1 Oct 2024 16:51:32 +0200 Subject: [PATCH 703/737] fix old nf-core sync command and use the info from .nf-core.yml to sync a pipeline --- nf_core/__main__.py | 5 +++-- nf_core/utils.py | 39 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index e0c1b85e9..08589fc24 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1815,6 +1815,7 @@ def command_create_logo(logo_text, directory, name, theme, width, format, force) # nf-core sync (deprecated) @nf_core_cli.command("sync", hidden=True, deprecated=True) +@click.pass_context @click.option( "-d", "--dir", @@ -1845,14 +1846,14 @@ def command_create_logo(logo_text, directory, name, theme, width, format, force) @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def command_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Use `nf-core pipelines sync` instead. """ log.warning( "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." ) - pipelines_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core bump-version (deprecated) diff --git a/nf_core/utils.py b/nf_core/utils.py index 663efb6b4..7795d0d27 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1136,6 +1136,45 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] error_message += f"\n{error['loc'][0]}: {error['msg']}" raise AssertionError(error_message) + # Retrieve information if template from config file is empty + wf_config = fetch_wf_config(Path(directory)) + config_template_keys = tools_config["template"].keys() if "template" in tools_config else [] + if nf_core_yaml_config.template is None: + # The .nf-core.yml file did not contain template information + nf_core_yaml_config.template = NFCoreTemplateConfig( + org="nf-core", + name=wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], + description=wf_config["manifest.description"].strip('"').strip("'"), + author=wf_config["manifest.author"].strip('"').strip("'"), + version=wf_config["manifest.version"].strip('"').strip("'"), + outdir=str(directory), + ) + elif "prefix" in config_template_keys or "skip" in config_template_keys: + # The .nf-core.yml file contained the old prefix or skip keys + nf_core_yaml_config.template = NFCoreTemplateConfig( + org=tools_config["template"]["prefix"] + if "prefix" in config_template_keys + else tools_config["template"]["org"] or "nf-core", + name=tools_config["template"]["name"] + if "name" in config_template_keys + else wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], + description=tools_config["template"]["description"] + if "description" in config_template_keys + else wf_config["manifest.description"].strip('"').strip("'"), + author=tools_config["template"]["author"] + if "author" in config_template_keys + else wf_config["manifest.author"].strip('"').strip("'"), + version=tools_config["template"]["version"] + if "version" in config_template_keys + else wf_config["manifest.version"].strip('"').strip("'"), + outdir=tools_config["template"]["outdir"] if "outdir" in config_template_keys else str(directory), + skip_features=tools_config["template"]["skip"] + if "skip" in config_template_keys + else tools_config["template"]["skip_features"] + if "skip_features" in config_template_keys + else None, + ) + log.debug("Using config file: %s", config_fn) return config_fn, nf_core_yaml_config From a3468d3527f60b82e8ad85da7ecb43eb968c8475 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 2 Oct 2024 09:33:39 +0200 Subject: [PATCH 704/737] udpate nf-core version and outdir before writing to the template .nf-core.yml --- nf_core/pipelines/create/create.py | 6 +----- nf_core/pipelines/sync.py | 10 ++++++++-- nf_core/utils.py | 10 +++++++++- 3 files changed, 18 insertions(+), 8 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8e1d46c69..b23dc27e0 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -182,7 +182,7 @@ def update_config(self, organisation, version, force, outdir): self.config.force = force if force else False if self.config.outdir is None: self.config.outdir = outdir if outdir else "." - if self.config.is_nfcore is None: + if self.config.is_nfcore is None or self.config.is_nfcore == "null": self.config.is_nfcore = self.config.org == "nf-core" def obtain_jinja_params_dict( @@ -363,11 +363,9 @@ def render_template(self) -> None: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - self.config.outdir = str(self.config.outdir) config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) yaml.safe_dump(config_yml.model_dump(), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") - run_prettier_on_file(self.outdir / config_fn) # Run prettier on files run_prettier_on_file(self.outdir) @@ -401,8 +399,6 @@ def fix_linting(self): with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) - run_prettier_on_file(Path(self.outdir, config_fn)) - def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" email_logo_path = Path(self.outdir) / "assets" diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index fced35dc2..31152564a 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -273,17 +273,23 @@ def make_template_pipeline(self): yaml.safe_dump(self.config_yml.model_dump(), config_path) try: - nf_core.pipelines.create.create.PipelineCreate( + pipeline_create_obj = nf_core.pipelines.create.create.PipelineCreate( outdir=str(self.pipeline_dir), from_config_file=True, no_git=True, force=True, - ).init_pipeline() + ) + pipeline_create_obj.init_pipeline() # set force to false to avoid overwriting files in the future if self.config_yml.template is not None: + self.config_yml.template = pipeline_create_obj.config # Set force true in config to overwrite existing files self.config_yml.template.force = False + # Set outdir as the current directory to avoid local info leaking + self.config_yml.template.outdir = "." + # Update nf-core version + self.config_yml.nf_core_version = nf_core.__version__ with open(self.config_yml_path, "w") as config_path: yaml.safe_dump(self.config_yml.model_dump(), config_path) diff --git a/nf_core/utils.py b/nf_core/utils.py index 7795d0d27..7fcb6026f 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1148,13 +1148,16 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] author=wf_config["manifest.author"].strip('"').strip("'"), version=wf_config["manifest.version"].strip('"').strip("'"), outdir=str(directory), + is_nfcore=True, ) elif "prefix" in config_template_keys or "skip" in config_template_keys: # The .nf-core.yml file contained the old prefix or skip keys nf_core_yaml_config.template = NFCoreTemplateConfig( org=tools_config["template"]["prefix"] if "prefix" in config_template_keys - else tools_config["template"]["org"] or "nf-core", + else tools_config["template"]["org"] + if "org" in config_template_keys + else "nf-core", name=tools_config["template"]["name"] if "name" in config_template_keys else wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], @@ -1173,6 +1176,11 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] else tools_config["template"]["skip_features"] if "skip_features" in config_template_keys else None, + is_nfcore=tools_config["template"]["prefix"] == "nf-core" + if "prefix" in config_template_keys + else tools_config["template"]["org"] == "nf-core" + if "org" in config_template_keys + else True, ) log.debug("Using config file: %s", config_fn) From d0a24be2c7da106f45aafd1be763cb772e0a9fa3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 2 Oct 2024 09:52:14 +0200 Subject: [PATCH 705/737] udpate yaml config only in pipeline repos --- nf_core/utils.py | 93 ++++++++++++++++++++++++------------------------ 1 file changed, 47 insertions(+), 46 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 7fcb6026f..4e29994ca 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1136,52 +1136,53 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] error_message += f"\n{error['loc'][0]}: {error['msg']}" raise AssertionError(error_message) - # Retrieve information if template from config file is empty - wf_config = fetch_wf_config(Path(directory)) - config_template_keys = tools_config["template"].keys() if "template" in tools_config else [] - if nf_core_yaml_config.template is None: - # The .nf-core.yml file did not contain template information - nf_core_yaml_config.template = NFCoreTemplateConfig( - org="nf-core", - name=wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], - description=wf_config["manifest.description"].strip('"').strip("'"), - author=wf_config["manifest.author"].strip('"').strip("'"), - version=wf_config["manifest.version"].strip('"').strip("'"), - outdir=str(directory), - is_nfcore=True, - ) - elif "prefix" in config_template_keys or "skip" in config_template_keys: - # The .nf-core.yml file contained the old prefix or skip keys - nf_core_yaml_config.template = NFCoreTemplateConfig( - org=tools_config["template"]["prefix"] - if "prefix" in config_template_keys - else tools_config["template"]["org"] - if "org" in config_template_keys - else "nf-core", - name=tools_config["template"]["name"] - if "name" in config_template_keys - else wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], - description=tools_config["template"]["description"] - if "description" in config_template_keys - else wf_config["manifest.description"].strip('"').strip("'"), - author=tools_config["template"]["author"] - if "author" in config_template_keys - else wf_config["manifest.author"].strip('"').strip("'"), - version=tools_config["template"]["version"] - if "version" in config_template_keys - else wf_config["manifest.version"].strip('"').strip("'"), - outdir=tools_config["template"]["outdir"] if "outdir" in config_template_keys else str(directory), - skip_features=tools_config["template"]["skip"] - if "skip" in config_template_keys - else tools_config["template"]["skip_features"] - if "skip_features" in config_template_keys - else None, - is_nfcore=tools_config["template"]["prefix"] == "nf-core" - if "prefix" in config_template_keys - else tools_config["template"]["org"] == "nf-core" - if "org" in config_template_keys - else True, - ) + if nf_core_yaml_config["repository_type"] == "pipeline": + # Retrieve information if template from config file is empty + wf_config = fetch_wf_config(Path(directory)) + config_template_keys = tools_config["template"].keys() if "template" in tools_config else [] + if nf_core_yaml_config.template is None: + # The .nf-core.yml file did not contain template information + nf_core_yaml_config.template = NFCoreTemplateConfig( + org="nf-core", + name=wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], + description=wf_config["manifest.description"].strip('"').strip("'"), + author=wf_config["manifest.author"].strip('"').strip("'"), + version=wf_config["manifest.version"].strip('"').strip("'"), + outdir=str(directory), + is_nfcore=True, + ) + elif "prefix" in config_template_keys or "skip" in config_template_keys: + # The .nf-core.yml file contained the old prefix or skip keys + nf_core_yaml_config.template = NFCoreTemplateConfig( + org=tools_config["template"]["prefix"] + if "prefix" in config_template_keys + else tools_config["template"]["org"] + if "org" in config_template_keys + else "nf-core", + name=tools_config["template"]["name"] + if "name" in config_template_keys + else wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], + description=tools_config["template"]["description"] + if "description" in config_template_keys + else wf_config["manifest.description"].strip('"').strip("'"), + author=tools_config["template"]["author"] + if "author" in config_template_keys + else wf_config["manifest.author"].strip('"').strip("'"), + version=tools_config["template"]["version"] + if "version" in config_template_keys + else wf_config["manifest.version"].strip('"').strip("'"), + outdir=tools_config["template"]["outdir"] if "outdir" in config_template_keys else str(directory), + skip_features=tools_config["template"]["skip"] + if "skip" in config_template_keys + else tools_config["template"]["skip_features"] + if "skip_features" in config_template_keys + else None, + is_nfcore=tools_config["template"]["prefix"] == "nf-core" + if "prefix" in config_template_keys + else tools_config["template"]["org"] == "nf-core" + if "org" in config_template_keys + else True, + ) log.debug("Using config file: %s", config_fn) return config_fn, nf_core_yaml_config From c95de5d29c6eaf1b3d6757d6bb95b3614850f471 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 2 Oct 2024 09:58:25 +0200 Subject: [PATCH 706/737] fix pytests --- nf_core/utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 4e29994ca..ef2453795 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1139,7 +1139,11 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] if nf_core_yaml_config["repository_type"] == "pipeline": # Retrieve information if template from config file is empty wf_config = fetch_wf_config(Path(directory)) - config_template_keys = tools_config["template"].keys() if "template" in tools_config else [] + config_template_keys = ( + tools_config["template"].keys() + if "template" in tools_config and tools_config["template"] is not None + else [] + ) if nf_core_yaml_config.template is None: # The .nf-core.yml file did not contain template information nf_core_yaml_config.template = NFCoreTemplateConfig( From 0b2e2277014125f601aff76a68afce09d48459be Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 2 Oct 2024 10:57:52 +0200 Subject: [PATCH 707/737] take into account empty directories --- nf_core/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index ef2453795..e5bc42804 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1136,9 +1136,9 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] error_message += f"\n{error['loc'][0]}: {error['msg']}" raise AssertionError(error_message) - if nf_core_yaml_config["repository_type"] == "pipeline": + wf_config = fetch_wf_config(Path(directory)) + if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config != {}: # Retrieve information if template from config file is empty - wf_config = fetch_wf_config(Path(directory)) config_template_keys = ( tools_config["template"].keys() if "template" in tools_config and tools_config["template"] is not None From b8b0c7b854320f727176bd72749231cc521dd64c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 2 Oct 2024 12:05:58 +0200 Subject: [PATCH 708/737] Update nf_core/utils.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index e5bc42804..06fd69ebd 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1137,7 +1137,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] raise AssertionError(error_message) wf_config = fetch_wf_config(Path(directory)) - if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config != {}: + if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: # Retrieve information if template from config file is empty config_template_keys = ( tools_config["template"].keys() From 1ac4e0645f0ec72524fcff11afa23b425db0f267 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Wed, 2 Oct 2024 12:07:20 +0200 Subject: [PATCH 709/737] Update nf_core/utils.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/utils.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 06fd69ebd..5120519e5 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1139,11 +1139,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] wf_config = fetch_wf_config(Path(directory)) if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: # Retrieve information if template from config file is empty - config_template_keys = ( - tools_config["template"].keys() - if "template" in tools_config and tools_config["template"] is not None - else [] - ) + config_template_keys = tools_config.get('template', {}).keys() if nf_core_yaml_config.template is None: # The .nf-core.yml file did not contain template information nf_core_yaml_config.template = NFCoreTemplateConfig( From 97bc4ea42b5915b9e4db691c598084ba68fe2207 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 2 Oct 2024 12:08:43 +0200 Subject: [PATCH 710/737] update strip commands --- nf_core/pipelines/download.py | 2 +- nf_core/pipelines/schema.py | 4 ++-- nf_core/utils.py | 18 +++++++++--------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 7018dc7b4..b9028d4b3 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -746,7 +746,7 @@ def find_container_images(self, workflow_directory: str) -> None: self.nf_config is needed, because we need to restart search over raw input if no proper container matches are found. """ - config_findings.append((k, v.strip('"').strip("'"), self.nf_config, "Nextflow configs")) + config_findings.append((k, v.strip("'\""), self.nf_config, "Nextflow configs")) # rectify the container paths found in the config # Raw config_findings may yield multiple containers, so better create a shallow copy of the list, since length of input and output may be different ?!? diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 95ed5e5b6..3aec815c7 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -71,7 +71,7 @@ def _update_validation_plugin_from_config(self) -> None: else: conf = nf_core.utils.fetch_wf_config(Path(self.pipeline_dir)) - plugins = str(conf.get("plugins", "")).strip('"').strip("'").strip(" ").split(",") + plugins = str(conf.get("plugins", "")).strip("'\"").strip(" ").split(",") plugin_found = False for plugin_instance in plugins: if "nf-schema" in plugin_instance: @@ -373,7 +373,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default) # If we have a default in the schema, check it matches the config if "default" in schema_param and ( (schema_param["type"] == "boolean" and str(config_default).lower() != str(schema_param["default"]).lower()) - and (str(schema_param["default"]) != str(config_default).strip('"').strip("'")) + and (str(schema_param["default"]) != str(config_default).strip("'\"")) ): # Check that we are not deferring the execution of this parameter in the schema default with squiggly brakcets if schema_param["type"] != "string" or "{" not in schema_param["default"]: diff --git a/nf_core/utils.py b/nf_core/utils.py index 5120519e5..f5444fe02 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1139,15 +1139,15 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] wf_config = fetch_wf_config(Path(directory)) if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: # Retrieve information if template from config file is empty - config_template_keys = tools_config.get('template', {}).keys() + config_template_keys = tools_config.get("template", {}).keys() if nf_core_yaml_config.template is None: # The .nf-core.yml file did not contain template information nf_core_yaml_config.template = NFCoreTemplateConfig( org="nf-core", - name=wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], - description=wf_config["manifest.description"].strip('"').strip("'"), - author=wf_config["manifest.author"].strip('"').strip("'"), - version=wf_config["manifest.version"].strip('"').strip("'"), + name=wf_config["manifest.name"].strip("'\"").split("/")[-1], + description=wf_config["manifest.description"].strip("'\""), + author=wf_config["manifest.author"].strip("'\""), + version=wf_config["manifest.version"].strip("'\""), outdir=str(directory), is_nfcore=True, ) @@ -1161,16 +1161,16 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] else "nf-core", name=tools_config["template"]["name"] if "name" in config_template_keys - else wf_config["manifest.name"].strip('"').strip("'").split("/")[-1], + else wf_config["manifest.name"].strip("'\"").split("/")[-1], description=tools_config["template"]["description"] if "description" in config_template_keys - else wf_config["manifest.description"].strip('"').strip("'"), + else wf_config["manifest.description"].strip("'\""), author=tools_config["template"]["author"] if "author" in config_template_keys - else wf_config["manifest.author"].strip('"').strip("'"), + else wf_config["manifest.author"].strip("'\""), version=tools_config["template"]["version"] if "version" in config_template_keys - else wf_config["manifest.version"].strip('"').strip("'"), + else wf_config["manifest.version"].strip("'\""), outdir=tools_config["template"]["outdir"] if "outdir" in config_template_keys else str(directory), skip_features=tools_config["template"]["skip"] if "skip" in config_template_keys From 86f436a0857205d4bdc3e3be4d9823d76ae0fbf9 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 2 Oct 2024 12:15:42 +0200 Subject: [PATCH 711/737] apply comments from code review by @mashehu --- nf_core/utils.py | 36 ++++++++---------------------------- 1 file changed, 8 insertions(+), 28 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index f5444fe02..dd5ca66e8 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1154,34 +1154,14 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] elif "prefix" in config_template_keys or "skip" in config_template_keys: # The .nf-core.yml file contained the old prefix or skip keys nf_core_yaml_config.template = NFCoreTemplateConfig( - org=tools_config["template"]["prefix"] - if "prefix" in config_template_keys - else tools_config["template"]["org"] - if "org" in config_template_keys - else "nf-core", - name=tools_config["template"]["name"] - if "name" in config_template_keys - else wf_config["manifest.name"].strip("'\"").split("/")[-1], - description=tools_config["template"]["description"] - if "description" in config_template_keys - else wf_config["manifest.description"].strip("'\""), - author=tools_config["template"]["author"] - if "author" in config_template_keys - else wf_config["manifest.author"].strip("'\""), - version=tools_config["template"]["version"] - if "version" in config_template_keys - else wf_config["manifest.version"].strip("'\""), - outdir=tools_config["template"]["outdir"] if "outdir" in config_template_keys else str(directory), - skip_features=tools_config["template"]["skip"] - if "skip" in config_template_keys - else tools_config["template"]["skip_features"] - if "skip_features" in config_template_keys - else None, - is_nfcore=tools_config["template"]["prefix"] == "nf-core" - if "prefix" in config_template_keys - else tools_config["template"]["org"] == "nf-core" - if "org" in config_template_keys - else True, + org=tools_config["template"].get("prefix", tools_config["template"].get("org", "nf-core")), + name=tools_config["template"].get("name", wf_config["manifest.name"].strip("'\"").split("/")[-1]), + description=tools_config["template"].get("description", wf_config["manifest.description"].strip("'\"")), + author=tools_config["template"].get("author", wf_config["manifest.author"].strip("'\"")), + version=tools_config["template"].get("version", wf_config["manifest.version"].strip("'\"")), + outdir=tools_config["template"].get("outdir", str(directory)), + skip_features=tools_config["template"].get("skip", tools_config["template"].get("skip_features")), + is_nfcore=tools_config["template"].get("prefix", tools_config["template"].get("org")) == "nf-core", ) log.debug("Using config file: %s", config_fn) From 26299f0082debced834a1d8f6d301a1c3f4d365f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Wed, 2 Oct 2024 14:40:03 +0200 Subject: [PATCH 712/737] more code suggestions, thanks @mashehu --- nf_core/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index dd5ca66e8..3f71de8d1 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1139,7 +1139,8 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] wf_config = fetch_wf_config(Path(directory)) if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: # Retrieve information if template from config file is empty - config_template_keys = tools_config.get("template", {}).keys() + template = tools_config.get("template") + config_template_keys = template.keys() if template is not None else [] if nf_core_yaml_config.template is None: # The .nf-core.yml file did not contain template information nf_core_yaml_config.template = NFCoreTemplateConfig( From d52969b323c42d20cafb482980ab2a29ebd86efb Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 12:57:44 +0000 Subject: [PATCH 713/737] Update gitpod/workspace-base Docker digest to 2cc134f --- nf_core/gitpod/gitpod.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index c462c6a47..2a9fbb0ed 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,7 +2,7 @@ # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . # See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. -FROM gitpod/workspace-base@sha256:f189a4195c3861365356f9c1b438ab26fd88e1ff46ce2843afc62861fc982e0c +FROM gitpod/workspace-base@sha256:2cc134fe5bd7d8fdbe44cab294925d4bc6d2d178d94624f4c376584a22d1f7b6 USER root From 93c2def991768265bc35ebc789235f9bf55698d8 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 3 Oct 2024 14:57:32 +0200 Subject: [PATCH 714/737] fix running prettier on rendered template --- nf_core/pipelines/create/create.py | 2 +- nf_core/pipelines/lint_utils.py | 30 +++++++++++++++++++++++------- nf_core/pipelines/sync.py | 4 ++-- 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index b23dc27e0..8ab547c1c 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -368,7 +368,7 @@ def render_template(self) -> None: log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") # Run prettier on files - run_prettier_on_file(self.outdir) + run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) def fix_linting(self): """ diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index ff65fb0e5..b4c56c600 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -2,9 +2,10 @@ import logging import subprocess from pathlib import Path -from typing import List +from typing import List, Union import rich +import yaml from rich.console import Console from rich.table import Table @@ -69,7 +70,7 @@ def print_fixes(lint_obj): ) -def run_prettier_on_file(file): +def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: """Run the pre-commit hook prettier on a file. Args: @@ -80,12 +81,15 @@ def run_prettier_on_file(file): """ nf_core_pre_commit_config = Path(nf_core.__file__).parent / ".pre-commit-prettier-config.yaml" + args = ["pre-commit", "run", "--config", str(nf_core_pre_commit_config), "prettier"] + if isinstance(file, List): + args.extend(["--files", *file]) + else: + args.extend(["--files", str(file)]) + try: - subprocess.run( - ["pre-commit", "run", "--config", nf_core_pre_commit_config, "prettier", "--files", file], - capture_output=True, - check=True, - ) + subprocess.run(args, capture_output=True, check=True) + log.debug(f"${subprocess.STDOUT}") except subprocess.CalledProcessError as e: if ": SyntaxError: " in e.stdout.decode(): log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") @@ -111,6 +115,18 @@ def dump_json_with_prettier(file_name, file_content): run_prettier_on_file(file_name) +def dump_yaml_with_prettier(file_name: Union[Path, str], file_content: dict) -> None: + """Dump a YAML file and run prettier on it. + + Args: + file_name (Path | str): A file identifier as a string or pathlib.Path. + file_content (dict): Content to dump into the YAML file + """ + with open(file_name, "w") as fh: + yaml.safe_dump(file_content, fh) + run_prettier_on_file(file_name) + + def ignore_file(lint_name: str, file_path: Path, dir_path: Path) -> List[List[str]]: """Ignore a file and add the result to the ignored list. Return the passed, failed, ignored and ignore_configs lists.""" diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 31152564a..12b29f15e 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -21,6 +21,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.list import nf_core.utils +from nf_core.pipelines.lint_utils import dump_yaml_with_prettier log = logging.getLogger(__name__) @@ -290,8 +291,7 @@ def make_template_pipeline(self): self.config_yml.template.outdir = "." # Update nf-core version self.config_yml.nf_core_version = nf_core.__version__ - with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump()) except Exception as err: # Reset to where you were to prevent git getting messed up. From 320037f9c88e9b888c92dab64cf7241fbb692688 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 4 Oct 2024 08:55:47 +0200 Subject: [PATCH 715/737] sync: copy modules.json to TEMPLATE branch --- nf_core/pipelines/sync.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 12b29f15e..c1dc8768a 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -18,6 +18,8 @@ from git import GitCommandError, InvalidGitRepositoryError import nf_core +import nf_core.modules +import nf_core.modules.install import nf_core.pipelines.create.create import nf_core.pipelines.list import nf_core.utils @@ -137,6 +139,7 @@ def sync(self): self.checkout_template_branch() self.delete_template_branch_files() self.make_template_pipeline() + self.copy_components_to_template() self.commit_template_changes() if not self.made_changes and self.force_pr: @@ -298,6 +301,16 @@ def make_template_pipeline(self): self.repo.git.reset("--hard") raise SyncExceptionError(f"Failed to rebuild pipeline from template with error:\n{err}") + def copy_components_to_template(self) -> None: + """ + Copy the modules.json file from the current pipeline to the TEMPLATE branch using git checkout + """ + log.info("Copying modules.json to TEMPLATE branch") + try: + self.repo.git.checkout(self.original_branch, "--", "modules.json") + except GitCommandError as e: + raise SyncExceptionError(f"Could not copy modules.json to TEMPLATE branch:\n{e}") + def commit_template_changes(self): """If we have any changes with the new template files, make a git commit""" # Check that we have something to commit From 8192ca6373fc14f5614397a3d1dc986dfc119bd3 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 4 Oct 2024 09:04:58 +0200 Subject: [PATCH 716/737] remove unused imports --- nf_core/pipelines/sync.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index c1dc8768a..31cd26dd6 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -18,8 +18,6 @@ from git import GitCommandError, InvalidGitRepositoryError import nf_core -import nf_core.modules -import nf_core.modules.install import nf_core.pipelines.create.create import nf_core.pipelines.list import nf_core.utils From 89a41eba2cc8acb212b62a360e8cf2b4d0b19787 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 4 Oct 2024 09:28:44 +0200 Subject: [PATCH 717/737] handle non-existent modules.json file more gracefully --- nf_core/pipelines/sync.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 31cd26dd6..c6d0218cc 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -307,7 +307,12 @@ def copy_components_to_template(self) -> None: try: self.repo.git.checkout(self.original_branch, "--", "modules.json") except GitCommandError as e: - raise SyncExceptionError(f"Could not copy modules.json to TEMPLATE branch:\n{e}") + # don't raise an error if the file doesn't exist + if "did not match any file(s) known to git" in str(e): + log.info("No modules.json file found in current pipeline - not copying") + return + else: + raise SyncExceptionError(f"Could not copy modules.json to TEMPLATE branch:\n{e}") def commit_template_changes(self): """If we have any changes with the new template files, make a git commit""" From 375fad3df8ea5c235a3dfb9ee44b467cd94fd208 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 4 Oct 2024 11:11:40 +0200 Subject: [PATCH 718/737] fix template new lines --- nf_core/pipeline-template/.editorconfig | 8 ++++---- nf_core/pipeline-template/CITATIONS.md | 6 ++++-- nf_core/pipeline-template/README.md | 2 +- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index 5145366e5..1db986f5b 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -11,7 +11,7 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js}] indent_size = 2 -{%- if modules %} +{% if modules %} # These files are edited and tested upstream in nf-core/modules [/modules/nf-core/**] charset = unset @@ -25,12 +25,12 @@ end_of_line = unset insert_final_newline = unset trim_trailing_whitespace = unset indent_style = unset -{%- endif %} +{% endif %} -{%- if email %} +{% if email %} [/assets/email*] indent_size = unset -{%- endif %} +{% endif %} # ignore python and markdown [*.{py,md}] diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 2373f1de7..01e97c141 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -15,10 +15,12 @@ {% if fastqc %}- [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. -> {% endif %} > {% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) +> {% endif %} + +{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. -> {%- endif %} +> {% endif %} ## Software packaging/containerisation tools diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 433145428..bcf159f46 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -7,7 +7,7 @@ -{%- else %} +{% else %} # {{ name }} From 80cca2fd13ae1d872c31435300f2b68c0db7790c Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 4 Oct 2024 11:29:02 +0200 Subject: [PATCH 719/737] add pydantic autodoc --- docs/api/_src/conf.py | 2 +- docs/api/requirements.txt | 1 + nf_core/utils.py | 20 ++++++++++++++++++++ 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index bfdbd7888..729cf6ba3 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -40,7 +40,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon"] +extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinxcontrib.autodoc_pydantic"] # Add any paths that contain templates here, relative to this directory. templates_path = ["./_templates"] diff --git a/docs/api/requirements.txt b/docs/api/requirements.txt index abffe3074..1d23f0b27 100644 --- a/docs/api/requirements.txt +++ b/docs/api/requirements.txt @@ -1,3 +1,4 @@ +autodoc_pydantic Sphinx>=3.3.1 sphinxcontrib-napoleon sphinx-markdown-builder diff --git a/nf_core/utils.py b/nf_core/utils.py index 3f71de8d1..7e3a4671d 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1050,15 +1050,26 @@ def get_repo_releases_branches(pipeline, wfs): class NFCoreTemplateConfig(BaseModel): + """Template configuration schema""" + org: Optional[str] = None + """ Organisation name """ name: Optional[str] = None + """ Pipeline name """ description: Optional[str] = None + """ Pipeline description """ author: Optional[str] = None + """ Pipeline author """ version: Optional[str] = None + """ Pipeline version """ force: Optional[bool] = True + """ Force overwrite of existing files """ outdir: Optional[Union[str, Path]] = None + """ Output directory """ skip_features: Optional[list] = None + """ Skip features. See https://nf-co.re/docs/nf-core-tools/pipelines/create for a list of features. """ is_nfcore: Optional[bool] = None + """ Whether the pipeline is an nf-core pipeline. """ # convert outdir to str @field_validator("outdir") @@ -1081,13 +1092,22 @@ def get(self, item: str, default: Any = None) -> Any: class NFCoreYamlConfig(BaseModel): + """.nf-core.yml configuration file schema""" + repository_type: str + """ Type of repository: pipeline or modules """ nf_core_version: Optional[str] = None + """ Version of nf-core/tools used to create/update the pipeline""" org_path: Optional[str] = None + """ Path to the organisation's nf-core pipelines repository """ lint: Optional[LintConfigType] = None + """ Linting configuration """ template: Optional[NFCoreTemplateConfig] = None + """ Template configuration """ bump_version: Optional[Dict[str, bool]] = None + """ Disable bumping of the version for a module/subworkflow (when repository_type is modules). """ update: Optional[Dict[str, Union[str, bool, Dict[str, Union[str, Dict[str, Union[str, bool]]]]]]] = None + """ Disable updating specific modules/subworkflows (when repository_type is pipeline).""" def __getitem__(self, item: str) -> Any: return getattr(self, item) From f063bf5db97ef3eb722d22bdd1d382d3943446c7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 4 Oct 2024 11:44:33 +0200 Subject: [PATCH 720/737] render also utils.py docs --- docs/api/_src/api/utils.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 docs/api/_src/api/utils.md diff --git a/docs/api/_src/api/utils.md b/docs/api/_src/api/utils.md new file mode 100644 index 000000000..1353f97ef --- /dev/null +++ b/docs/api/_src/api/utils.md @@ -0,0 +1,9 @@ +# nf_core.utils + +```{eval-rst} +.. automodule:: nf_core.utils + :members: + :undoc-members: + :show-inheritance: + :private-members: +``` From 4ea365085be4e8a9ef4ad8d21dba2c39499d49fd Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 4 Oct 2024 12:00:11 +0200 Subject: [PATCH 721/737] include review comments and links to docs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Júlia Mir Pedrol --- nf_core/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 7e3a4671d..068da22de 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1099,15 +1099,15 @@ class NFCoreYamlConfig(BaseModel): nf_core_version: Optional[str] = None """ Version of nf-core/tools used to create/update the pipeline""" org_path: Optional[str] = None - """ Path to the organisation's nf-core pipelines repository """ + """ Path to the organisation's modules repository (used for modules repo_type only) """ lint: Optional[LintConfigType] = None - """ Linting configuration """ + """ Pipeline linting configuration, see https://nf-co.re/docs/nf-core-tools/pipelines/lint#linting-config for examples and documentation """ template: Optional[NFCoreTemplateConfig] = None - """ Template configuration """ + """ Pipeline template configuration """ bump_version: Optional[Dict[str, bool]] = None - """ Disable bumping of the version for a module/subworkflow (when repository_type is modules). """ + """ Disable bumping of the version for a module/subworkflow (when repository_type is modules). See https://nf-co.re/docs/nf-core-tools/modules/bump-versions for more information.""" update: Optional[Dict[str, Union[str, bool, Dict[str, Union[str, Dict[str, Union[str, bool]]]]]]] = None - """ Disable updating specific modules/subworkflows (when repository_type is pipeline).""" + """ Disable updating specific modules/subworkflows (when repository_type is pipeline). See https://nf-co.re/docs/nf-core-tools/modules/update for more information.""" def __getitem__(self, item: str) -> Any: return getattr(self, item) From 46b998b50b0b7625cec269bae7d6182e379788ba Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 4 Oct 2024 12:19:48 +0200 Subject: [PATCH 722/737] install modules in TEMPLATE branch --- nf_core/modules/modules_json.py | 2 +- nf_core/pipelines/sync.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 536589d81..05c64b6de 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -432,7 +432,7 @@ def move_component_to_local(self, component_type: str, component: str, repo_name to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(str(current_path), local_dir / to_name) - def unsynced_components(self) -> Tuple[List[str], List[str], dict]: + def unsynced_components(self) -> Tuple[List[str], List[str], Dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index c6d0218cc..d573f8866 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -18,6 +18,7 @@ from git import GitCommandError, InvalidGitRepositoryError import nf_core +import nf_core.modules.modules_json import nf_core.pipelines.create.create import nf_core.pipelines.list import nf_core.utils @@ -306,6 +307,7 @@ def copy_components_to_template(self) -> None: log.info("Copying modules.json to TEMPLATE branch") try: self.repo.git.checkout(self.original_branch, "--", "modules.json") + nf_core.modules.modules_json.ModulesJson(self.pipeline_dir).check_up_to_date() except GitCommandError as e: # don't raise an error if the file doesn't exist if "did not match any file(s) known to git" in str(e): From fa1a01dfddc12306832a7f1db459833b11e5100f Mon Sep 17 00:00:00 2001 From: mashehu Date: Fri, 4 Oct 2024 15:29:32 +0200 Subject: [PATCH 723/737] remove modules.json copy step in sync --- nf_core/pipelines/sync.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index d573f8866..12b29f15e 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -18,7 +18,6 @@ from git import GitCommandError, InvalidGitRepositoryError import nf_core -import nf_core.modules.modules_json import nf_core.pipelines.create.create import nf_core.pipelines.list import nf_core.utils @@ -138,7 +137,6 @@ def sync(self): self.checkout_template_branch() self.delete_template_branch_files() self.make_template_pipeline() - self.copy_components_to_template() self.commit_template_changes() if not self.made_changes and self.force_pr: @@ -300,22 +298,6 @@ def make_template_pipeline(self): self.repo.git.reset("--hard") raise SyncExceptionError(f"Failed to rebuild pipeline from template with error:\n{err}") - def copy_components_to_template(self) -> None: - """ - Copy the modules.json file from the current pipeline to the TEMPLATE branch using git checkout - """ - log.info("Copying modules.json to TEMPLATE branch") - try: - self.repo.git.checkout(self.original_branch, "--", "modules.json") - nf_core.modules.modules_json.ModulesJson(self.pipeline_dir).check_up_to_date() - except GitCommandError as e: - # don't raise an error if the file doesn't exist - if "did not match any file(s) known to git" in str(e): - log.info("No modules.json file found in current pipeline - not copying") - return - else: - raise SyncExceptionError(f"Could not copy modules.json to TEMPLATE branch:\n{e}") - def commit_template_changes(self): """If we have any changes with the new template files, make a git commit""" # Check that we have something to commit From d6cd2738bc16516a6285895207a72c8f61a31d8f Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Thu, 26 Sep 2024 11:50:14 +0200 Subject: [PATCH 724/737] Improve the Github Action to test offline support better. --- .../.github/workflows/download_pipeline.yml | 35 ++++++++++++++++--- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index f704609ca..f3936f5e0 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -54,33 +54,60 @@ jobs: echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} + - name: Make a cache directory for the container images + run: | + mkdir -p ./singularity_container_images + - name: Download the pipeline env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images run: | nf-core pipelines download ${{ env.REPO_LOWERCASE }} \ --revision ${{ env.REPO_BRANCH }} \ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ --compress "none" \ --container-system 'singularity' \ - --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ + --container-library "quay.io" -l "docker.io" -l "ghcr.io" -l "community.wave.seqera.io" \ --container-cache-utilisation 'amend' \ --download-configuration 'yes' - name: Inspect download run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} + - name: Count the downloaded number of container images + id: count_initial + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l) + echo "Initial container image count: $image_count" + echo "IMAGE_COUNT_INITIAL=$image_count" >> ${GITHUB_ENV} + - name: Run the downloaded pipeline (stub) id: stub_run_pipeline continue-on-error: true env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results - name: Run the downloaded pipeline (stub run not supported) id: run_pipeline if: ${{ job.steps.stub_run_pipeline.status == failure() }} env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %}{% endif %} + + - name: Count the downloaded number of container images + id: count_afterwards + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l) + echo "Post-pipeline container image count: $image_count" + echo "IMAGE_COUNT_AFTER=$image_count" >> ${GITHUB_ENV} + + - name: Compare container image counts + run: | + if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then + echo "The number of container images has changed. The pipeline has no support for offline runs!" + exit 1 + else + echo "The pipeline can be downloaded successfully!" + fi From ca5eaa5c36f1dd4c64544313545bf854786dd16d Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Thu, 26 Sep 2024 12:09:21 +0200 Subject: [PATCH 725/737] Add raw statements for template and update Singularity version in runner. --- .../.github/workflows/download_pipeline.yml | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index f3936f5e0..406ef3bdd 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -39,9 +39,11 @@ jobs: with: python-version: "3.12" architecture: "x64" - - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 + + - name: Setup Apptainer + uses: eWaterCycle/setup-apptainer@4bb22c52d4f63406c49e94c804632975787312b3 # v2.0.0 with: - singularity-version: 3.8.3 + apptainer-version: 1.3.4 - name: Install dependencies run: | @@ -77,7 +79,7 @@ jobs: - name: Count the downloaded number of container images id: count_initial run: | - image_count=$(ls -1 ./singularity_container_images | wc -l) + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) echo "Initial container image count: $image_count" echo "IMAGE_COUNT_INITIAL=$image_count" >> ${GITHUB_ENV} @@ -99,15 +101,18 @@ jobs: - name: Count the downloaded number of container images id: count_afterwards run: | - image_count=$(ls -1 ./singularity_container_images | wc -l) - echo "Post-pipeline container image count: $image_count" + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Post-pipeline run container image count: $image_count" echo "IMAGE_COUNT_AFTER=$image_count" >> ${GITHUB_ENV} - name: Compare container image counts run: | + {% raw %} if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then - echo "The number of container images has changed. The pipeline has no support for offline runs!" + echo "$(expr ${{ env.IMAGE_COUNT_AFTER }}-${{ env.IMAGE_COUNT_INITIAL }}) additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" + tree ./singularity_container_images exit 1 else echo "The pipeline can be downloaded successfully!" fi + {% endraw %} From 5c39ab8aa0ab0f7ed459ae0566c7e79312e23c50 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Thu, 26 Sep 2024 13:15:15 +0200 Subject: [PATCH 726/737] Fix container image count calculation. --- .../.github/workflows/download_pipeline.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 406ef3bdd..29a9e8b5d 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -69,7 +69,7 @@ jobs: --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ --compress "none" \ --container-system 'singularity' \ - --container-library "quay.io" -l "docker.io" -l "ghcr.io" -l "community.wave.seqera.io" \ + --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io" \ --container-cache-utilisation 'amend' \ --download-configuration 'yes' @@ -109,7 +109,10 @@ jobs: run: | {% raw %} if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then - echo "$(expr ${{ env.IMAGE_COUNT_AFTER }}-${{ env.IMAGE_COUNT_INITIAL }}) additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" + initial_count=${{ env.IMAGE_COUNT_INITIAL }} + final_count=${{ env.IMAGE_COUNT_AFTER }} + difference=$((final_count - initial_count)) + echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" tree ./singularity_container_images exit 1 else From 8feb89b770ebee56a34d1d419e6fbd44189c0e0f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 14:11:53 +0000 Subject: [PATCH 727/737] chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.6.9 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7cbde0c86..67aa3204c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.0 + rev: v0.6.9 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 89961d6d62f7c1c4ba64181efa1169a52eb0e670 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Fri, 4 Oct 2024 16:17:42 +0200 Subject: [PATCH 728/737] Tidy Jinja template markup. --- CHANGELOG.md | 1 + .../.github/workflows/download_pipeline.yml | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 12a0c9911..717e82a48 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -43,6 +43,7 @@ - Remove if/else block to include `igenomes.config` ([#3168](https://github.com/nf-core/tools/pull/3168)) - Fixed release announcement hashtags for Mastodon ([#3099](https://github.com/nf-core/tools/pull/3176)) - Remove try/catch blocks from `nextflow.config` ([#3167](https://github.com/nf-core/tools/pull/3167)) +- Extend `download_pipeline.yml` to count pre-downloaded container images. ([#3182](https://github.com/nf-core/tools/pull/3182)) ### Linting diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 29a9e8b5d..8d70f3896 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -74,7 +74,7 @@ jobs: --download-configuration 'yes' - name: Inspect download - run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} + run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% if test_config %} - name: Count the downloaded number of container images id: count_initial @@ -96,7 +96,7 @@ jobs: env: NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %}{% endif %} + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results - name: Count the downloaded number of container images id: count_afterwards @@ -107,7 +107,6 @@ jobs: - name: Compare container image counts run: | - {% raw %} if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then initial_count=${{ env.IMAGE_COUNT_INITIAL }} final_count=${{ env.IMAGE_COUNT_AFTER }} @@ -118,4 +117,4 @@ jobs: else echo "The pipeline can be downloaded successfully!" fi - {% endraw %} + {% endif %}{% endraw %} From 664fbfab163c9b21ef303baa0a34be3965e212a2 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Fri, 4 Oct 2024 16:34:54 +0200 Subject: [PATCH 729/737] The raw statements must not wrap other Jinja markup. --- .../pipeline-template/.github/workflows/download_pipeline.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 8d70f3896..29b994754 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -74,7 +74,7 @@ jobs: --download-configuration 'yes' - name: Inspect download - run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% if test_config %} + run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} - name: Count the downloaded number of container images id: count_initial @@ -117,4 +117,4 @@ jobs: else echo "The pipeline can be downloaded successfully!" fi - {% endif %}{% endraw %} + {% endraw %}{% endif %} From fc9c0010709455a5bbcc70aa51cf21d8d23d5798 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 20:36:21 +0000 Subject: [PATCH 730/737] chore(deps): update python:3.12-slim docker digest to af4e85f --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index fb1a86793..8269e9570 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:59c7332a4a24373861c4a5f0eec2c92b87e3efeb8ddef011744ef9a751b1d11c +FROM python:3.12-slim@sha256:af4e85f1cac90dd3771e47292ea7c8a9830abfabbe4faa5c53f158854c2e819d LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" From aa158204e13e42a9f832916ee0a9fc860e409bb1 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 7 Oct 2024 10:00:28 +0200 Subject: [PATCH 731/737] add pipeline name as an optional sync input --- .github/workflows/sync.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 55880e813..c86d79d78 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -19,6 +19,10 @@ on: description: "Force a PR to be created" type: boolean default: false + pipeline: + description: "Pipeline to sync" + type: string + default: "all" # Cancel if a newer run is started concurrency: @@ -35,6 +39,14 @@ jobs: run: | if [ "${{ github.event.inputs.testpipeline }}" == "true" ]; then echo '{"pipeline":["testpipeline"]}' > pipeline_names.json + elif [ "${{ github.event.inputs.pipeline }}" != "all" ]; then + curl -O https://nf-co.re/pipeline_names.json + # check if the pipeline exists + if ! grep -q "\"${{ github.event.inputs.pipeline }}\"" pipeline_names.json; then + echo "Pipeline ${{ github.event.inputs.pipeline }} does not exist" + exit 1 + fi + echo '{"pipeline":["${{ github.event.inputs.pipeline }}"]}' > pipeline_names.json else curl -O https://nf-co.re/pipeline_names.json fi From 962f4b3f137866527cecb820eea0daf75aba02d0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 7 Oct 2024 11:01:23 +0200 Subject: [PATCH 732/737] fix for multiqc_config template for released pipelines --- nf_core/pipeline-template/assets/multiqc_config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index cd4e539b3..e6fd87898 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -3,11 +3,11 @@ report_comment: > This report has been generated by the
    {{ name }} analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {%- else %} + {%- else -%} This report has been generated by the {{ name }} analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {% endif %} + {%- endif %} report_section_order: "{{ name_noslash }}-methods-description": order: -1000 From baeb1dfd351b922e34daf61257eb4539c9d4b8e3 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 7 Oct 2024 11:22:32 +0200 Subject: [PATCH 733/737] bump version to 3.0.0 --- .gitpod.yml | 2 +- CHANGELOG.md | 17 ++++++++--------- setup.py | 2 +- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/.gitpod.yml b/.gitpod.yml index efe193f35..f92457278 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,4 +1,4 @@ -image: nfcore/gitpod:dev +image: nfcore/gitpod:latest tasks: - name: install current state of nf-core/tools and setup pre-commit command: | diff --git a/CHANGELOG.md b/CHANGELOG.md index 717e82a48..1904cd99c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # nf-core/tools: Changelog -## v3.0.0dev +## [v3.0.0 - Titanium Tapir](https://github.com/nf-core/tools/releases/tag/3.0.0) - [2024-10-07] **Highlights** @@ -9,7 +9,6 @@ - A new Text User Interface app when running `nf-core pipelines create` to help us guide you through the process better (no worries, you can still use the cli if you give all values as parameters) - We replaced nf-validation with nf-schema in the pipeline template - CI tests now lint with the nf-core tools version matching the template version of the pipeline, to minimise errors in opened PRs with every new tools release. -- New command `nf-core pipelines ro-crate` to create a [Research Object (RO) crate](https://www.researchobject.org/ro-crate/) for a pipeline - `nf-core licences` command is deprecated. - The structure of nf-core/tools pytests has been updated - The structure of the API docs has been updated @@ -30,12 +29,12 @@ - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) - add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) -- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) - add option to exclude nf-schema from the template ([#3116](https://github.com/nf-core/tools/pull/3116)) - add option to exclude fastqc from pipeline template ([#3129](https://github.com/nf-core/tools/pull/3129)) - add option to exclude documentation from pipeline template ([#3130](https://github.com/nf-core/tools/pull/3130)) - add option to exclude test configs from pipeline template ([#3133](https://github.com/nf-core/tools/pull/3133)) - add option to exclude tower.yml from pipeline template ([#3134](https://github.com/nf-core/tools/pull/3134)) +- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) - test pipeline with conda and singularity on PRs to master ([#3149](https://github.com/nf-core/tools/pull/3149)) - run nf-core lint `--release` on PRs to master ([#3148](https://github.com/nf-core/tools/pull/3148)) - Add tests to ensure all files are part of a template customisation group and all groups are tested ([#3099](https://github.com/nf-core/tools/pull/3099)) @@ -61,16 +60,16 @@ ### Pipeline create command -- Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) -- Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) -- Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) -- Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) +- Allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- Display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) +- Allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) ### Components - The `modules_nfcore` tag in the `main.nf.test` file of modules/subworkflows now displays the organization name in custom modules repositories ([#3005](https://github.com/nf-core/tools/pull/3005)) - Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) -- Components: allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) - Add option `--fix` to update the `meta.yml` file of subworkflows ([#3077](https://github.com/nf-core/tools/pull/3077)) ### Download @@ -88,7 +87,6 @@ - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) -- update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) - move pipeline subcommands for v3.0 ([#2983](https://github.com/nf-core/tools/pull/2983)) - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Remove nf-core licences command ([#3012](https://github.com/nf-core/tools/pull/3012)) @@ -112,6 +110,7 @@ - Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) - Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) - Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) +- Update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] diff --git a/setup.py b/setup.py index 45df29b8b..95a465530 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.0.0dev" +version = "3.0.0" with open("README.md") as f: readme = f.read() From 0865cd1df778d2c5bbf2e31ddb06f58c4103f3be Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 7 Oct 2024 11:22:55 +0200 Subject: [PATCH 734/737] update nf-core command in sync.yml --- .github/workflows/sync.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index c86d79d78..d453dc930 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -94,7 +94,7 @@ jobs: run: | git config --global user.email "core@nf-co.re" git config --global user.name "nf-core-bot" - nf-core --log-file sync_log_${{ matrix.pipeline }}.txt sync -d nf-core/${{ matrix.pipeline }} \ + nf-core --log-file sync_log_${{ matrix.pipeline }}.txt pipelines sync -d nf-core/${{ matrix.pipeline }} \ --from-branch dev \ --pull-request \ --username nf-core-bot \ From f20f797ce755bdd2647bb951be37d54c2435921b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 7 Oct 2024 11:35:22 +0200 Subject: [PATCH 735/737] fix some more template newlines --- nf_core/pipeline-template/.github/CONTRIBUTING.md | 4 ++-- nf_core/pipeline-template/docs/output.md | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index f331d3867..63cddcb7f 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -37,7 +37,7 @@ You have the option to test your changes locally by running the pipeline. For re nf-test test --profile debug,test,docker --verbose ``` -{% endif %} +{% endif -%} When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. @@ -95,7 +95,7 @@ If you wish to contribute a new step, please use the following coding standards: {%- if multiqc %} 9. Update MultiQC config `assets/multiqc_config.yml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module. 10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`. - {% endif %} + {%- endif %} ### Default values diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 5e42d50cc..083c46ecd 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -58,7 +58,8 @@ Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQ - `pipeline_info/` - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. - {% if email %}- Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. {% endif %} + {%- if email %} + - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. {% endif %} - Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. - Parameters used by the pipeline run: `params.json`. From a506b1d50cdec4e41890ced0c61b957f99cb9f1c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 8 Oct 2024 10:55:54 +0200 Subject: [PATCH 736/737] review suggestions by @mashehu --- .github/workflows/create-test-wf.yml | 4 ---- README.md | 2 +- nf_core/components/list.py | 2 +- nf_core/components/nfcore_component.py | 4 ++-- nf_core/modules/modules_differ.py | 4 ++-- 5 files changed, 6 insertions(+), 10 deletions(-) diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 09c5b01c3..782a08ac9 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -71,10 +71,6 @@ jobs: mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" - # echo current directory - pwd - # echo content of current directory - ls -la nextflow run nf-core-testpipeline -profile self_hosted_runner,test --outdir ./results - name: Upload log file artifact diff --git a/README.md b/README.md index 58fb708a0..3597f1ea1 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ For documentation of the internal Python functions, please refer to the [Tools P ## Installation -For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/usage/tools). +For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/nf-core-tools/installation). Below is a quick-start for those who know what they're doing: ### Bioconda diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 05a8f7112..4c20e6086 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -25,7 +25,7 @@ def __init__( self.remote = remote super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - def _configure_repo_and_paths(self, nf_dir_req=True) -> None: + def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ Override the default with nf_dir_req set to False to allow info to be run from anywhere and still return remote info diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 9cce94acb..37e43a536 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -5,7 +5,7 @@ import logging import re from pathlib import Path -from typing import Any, List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union log = logging.getLogger(__name__) @@ -50,7 +50,7 @@ def __init__( self.passed: List[Tuple[str, str, Path]] = [] self.warned: List[Tuple[str, str, Path]] = [] self.failed: List[Tuple[str, str, Path]] = [] - self.inputs: List[list[dict[str, dict[str, str]]]] = [] + self.inputs: List[List[Dict[str, Dict[str, str]]]] = [] self.outputs: List[str] = [] self.has_meta: bool = False self.git_sha: Optional[str] = None diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index b6d7f0d0f..f9ba9d30c 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -391,8 +391,8 @@ def get_new_and_old_lines(patch): def try_apply_single_patch(file_lines, patch, reverse=False): """ Tries to apply a patch to a modified file. Since the line numbers in - the patch does not agree if the file is modified, the old and new - lines inpatch are reconstructed and then we look for the old lines + the patch do not agree if the file is modified, the old and new + lines in the patch are reconstructed and then we look for the old lines in the modified file. If all hunk in the patch are found in the new file it is updated with the new lines from the patch file. From 0cfca702cc93b65274098c238228dd24649fce6a Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 8 Oct 2024 10:56:22 +0200 Subject: [PATCH 737/737] update release date --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1904cd99c..7b42046fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # nf-core/tools: Changelog -## [v3.0.0 - Titanium Tapir](https://github.com/nf-core/tools/releases/tag/3.0.0) - [2024-10-07] +## [v3.0.0 - Titanium Tapir](https://github.com/nf-core/tools/releases/tag/3.0.0) - [2024-10-08] **Highlights**