Skip to content

Commit

Permalink
Merge branch 'release_24.0' into release_24.1
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Jul 1, 2024
2 parents 9d601f8 + 58c55d4 commit 33f27d5
Show file tree
Hide file tree
Showing 12 changed files with 206 additions and 19 deletions.
20 changes: 12 additions & 8 deletions lib/galaxy/job_execution/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,22 +213,26 @@ def get_input_dataset_fnames(self, ds: DatasetInstance) -> List[str]:
for value in ds.metadata.values():
if isinstance(value, MetadataFile):
filenames.append(value.get_file_name())
if ds.dataset and ds.dataset.extra_files_path_exists():
filenames.append(ds.dataset.extra_files_path)
return filenames

def get_input_fnames(self) -> List[str]:
def get_input_datasets(self) -> List[DatasetInstance]:
job = self.job
return [
da.dataset for da in job.input_datasets + job.input_library_datasets if da.dataset
] # da is JobToInputDatasetAssociation object

def get_input_fnames(self) -> List[str]:
filenames = []
for da in job.input_datasets + job.input_library_datasets: # da is JobToInputDatasetAssociation object
if da.dataset:
filenames.extend(self.get_input_dataset_fnames(da.dataset))
for ds in self.get_input_datasets():
filenames.extend(self.get_input_dataset_fnames(ds))
return filenames

def get_input_paths(self) -> List[DatasetPath]:
job = self.job
paths = []
for da in job.input_datasets + job.input_library_datasets: # da is JobToInputDatasetAssociation object
if da.dataset:
paths.append(self.get_input_path(da.dataset))
for ds in self.get_input_datasets():
paths.append(self.get_input_path(ds))
return paths

def get_input_path(self, dataset: DatasetInstance) -> DatasetPath:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/splitters/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def set_basic_defaults(job_wrapper):


def do_split(job_wrapper):
if len(job_wrapper.job_io.get_input_fnames()) > 1 or len(job_wrapper.job_io.get_output_fnames()) > 1:
if len(job_wrapper.job_io.get_input_datasets()) > 1 or len(job_wrapper.job_io.get_output_fnames()) > 1:
log.error("The basic splitter is not capable of handling jobs with multiple inputs or outputs.")
raise Exception("Job Splitting Failed, the basic splitter only handles tools with one input and one output")
# add in the missing information for splitting the one input and merging the one output
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/managers/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -616,7 +616,7 @@ def get_reset_token(self, trans, email):
reset_user = get_user_by_email(trans.sa_session, email, self.app.model.User)
if not reset_user and email != email.lower():
reset_user = self._get_user_by_email_case_insensitive(trans.sa_session, email)
if reset_user:
if reset_user and not reset_user.deleted:
prt = self.app.model.PasswordResetToken(reset_user)
trans.sa_session.add(prt)
with transaction(trans.sa_session):
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/managers/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -1416,7 +1416,7 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None, internal=F
If `allow_upgrade`, the workflow and sub-workflows might use updated tool versions when refactoring.
"""
annotation_str = ""
tag_str = ""
tags_list = []
annotation_owner = None
if stored is not None:
if stored.id:
Expand All @@ -1427,7 +1427,7 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None, internal=F
or self.get_item_annotation_str(trans.sa_session, annotation_owner, stored)
or ""
)
tag_str = stored.make_tag_string_list()
tags_list = stored.make_tag_string_list()
else:
# dry run with flushed workflow objects, just use the annotation
annotations = stored.annotations
Expand All @@ -1440,7 +1440,7 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None, internal=F
data["format-version"] = "0.1"
data["name"] = workflow.name
data["annotation"] = annotation_str
data["tags"] = tag_str
data["tags"] = tags_list
if workflow.uuid is not None:
data["uuid"] = str(workflow.uuid)
steps: Dict[int, Dict[str, Any]] = {}
Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/workflow/refactor/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -531,6 +531,9 @@ def _patch_step(self, execution, step, step_def):
if upgrade_input["name"] == input_name:
matching_input = upgrade_input
break
elif step.when_expression and f"inputs.{input_name}" in step.when_expression:
# TODO: eventually track step inputs more formally
matching_input = upgrade_input

# In the future check parameter type, format, mapping status...
if matching_input is None:
Expand Down
7 changes: 7 additions & 0 deletions lib/galaxy_test/api/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -7704,6 +7704,13 @@ def _all_user_invocation_ids(self):
invocation_ids = [i["id"] for i in all_invocations_for_user.json()]
return invocation_ids

def test_subworkflow_tags(self):
workflow = self.workflow_populator.load_workflow_from_resource("test_subworkflow_with_tags")
workflow_id = self.workflow_populator.create_workflow(workflow)
downloaded_workflow = self._download_workflow(workflow_id)
subworkflow = downloaded_workflow["steps"]["1"]["subworkflow"]
assert subworkflow["tags"] == []


class TestAdminWorkflowsApi(BaseWorkflowsApiTestCase):
require_admin_user = True
Expand Down
134 changes: 134 additions & 0 deletions lib/galaxy_test/base/data/test_subworkflow_with_tags.ga
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
{
"a_galaxy_workflow": "true",
"annotation": "Test main ",
"comments": [],
"format-version": "0.1",
"name": "Unnamed Workflow",
"report": {
"markdown": "\n# Workflow Execution Report\n\n## Workflow Inputs\n```galaxy\ninvocation_inputs()\n```\n\n## Workflow Outputs\n```galaxy\ninvocation_outputs()\n```\n\n## Workflow\n```galaxy\nworkflow_display()\n```\n"
},
"steps": {
"0": {
"annotation": "",
"content_id": null,
"errors": null,
"id": 0,
"input_connections": {},
"inputs": [],
"label": null,
"name": "Input dataset",
"outputs": [],
"position": {
"left": 0,
"top": 0
},
"tool_id": null,
"tool_state": "{\"optional\": false, \"tag\": null}",
"tool_version": null,
"type": "data_input",
"uuid": "967583e9-d2a6-444a-ba31-6fb749d03f9e",
"when": null,
"workflow_outputs": []
},
"1": {
"annotation": "",
"id": 1,
"input_connections": {
"0:Input dataset": {
"id": 0,
"input_subworkflow_step_id": 0,
"output_name": "output"
}
},
"inputs": [],
"label": null,
"name": "Workflow with tags",
"outputs": [],
"position": {
"left": 249,
"top": 51
},
"subworkflow": {
"a_galaxy_workflow": "true",
"annotation": "",
"comments": [],
"format-version": "0.1",
"name": "Workflow with tags",
"report": {
"markdown": "\n# Workflow Execution Report\n\n## Workflow Inputs\n```galaxy\ninvocation_inputs()\n```\n\n## Workflow Outputs\n```galaxy\ninvocation_outputs()\n```\n\n## Workflow\n```galaxy\nworkflow_display()\n```\n"
},
"steps": {
"0": {
"annotation": "",
"content_id": null,
"errors": null,
"id": 0,
"input_connections": {},
"inputs": [],
"label": null,
"name": "Input dataset",
"outputs": [],
"position": {
"left": 0,
"top": 0.0
},
"tool_id": null,
"tool_state": "{\"optional\": false, \"tag\": null}",
"tool_version": null,
"type": "data_input",
"uuid": "eca9b088-ff50-4253-8387-01512f03ff2f",
"when": null,
"workflow_outputs": []
},
"1": {
"annotation": "",
"content_id": "addValue",
"errors": null,
"id": 1,
"input_connections": {
"input": {
"id": 0,
"output_name": "output"
}
},
"inputs": [
{
"description": "runtime parameter for tool Add column",
"name": "input"
}
],
"label": null,
"name": "Add column",
"outputs": [
{
"name": "out_file1",
"type": "input"
}
],
"position": {
"left": 123,
"top": 112.0
},
"post_job_actions": {},
"tool_id": "addValue",
"tool_state": "{\"exp\": \"1\", \"input\": {\"__class__\": \"RuntimeValue\"}, \"iterate\": \"no\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
"tool_version": "1.0.0",
"type": "tool",
"uuid": "7016e754-149b-402a-bb17-eb6cd4b1ab0a",
"when": null,
"workflow_outputs": []
}
},
"uuid": "c33370a9-188f-4af8-bfcc-137c577c79ba"
},
"tool_id": null,
"type": "subworkflow",
"uuid": "90bdcd13-418a-49da-847e-02926942bf4b",
"when": null,
"workflow_outputs": []
}
},
"tags": [],
"uuid": "64d7fac3-6402-412b-9db6-490ffc18e129",
"version": 1
}
2 changes: 1 addition & 1 deletion lib/tool_shed/webapp/templates/galaxy_client_app.mako
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ ${ h.dumps( dictionary, indent=( 2 if trans.debug else 0 ) ) }

<%def name="config_plausible_analytics(plausible_server, plausible_domain)">
%if plausible_server and plausible_domain:
<script async defer data-domain="${plausible_domain}" src="${plausible_server}/js/plausible.js"></script>
<script async defer data-domain="${plausible_domain}" src="${plausible_server}/js/script.js"></script>
%else:
<script>console.warn("Missing plausible server or plausible domain");</script>
%endif
Expand Down
2 changes: 1 addition & 1 deletion templates/galaxy_client_app.mako
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ ${ h.dumps( dictionary, indent=( 2 if trans.debug else 0 ) ) }

<%def name="config_plausible_analytics(plausible_server, plausible_domain)">
%if plausible_server and plausible_domain:
<script async defer data-domain="${plausible_domain}" src="${plausible_server}/js/plausible.js"></script>
<script async defer data-domain="${plausible_domain}" src="${plausible_server}/js/script.js"></script>
<script>window.plausible = window.plausible || function() { (window.plausible.q = window.plausible.q || []).push(arguments) }</script>
%else:
<script>
Expand Down
8 changes: 4 additions & 4 deletions templates/js-app.mako
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@
| ${app.config.brand}
%endif
</title>

## relative href for site root
<link rel="index" href="${ h.url_for( '/' ) }"/>

## TODO: use loaders to move everything but the essentials below the fold
${ h.dist_css(
'base',
Expand Down Expand Up @@ -86,10 +86,10 @@
${ galaxy_client.config_google_analytics(app.config.ga_code) }
%endif
%if app.config.plausible_server and app.config.plausible_domain:
${ galaxy_client.config_plausible_analytics(app.config.plausible_server, app.config.plausible_domain) }
${ galaxy_client.config_plausible_analytics(app.config.plausible_server, app.config.plausible_domain) }
%endif
%if app.config.matomo_server and app.config.matomo_site_id:
${ galaxy_client.config_matomo_analytics(app.config.matomo_server, app.config.matomo_site_id) }
${ galaxy_client.config_matomo_analytics(app.config.matomo_server, app.config.matomo_site_id) }
%endif
</%def>

Expand Down
29 changes: 29 additions & 0 deletions test/integration/test_workflow_refactoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,6 +537,35 @@ def test_tool_version_upgrade_no_state_change(self):
assert len(action_executions[0].messages) == 0
assert self._latest_workflow.step_by_label("the_step").tool_version == "0.2"

def test_tool_version_upgrade_keeps_when_expression(self):
self.workflow_populator.upload_yaml_workflow(
"""
class: GalaxyWorkflow
inputs:
the_bool:
type: boolean
steps:
the_step:
tool_id: multiple_versions
tool_version: '0.1'
in:
when: the_bool
state:
inttest: 0
when: $(inputs.when)
"""
)
assert self._latest_workflow.step_by_label("the_step").tool_version == "0.1"
actions: ActionsJson = [
{"action_type": "upgrade_tool", "step": {"label": "the_step"}},
]
action_executions = self._refactor(actions).action_executions
assert len(action_executions) == 1
assert len(action_executions[0].messages) == 0
step = self._latest_workflow.step_by_label("the_step")
assert step.tool_version == "0.2"
assert step.when_expression

def test_tool_version_upgrade_state_added(self):
self.workflow_populator.upload_yaml_workflow(
"""
Expand Down
10 changes: 10 additions & 0 deletions test/unit/app/managers/test_UserManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,16 @@ def validate_send_email(frm, to, subject, body, config, html=None):
mock_unique_id.assert_called_once()
assert result is None

def test_reset_email_user_deleted(self):
self.trans.app.config.allow_user_deletion = True
self.log("should not produce the password reset email if user is deleted")
user_email = "[email protected]"
user = self.user_manager.create(email=user_email, username="nopassword")
self.user_manager.delete(user)
assert user.deleted is True
message = self.user_manager.send_reset_email(self.trans, {"email": user_email})
assert message == "Failed to produce password reset token. User not found."

def test_get_user_by_identity(self):
# return None if username/email not found
assert self.user_manager.get_user_by_identity("xyz") is None
Expand Down

0 comments on commit 33f27d5

Please sign in to comment.