Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 24 additions & 14 deletions lib/galaxy/tools/parameters/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2419,14 +2419,19 @@ def append_ldda(ldda):
)

# add datasets
hda_list = util.listify(other_values.get(self.name))
# When rerunning a job, other_values contains the original job's input
# values which may be HDAs, HDCAs, DCEs, or LDDAs. We start by
# collecting them so we can (a) mark the ones that are still present
# among the active visible datasets and (b) carry forward the rest as
# "keep" options so the client can pre-select them.
job_input_values = util.listify(other_values.get(self.name))
# Prefetch all at once, big list of visible, non-deleted datasets.
matches_by_hid: dict[int, list] = {}
for hda in history.active_visible_datasets_and_roles:
match = dataset_matcher.hda_match(hda)
if match:
m = match.hda
hda_list = [h for h in hda_list if h != m and h != hda]
job_input_values = [h for h in job_input_values if h != m and h != hda]
if m.hid not in matches_by_hid:
matches_by_hid[m.hid] = []
matches_by_hid[m.hid].append(match)
Expand All @@ -2442,19 +2447,24 @@ def append_ldda(ldda):
)
append(d["options"]["hda"], match.hda, m_name, "hda")

for hda in hda_list:
if hasattr(hda, "hid"):
if hda.deleted:
hda_state = "deleted"
elif not hda.visible:
hda_state = "hidden"
# Remaining job_input_values were not found among active visible
# datasets (e.g. hidden or deleted inputs from the original job).
# Route each to the correct options list by type so the client can
# match them by id *and* src.
for value in job_input_values:
if isinstance(value, (HistoryDatasetCollectionAssociation, HistoryDatasetAssociation)):
if value.deleted:
state = "deleted"
elif not value.visible:
state = "hidden"
else:
hda_state = "unavailable"
append(d["options"]["hda"], hda, f"({hda_state}) {hda.name}", "hda", True)
elif isinstance(hda, DatasetCollectionElement):
append_dce(hda)
elif isinstance(hda, LibraryDatasetDatasetAssociation):
append_ldda(hda)
state = "not in current history"
src = "hdca" if isinstance(value, HistoryDatasetCollectionAssociation) else "hda"
append(d["options"][src], value, f"({state}) {value.name}", src, True)
elif isinstance(value, DatasetCollectionElement):
append_dce(value)
elif isinstance(value, LibraryDatasetDatasetAssociation):
append_ldda(value)

# add dataset collections
dataset_collection_matcher = dataset_matcher_factory.dataset_collection_matcher(dataset_matcher)
Expand Down
45 changes: 38 additions & 7 deletions lib/galaxy_test/api/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,7 @@ def test_no_hide_on_rerun(self):
)
first_update_time = failed_hdca["update_time"]
assert failed_hdca["visible"]
rerun_params = self._get(f"jobs/{job_id}/build_for_rerun").json()
rerun_params = self.dataset_populator.build_for_rerun(job_id)
inputs = rerun_params["state_inputs"]
inputs["rerun_remap_job_id"] = job_id
rerun_response = self._run_detect_errors(history_id=history_id, inputs=inputs)
Expand Down Expand Up @@ -481,7 +481,7 @@ def test_rerun_exception_handling(self):
assert_ok=False,
)
assert failed_hdca["visible"]
rerun_params = self._get(f"jobs/{job_id}/build_for_rerun").json()
rerun_params = self.dataset_populator.build_for_rerun(job_id)
inputs = rerun_params["state_inputs"]
inputs["rerun_remap_job_id"] = unrelated_job_id
before_rerun_items = self.dataset_populator.get_history_contents(history_id)
Expand Down Expand Up @@ -944,7 +944,7 @@ def _get_simple_rerun_params(self, history_id, private=False):
wait_for_job=True,
assert_ok=True,
)
rerun_params = self._get(f"jobs/{run_response['jobs'][0]['id']}/build_for_rerun").json()
rerun_params = self.dataset_populator.build_for_rerun(run_response["jobs"][0]["id"])
# Since we call rerun on the first (and only) job we should get the expanded input
# which is a dataset collection element (and not the list:pair hdca that was used as input to the original
# job).
Expand All @@ -966,12 +966,43 @@ def test_job_build_for_rerun(self, history_id):
assert_ok=True,
)

@skip_without_tool("multi_data_param")
def test_job_build_for_rerun_hdca_value_in_options(self, history_id):
"""When rerunning a job whose input was a collection passed to a
``multiple="true"`` data parameter, the collection must appear in
``options.hdca`` (not ``options.hda``) so the client can match it
against the value's ``src: "hdca"``.

Regression test for a bug where hidden HDCAs were misclassified as
HDAs in the fallback options, causing the rerun form to show single-
dataset mode with nothing pre-selected.
"""
hdca_id = self.__history_with_ok_collection(collection_type="list", history_id=history_id)
inputs = {
"f1": {"src": "hdca", "id": hdca_id},
"f2": {"src": "hdca", "id": hdca_id},
}
run_response = self._run("multi_data_param", history_id, inputs, wait_for_job=True, assert_ok=True)
job_id = run_response["jobs"][0]["id"]

# Hide the collection so it goes through the job-rerun fallback path
# (not found among active visible dataset collections).
self.dataset_populator.hide_dataset_collection(hdca_id)

rerun_params = self.dataset_populator.build_for_rerun(job_id)

# Find the f1 input definition in the form model
f1_input = next(i for i in rerun_params["inputs"] if i["name"] == "f1")
assert f1_input["value"]["values"][0]["src"] == "hdca"

# The HDCA must be in options.hdca (not options.hda)
hdca_option = f1_input["options"]["hdca"][0]
assert hdca_option["id"] == hdca_id and hdca_option["src"] == "hdca"

@skip_without_tool("multiple_versions")
def test_job_build_for_rerun_switch_version(self, history_id):
run_response = self._run("multiple_versions", history_id, {}, tool_version="0.1").json()
rerun_params = self._get(
f"jobs/{run_response['jobs'][0]['id']}/build_for_rerun", {"tool_version": "0.2"}
).json()
rerun_params = self.dataset_populator.build_for_rerun(run_response["jobs"][0]["id"], tool_version="0.2")
assert rerun_params["version"] == "0.2"

@skip_without_tool("collection_paired_test")
Expand Down Expand Up @@ -1012,7 +1043,7 @@ def test_job_build_for_rerun_list_list(self, history_id):
assert_ok=True,
)
assert len(run_response["jobs"]) == 2
rerun_params = self._get(f"jobs/{run_response['jobs'][0]['id']}/build_for_rerun").json()
rerun_params = self.dataset_populator.build_for_rerun(run_response["jobs"][0]["id"])
# Since we call rerun on the first (and only) job we should get the expanded input
# which is a dataset collection element (and not the list:list hdca that was used as input to the original
# job).
Expand Down
5 changes: 1 addition & 4 deletions lib/galaxy_test/api/test_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -1086,10 +1086,7 @@ def test_dataset_hidden_after_job_finish(self):
}
response = self._run("galaxy_json_sleep", history_id, inputs, assert_ok=True)
output = response["outputs"][0]
response = self._put(
f"histories/{history_id}/contents/datasets/{output['id']}", data={"visible": False}, json=True
)
response.raise_for_status()
self.dataset_populator.hide_dataset(output["id"])
output_details = self.dataset_populator.get_history_dataset_details(history_id, dataset=output, wait=False)
assert not output_details["visible"]
output_details = self.dataset_populator.get_history_dataset_details(history_id, dataset=output, wait=True)
Expand Down
11 changes: 11 additions & 0 deletions lib/galaxy_test/base/populators.py
Original file line number Diff line number Diff line change
Expand Up @@ -855,6 +855,11 @@ def active_history_jobs(self, history_id: str) -> list:
]
return active_jobs

def build_for_rerun(self, job_id: str, **kwd) -> dict[str, Any]:
response = self._get(f"jobs/{job_id}/build_for_rerun", kwd or None)
response.raise_for_status()
return response.json()

def cancel_job(self, job_id: str) -> Response:
return self._delete(f"jobs/{job_id}")

Expand Down Expand Up @@ -1743,12 +1748,18 @@ def update_dataset(self, dataset_id: str, update_payload: dict[str, Any]):
api_asserts.assert_status_code_is_ok(put_response)
return put_response.json()

def hide_dataset(self, dataset_id: str) -> dict[str, Any]:
return self.update_dataset(dataset_id, {"visible": False})

def update_dataset_collection(self, dataset_collection_id: str, update_payload: dict[str, Any]):
update_url = f"dataset_collections/{dataset_collection_id}"
put_response = self._put(update_url, update_payload, json=True)
api_asserts.assert_status_code_is_ok(put_response)
return put_response.json()

def hide_dataset_collection(self, dataset_collection_id: str) -> dict[str, Any]:
return self.update_dataset_collection(dataset_collection_id, {"visible": False})

def get_histories(self):
history_index_response = self._get("histories")
api_asserts.assert_status_code_is(history_index_response, 200)
Expand Down
17 changes: 10 additions & 7 deletions test/unit/app/tools/test_data_parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ def test_field_filter_on_types(self):
assert field["options"]["hda"][0]["name"] == "hda1"

def test_field_display_hidden_hdas_only_if_selected(self):
hda1 = MockHistoryDatasetAssociation(name="hda1", id=1)
hda2 = MockHistoryDatasetAssociation(name="hda2", id=2)
hda1 = self._new_hda(name="hda1")
hda2 = self._new_hda(name="hda2")
hda1.visible = False
hda2.visible = False
self.stub_active_datasets(hda1, hda2)
Expand All @@ -59,8 +59,8 @@ def test_field_display_hidden_hdas_only_if_selected(self):
assert field["options"]["hda"][0]["name"] == "(hidden) hda2"

def test_field_display_deleted_hdas_only_if_selected(self):
hda1 = MockHistoryDatasetAssociation(name="hda1", id=1)
hda2 = MockHistoryDatasetAssociation(name="hda2", id=2)
hda1 = self._new_hda(name="hda1")
hda2 = self._new_hda(name="hda2")
hda1.visible = False
hda2.deleted = True
self.stub_active_datasets(hda1, hda2)
Expand Down Expand Up @@ -141,10 +141,13 @@ def test_get_initial_with_to_be_converted_data(self):
self.stub_active_datasets(hda1)
assert hda1 == self.param.get_initial_value(self.trans, {}), hda1

def _new_hda(self):
def _new_hda(self, name="Test Dataset"):
hda = model.HistoryDatasetAssociation()
hda.visible = True
hda.name = name
hda.extension = "txt"
hda.dataset = model.Dataset()
hda.dataset.state = model.Dataset.states.OK
session = self.app.model.context
session.add(hda)
session.commit()
Expand Down Expand Up @@ -184,8 +187,8 @@ def param(self):


class MockHistoryDatasetAssociation:
"""Fake HistoryDatasetAssociation stubbed out for testing matching and
stuff like that.
"""Fake HistoryDatasetAssociation that stubs find_conversion_destination
so conversion tests can run without a datatype registry.
"""

def __init__(self, test_dataset=None, name="Test Dataset", id=1):
Expand Down
Loading