From feb37476984570c8b6317059c1e396d8be3d6adc Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Wed, 6 Nov 2024 16:13:57 +0100 Subject: [PATCH] Enable specifying dataset hash in test jobs --- lib/galaxy/model/store/discover.py | 4 +--- lib/galaxy/tool_util/client/staging.py | 9 ++++----- lib/galaxy/tool_util/cwl/util.py | 2 ++ lib/galaxy_test/api/test_tools_upload.py | 3 +++ 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/lib/galaxy/model/store/discover.py b/lib/galaxy/model/store/discover.py index 1406437f86bc..043ede46f5ef 100644 --- a/lib/galaxy/model/store/discover.py +++ b/lib/galaxy/model/store/discover.py @@ -930,9 +930,7 @@ def replace_request_syntax_sugar(obj): new_hashes.append({"hash_function": key, "hash_value": obj[key.lower()]}) del obj[key.lower()] - if "hashes" not in obj: - obj["hashes"] = [] - obj["hashes"].extend(new_hashes) + obj.setdefault("hashes", []).extend(new_hashes) class DiscoveredFile(NamedTuple): diff --git a/lib/galaxy/tool_util/client/staging.py b/lib/galaxy/tool_util/client/staging.py index 8156dd123f58..ca165d41bdf8 100644 --- a/lib/galaxy/tool_util/client/staging.py +++ b/lib/galaxy/tool_util/client/staging.py @@ -107,6 +107,7 @@ def _attach_file(upload_payload: Dict[str, Any], uri: str, index: int = 0) -> Di dbkey=dbkey, to_posix_lines=to_posix_lines, decompress=upload_target.properties.get("decompress") or DEFAULT_DECOMPRESS, + hashes=upload_target.properties.get("hashes"), ) name = _file_path_to_name(file_path) if file_path is not None: @@ -177,11 +178,7 @@ def _attach_file(upload_payload: Dict[str, Any], uri: str, index: int = 0) -> No file_path = upload_target.path file_type = upload_target.properties.get("filetype", None) or DEFAULT_FILE_TYPE dbkey = upload_target.properties.get("dbkey", None) or DEFAULT_DBKEY - upload_payload = _upload_payload( - history_id, - file_type=file_type, - to_posix_lines=dbkey, - ) + upload_payload = _upload_payload(history_id, file_type=file_type, to_posix_lines=dbkey) name = _file_path_to_name(file_path) upload_payload["inputs"]["files_0|auto_decompress"] = False upload_payload["inputs"]["auto_decompress"] = False @@ -334,6 +331,8 @@ def _fetch_payload(history_id, file_type=DEFAULT_FILE_TYPE, dbkey=DEFAULT_DBKEY, for arg in ["to_posix_lines", "space_to_tab"]: if arg in kwd: element[arg] = kwd[arg] + if kwd.get("hashes"): + element["hashes"] = kwd["hashes"] if "file_name" in kwd: element["name"] = kwd["file_name"] if "decompress" in kwd: diff --git a/lib/galaxy/tool_util/cwl/util.py b/lib/galaxy/tool_util/cwl/util.py index 4d2dc2977d11..2f3edde16dba 100644 --- a/lib/galaxy/tool_util/cwl/util.py +++ b/lib/galaxy/tool_util/cwl/util.py @@ -235,6 +235,8 @@ def replacement_file(value): kwd["dbkey"] = value.get("dbkey") if "decompress" in value: kwd["decompress"] = value["decompress"] + if value.get("hashes"): + kwd["hashes"] = value["hashes"] if composite_data_raw: composite_data = [] for entry in composite_data_raw: diff --git a/lib/galaxy_test/api/test_tools_upload.py b/lib/galaxy_test/api/test_tools_upload.py index 15d0198f9c31..0830bd621f4f 100644 --- a/lib/galaxy_test/api/test_tools_upload.py +++ b/lib/galaxy_test/api/test_tools_upload.py @@ -254,6 +254,7 @@ def test_newlines_stage_fetch(self, history_id: str) -> None: "class": "File", "format": "txt", "path": "test-data/simple_line_no_newline.txt", + "hashes": [{"hash_function": "SHA-1", "hash_value": "f030155d3459c233efd37e13bc1061c1dc744ebf"}], } } inputs, datasets = stage_inputs(self.galaxy_interactor, history_id, job, use_path_paste=False) @@ -261,6 +262,8 @@ def test_newlines_stage_fetch(self, history_id: str) -> None: content = self.dataset_populator.get_history_dataset_content(history_id=history_id, dataset=dataset) # By default this appends the newline. assert content == "This is a line of text.\n" + dataset = self.dataset_populator.get_history_dataset_details(history_id, content_id=dataset["id"]) + assert dataset["hashes"][0]["hash_value"] == "f030155d3459c233efd37e13bc1061c1dc744ebf" def test_stage_object(self, history_id: str) -> None: job = {"input1": "randomstr"}