diff --git a/lib/galaxy/dependencies/pinned-typecheck-requirements.txt b/lib/galaxy/dependencies/pinned-typecheck-requirements.txt index 0aab0f8b71ce..0fcfcb679b1e 100644 --- a/lib/galaxy/dependencies/pinned-typecheck-requirements.txt +++ b/lib/galaxy/dependencies/pinned-typecheck-requirements.txt @@ -8,7 +8,7 @@ cryptography==42.0.8 ; python_version >= "3.8" and python_version < "3.13" lxml-stubs==0.5.1 ; python_version >= "3.8" and python_version < "3.13" mypy-boto3-s3==1.34.138 ; python_version >= "3.8" and python_version < "3.13" mypy-extensions==1.0.0 ; python_version >= "3.8" and python_version < "3.13" -mypy==1.10.1 ; python_version >= "3.8" and python_version < "3.13" +mypy==1.11.0 ; python_version >= "3.8" and python_version < "3.13" pycparser==2.22 ; python_version >= "3.8" and python_version < "3.13" and platform_python_implementation != "PyPy" pydantic-core==2.20.1 ; python_version >= "3.8" and python_version < "3.13" pydantic==2.8.2 ; python_version >= "3.8" and python_version < "3.13" diff --git a/lib/galaxy/managers/base.py b/lib/galaxy/managers/base.py index 6629e6911267..3d4c39e1cd4b 100644 --- a/lib/galaxy/managers/base.py +++ b/lib/galaxy/managers/base.py @@ -697,7 +697,7 @@ def serialize(self, item, keys, **context): try: returned[key] = self.serializers[key](item, key, **context) except SkipAttribute: - # dont add this key if the deserializer threw this + # don't add this key if the serializer threw this pass elif key in self.serializable_keyset: returned[key] = self.default_serializer(item, key, **context) diff --git a/lib/galaxy/managers/configuration.py b/lib/galaxy/managers/configuration.py index 7a133b3f0fb2..d0ccfa8d53c2 100644 --- a/lib/galaxy/managers/configuration.py +++ b/lib/galaxy/managers/configuration.py @@ -104,8 +104,8 @@ def __init__(self, app): self.default_view = "all" self.add_view("all", list(self.serializers.keys())) - def default_serializer(self, config, key): - return getattr(config, key, None) + def default_serializer(self, item, key, **context): + return getattr(item, key, None) def add_serializers(self): def _defaults_to(default) -> base.Serializer: diff --git a/lib/galaxy/managers/datasets.py b/lib/galaxy/managers/datasets.py index 3a23a25e19a1..a9f03f53bf92 100644 --- a/lib/galaxy/managers/datasets.py +++ b/lib/galaxy/managers/datasets.py @@ -84,23 +84,23 @@ def create(self, manage_roles=None, access_roles=None, flush=True, **kwargs): def copy(self, dataset, **kwargs): raise exceptions.NotImplemented("Datasets cannot be copied") - def purge(self, dataset, flush=True): + def purge(self, item, flush=True, **kwargs): """ Remove the object_store/file for this dataset from storage and mark as purged. :raises exceptions.ConfigDoesNotAllowException: if the instance doesn't allow """ - self.error_unless_dataset_purge_allowed(dataset) + self.error_unless_dataset_purge_allowed(item) # the following also marks dataset as purged and deleted - dataset.full_delete() - self.session().add(dataset) + item.full_delete() + self.session().add(item) if flush: session = self.session() with transaction(session): session.commit() - return dataset + return item def purge_datasets(self, request: PurgeDatasetsTaskRequest): """ @@ -376,7 +376,7 @@ def delete(self, item, flush: bool = True, stop_job: bool = False, **kwargs): self.stop_creating_job(item, flush=flush) return item - def purge(self, dataset_assoc, flush=True): + def purge(self, item, flush=True, **kwargs): """ Purge this DatasetInstance and the dataset underlying it. """ @@ -388,15 +388,15 @@ def purge(self, dataset_assoc, flush=True): # so that job cleanup associated with stop_creating_job will see # the dataset as purged. flush_required = not self.app.config.track_jobs_in_database - super().purge(dataset_assoc, flush=flush or flush_required) + super().purge(item, flush=flush or flush_required, **kwargs) - # stop any jobs outputing the dataset_assoc - self.stop_creating_job(dataset_assoc, flush=True) + # stop any jobs outputing the dataset association + self.stop_creating_job(item, flush=True) # more importantly, purge underlying dataset as well - if dataset_assoc.dataset.user_can_purge: - self.dataset_manager.purge(dataset_assoc.dataset) - return dataset_assoc + if item.dataset.user_can_purge: + self.dataset_manager.purge(item.dataset, flush=flush, **kwargs) + return item def by_user(self, user): raise exceptions.NotImplemented("Abstract Method") diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index f69a20c6cc4c..08ffca33fbb6 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -228,14 +228,14 @@ def copy( return copy # .... deletion and purging - def purge(self, hda, flush=True, **kwargs): + def purge(self, item, flush=True, **kwargs): if self.app.config.enable_celery_tasks: from galaxy.celery.tasks import purge_hda user = kwargs.get("user") - return purge_hda.delay(hda_id=hda.id, task_user_id=getattr(user, "id", None)) + return purge_hda.delay(hda_id=item.id, task_user_id=getattr(user, "id", None)) else: - self._purge(hda, flush=flush) + self._purge(item, flush=flush) def _purge(self, hda, flush=True): """ diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py index 84db5dd38c2f..1cc3bf6e8937 100644 --- a/lib/galaxy/managers/histories.py +++ b/lib/galaxy/managers/histories.py @@ -289,19 +289,19 @@ def most_recent(self, user, filters=None, current_history=None): return self.session().scalars(stmt).first() # .... purgable - def purge(self, history, flush=True, **kwargs): + def purge(self, item, flush=True, **kwargs): """ Purge this history and all HDAs, Collections, and Datasets inside this history. """ - self.error_unless_mutable(history) + self.error_unless_mutable(item) self.hda_manager.dataset_manager.error_unless_dataset_purge_allowed() # First purge all the datasets - for hda in history.datasets: + for hda in item.datasets: if not hda.purged: self.hda_manager.purge(hda, flush=True, **kwargs) # Now mark the history as purged - super().purge(history, flush=flush, **kwargs) + super().purge(item, flush=flush, **kwargs) # .... current # TODO: make something to bypass the anon user + current history permissions issue diff --git a/lib/galaxy/objectstore/_caching_base.py b/lib/galaxy/objectstore/_caching_base.py index 066fc9077e34..5866fe749195 100644 --- a/lib/galaxy/objectstore/_caching_base.py +++ b/lib/galaxy/objectstore/_caching_base.py @@ -112,7 +112,7 @@ def _in_cache(self, rel_path: str) -> bool: cache_path = self._get_cache_path(rel_path) return os.path.exists(cache_path) - def _pull_into_cache(self, rel_path) -> bool: + def _pull_into_cache(self, rel_path, **kwargs) -> bool: # Ensure the cache directory structure exists (e.g., dataset_#_files/) rel_path_dir = os.path.dirname(rel_path) if not os.path.exists(self._get_cache_path(rel_path_dir)): @@ -129,7 +129,7 @@ def _get_data(self, obj, start=0, count=-1, **kwargs): rel_path = self._construct_path(obj, **kwargs) # Check cache first and get file if not there if not self._in_cache(rel_path): - self._pull_into_cache(rel_path) + self._pull_into_cache(rel_path, **kwargs) # Read the file content from cache data_file = open(self._get_cache_path(rel_path)) data_file.seek(start) @@ -298,7 +298,7 @@ def _get_filename(self, obj, **kwargs): self._download_directory_into_cache(rel_path, cache_path) return cache_path else: - if self._pull_into_cache(rel_path): + if self._pull_into_cache(rel_path, **kwargs): return cache_path raise ObjectNotFound(f"objectstore.get_filename, no cache_path: {obj}, kwargs: {kwargs}") diff --git a/lib/galaxy/objectstore/rucio.py b/lib/galaxy/objectstore/rucio.py index 4bb6540a34de..78badbe0e57d 100644 --- a/lib/galaxy/objectstore/rucio.py +++ b/lib/galaxy/objectstore/rucio.py @@ -306,7 +306,7 @@ def _initialize(self): self._ensure_staging_path_writable() self._start_cache_monitor_if_needed() - def _pull_into_cache(self, rel_path, auth_token): + def _pull_into_cache(self, rel_path, **kwargs) -> bool: log.debug("rucio _pull_into_cache: %s", rel_path) # Ensure the cache directory structure exists (e.g., dataset_#_files/) rel_path_dir = os.path.dirname(rel_path) @@ -314,6 +314,7 @@ def _pull_into_cache(self, rel_path, auth_token): os.makedirs(self._get_cache_path(rel_path_dir), exist_ok=True) # Now pull in the file dest = self._get_cache_path(rel_path) + auth_token = self._get_token(**kwargs) file_ok = self.rucio_broker.download(rel_path, dest, auth_token) self._fix_permissions(self._get_cache_path(rel_path_dir)) return file_ok diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index e2ed787bdba2..02272ddfe06f 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -192,7 +192,9 @@ def reset_metadata_on_selected_repositories(self, user, **kwd): status = "error" return message, status - def set_repository(self, repository): + def set_repository( + self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None + ): super().set_repository(repository) self.repository_clone_url = common_util.generate_clone_url_for_installed_repository(self.app, repository) diff --git a/lib/galaxy/tool_util/unittest_utils/parameters.py b/lib/galaxy/tool_util/unittest_utils/parameters.py index d3be68b7cca2..71738b5f4694 100644 --- a/lib/galaxy/tool_util/unittest_utils/parameters.py +++ b/lib/galaxy/tool_util/unittest_utils/parameters.py @@ -1,5 +1,4 @@ import os -from typing import List from galaxy.tool_util.parameters import ( from_input_source, @@ -11,7 +10,6 @@ class ParameterBundle(ToolParameterBundle): - input_models: List[ToolParameterT] def __init__(self, parameter: ToolParameterT): self.input_models = [parameter] diff --git a/lib/galaxy/tools/error_reports/plugins/__init__.py b/lib/galaxy/tools/error_reports/plugins/__init__.py index 97de6441cb30..08e03097775e 100644 --- a/lib/galaxy/tools/error_reports/plugins/__init__.py +++ b/lib/galaxy/tools/error_reports/plugins/__init__.py @@ -17,6 +17,6 @@ class ErrorPlugin(metaclass=ABCMeta): def plugin_type(self): """Short string providing labelling this plugin""" - def submit_report(self, dataset, job, tool, user_submission=False, **kwargs): + def submit_report(self, dataset, job, tool, **kwargs): """Submit the bug report and render a string to be displayed to the user.""" return None diff --git a/lib/galaxy/tools/error_reports/plugins/slack.py b/lib/galaxy/tools/error_reports/plugins/slack.py index 53affb80bc1a..3c4f273ed034 100644 --- a/lib/galaxy/tools/error_reports/plugins/slack.py +++ b/lib/galaxy/tools/error_reports/plugins/slack.py @@ -17,7 +17,7 @@ class SlackPlugin(BaseGitPlugin): - """Send error report to Sentry.""" + """Send error report to Slack.""" plugin_type = "slack" diff --git a/lib/galaxy/util/__init__.py b/lib/galaxy/util/__init__.py index 607207eb872c..b8132c4bc955 100644 --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -1134,7 +1134,7 @@ def commaify(amount): @overload -def unicodify( # type: ignore[overload-overlap] +def unicodify( value: Literal[None], encoding: str = DEFAULT_ENCODING, error: str = "replace", diff --git a/lib/galaxy/util/odict.py b/lib/galaxy/util/odict.py index aeb66cd20039..4b3fdbc2dc7f 100644 --- a/lib/galaxy/util/odict.py +++ b/lib/galaxy/util/odict.py @@ -71,10 +71,6 @@ def setdefault(self, key, failobj=None): self._keys.append(key) return UserDict.setdefault(self, key, failobj) - def update(self, dict): - for key, val in dict.items(): - self.__setitem__(key, val) - def values(self): return map(self.get, self._keys) diff --git a/lib/galaxy_test/driver/driver_util.py b/lib/galaxy_test/driver/driver_util.py index 77ab88ca0682..3c5121f99640 100644 --- a/lib/galaxy_test/driver/driver_util.py +++ b/lib/galaxy_test/driver/driver_util.py @@ -799,7 +799,7 @@ def __init__(self): self.server_wrappers: List[ServerWrapper] = [] self.temp_directories: List[str] = [] - def setup(self, config_object=None) -> None: + def setup(self) -> None: """Called before tests are built.""" def tear_down(self) -> None: diff --git a/lib/galaxy_test/selenium/test_workflow_editor.py b/lib/galaxy_test/selenium/test_workflow_editor.py index be5433ef3f18..ececfacac313 100644 --- a/lib/galaxy_test/selenium/test_workflow_editor.py +++ b/lib/galaxy_test/selenium/test_workflow_editor.py @@ -1481,12 +1481,6 @@ def workflow_index_open_with_name(self, name): self.workflow_index_search_for(name) self.components.workflows.edit_button.wait_for_and_click() - def workflow_upload_yaml_with_random_name(self, content): - workflow_populator = self.workflow_populator - name = self._get_random_name() - workflow_populator.upload_yaml_workflow(content, name=name) - return name - @retry_assertion_during_transitions def assert_wf_name_is(self, expected_name): edit_name_element = self.components.workflow_editor.edit_name.wait_for_visible() diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index 31a1fc45cac0..1f9b817e0230 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -957,9 +957,11 @@ def reset_metadata_on_selected_repositories(self, **kwd): status = "error" return message, status - def set_repository(self, repository, repository_clone_url=None): + def set_repository( + self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None + ): super().set_repository(repository) - self.repository_clone_url = repository_clone_url or common_util.generate_clone_url_for(self.trans, repository) + self.repository_clone_url = relative_install_dir or common_util.generate_clone_url_for(self.trans, repository) def set_repository_metadata(self, host, content_alert_str="", **kwd): """ diff --git a/lib/tool_shed/test/base/driver.py b/lib/tool_shed/test/base/driver.py index a4fa67efdfb5..5fe5280d786c 100644 --- a/lib/tool_shed/test/base/driver.py +++ b/lib/tool_shed/test/base/driver.py @@ -66,7 +66,7 @@ def build_shed_app(simple_kwargs): class ToolShedTestDriver(driver_util.TestDriver): """Instantiate a Galaxy-style TestDriver for testing the tool shed.""" - def setup(self): + def setup(self) -> None: """Entry point for test driver script.""" self.external_shed = bool(os.environ.get("TOOL_SHED_TEST_EXTERNAL", None)) if not self.external_shed: diff --git a/test/unit/tool_util/test_parameter_specification.py b/test/unit/tool_util/test_parameter_specification.py index 1ad8332230fc..81ebc33b3017 100644 --- a/test/unit/tool_util/test_parameter_specification.py +++ b/test/unit/tool_util/test_parameter_specification.py @@ -14,13 +14,13 @@ encode, RequestInternalToolState, RequestToolState, - ToolParameterModel, validate_internal_job, validate_internal_request, validate_request, validate_test_case, ) from galaxy.tool_util.parameters.json import to_json_schema_string +from galaxy.tool_util.parameters.models import ToolParameterT from galaxy.tool_util.unittest_utils.parameters import ( parameter_bundle, parameter_bundle_for_file, @@ -85,12 +85,12 @@ def _test_file(file: str, specification=None): _assert_internal_requests_invalid(tool_parameter_model, combos["request_invalid"]) -def _for_each(test: Callable, parameter: ToolParameterModel, requests: List[Dict[str, Any]]) -> None: +def _for_each(test: Callable, parameter: ToolParameterT, requests: List[Dict[str, Any]]) -> None: for request in requests: test(parameter, request) -def _assert_request_validates(parameter, request) -> None: +def _assert_request_validates(parameter: ToolParameterT, request: Dict[str, Any]) -> None: try: validate_request(parameter_bundle(parameter), request) except RequestParameterInvalidException as e: diff --git a/test/unit/webapps/test_service_base.py b/test/unit/webapps/test_service_base.py index 993b841c133e..4a8f351af080 100644 --- a/test/unit/webapps/test_service_base.py +++ b/test/unit/webapps/test_service_base.py @@ -13,7 +13,7 @@ def __init__(self, expected_filename: str, expected_mime_type: str) -> None: self.expected_filename = expected_filename self.expected_mime_type = expected_mime_type - def new_target(self, filename, mime_type): + def new_target(self, filename, mime_type, duration=None, security=None): assert filename == self.expected_filename assert mime_type == self.expected_mime_type