From bef8eece4b01342f0138928be3ad0930b2a8ebc6 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Fri, 25 Jul 2025 03:10:10 +0200 Subject: [PATCH] Type-annotate ansible.galaxy.dependency_resolution This patch is a combination of `pyrefly autotype` and manual post-processing. Parts of it migrate pre-existing comment-based annotations, fixing incorrect ones where applicable. The change also configures MyPy to run checks against actual `resolvelib` annotations and includes a small tweak of `ansible.galaxy.collection._resolve_depenency_map` to make it compatible with those. Co-Authored-By: Jordan Borean Co-Authored-By: Matt Clay Co-Authored-By: Sloane Hertel <19572925+s-hertel@users.noreply.github.com> (cherry picked from commit c9131aa847dfb72feec4ea9e45cdb2ad3ab36b94) --- lib/ansible/galaxy/collection/__init__.py | 11 +- .../galaxy/dependency_resolution/__init__.py | 19 +-- .../dependency_resolution/dataclasses.py | 146 +++++++++++------- .../galaxy/dependency_resolution/providers.py | 50 +++--- .../dependency_resolution/versioning.py | 6 +- test/sanity/code-smell/mypy.requirements.in | 1 + test/sanity/code-smell/mypy.requirements.txt | 1 + test/sanity/code-smell/mypy/ansible-core.ini | 3 - 8 files changed, 139 insertions(+), 98 deletions(-) diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py index 4056f0c177e..5656227e7e1 100644 --- a/lib/ansible/galaxy/collection/__init__.py +++ b/lib/ansible/galaxy/collection/__init__.py @@ -1839,10 +1839,13 @@ def _resolve_depenency_map( offline=offline, ) try: - return collection_dep_resolver.resolve( - requested_requirements, - max_rounds=2000000, # NOTE: same constant pip uses - ).mapping + return t.cast( + dict[str, Candidate], + collection_dep_resolver.resolve( + requested_requirements, + max_rounds=2000000, # NOTE: same constant pip uses + ).mapping, + ) except CollectionDependencyResolutionImpossible as dep_exc: conflict_causes = ( '* {req.fqcn!s}:{req.ver!s} ({dep_origin!s})'.format( diff --git a/lib/ansible/galaxy/dependency_resolution/__init__.py b/lib/ansible/galaxy/dependency_resolution/__init__.py index 2e8ef147723..a165352955f 100644 --- a/lib/ansible/galaxy/dependency_resolution/__init__.py +++ b/lib/ansible/galaxy/dependency_resolution/__init__.py @@ -5,6 +5,7 @@ from __future__ import annotations +import collections.abc as _c import typing as t if t.TYPE_CHECKING: @@ -21,15 +22,15 @@ from ansible.galaxy.dependency_resolution.resolvers import CollectionDependencyR def build_collection_dependency_resolver( - galaxy_apis, # type: t.Iterable[GalaxyAPI] - concrete_artifacts_manager, # type: ConcreteArtifactsManager - preferred_candidates=None, # type: t.Iterable[Candidate] - with_deps=True, # type: bool - with_pre_releases=False, # type: bool - upgrade=False, # type: bool - include_signatures=True, # type: bool - offline=False, # type: bool -): # type: (...) -> CollectionDependencyResolver + galaxy_apis: _c.Iterable[GalaxyAPI], + concrete_artifacts_manager: ConcreteArtifactsManager, + preferred_candidates: _c.Iterable[Candidate] | None = None, + with_deps: bool = True, + with_pre_releases: bool = False, + upgrade: bool = False, + include_signatures: bool = True, + offline: bool = False, +) -> CollectionDependencyResolver: """Return a collection dependency resolver. The returned instance will have a ``resolve()`` method for diff --git a/lib/ansible/galaxy/dependency_resolution/dataclasses.py b/lib/ansible/galaxy/dependency_resolution/dataclasses.py index 5fe66c16f1c..389eef7122a 100644 --- a/lib/ansible/galaxy/dependency_resolution/dataclasses.py +++ b/lib/ansible/galaxy/dependency_resolution/dataclasses.py @@ -6,12 +6,12 @@ from __future__ import annotations +import collections.abc as _c import os import pathlib import typing as t from collections import namedtuple -from collections.abc import MutableSequence, MutableMapping from glob import iglob from urllib.parse import urlparse from yaml import safe_load @@ -43,7 +43,12 @@ _SOURCE_METADATA_FILE = b'GALAXY.yml' display = Display() -def get_validated_source_info(b_source_info_path, namespace, name, version): +def get_validated_source_info( + b_source_info_path: bytes, + namespace: str, + name: str, + version: str, +) -> dict[str, object] | None: source_info_path = to_text(b_source_info_path, errors='surrogate_or_strict') if not os.path.isfile(b_source_info_path): @@ -58,7 +63,7 @@ def get_validated_source_info(b_source_info_path, namespace, name, version): ) return None - if not isinstance(metadata, MutableMapping): + if not isinstance(metadata, dict): display.warning(f"Error getting collection source information at '{source_info_path}': expected a YAML dictionary") return None @@ -72,7 +77,12 @@ def get_validated_source_info(b_source_info_path, namespace, name, version): return metadata -def _validate_v1_source_info_schema(namespace, name, version, provided_arguments): +def _validate_v1_source_info_schema( + namespace: str, + name: str, + version: str, + provided_arguments: dict[str, object], +) -> list[str]: argument_spec_data = dict( format_version=dict(choices=["1.0.0"]), download_url=dict(), @@ -102,24 +112,24 @@ def _validate_v1_source_info_schema(namespace, name, version, provided_arguments return validation_result.error_messages -def _is_collection_src_dir(dir_path): +def _is_collection_src_dir(dir_path: bytes | str) -> bool: b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') return os.path.isfile(os.path.join(b_dir_path, _GALAXY_YAML)) -def _is_installed_collection_dir(dir_path): +def _is_installed_collection_dir(dir_path: bytes | str) -> bool: b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') return os.path.isfile(os.path.join(b_dir_path, _MANIFEST_JSON)) -def _is_collection_dir(dir_path): +def _is_collection_dir(dir_path: bytes | str) -> bool: return ( _is_installed_collection_dir(dir_path) or _is_collection_src_dir(dir_path) ) -def _find_collections_in_subdirs(dir_path): +def _find_collections_in_subdirs(dir_path: str) -> _c.Iterator[bytes]: b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') subdir_glob_pattern = os.path.join( @@ -135,23 +145,23 @@ def _find_collections_in_subdirs(dir_path): yield subdir -def _is_collection_namespace_dir(tested_str): +def _is_collection_namespace_dir(tested_str: str) -> bool: return any(_find_collections_in_subdirs(tested_str)) -def _is_file_path(tested_str): +def _is_file_path(tested_str: str) -> bool: return os.path.isfile(to_bytes(tested_str, errors='surrogate_or_strict')) -def _is_http_url(tested_str): +def _is_http_url(tested_str: str) -> bool: return urlparse(tested_str).scheme.lower() in {'http', 'https'} -def _is_git_url(tested_str): +def _is_git_url(tested_str: str) -> bool: return tested_str.startswith(('git+', 'git@')) -def _is_concrete_artifact_pointer(tested_str): +def _is_concrete_artifact_pointer(tested_str: str) -> bool: return any( predicate(tested_str) for predicate in ( @@ -168,7 +178,7 @@ def _is_concrete_artifact_pointer(tested_str): class _ComputedReqKindsMixin: UNIQUE_ATTRS = ('fqcn', 'ver', 'src', 'type') - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: if not self.may_have_offline_galaxy_info: self._source_info = None else: @@ -181,18 +191,18 @@ class _ComputedReqKindsMixin: self.ver ) - def __hash__(self): + def __hash__(self) -> int: return hash(tuple(getattr(self, attr) for attr in _ComputedReqKindsMixin.UNIQUE_ATTRS)) - def __eq__(self, candidate): + def __eq__(self, candidate: _c.Hashable) -> bool: return hash(self) == hash(candidate) @classmethod - def from_dir_path_as_unknown( # type: ignore[misc] - cls, # type: t.Type[Collection] - dir_path, # type: bytes - art_mgr, # type: ConcreteArtifactsManager - ): # type: (...) -> Collection + def from_dir_path_as_unknown( + cls, + dir_path: bytes, + art_mgr: ConcreteArtifactsManager, + ) -> t.Self: """Make collection from an unspecified dir type. This alternative constructor attempts to grab metadata from the @@ -215,11 +225,11 @@ class _ComputedReqKindsMixin: return cls.from_dir_path_implicit(dir_path) @classmethod - def from_dir_path( # type: ignore[misc] - cls, # type: t.Type[Collection] - dir_path, # type: bytes - art_mgr, # type: ConcreteArtifactsManager - ): # type: (...) -> Collection + def from_dir_path( + cls, + dir_path: bytes, + art_mgr: ConcreteArtifactsManager, + ) -> t.Self: """Make collection from an directory with metadata.""" if dir_path.endswith(to_bytes(os.path.sep)): dir_path = dir_path.rstrip(to_bytes(os.path.sep)) @@ -262,10 +272,10 @@ class _ComputedReqKindsMixin: return cls(req_name, req_version, dir_path, 'dir', None) @classmethod - def from_dir_path_implicit( # type: ignore[misc] - cls, # type: t.Type[Collection] - dir_path, # type: bytes - ): # type: (...) -> Collection + def from_dir_path_implicit( + cls, + dir_path: bytes, + ) -> t.Self: """Construct a collection instance based on an arbitrary dir. This alternative constructor infers the FQCN based on the parent @@ -278,11 +288,16 @@ class _ComputedReqKindsMixin: u_dir_path = to_text(dir_path, errors='surrogate_or_strict') path_list = u_dir_path.split(os.path.sep) req_name = '.'.join(path_list[-2:]) - return cls(req_name, '*', dir_path, 'dir', None) # type: ignore[call-arg] + return cls(req_name, '*', dir_path, 'dir', None) @classmethod - def from_string(cls, collection_input, artifacts_manager, supplemental_signatures): - req = {} + def from_string( + cls, + collection_input: str, + artifacts_manager: ConcreteArtifactsManager, + supplemental_signatures: list[str] | None, + ) -> t.Self: + req: dict[str, str | list[str] | None] = {} if _is_concrete_artifact_pointer(collection_input) or AnsibleCollectionRef.is_valid_collection_name(collection_input): # Arg is a file path or URL to a collection, or just a collection req['name'] = collection_input @@ -307,7 +322,14 @@ class _ComputedReqKindsMixin: return cls.from_requirement_dict(req, artifacts_manager) @classmethod - def from_requirement_dict(cls, collection_req, art_mgr, validate_signature_options=True): + def from_requirement_dict( + cls, + # NOTE: The actual `collection_req` shape is supposed to be + # NOTE: `dict[str, str | list[str] | None]` + collection_req: dict[str, t.Any], + art_mgr: ConcreteArtifactsManager, + validate_signature_options: bool = True, + ) -> t.Self: req_name = collection_req.get('name', None) req_version = collection_req.get('version', '*') req_type = collection_req.get('type') @@ -320,7 +342,7 @@ class _ComputedReqKindsMixin: f"Signatures were provided to verify {req_name} but no keyring was configured." ) - if not isinstance(req_signature_sources, MutableSequence): + if not isinstance(req_signature_sources, _c.MutableSequence): req_signature_sources = [req_signature_sources] req_signature_sources = frozenset(req_signature_sources) @@ -434,7 +456,11 @@ class _ComputedReqKindsMixin: format(not_url=req_source.api_server), ) - if req_type == 'dir' and req_source.endswith(os.path.sep): + if ( + req_type == 'dir' + and isinstance(req_source, str) + and req_source.endswith(os.path.sep) + ): req_source = req_source.rstrip(os.path.sep) tmp_inst_req = cls(req_name, req_version, req_source, req_type, req_signature_sources) @@ -451,16 +477,16 @@ class _ComputedReqKindsMixin: req_signature_sources, ) - def __repr__(self): + def __repr__(self) -> str: return ( '<{self!s} of type {coll_type!r} from {src!s}>'. format(self=self, coll_type=self.type, src=self.src or 'Galaxy') ) - def __str__(self): + def __str__(self) -> str: return to_native(self.__unicode__()) - def __unicode__(self): + def __unicode__(self) -> str: if self.fqcn is None: return ( f'{self.type} collection from a Git repo' if self.is_scm @@ -473,7 +499,7 @@ class _ComputedReqKindsMixin: ) @property - def may_have_offline_galaxy_info(self): + def may_have_offline_galaxy_info(self) -> bool: if self.fqcn is None: # Virtual collection return False @@ -482,7 +508,7 @@ class _ComputedReqKindsMixin: return False return True - def construct_galaxy_info_path(self, b_collection_path): + def construct_galaxy_info_path(self, b_collection_path: bytes) -> bytes: if not self.may_have_offline_galaxy_info and not self.type == 'galaxy': raise TypeError('Only installed collections from a Galaxy server have offline Galaxy info') @@ -502,21 +528,21 @@ class _ComputedReqKindsMixin: return self.fqcn.split('.') @property - def namespace(self): + def namespace(self) -> str: if self.is_virtual: raise TypeError(f'{self.type} collections do not have a namespace') return self._get_separate_ns_n_name()[0] @property - def name(self): + def name(self) -> str: if self.is_virtual: raise TypeError(f'{self.type} collections do not have a name') return self._get_separate_ns_n_name()[-1] @property - def canonical_package_id(self): + def canonical_package_id(self) -> str: if not self.is_virtual: return to_native(self.fqcn) @@ -526,46 +552,46 @@ class _ComputedReqKindsMixin: ) @property - def is_virtual(self): + def is_virtual(self) -> bool: return self.is_scm or self.is_subdirs @property - def is_file(self): + def is_file(self) -> bool: return self.type == 'file' @property - def is_dir(self): + def is_dir(self) -> bool: return self.type == 'dir' @property - def namespace_collection_paths(self): + def namespace_collection_paths(self) -> list[str]: return [ to_native(path) for path in _find_collections_in_subdirs(self.src) ] @property - def is_subdirs(self): + def is_subdirs(self) -> bool: return self.type == 'subdirs' @property - def is_url(self): + def is_url(self) -> bool: return self.type == 'url' @property - def is_scm(self): + def is_scm(self) -> bool: return self.type == 'git' @property - def is_concrete_artifact(self): + def is_concrete_artifact(self) -> bool: return self.type in {'git', 'url', 'file', 'dir', 'subdirs'} @property - def is_online_index_pointer(self): + def is_online_index_pointer(self) -> bool: return not self.is_concrete_artifact @property - def is_pinned(self): + def is_pinned(self) -> bool: """Indicate if the version set is considered pinned. This essentially computes whether the version field of the current @@ -585,7 +611,7 @@ class _ComputedReqKindsMixin: ) @property - def source_info(self): + def source_info(self) -> dict[str, object] | None: return self._source_info @@ -601,11 +627,11 @@ class Requirement( ): """An abstract requirement request.""" - def __new__(cls, *args, **kwargs): + def __new__(cls, *args: object, **kwargs: object) -> t.Self: self = RequirementNamedTuple.__new__(cls, *args, **kwargs) return self - def __init__(self, *args, **kwargs): + def __init__(self, *args: object, **kwargs: object) -> None: super(Requirement, self).__init__() @@ -615,14 +641,14 @@ class Candidate( ): """A concrete collection candidate with its version resolved.""" - def __new__(cls, *args, **kwargs): + def __new__(cls, *args: object, **kwargs: object) -> t.Self: self = CandidateNamedTuple.__new__(cls, *args, **kwargs) return self - def __init__(self, *args, **kwargs): + def __init__(self, *args: object, **kwargs: object) -> None: super(Candidate, self).__init__() - def with_signatures_repopulated(self): # type: (Candidate) -> Candidate + def with_signatures_repopulated(self) -> Candidate: """Populate a new Candidate instance with Galaxy signatures. :raises AnsibleAssertionError: If the supplied candidate is not sourced from a Galaxy-like index. """ diff --git a/lib/ansible/galaxy/dependency_resolution/providers.py b/lib/ansible/galaxy/dependency_resolution/providers.py index 1df0e9b7af2..8d4252aeca4 100644 --- a/lib/ansible/galaxy/dependency_resolution/providers.py +++ b/lib/ansible/galaxy/dependency_resolution/providers.py @@ -16,6 +16,8 @@ if t.TYPE_CHECKING: from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy from ansible.galaxy.api import GalaxyAPI + from resolvelib.structs import RequirementInformation + from ansible.galaxy.collection.gpg import get_signature_from_source from ansible.galaxy.dependency_resolution.dataclasses import ( Candidate, @@ -48,14 +50,14 @@ class CollectionDependencyProvider(AbstractProvider): def __init__( self, - apis, # type: MultiGalaxyAPIProxy - concrete_artifacts_manager=None, # type: ConcreteArtifactsManager - preferred_candidates=None, # type: t.Iterable[Candidate] - with_deps=True, # type: bool - with_pre_releases=False, # type: bool - upgrade=False, # type: bool - include_signatures=True, # type: bool - ): # type: (...) -> None + apis: MultiGalaxyAPIProxy, + concrete_artifacts_manager: ConcreteArtifactsManager | None = None, + preferred_candidates: _c.Iterable[Candidate] | None = None, + with_deps: bool = True, + with_pre_releases: bool = False, + upgrade: bool = False, + include_signatures: bool = True, + ) -> None: r"""Initialize helper attributes. :param api: An instance of the multiple Galaxy APIs wrapper. @@ -91,8 +93,10 @@ class CollectionDependencyProvider(AbstractProvider): self._upgrade = upgrade self._include_signatures = include_signatures - def identify(self, requirement_or_candidate): - # type: (t.Union[Candidate, Requirement]) -> str + def identify( + self, + requirement_or_candidate: Candidate | Requirement, + ) -> str: """Given requirement or candidate, return an identifier for it. This is used to identify a requirement or candidate, e.g. @@ -108,8 +112,13 @@ class CollectionDependencyProvider(AbstractProvider): identifier: str, resolutions: _c.Mapping[str, Candidate], candidates: _c.Mapping[str, _c.Iterator[Candidate]], - information: _c.Iterator[t.NamedTuple], - backtrack_causes: _c.Sequence, + information: _c.Mapping[ + str, + _c.Iterator[RequirementInformation[Requirement, Candidate]], + ], + backtrack_causes: _c.Sequence[ + RequirementInformation[Requirement, Candidate], + ], ) -> float | int: """Return sort key function return value for given requirement. @@ -205,7 +214,10 @@ class CollectionDependencyProvider(AbstractProvider): all(self.is_satisfied_by(requirement, candidate) for requirement in requirements) } try: - coll_versions = [] if preinstalled_candidates else self._api_proxy.get_collection_versions(first_req) # type: t.Iterable[t.Tuple[str, GalaxyAPI]] + coll_versions: _c.Iterable[tuple[str, GalaxyAPI]] = ( + [] if preinstalled_candidates + else self._api_proxy.get_collection_versions(first_req) + ) except TypeError as exc: if first_req.is_concrete_artifact: # Non hashable versions will cause a TypeError @@ -248,7 +260,7 @@ class CollectionDependencyProvider(AbstractProvider): latest_matches = [] signatures = [] - extra_signature_sources = [] # type: list[str] + extra_signature_sources: list[str] = [] discarding_pre_releases_acceptable = any( not is_pre_release(candidate_version) @@ -353,8 +365,11 @@ class CollectionDependencyProvider(AbstractProvider): return list(preinstalled_candidates) + latest_matches - def is_satisfied_by(self, requirement, candidate): - # type: (Requirement, Candidate) -> bool + def is_satisfied_by( + self, + requirement: Requirement, + candidate: Candidate, + ) -> bool: r"""Whether the given requirement is satisfiable by a candidate. :param requirement: A requirement that produced the `candidate`. @@ -380,8 +395,7 @@ class CollectionDependencyProvider(AbstractProvider): requirements=requirement.ver, ) - def get_dependencies(self, candidate): - # type: (Candidate) -> list[Candidate] + def get_dependencies(self, candidate: Candidate) -> list[Requirement]: r"""Get direct dependencies of a candidate. :returns: A collection of requirements that `candidate` \ diff --git a/lib/ansible/galaxy/dependency_resolution/versioning.py b/lib/ansible/galaxy/dependency_resolution/versioning.py index 74f956cf1e8..a9786781781 100644 --- a/lib/ansible/galaxy/dependency_resolution/versioning.py +++ b/lib/ansible/galaxy/dependency_resolution/versioning.py @@ -11,8 +11,7 @@ from ansible.module_utils.compat.version import LooseVersion from ansible.utils.version import SemanticVersion -def is_pre_release(version): - # type: (str) -> bool +def is_pre_release(version: str) -> bool: """Figure out if a given version is a pre-release.""" try: return SemanticVersion(version).is_prerelease @@ -20,8 +19,7 @@ def is_pre_release(version): return False -def meets_requirements(version, requirements): - # type: (str, str) -> bool +def meets_requirements(version: str, requirements: str) -> bool: """Verify if a given version satisfies all the requirements. Supported version identifiers are: diff --git a/test/sanity/code-smell/mypy.requirements.in b/test/sanity/code-smell/mypy.requirements.in index be42056b3ad..150adb1b970 100644 --- a/test/sanity/code-smell/mypy.requirements.in +++ b/test/sanity/code-smell/mypy.requirements.in @@ -4,6 +4,7 @@ jinja2 # type stubs not published separately packaging # type stubs not published separately pytest # type stubs not published separately pytest-mock # type stubs not published separately +resolvelib # type stubs not published separately tomli # type stubs not published separately, required for toml inventory plugin types-backports types-paramiko diff --git a/test/sanity/code-smell/mypy.requirements.txt b/test/sanity/code-smell/mypy.requirements.txt index 36f2409c5c8..2027c540738 100644 --- a/test/sanity/code-smell/mypy.requirements.txt +++ b/test/sanity/code-smell/mypy.requirements.txt @@ -13,6 +13,7 @@ pycparser==2.22 Pygments==2.19.2 pytest==8.4.2 pytest-mock==3.15.0 +resolvelib==1.2.0 tomli==2.2.1 types-backports==0.1.3 types-paramiko==4.0.0.20250822 diff --git a/test/sanity/code-smell/mypy/ansible-core.ini b/test/sanity/code-smell/mypy/ansible-core.ini index 7703e800387..7fca8aeb02f 100644 --- a/test/sanity/code-smell/mypy/ansible-core.ini +++ b/test/sanity/code-smell/mypy/ansible-core.ini @@ -104,9 +104,6 @@ ignore_missing_imports = True [mypy-distro.*] ignore_missing_imports = True -[mypy-resolvelib.*] -ignore_missing_imports = True - [mypy-urlparse.*] ignore_missing_imports = True