Type-annotate ansible.galaxy.dependency_resolution

This patch is a combination of `pyrefly autotype` and manual
post-processing. Parts of it migrate pre-existing comment-based
annotations, fixing incorrect ones where applicable.

The change also configures MyPy to run checks against actual
`resolvelib` annotations and includes a small tweak of
`ansible.galaxy.collection._resolve_depenency_map` to make it
compatible with those.

Co-Authored-By: Jordan Borean <jborean93@gmail.com>
Co-Authored-By: Matt Clay <matt@mystile.com>
Co-Authored-By: Sloane Hertel <19572925+s-hertel@users.noreply.github.com>
pull/85957/head
Sviatoslav Sydorenko 4 months ago committed by 🇺🇦 Sviatoslav Sydorenko (Святослав Сидоренко)
parent feda8fc564
commit c9131aa847

@ -1839,10 +1839,13 @@ def _resolve_depenency_map(
offline=offline, offline=offline,
) )
try: try:
return collection_dep_resolver.resolve( return t.cast(
requested_requirements, dict[str, Candidate],
max_rounds=2000000, # NOTE: same constant pip uses collection_dep_resolver.resolve(
).mapping requested_requirements,
max_rounds=2000000, # NOTE: same constant pip uses
).mapping,
)
except CollectionDependencyResolutionImpossible as dep_exc: except CollectionDependencyResolutionImpossible as dep_exc:
conflict_causes = ( conflict_causes = (
'* {req.fqcn!s}:{req.ver!s} ({dep_origin!s})'.format( '* {req.fqcn!s}:{req.ver!s} ({dep_origin!s})'.format(

@ -5,6 +5,7 @@
from __future__ import annotations from __future__ import annotations
import collections.abc as _c
import typing as t import typing as t
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
@ -21,15 +22,15 @@ from ansible.galaxy.dependency_resolution.resolvers import CollectionDependencyR
def build_collection_dependency_resolver( def build_collection_dependency_resolver(
galaxy_apis, # type: t.Iterable[GalaxyAPI] galaxy_apis: _c.Iterable[GalaxyAPI],
concrete_artifacts_manager, # type: ConcreteArtifactsManager concrete_artifacts_manager: ConcreteArtifactsManager,
preferred_candidates=None, # type: t.Iterable[Candidate] preferred_candidates: _c.Iterable[Candidate] | None = None,
with_deps=True, # type: bool with_deps: bool = True,
with_pre_releases=False, # type: bool with_pre_releases: bool = False,
upgrade=False, # type: bool upgrade: bool = False,
include_signatures=True, # type: bool include_signatures: bool = True,
offline=False, # type: bool offline: bool = False,
): # type: (...) -> CollectionDependencyResolver ) -> CollectionDependencyResolver:
"""Return a collection dependency resolver. """Return a collection dependency resolver.
The returned instance will have a ``resolve()`` method for The returned instance will have a ``resolve()`` method for

@ -6,12 +6,12 @@
from __future__ import annotations from __future__ import annotations
import collections.abc as _c
import os import os
import pathlib import pathlib
import typing as t import typing as t
from collections import namedtuple from collections import namedtuple
from collections.abc import MutableSequence, MutableMapping
from glob import iglob from glob import iglob
from urllib.parse import urlparse from urllib.parse import urlparse
from yaml import safe_load from yaml import safe_load
@ -43,7 +43,12 @@ _SOURCE_METADATA_FILE = b'GALAXY.yml'
display = Display() display = Display()
def get_validated_source_info(b_source_info_path, namespace, name, version): def get_validated_source_info(
b_source_info_path: bytes,
namespace: str,
name: str,
version: str,
) -> dict[str, object] | None:
source_info_path = to_text(b_source_info_path, errors='surrogate_or_strict') source_info_path = to_text(b_source_info_path, errors='surrogate_or_strict')
if not os.path.isfile(b_source_info_path): if not os.path.isfile(b_source_info_path):
@ -58,7 +63,7 @@ def get_validated_source_info(b_source_info_path, namespace, name, version):
) )
return None return None
if not isinstance(metadata, MutableMapping): if not isinstance(metadata, dict):
display.warning(f"Error getting collection source information at '{source_info_path}': expected a YAML dictionary") display.warning(f"Error getting collection source information at '{source_info_path}': expected a YAML dictionary")
return None return None
@ -72,7 +77,12 @@ def get_validated_source_info(b_source_info_path, namespace, name, version):
return metadata return metadata
def _validate_v1_source_info_schema(namespace, name, version, provided_arguments): def _validate_v1_source_info_schema(
namespace: str,
name: str,
version: str,
provided_arguments: dict[str, object],
) -> list[str]:
argument_spec_data = dict( argument_spec_data = dict(
format_version=dict(choices=["1.0.0"]), format_version=dict(choices=["1.0.0"]),
download_url=dict(), download_url=dict(),
@ -102,24 +112,24 @@ def _validate_v1_source_info_schema(namespace, name, version, provided_arguments
return validation_result.error_messages return validation_result.error_messages
def _is_collection_src_dir(dir_path): def _is_collection_src_dir(dir_path: bytes | str) -> bool:
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
return os.path.isfile(os.path.join(b_dir_path, _GALAXY_YAML)) return os.path.isfile(os.path.join(b_dir_path, _GALAXY_YAML))
def _is_installed_collection_dir(dir_path): def _is_installed_collection_dir(dir_path: bytes | str) -> bool:
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
return os.path.isfile(os.path.join(b_dir_path, _MANIFEST_JSON)) return os.path.isfile(os.path.join(b_dir_path, _MANIFEST_JSON))
def _is_collection_dir(dir_path): def _is_collection_dir(dir_path: bytes | str) -> bool:
return ( return (
_is_installed_collection_dir(dir_path) or _is_installed_collection_dir(dir_path) or
_is_collection_src_dir(dir_path) _is_collection_src_dir(dir_path)
) )
def _find_collections_in_subdirs(dir_path): def _find_collections_in_subdirs(dir_path: str) -> _c.Iterator[bytes]:
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
subdir_glob_pattern = os.path.join( subdir_glob_pattern = os.path.join(
@ -135,23 +145,23 @@ def _find_collections_in_subdirs(dir_path):
yield subdir yield subdir
def _is_collection_namespace_dir(tested_str): def _is_collection_namespace_dir(tested_str: str) -> bool:
return any(_find_collections_in_subdirs(tested_str)) return any(_find_collections_in_subdirs(tested_str))
def _is_file_path(tested_str): def _is_file_path(tested_str: str) -> bool:
return os.path.isfile(to_bytes(tested_str, errors='surrogate_or_strict')) return os.path.isfile(to_bytes(tested_str, errors='surrogate_or_strict'))
def _is_http_url(tested_str): def _is_http_url(tested_str: str) -> bool:
return urlparse(tested_str).scheme.lower() in {'http', 'https'} return urlparse(tested_str).scheme.lower() in {'http', 'https'}
def _is_git_url(tested_str): def _is_git_url(tested_str: str) -> bool:
return tested_str.startswith(('git+', 'git@')) return tested_str.startswith(('git+', 'git@'))
def _is_concrete_artifact_pointer(tested_str): def _is_concrete_artifact_pointer(tested_str: str) -> bool:
return any( return any(
predicate(tested_str) predicate(tested_str)
for predicate in ( for predicate in (
@ -168,7 +178,7 @@ def _is_concrete_artifact_pointer(tested_str):
class _ComputedReqKindsMixin: class _ComputedReqKindsMixin:
UNIQUE_ATTRS = ('fqcn', 'ver', 'src', 'type') UNIQUE_ATTRS = ('fqcn', 'ver', 'src', 'type')
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs) -> None:
if not self.may_have_offline_galaxy_info: if not self.may_have_offline_galaxy_info:
self._source_info = None self._source_info = None
else: else:
@ -181,18 +191,18 @@ class _ComputedReqKindsMixin:
self.ver self.ver
) )
def __hash__(self): def __hash__(self) -> int:
return hash(tuple(getattr(self, attr) for attr in _ComputedReqKindsMixin.UNIQUE_ATTRS)) return hash(tuple(getattr(self, attr) for attr in _ComputedReqKindsMixin.UNIQUE_ATTRS))
def __eq__(self, candidate): def __eq__(self, candidate: _c.Hashable) -> bool:
return hash(self) == hash(candidate) return hash(self) == hash(candidate)
@classmethod @classmethod
def from_dir_path_as_unknown( # type: ignore[misc] def from_dir_path_as_unknown(
cls, # type: t.Type[Collection] cls,
dir_path, # type: bytes dir_path: bytes,
art_mgr, # type: ConcreteArtifactsManager art_mgr: ConcreteArtifactsManager,
): # type: (...) -> Collection ) -> t.Self:
"""Make collection from an unspecified dir type. """Make collection from an unspecified dir type.
This alternative constructor attempts to grab metadata from the This alternative constructor attempts to grab metadata from the
@ -215,11 +225,11 @@ class _ComputedReqKindsMixin:
return cls.from_dir_path_implicit(dir_path) return cls.from_dir_path_implicit(dir_path)
@classmethod @classmethod
def from_dir_path( # type: ignore[misc] def from_dir_path(
cls, # type: t.Type[Collection] cls,
dir_path, # type: bytes dir_path: bytes,
art_mgr, # type: ConcreteArtifactsManager art_mgr: ConcreteArtifactsManager,
): # type: (...) -> Collection ) -> t.Self:
"""Make collection from an directory with metadata.""" """Make collection from an directory with metadata."""
if dir_path.endswith(to_bytes(os.path.sep)): if dir_path.endswith(to_bytes(os.path.sep)):
dir_path = dir_path.rstrip(to_bytes(os.path.sep)) dir_path = dir_path.rstrip(to_bytes(os.path.sep))
@ -262,10 +272,10 @@ class _ComputedReqKindsMixin:
return cls(req_name, req_version, dir_path, 'dir', None) return cls(req_name, req_version, dir_path, 'dir', None)
@classmethod @classmethod
def from_dir_path_implicit( # type: ignore[misc] def from_dir_path_implicit(
cls, # type: t.Type[Collection] cls,
dir_path, # type: bytes dir_path: bytes,
): # type: (...) -> Collection ) -> t.Self:
"""Construct a collection instance based on an arbitrary dir. """Construct a collection instance based on an arbitrary dir.
This alternative constructor infers the FQCN based on the parent This alternative constructor infers the FQCN based on the parent
@ -278,11 +288,16 @@ class _ComputedReqKindsMixin:
u_dir_path = to_text(dir_path, errors='surrogate_or_strict') u_dir_path = to_text(dir_path, errors='surrogate_or_strict')
path_list = u_dir_path.split(os.path.sep) path_list = u_dir_path.split(os.path.sep)
req_name = '.'.join(path_list[-2:]) req_name = '.'.join(path_list[-2:])
return cls(req_name, '*', dir_path, 'dir', None) # type: ignore[call-arg] return cls(req_name, '*', dir_path, 'dir', None)
@classmethod @classmethod
def from_string(cls, collection_input, artifacts_manager, supplemental_signatures): def from_string(
req = {} cls,
collection_input: str,
artifacts_manager: ConcreteArtifactsManager,
supplemental_signatures: list[str] | None,
) -> t.Self:
req: dict[str, str | list[str] | None] = {}
if _is_concrete_artifact_pointer(collection_input) or AnsibleCollectionRef.is_valid_collection_name(collection_input): if _is_concrete_artifact_pointer(collection_input) or AnsibleCollectionRef.is_valid_collection_name(collection_input):
# Arg is a file path or URL to a collection, or just a collection # Arg is a file path or URL to a collection, or just a collection
req['name'] = collection_input req['name'] = collection_input
@ -307,7 +322,14 @@ class _ComputedReqKindsMixin:
return cls.from_requirement_dict(req, artifacts_manager) return cls.from_requirement_dict(req, artifacts_manager)
@classmethod @classmethod
def from_requirement_dict(cls, collection_req, art_mgr, validate_signature_options=True): def from_requirement_dict(
cls,
# NOTE: The actual `collection_req` shape is supposed to be
# NOTE: `dict[str, str | list[str] | None]`
collection_req: dict[str, t.Any],
art_mgr: ConcreteArtifactsManager,
validate_signature_options: bool = True,
) -> t.Self:
req_name = collection_req.get('name', None) req_name = collection_req.get('name', None)
req_version = collection_req.get('version', '*') req_version = collection_req.get('version', '*')
req_type = collection_req.get('type') req_type = collection_req.get('type')
@ -320,7 +342,7 @@ class _ComputedReqKindsMixin:
f"Signatures were provided to verify {req_name} but no keyring was configured." f"Signatures were provided to verify {req_name} but no keyring was configured."
) )
if not isinstance(req_signature_sources, MutableSequence): if not isinstance(req_signature_sources, _c.MutableSequence):
req_signature_sources = [req_signature_sources] req_signature_sources = [req_signature_sources]
req_signature_sources = frozenset(req_signature_sources) req_signature_sources = frozenset(req_signature_sources)
@ -434,7 +456,11 @@ class _ComputedReqKindsMixin:
format(not_url=req_source.api_server), format(not_url=req_source.api_server),
) )
if req_type == 'dir' and req_source.endswith(os.path.sep): if (
req_type == 'dir'
and isinstance(req_source, str)
and req_source.endswith(os.path.sep)
):
req_source = req_source.rstrip(os.path.sep) req_source = req_source.rstrip(os.path.sep)
tmp_inst_req = cls(req_name, req_version, req_source, req_type, req_signature_sources) tmp_inst_req = cls(req_name, req_version, req_source, req_type, req_signature_sources)
@ -451,16 +477,16 @@ class _ComputedReqKindsMixin:
req_signature_sources, req_signature_sources,
) )
def __repr__(self): def __repr__(self) -> str:
return ( return (
'<{self!s} of type {coll_type!r} from {src!s}>'. '<{self!s} of type {coll_type!r} from {src!s}>'.
format(self=self, coll_type=self.type, src=self.src or 'Galaxy') format(self=self, coll_type=self.type, src=self.src or 'Galaxy')
) )
def __str__(self): def __str__(self) -> str:
return to_native(self.__unicode__()) return to_native(self.__unicode__())
def __unicode__(self): def __unicode__(self) -> str:
if self.fqcn is None: if self.fqcn is None:
return ( return (
f'{self.type} collection from a Git repo' if self.is_scm f'{self.type} collection from a Git repo' if self.is_scm
@ -473,7 +499,7 @@ class _ComputedReqKindsMixin:
) )
@property @property
def may_have_offline_galaxy_info(self): def may_have_offline_galaxy_info(self) -> bool:
if self.fqcn is None: if self.fqcn is None:
# Virtual collection # Virtual collection
return False return False
@ -482,7 +508,7 @@ class _ComputedReqKindsMixin:
return False return False
return True return True
def construct_galaxy_info_path(self, b_collection_path): def construct_galaxy_info_path(self, b_collection_path: bytes) -> bytes:
if not self.may_have_offline_galaxy_info and not self.type == 'galaxy': if not self.may_have_offline_galaxy_info and not self.type == 'galaxy':
raise TypeError('Only installed collections from a Galaxy server have offline Galaxy info') raise TypeError('Only installed collections from a Galaxy server have offline Galaxy info')
@ -502,21 +528,21 @@ class _ComputedReqKindsMixin:
return self.fqcn.split('.') return self.fqcn.split('.')
@property @property
def namespace(self): def namespace(self) -> str:
if self.is_virtual: if self.is_virtual:
raise TypeError(f'{self.type} collections do not have a namespace') raise TypeError(f'{self.type} collections do not have a namespace')
return self._get_separate_ns_n_name()[0] return self._get_separate_ns_n_name()[0]
@property @property
def name(self): def name(self) -> str:
if self.is_virtual: if self.is_virtual:
raise TypeError(f'{self.type} collections do not have a name') raise TypeError(f'{self.type} collections do not have a name')
return self._get_separate_ns_n_name()[-1] return self._get_separate_ns_n_name()[-1]
@property @property
def canonical_package_id(self): def canonical_package_id(self) -> str:
if not self.is_virtual: if not self.is_virtual:
return to_native(self.fqcn) return to_native(self.fqcn)
@ -526,46 +552,46 @@ class _ComputedReqKindsMixin:
) )
@property @property
def is_virtual(self): def is_virtual(self) -> bool:
return self.is_scm or self.is_subdirs return self.is_scm or self.is_subdirs
@property @property
def is_file(self): def is_file(self) -> bool:
return self.type == 'file' return self.type == 'file'
@property @property
def is_dir(self): def is_dir(self) -> bool:
return self.type == 'dir' return self.type == 'dir'
@property @property
def namespace_collection_paths(self): def namespace_collection_paths(self) -> list[str]:
return [ return [
to_native(path) to_native(path)
for path in _find_collections_in_subdirs(self.src) for path in _find_collections_in_subdirs(self.src)
] ]
@property @property
def is_subdirs(self): def is_subdirs(self) -> bool:
return self.type == 'subdirs' return self.type == 'subdirs'
@property @property
def is_url(self): def is_url(self) -> bool:
return self.type == 'url' return self.type == 'url'
@property @property
def is_scm(self): def is_scm(self) -> bool:
return self.type == 'git' return self.type == 'git'
@property @property
def is_concrete_artifact(self): def is_concrete_artifact(self) -> bool:
return self.type in {'git', 'url', 'file', 'dir', 'subdirs'} return self.type in {'git', 'url', 'file', 'dir', 'subdirs'}
@property @property
def is_online_index_pointer(self): def is_online_index_pointer(self) -> bool:
return not self.is_concrete_artifact return not self.is_concrete_artifact
@property @property
def is_pinned(self): def is_pinned(self) -> bool:
"""Indicate if the version set is considered pinned. """Indicate if the version set is considered pinned.
This essentially computes whether the version field of the current This essentially computes whether the version field of the current
@ -585,7 +611,7 @@ class _ComputedReqKindsMixin:
) )
@property @property
def source_info(self): def source_info(self) -> dict[str, object] | None:
return self._source_info return self._source_info
@ -601,11 +627,11 @@ class Requirement(
): ):
"""An abstract requirement request.""" """An abstract requirement request."""
def __new__(cls, *args, **kwargs): def __new__(cls, *args: object, **kwargs: object) -> t.Self:
self = RequirementNamedTuple.__new__(cls, *args, **kwargs) self = RequirementNamedTuple.__new__(cls, *args, **kwargs)
return self return self
def __init__(self, *args, **kwargs): def __init__(self, *args: object, **kwargs: object) -> None:
super(Requirement, self).__init__() super(Requirement, self).__init__()
@ -615,14 +641,14 @@ class Candidate(
): ):
"""A concrete collection candidate with its version resolved.""" """A concrete collection candidate with its version resolved."""
def __new__(cls, *args, **kwargs): def __new__(cls, *args: object, **kwargs: object) -> t.Self:
self = CandidateNamedTuple.__new__(cls, *args, **kwargs) self = CandidateNamedTuple.__new__(cls, *args, **kwargs)
return self return self
def __init__(self, *args, **kwargs): def __init__(self, *args: object, **kwargs: object) -> None:
super(Candidate, self).__init__() super(Candidate, self).__init__()
def with_signatures_repopulated(self): # type: (Candidate) -> Candidate def with_signatures_repopulated(self) -> Candidate:
"""Populate a new Candidate instance with Galaxy signatures. """Populate a new Candidate instance with Galaxy signatures.
:raises AnsibleAssertionError: If the supplied candidate is not sourced from a Galaxy-like index. :raises AnsibleAssertionError: If the supplied candidate is not sourced from a Galaxy-like index.
""" """

@ -16,6 +16,8 @@ if t.TYPE_CHECKING:
from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy
from ansible.galaxy.api import GalaxyAPI from ansible.galaxy.api import GalaxyAPI
from resolvelib.structs import RequirementInformation
from ansible.galaxy.collection.gpg import get_signature_from_source from ansible.galaxy.collection.gpg import get_signature_from_source
from ansible.galaxy.dependency_resolution.dataclasses import ( from ansible.galaxy.dependency_resolution.dataclasses import (
Candidate, Candidate,
@ -48,14 +50,14 @@ class CollectionDependencyProvider(AbstractProvider):
def __init__( def __init__(
self, self,
apis, # type: MultiGalaxyAPIProxy apis: MultiGalaxyAPIProxy,
concrete_artifacts_manager=None, # type: ConcreteArtifactsManager concrete_artifacts_manager: ConcreteArtifactsManager | None = None,
preferred_candidates=None, # type: t.Iterable[Candidate] preferred_candidates: _c.Iterable[Candidate] | None = None,
with_deps=True, # type: bool with_deps: bool = True,
with_pre_releases=False, # type: bool with_pre_releases: bool = False,
upgrade=False, # type: bool upgrade: bool = False,
include_signatures=True, # type: bool include_signatures: bool = True,
): # type: (...) -> None ) -> None:
r"""Initialize helper attributes. r"""Initialize helper attributes.
:param api: An instance of the multiple Galaxy APIs wrapper. :param api: An instance of the multiple Galaxy APIs wrapper.
@ -91,8 +93,10 @@ class CollectionDependencyProvider(AbstractProvider):
self._upgrade = upgrade self._upgrade = upgrade
self._include_signatures = include_signatures self._include_signatures = include_signatures
def identify(self, requirement_or_candidate): def identify(
# type: (t.Union[Candidate, Requirement]) -> str self,
requirement_or_candidate: Candidate | Requirement,
) -> str:
"""Given requirement or candidate, return an identifier for it. """Given requirement or candidate, return an identifier for it.
This is used to identify a requirement or candidate, e.g. This is used to identify a requirement or candidate, e.g.
@ -108,8 +112,13 @@ class CollectionDependencyProvider(AbstractProvider):
identifier: str, identifier: str,
resolutions: _c.Mapping[str, Candidate], resolutions: _c.Mapping[str, Candidate],
candidates: _c.Mapping[str, _c.Iterator[Candidate]], candidates: _c.Mapping[str, _c.Iterator[Candidate]],
information: _c.Iterator[t.NamedTuple], information: _c.Mapping[
backtrack_causes: _c.Sequence, str,
_c.Iterator[RequirementInformation[Requirement, Candidate]],
],
backtrack_causes: _c.Sequence[
RequirementInformation[Requirement, Candidate],
],
) -> float | int: ) -> float | int:
"""Return sort key function return value for given requirement. """Return sort key function return value for given requirement.
@ -205,7 +214,10 @@ class CollectionDependencyProvider(AbstractProvider):
all(self.is_satisfied_by(requirement, candidate) for requirement in requirements) all(self.is_satisfied_by(requirement, candidate) for requirement in requirements)
} }
try: try:
coll_versions = [] if preinstalled_candidates else self._api_proxy.get_collection_versions(first_req) # type: t.Iterable[t.Tuple[str, GalaxyAPI]] coll_versions: _c.Iterable[tuple[str, GalaxyAPI]] = (
[] if preinstalled_candidates
else self._api_proxy.get_collection_versions(first_req)
)
except TypeError as exc: except TypeError as exc:
if first_req.is_concrete_artifact: if first_req.is_concrete_artifact:
# Non hashable versions will cause a TypeError # Non hashable versions will cause a TypeError
@ -248,7 +260,7 @@ class CollectionDependencyProvider(AbstractProvider):
latest_matches = [] latest_matches = []
signatures = [] signatures = []
extra_signature_sources = [] # type: list[str] extra_signature_sources: list[str] = []
discarding_pre_releases_acceptable = any( discarding_pre_releases_acceptable = any(
not is_pre_release(candidate_version) not is_pre_release(candidate_version)
@ -353,8 +365,11 @@ class CollectionDependencyProvider(AbstractProvider):
return list(preinstalled_candidates) + latest_matches return list(preinstalled_candidates) + latest_matches
def is_satisfied_by(self, requirement, candidate): def is_satisfied_by(
# type: (Requirement, Candidate) -> bool self,
requirement: Requirement,
candidate: Candidate,
) -> bool:
r"""Whether the given requirement is satisfiable by a candidate. r"""Whether the given requirement is satisfiable by a candidate.
:param requirement: A requirement that produced the `candidate`. :param requirement: A requirement that produced the `candidate`.
@ -380,8 +395,7 @@ class CollectionDependencyProvider(AbstractProvider):
requirements=requirement.ver, requirements=requirement.ver,
) )
def get_dependencies(self, candidate): def get_dependencies(self, candidate: Candidate) -> list[Requirement]:
# type: (Candidate) -> list[Candidate]
r"""Get direct dependencies of a candidate. r"""Get direct dependencies of a candidate.
:returns: A collection of requirements that `candidate` \ :returns: A collection of requirements that `candidate` \

@ -11,8 +11,7 @@ from ansible.module_utils.compat.version import LooseVersion
from ansible.utils.version import SemanticVersion from ansible.utils.version import SemanticVersion
def is_pre_release(version): def is_pre_release(version: str) -> bool:
# type: (str) -> bool
"""Figure out if a given version is a pre-release.""" """Figure out if a given version is a pre-release."""
try: try:
return SemanticVersion(version).is_prerelease return SemanticVersion(version).is_prerelease
@ -20,8 +19,7 @@ def is_pre_release(version):
return False return False
def meets_requirements(version, requirements): def meets_requirements(version: str, requirements: str) -> bool:
# type: (str, str) -> bool
"""Verify if a given version satisfies all the requirements. """Verify if a given version satisfies all the requirements.
Supported version identifiers are: Supported version identifiers are:

@ -4,6 +4,7 @@ jinja2 # type stubs not published separately
packaging # type stubs not published separately packaging # type stubs not published separately
pytest # type stubs not published separately pytest # type stubs not published separately
pytest-mock # type stubs not published separately pytest-mock # type stubs not published separately
resolvelib # type stubs not published separately
tomli # type stubs not published separately, required for toml inventory plugin tomli # type stubs not published separately, required for toml inventory plugin
types-backports types-backports
types-paramiko types-paramiko

@ -13,6 +13,7 @@ pycparser==2.22
Pygments==2.19.2 Pygments==2.19.2
pytest==8.4.2 pytest==8.4.2
pytest-mock==3.15.0 pytest-mock==3.15.0
resolvelib==1.2.0
tomli==2.2.1 tomli==2.2.1
types-backports==0.1.3 types-backports==0.1.3
types-paramiko==4.0.0.20250822 types-paramiko==4.0.0.20250822

@ -104,9 +104,6 @@ ignore_missing_imports = True
[mypy-distro.*] [mypy-distro.*]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-resolvelib.*]
ignore_missing_imports = True
[mypy-urlparse.*] [mypy-urlparse.*]
ignore_missing_imports = True ignore_missing_imports = True

Loading…
Cancel
Save