galaxy - Clean up type hints and imports.

pull/74989/head
Matt Clay 3 years ago
parent 0990c4ca7c
commit 8b2e628565

@ -31,12 +31,6 @@ from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash_s
from ansible.utils.path import makedirs_safe
try:
from urllib.parse import urlparse
except ImportError:
# Python 2
from urlparse import urlparse
display = Display()
_CACHE_LOCK = threading.Lock()
COLLECTION_PAGE_SIZE = 100

@ -11,6 +11,7 @@ import fnmatch
import functools
import json
import os
import queue
import shutil
import stat
import sys
@ -19,52 +20,30 @@ import tempfile
import textwrap
import threading
import time
import yaml
import typing as t
from collections import namedtuple
from contextlib import contextmanager
from ansible.module_utils.compat.version import LooseVersion
from hashlib import sha256
from io import BytesIO
from itertools import chain
from yaml.error import YAMLError
# NOTE: Adding type ignores is a hack for mypy to shut up wrt bug #1153
try:
import queue # type: ignore[import]
except ImportError: # Python 2
import Queue as queue # type: ignore[import,no-redef]
try:
# NOTE: It's in Python 3 stdlib and can be installed on Python 2
# NOTE: via `pip install typing`. Unnecessary in runtime.
# NOTE: `TYPE_CHECKING` is True during mypy-typecheck-time.
from typing import TYPE_CHECKING
except ImportError:
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Dict, Iterable, List, Optional, Text, Union
if sys.version_info[:2] >= (3, 8):
from typing import Literal
else: # Python 2 + Python 3.4-3.7
from typing_extensions import Literal
if t.TYPE_CHECKING:
from ansible.galaxy.collection.concrete_artifact_manager import (
ConcreteArtifactsManager,
)
ManifestKeysType = Literal[
ManifestKeysType = t.Literal[
'collection_info', 'file_manifest_file', 'format',
]
FileMetaKeysType = Literal[
FileMetaKeysType = t.Literal[
'name',
'ftype',
'chksum_type',
'chksum_sha256',
'format',
]
CollectionInfoKeysType = Literal[
CollectionInfoKeysType = t.Literal[
# collection meta:
'namespace', 'name', 'version',
'authors', 'readme',
@ -77,26 +56,13 @@ if TYPE_CHECKING:
# files meta:
FileMetaKeysType,
]
ManifestValueType = Dict[
CollectionInfoKeysType,
Optional[
Union[
int, str, # scalars, like name/ns, schema version
List[str], # lists of scalars, like tags
Dict[str, str], # deps map
],
],
]
CollectionManifestType = Dict[ManifestKeysType, ManifestValueType]
FileManifestEntryType = Dict[FileMetaKeysType, Optional[Union[str, int]]]
FilesManifestType = Dict[
Literal['files', 'format'],
Union[List[FileManifestEntryType], int],
]
ManifestValueType = dict[CollectionInfoKeysType, 'int | str | list[str] | dict[str, str] | None']
CollectionManifestType = dict[ManifestKeysType, ManifestValueType]
FileManifestEntryType = dict[FileMetaKeysType, 'str | int | None']
FilesManifestType = dict[t.Literal['files', 'format'], 'list[FileManifestEntryType] | int']
import ansible.constants as C
from ansible.errors import AnsibleError
from ansible.galaxy import get_collections_galaxy_meta_info
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.collection.concrete_artifact_manager import (
_consume_file,
@ -128,7 +94,6 @@ from ansible.module_utils.common.yaml import yaml_dump
from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash, secure_hash_s
from ansible.utils.version import SemanticVersion
display = Display()
@ -188,7 +153,7 @@ class CollectionVerifyResult:
def verify_local_collection(
local_collection, remote_collection,
artifacts_manager,
): # type: (Candidate, Optional[Candidate], ConcreteArtifactsManager) -> CollectionVerifyResult
): # type: (Candidate, Candidate | None, ConcreteArtifactsManager) -> CollectionVerifyResult
"""Verify integrity of the locally installed collection.
:param local_collection: Collection being checked.
@ -208,7 +173,7 @@ def verify_local_collection(
format(path=to_text(local_collection.src)),
)
modified_content = [] # type: List[ModifiedContent]
modified_content = [] # type: list[ModifiedContent]
verify_local_only = remote_collection is None
@ -381,7 +346,7 @@ def verify_file_signature(manifest_file, detached_signature, keyring):
def build_collection(u_collection_path, u_output_path, force):
# type: (Text, Text, bool) -> Text
# type: (str, str, bool) -> str
"""Creates the Ansible collection artifact in a .tar.gz file.
:param u_collection_path: The path to the collection to build. This should be the directory that contains the
@ -427,9 +392,9 @@ def build_collection(u_collection_path, u_output_path, force):
def download_collections(
collections, # type: Iterable[Requirement]
collections, # type: t.Iterable[Requirement]
output_path, # type: str
apis, # type: Iterable[GalaxyAPI]
apis, # type: t.Iterable[GalaxyAPI]
no_deps, # type: bool
allow_pre_release, # type: bool
artifacts_manager, # type: ConcreteArtifactsManager
@ -569,9 +534,9 @@ def publish_collection(collection_path, api, wait, timeout):
def install_collections(
collections, # type: Iterable[Requirement]
collections, # type: t.Iterable[Requirement]
output_path, # type: str
apis, # type: Iterable[GalaxyAPI]
apis, # type: t.Iterable[GalaxyAPI]
ignore_errors, # type: bool
no_deps, # type: bool
force, # type: bool
@ -735,13 +700,13 @@ def validate_collection_path(collection_path): # type: (str) -> str
def verify_collections(
collections, # type: Iterable[Requirement]
search_paths, # type: Iterable[str]
apis, # type: Iterable[GalaxyAPI]
collections, # type: t.Iterable[Requirement]
search_paths, # type: t.Iterable[str]
apis, # type: t.Iterable[GalaxyAPI]
ignore_errors, # type: bool
local_verify_only, # type: bool
artifacts_manager, # type: ConcreteArtifactsManager
): # type: (...) -> List[CollectionVerifyResult]
): # type: (...) -> list[CollectionVerifyResult]
r"""Verify the integrity of locally installed collections.
:param collections: The collections to check.
@ -752,7 +717,7 @@ def verify_collections(
:param artifacts_manager: Artifacts manager.
:return: list of CollectionVerifyResult objects describing the results of each collection verification
"""
results = [] # type: List[CollectionVerifyResult]
results = [] # type: list[CollectionVerifyResult]
api_proxy = MultiGalaxyAPIProxy(apis, artifacts_manager)
@ -949,7 +914,7 @@ def _verify_file_hash(b_path, filename, expected_hash, error_queue):
def _build_files_manifest(b_collection_path, namespace, name, ignore_patterns):
# type: (bytes, str, str, List[str]) -> FilesManifestType
# type: (bytes, str, str, list[str]) -> FilesManifestType
# We always ignore .pyc and .retry files as well as some well known version control directories. The ignore
# patterns can be extended by the build_ignore key in galaxy.yml
b_ignore_patterns = [
@ -1073,7 +1038,7 @@ def _build_collection_tar(
b_tar_path, # type: bytes
collection_manifest, # type: CollectionManifestType
file_manifest, # type: FilesManifestType
): # type: (...) -> Text
): # type: (...) -> str
"""Build a tar.gz collection artifact from the manifest data."""
files_manifest_json = to_bytes(json.dumps(file_manifest, indent=True), errors='surrogate_or_strict')
collection_manifest['file_manifest_file']['chksum_sha256'] = secure_hash_s(files_manifest_json, hash_func=sha256)
@ -1528,15 +1493,15 @@ def _is_child_path(path, parent_path, link_name=None):
def _resolve_depenency_map(
requested_requirements, # type: Iterable[Requirement]
galaxy_apis, # type: Iterable[GalaxyAPI]
requested_requirements, # type: t.Iterable[Requirement]
galaxy_apis, # type: t.Iterable[GalaxyAPI]
concrete_artifacts_manager, # type: ConcreteArtifactsManager
preferred_candidates, # type: Optional[Iterable[Candidate]]
preferred_candidates, # type: t.Iterable[Candidate] | None
no_deps, # type: bool
allow_pre_release, # type: bool
upgrade, # type: bool
include_signatures, # type: bool
): # type: (...) -> Dict[str, Candidate]
): # type: (...) -> dict[str, Candidate]
"""Return the resolved dependency map."""
collection_dep_resolver = build_collection_dependency_resolver(
galaxy_apis=galaxy_apis,

@ -10,6 +10,8 @@ import json
import os
import tarfile
import subprocess
import typing as t
from contextlib import contextmanager
from hashlib import sha256
from urllib.error import URLError
@ -17,19 +19,7 @@ from urllib.parse import urldefrag
from shutil import rmtree
from tempfile import mkdtemp
try:
from typing import TYPE_CHECKING
except ImportError:
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import (
Any, # FIXME: !!!111
BinaryIO, Dict, IO,
Iterator, List, Optional,
Set, Tuple, Type, Union,
)
if t.TYPE_CHECKING:
from ansible.galaxy.dependency_resolution.dataclasses import (
Candidate, Requirement,
)
@ -37,7 +27,6 @@ if TYPE_CHECKING:
from ansible.errors import AnsibleError
from ansible.galaxy import get_collections_galaxy_meta_info
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.dependency_resolution.dataclasses import _GALAXY_YAML
from ansible.galaxy.user_agent import user_agent
from ansible.module_utils._text import to_bytes, to_native, to_text
@ -72,13 +61,13 @@ class ConcreteArtifactsManager:
# type: (bytes, bool, str, int) -> None
"""Initialize ConcreteArtifactsManager caches and costraints."""
self._validate_certs = validate_certs # type: bool
self._artifact_cache = {} # type: Dict[bytes, bytes]
self._galaxy_artifact_cache = {} # type: Dict[Union[Candidate, Requirement], bytes]
self._artifact_meta_cache = {} # type: Dict[bytes, Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]]
self._galaxy_collection_cache = {} # type: Dict[Union[Candidate, Requirement], Tuple[str, str, GalaxyToken]]
self._galaxy_collection_origin_cache = {} # type: Dict[Candidate, Tuple[str, List[Dict[str, str]]]]
self._artifact_cache = {} # type: dict[bytes, bytes]
self._galaxy_artifact_cache = {} # type: dict[Candidate | Requirement, bytes]
self._artifact_meta_cache = {} # type: dict[bytes, dict[str, str | list[str] | dict[str, str] | None]]
self._galaxy_collection_cache = {} # type: dict[Candidate | Requirement, tuple[str, str, GalaxyToken]]
self._galaxy_collection_origin_cache = {} # type: dict[Candidate, tuple[str, list[dict[str, str]]]]
self._b_working_directory = b_working_directory # type: bytes
self._supplemental_signature_cache = {} # type: Dict[str, str]
self._supplemental_signature_cache = {} # type: dict[str, str]
self._keyring = keyring # type: str
self.timeout = timeout # type: int
@ -87,7 +76,7 @@ class ConcreteArtifactsManager:
return self._keyring
def get_galaxy_artifact_source_info(self, collection):
# type: (Candidate) -> Dict[str, Union[str, List[Dict[str, str]]]]
# type: (Candidate) -> dict[str, str | list[dict[str, str]]]
server = collection.src.api_server
try:
@ -111,7 +100,7 @@ class ConcreteArtifactsManager:
}
def get_galaxy_artifact_path(self, collection):
# type: (Union[Candidate, Requirement]) -> bytes
# type: (Candidate | Requirement) -> bytes
"""Given a Galaxy-stored collection, return a cached path.
If it's not yet on disk, this method downloads the artifact first.
@ -171,7 +160,7 @@ class ConcreteArtifactsManager:
return b_artifact_path
def get_artifact_path(self, collection):
# type: (Union[Candidate, Requirement]) -> bytes
# type: (Candidate | Requirement) -> bytes
"""Given a concrete collection pointer, return a cached path.
If it's not yet on disk, this method downloads the artifact first.
@ -236,15 +225,15 @@ class ConcreteArtifactsManager:
return b_artifact_path
def _get_direct_collection_namespace(self, collection):
# type: (Candidate) -> Optional[str]
# type: (Candidate) -> str | None
return self.get_direct_collection_meta(collection)['namespace'] # type: ignore[return-value]
def _get_direct_collection_name(self, collection):
# type: (Candidate) -> Optional[str]
# type: (Candidate) -> str | None
return self.get_direct_collection_meta(collection)['name'] # type: ignore[return-value]
def get_direct_collection_fqcn(self, collection):
# type: (Candidate) -> Optional[str]
# type: (Candidate) -> str | None
"""Extract FQCN from the given on-disk collection artifact.
If the collection is virtual, ``None`` is returned instead
@ -260,17 +249,17 @@ class ConcreteArtifactsManager:
))
def get_direct_collection_version(self, collection):
# type: (Union[Candidate, Requirement]) -> str
# type: (Candidate | Requirement) -> str
"""Extract version from the given on-disk collection artifact."""
return self.get_direct_collection_meta(collection)['version'] # type: ignore[return-value]
def get_direct_collection_dependencies(self, collection):
# type: (Union[Candidate, Requirement]) -> Dict[str, str]
# type: (Candidate | Requirement) -> dict[str, str]
"""Extract deps from the given on-disk collection artifact."""
return self.get_direct_collection_meta(collection)['dependencies'] # type: ignore[return-value]
def get_direct_collection_meta(self, collection):
# type: (Union[Candidate, Requirement]) -> Dict[str, Optional[Union[str, Dict[str, str], List[str]]]]
# type: (Candidate | Requirement) -> dict[str, str | dict[str, str] | list[str] | None]
"""Extract meta from the given on-disk collection artifact."""
try: # FIXME: use unique collection identifier as a cache key?
return self._artifact_meta_cache[collection.src]
@ -316,7 +305,7 @@ class ConcreteArtifactsManager:
return collection_meta
def save_collection_source(self, collection, url, sha256_hash, token, signatures_url, signatures):
# type: (Candidate, str, str, GalaxyToken, str, List[Dict[str, str]]) -> None
# type: (Candidate, str, str, GalaxyToken, str, list[dict[str, str]]) -> None
"""Store collection URL, SHA256 hash and Galaxy API token.
This is a hook that is supposed to be called before attempting to
@ -328,11 +317,11 @@ class ConcreteArtifactsManager:
@classmethod
@contextmanager
def under_tmpdir(
cls, # type: Type[ConcreteArtifactsManager]
cls,
temp_dir_base, # type: str
validate_certs=True, # type: bool
keyring=None, # type: str
): # type: (...) -> Iterator[ConcreteArtifactsManager]
): # type: (...) -> t.Iterator[ConcreteArtifactsManager]
"""Custom ConcreteArtifactsManager constructor with temp dir.
This method returns a context manager that allocates and cleans
@ -427,7 +416,7 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path):
# FIXME: use random subdirs while preserving the file names
def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeout=60):
# type: (str, bytes, Optional[str], bool, GalaxyToken, int) -> bytes
# type: (str, bytes, str | None, bool, GalaxyToken, int) -> bytes
# ^ NOTE: used in download and verify_collections ^
b_tarball_name = to_bytes(
url.rsplit('/', 1)[1], errors='surrogate_or_strict',
@ -452,7 +441,7 @@ def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeo
timeout=timeout
)
with open(b_file_path, 'wb') as download_file: # type: BinaryIO
with open(b_file_path, 'wb') as download_file: # type: t.BinaryIO
actual_hash = _consume_file(resp, write_to=download_file)
if expected_hash:
@ -468,7 +457,7 @@ def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeo
def _consume_file(read_from, write_to=None):
# type: (BinaryIO, BinaryIO) -> str
# type: (t.BinaryIO, t.BinaryIO) -> str
bufsize = 65536
sha256_digest = sha256()
data = read_from.read(bufsize)
@ -483,19 +472,19 @@ def _consume_file(read_from, write_to=None):
def _normalize_galaxy_yml_manifest(
galaxy_yml, # type: Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
galaxy_yml, # type: dict[str, str | list[str] | dict[str, str] | None]
b_galaxy_yml_path, # type: bytes
):
# type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
# type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
galaxy_yml_schema = (
get_collections_galaxy_meta_info()
) # type: List[Dict[str, Any]] # FIXME: <--
# FIXME: 👆maybe precise type: List[Dict[str, Union[bool, str, List[str]]]]
) # type: list[dict[str, t.Any]] # FIXME: <--
# FIXME: 👆maybe precise type: list[dict[str, bool | str | list[str]]]
mandatory_keys = set()
string_keys = set() # type: Set[str]
list_keys = set() # type: Set[str]
dict_keys = set() # type: Set[str]
string_keys = set() # type: set[str]
list_keys = set() # type: set[str]
dict_keys = set() # type: set[str]
for info in galaxy_yml_schema:
if info.get('required', False):
@ -550,7 +539,7 @@ def _normalize_galaxy_yml_manifest(
def _get_meta_from_dir(
b_path, # type: bytes
): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
try:
return _get_meta_from_installed_dir(b_path)
except LookupError:
@ -559,7 +548,7 @@ def _get_meta_from_dir(
def _get_meta_from_src_dir(
b_path, # type: bytes
): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
galaxy_yml = os.path.join(b_path, _GALAXY_YAML)
if not os.path.isfile(galaxy_yml):
raise LookupError(
@ -589,7 +578,7 @@ def _get_meta_from_src_dir(
def _get_json_from_installed_dir(
b_path, # type: bytes
filename, # type: str
): # type: (...) -> Dict
): # type: (...) -> dict
b_json_filepath = os.path.join(b_path, to_bytes(filename, errors='surrogate_or_strict'))
@ -621,7 +610,7 @@ def _get_json_from_installed_dir(
def _get_meta_from_installed_dir(
b_path, # type: bytes
): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
manifest = _get_json_from_installed_dir(b_path, MANIFEST_FILENAME)
collection_info = manifest['collection_info']
@ -642,7 +631,7 @@ def _get_meta_from_installed_dir(
def _get_meta_from_tar(
b_path, # type: bytes
): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
if not tarfile.is_tarfile(b_path):
raise AnsibleError(
"Collection artifact at '{path!s}' is not a valid tar file.".
@ -690,7 +679,7 @@ def _tarfile_extract(
tar, # type: tarfile.TarFile
member, # type: tarfile.TarInfo
):
# type: (...) -> Iterator[Tuple[tarfile.TarInfo, Optional[IO[bytes]]]]
# type: (...) -> t.Iterator[tuple[tarfile.TarInfo, t.IO[bytes] | None]]
tar_obj = tar.extractfile(member)
try:
yield member, tar_obj

@ -6,15 +6,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import typing as t
try:
from typing import TYPE_CHECKING
except ImportError:
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Dict, Iterable, Iterator, Tuple, List
if t.TYPE_CHECKING:
from ansible.galaxy.api import CollectionVersionMetadata
from ansible.galaxy.collection.concrete_artifact_manager import (
ConcreteArtifactsManager,
@ -35,13 +29,13 @@ class MultiGalaxyAPIProxy:
"""A proxy that abstracts talking to multiple Galaxy instances."""
def __init__(self, apis, concrete_artifacts_manager):
# type: (Iterable[GalaxyAPI], ConcreteArtifactsManager) -> None
# type: (t.Iterable[GalaxyAPI], ConcreteArtifactsManager) -> None
"""Initialize the target APIs list."""
self._apis = apis
self._concrete_art_mgr = concrete_artifacts_manager
def _get_collection_versions(self, requirement):
# type: (Requirement) -> Iterator[Tuple[GalaxyAPI, str]]
# type: (Requirement) -> t.Iterator[tuple[GalaxyAPI, str]]
"""Helper for get_collection_versions.
Yield api, version pairs for all APIs,
@ -82,7 +76,7 @@ class MultiGalaxyAPIProxy:
raise last_error
def get_collection_versions(self, requirement):
# type: (Requirement) -> Iterable[Tuple[str, GalaxyAPI]]
# type: (Requirement) -> t.Iterable[tuple[str, GalaxyAPI]]
"""Get a set of unique versions for FQCN on Galaxy servers."""
if requirement.is_concrete_artifact:
return {
@ -152,7 +146,7 @@ class MultiGalaxyAPIProxy:
raise last_err
def get_collection_dependencies(self, collection_candidate):
# type: (Candidate) -> Dict[str, str]
# type: (Candidate) -> dict[str, str]
# FIXME: return Requirement instances instead?
"""Retrieve collection dependencies of a given candidate."""
if collection_candidate.is_concrete_artifact:
@ -169,7 +163,7 @@ class MultiGalaxyAPIProxy:
)
def get_signatures(self, collection_candidate):
# type: (Candidate) -> List[Dict[str, str]]
# type: (Candidate) -> list[dict[str, str]]
namespace = collection_candidate.namespace
name = collection_candidate.name
version = collection_candidate.ver

@ -11,30 +11,21 @@ import contextlib
import os
import subprocess
import sys
import typing as t
from dataclasses import dataclass, fields as dc_fields
from functools import partial
from urllib.error import HTTPError, URLError
try:
# NOTE: It's in Python 3 stdlib and can be installed on Python 2
# NOTE: via `pip install typing`. Unnecessary in runtime.
# NOTE: `TYPE_CHECKING` is True during mypy-typecheck-time.
from typing import TYPE_CHECKING
except ImportError:
TYPE_CHECKING = False
if TYPE_CHECKING:
if t.TYPE_CHECKING:
from ansible.utils.display import Display
from typing import Tuple, Iterator, Optional
IS_PY310_PLUS = sys.version_info[:2] >= (3, 10)
frozen_dataclass = partial(dataclass, frozen=True, **({'slots': True} if IS_PY310_PLUS else {}))
def get_signature_from_source(source, display=None): # type: (str, Optional[Display]) -> str
def get_signature_from_source(source, display=None): # type: (str, Display | None) -> str
if display is not None:
display.vvvv(f"Using signature at {source}")
try:
@ -58,7 +49,7 @@ def run_gpg_verify(
signature, # type: str
keyring, # type: str
display, # type: Display
): # type: (...) -> Tuple[str, int]
): # type: (...) -> tuple[str, int]
status_fd_read, status_fd_write = os.pipe()
# running the gpg command will create the keyring if it does not exist
@ -108,7 +99,7 @@ def run_gpg_verify(
return stdout, p.returncode
def parse_gpg_errors(status_out): # type: (str) -> Iterator[GpgBaseError]
def parse_gpg_errors(status_out): # type: (str) -> t.Iterator[GpgBaseError]
for line in status_out.splitlines():
if not line:
continue

@ -6,13 +6,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
try:
from typing import TYPE_CHECKING
except ImportError:
TYPE_CHECKING = False
import typing as t
if TYPE_CHECKING:
from typing import Iterable
if t.TYPE_CHECKING:
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.collection.concrete_artifact_manager import (
ConcreteArtifactsManager,
@ -29,10 +25,10 @@ from ansible.galaxy.dependency_resolution.resolvers import CollectionDependencyR
def build_collection_dependency_resolver(
galaxy_apis, # type: Iterable[GalaxyAPI]
galaxy_apis, # type: t.Iterable[GalaxyAPI]
concrete_artifacts_manager, # type: ConcreteArtifactsManager
user_requirements, # type: Iterable[Requirement]
preferred_candidates=None, # type: Iterable[Candidate]
user_requirements, # type: t.Iterable[Requirement]
preferred_candidates=None, # type: t.Iterable[Candidate]
with_deps=True, # type: bool
with_pre_releases=False, # type: bool
upgrade=False, # type: bool

@ -8,23 +8,19 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import typing as t
from collections import namedtuple
from collections.abc import MutableSequence
from glob import iglob
from urllib.parse import urlparse
from yaml import safe_load
try:
from typing import TYPE_CHECKING
except ImportError:
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Type, TypeVar
if t.TYPE_CHECKING:
from ansible.galaxy.collection.concrete_artifact_manager import (
ConcreteArtifactsManager,
)
Collection = TypeVar(
Collection = t.TypeVar(
'Collection',
'Candidate', 'Requirement',
'_ComputedReqKindsMixin',
@ -187,7 +183,7 @@ class _ComputedReqKindsMixin:
@classmethod
def from_dir_path_as_unknown( # type: ignore[misc]
cls, # type: Type[Collection]
cls, # type: t.Type[Collection]
dir_path, # type: bytes
art_mgr, # type: ConcreteArtifactsManager
): # type: (...) -> Collection
@ -239,7 +235,7 @@ class _ComputedReqKindsMixin:
@classmethod
def from_dir_path_implicit( # type: ignore[misc]
cls, # type: Type[Collection]
cls, # type: t.Type[Collection]
dir_path, # type: bytes
): # type: (...) -> Collection
"""Construct a collection instance based on an arbitrary dir.

@ -7,14 +7,9 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import functools
import typing as t
try:
from typing import TYPE_CHECKING
except ImportError:
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Iterable, List, NamedTuple, Optional, Union
if t.TYPE_CHECKING:
from ansible.galaxy.collection.concrete_artifact_manager import (
ConcreteArtifactsManager,
)
@ -71,8 +66,8 @@ class CollectionDependencyProvider(AbstractProvider):
self, # type: CollectionDependencyProvider
apis, # type: MultiGalaxyAPIProxy
concrete_artifacts_manager=None, # type: ConcreteArtifactsManager
user_requirements=None, # type: Iterable[Requirement]
preferred_candidates=None, # type: Iterable[Candidate]
user_requirements=None, # type: t.Iterable[Requirement]
preferred_candidates=None, # type: t.Iterable[Candidate]
with_deps=True, # type: bool
with_pre_releases=False, # type: bool
upgrade=False, # type: bool
@ -160,7 +155,7 @@ class CollectionDependencyProvider(AbstractProvider):
return False
def identify(self, requirement_or_candidate):
# type: (Union[Candidate, Requirement]) -> str
# type: (Candidate | Requirement) -> str
"""Given requirement or candidate, return an identifier for it.
This is used to identify a requirement or candidate, e.g.
@ -173,10 +168,10 @@ class CollectionDependencyProvider(AbstractProvider):
def get_preference(
self, # type: CollectionDependencyProvider
resolution, # type: Optional[Candidate]
candidates, # type: List[Candidate]
information, # type: List[NamedTuple]
): # type: (...) -> Union[float, int]
resolution, # type: Candidate | None
candidates, # type: list[Candidate]
information, # type: list[t.NamedTuple]
): # type: (...) -> float | int
"""Return sort key function return value for given requirement.
This result should be based on preference that is defined as
@ -230,7 +225,7 @@ class CollectionDependencyProvider(AbstractProvider):
return len(candidates)
def find_matches(self, requirements):
# type: (List[Requirement]) -> List[Candidate]
# type: (list[Requirement]) -> list[Candidate]
r"""Find all possible candidates satisfying given requirements.
This tries to get candidates based on the requirements' types.
@ -398,7 +393,7 @@ class CollectionDependencyProvider(AbstractProvider):
)
def get_dependencies(self, candidate):
# type: (Candidate) -> List[Candidate]
# type: (Candidate) -> list[Candidate]
r"""Get direct dependencies of a candidate.
:returns: A collection of requirements that `candidate` \

@ -7,8 +7,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import operator
from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.compat.version import LooseVersion
from ansible.utils.version import SemanticVersion

Loading…
Cancel
Save