|
|
|
@ -88,7 +88,7 @@ class ConcreteArtifactsManager:
|
|
|
|
|
return self._ignore_signature_errors
|
|
|
|
|
|
|
|
|
|
def get_galaxy_artifact_source_info(self, collection):
|
|
|
|
|
# type: (Candidate) -> dict[str, str | list[dict[str, str]]]
|
|
|
|
|
# type: (Candidate) -> dict[str, t.Union[str, list[dict[str, str]]]]
|
|
|
|
|
server = collection.src.api_server
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
@ -112,7 +112,7 @@ class ConcreteArtifactsManager:
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def get_galaxy_artifact_path(self, collection):
|
|
|
|
|
# type: (Candidate | Requirement) -> bytes
|
|
|
|
|
# type: (t.Union[Candidate, Requirement]) -> bytes
|
|
|
|
|
"""Given a Galaxy-stored collection, return a cached path.
|
|
|
|
|
|
|
|
|
|
If it's not yet on disk, this method downloads the artifact first.
|
|
|
|
@ -172,7 +172,7 @@ class ConcreteArtifactsManager:
|
|
|
|
|
return b_artifact_path
|
|
|
|
|
|
|
|
|
|
def get_artifact_path(self, collection):
|
|
|
|
|
# type: (Candidate | Requirement) -> bytes
|
|
|
|
|
# type: (t.Union[Candidate, Requirement]) -> bytes
|
|
|
|
|
"""Given a concrete collection pointer, return a cached path.
|
|
|
|
|
|
|
|
|
|
If it's not yet on disk, this method downloads the artifact first.
|
|
|
|
@ -237,15 +237,15 @@ class ConcreteArtifactsManager:
|
|
|
|
|
return b_artifact_path
|
|
|
|
|
|
|
|
|
|
def _get_direct_collection_namespace(self, collection):
|
|
|
|
|
# type: (Candidate) -> str | None
|
|
|
|
|
# type: (Candidate) -> t.Optional[str]
|
|
|
|
|
return self.get_direct_collection_meta(collection)['namespace'] # type: ignore[return-value]
|
|
|
|
|
|
|
|
|
|
def _get_direct_collection_name(self, collection):
|
|
|
|
|
# type: (Candidate) -> str | None
|
|
|
|
|
# type: (Candidate) -> t.Optional[str]
|
|
|
|
|
return self.get_direct_collection_meta(collection)['name'] # type: ignore[return-value]
|
|
|
|
|
|
|
|
|
|
def get_direct_collection_fqcn(self, collection):
|
|
|
|
|
# type: (Candidate) -> str | None
|
|
|
|
|
# type: (Candidate) -> t.Optional[str]
|
|
|
|
|
"""Extract FQCN from the given on-disk collection artifact.
|
|
|
|
|
|
|
|
|
|
If the collection is virtual, ``None`` is returned instead
|
|
|
|
@ -261,12 +261,12 @@ class ConcreteArtifactsManager:
|
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
def get_direct_collection_version(self, collection):
|
|
|
|
|
# type: (Candidate | Requirement) -> str
|
|
|
|
|
# type: (t.Union[Candidate, Requirement]) -> str
|
|
|
|
|
"""Extract version from the given on-disk collection artifact."""
|
|
|
|
|
return self.get_direct_collection_meta(collection)['version'] # type: ignore[return-value]
|
|
|
|
|
|
|
|
|
|
def get_direct_collection_dependencies(self, collection):
|
|
|
|
|
# type: (Candidate | Requirement) -> dict[str, str]
|
|
|
|
|
# type: (t.Union[Candidate, Requirement]) -> dict[str, str]
|
|
|
|
|
"""Extract deps from the given on-disk collection artifact."""
|
|
|
|
|
collection_dependencies = self.get_direct_collection_meta(collection)['dependencies']
|
|
|
|
|
if collection_dependencies is None:
|
|
|
|
@ -274,7 +274,7 @@ class ConcreteArtifactsManager:
|
|
|
|
|
return collection_dependencies # type: ignore[return-value]
|
|
|
|
|
|
|
|
|
|
def get_direct_collection_meta(self, collection):
|
|
|
|
|
# type: (Candidate | Requirement) -> dict[str, str | dict[str, str] | list[str] | None]
|
|
|
|
|
# type: (t.Union[Candidate, Requirement]) -> dict[str, t.Union[str, dict[str, str], list[str], None]]
|
|
|
|
|
"""Extract meta from the given on-disk collection artifact."""
|
|
|
|
|
try: # FIXME: use unique collection identifier as a cache key?
|
|
|
|
|
return self._artifact_meta_cache[collection.src]
|
|
|
|
@ -447,7 +447,7 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path):
|
|
|
|
|
|
|
|
|
|
# FIXME: use random subdirs while preserving the file names
|
|
|
|
|
def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeout=60):
|
|
|
|
|
# type: (str, bytes, str | None, bool, GalaxyToken, int) -> bytes
|
|
|
|
|
# type: (str, bytes, t.Optional[str], bool, GalaxyToken, int) -> bytes
|
|
|
|
|
# ^ NOTE: used in download and verify_collections ^
|
|
|
|
|
b_tarball_name = to_bytes(
|
|
|
|
|
url.rsplit('/', 1)[1], errors='surrogate_or_strict',
|
|
|
|
@ -503,14 +503,14 @@ def _consume_file(read_from, write_to=None):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _normalize_galaxy_yml_manifest(
|
|
|
|
|
galaxy_yml, # type: dict[str, str | list[str] | dict[str, str] | None]
|
|
|
|
|
galaxy_yml, # type: dict[str, t.Union[str, list[str], dict[str, str], None]]
|
|
|
|
|
b_galaxy_yml_path, # type: bytes
|
|
|
|
|
):
|
|
|
|
|
# type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
|
|
|
|
|
# type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]]
|
|
|
|
|
galaxy_yml_schema = (
|
|
|
|
|
get_collections_galaxy_meta_info()
|
|
|
|
|
) # type: list[dict[str, t.Any]] # FIXME: <--
|
|
|
|
|
# FIXME: 👆maybe precise type: list[dict[str, bool | str | list[str]]]
|
|
|
|
|
# FIXME: 👆maybe precise type: list[dict[str, t.Union[bool, str, list[str]]]]
|
|
|
|
|
|
|
|
|
|
mandatory_keys = set()
|
|
|
|
|
string_keys = set() # type: set[str]
|
|
|
|
@ -570,7 +570,7 @@ def _normalize_galaxy_yml_manifest(
|
|
|
|
|
|
|
|
|
|
def _get_meta_from_dir(
|
|
|
|
|
b_path, # type: bytes
|
|
|
|
|
): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
|
|
|
|
|
): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]]
|
|
|
|
|
try:
|
|
|
|
|
return _get_meta_from_installed_dir(b_path)
|
|
|
|
|
except LookupError:
|
|
|
|
@ -579,7 +579,7 @@ def _get_meta_from_dir(
|
|
|
|
|
|
|
|
|
|
def _get_meta_from_src_dir(
|
|
|
|
|
b_path, # type: bytes
|
|
|
|
|
): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
|
|
|
|
|
): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]]
|
|
|
|
|
galaxy_yml = os.path.join(b_path, _GALAXY_YAML)
|
|
|
|
|
if not os.path.isfile(galaxy_yml):
|
|
|
|
|
raise LookupError(
|
|
|
|
@ -641,7 +641,7 @@ def _get_json_from_installed_dir(
|
|
|
|
|
|
|
|
|
|
def _get_meta_from_installed_dir(
|
|
|
|
|
b_path, # type: bytes
|
|
|
|
|
): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
|
|
|
|
|
): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]]
|
|
|
|
|
manifest = _get_json_from_installed_dir(b_path, MANIFEST_FILENAME)
|
|
|
|
|
collection_info = manifest['collection_info']
|
|
|
|
|
|
|
|
|
@ -662,7 +662,7 @@ def _get_meta_from_installed_dir(
|
|
|
|
|
|
|
|
|
|
def _get_meta_from_tar(
|
|
|
|
|
b_path, # type: bytes
|
|
|
|
|
): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None]
|
|
|
|
|
): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]]
|
|
|
|
|
if not tarfile.is_tarfile(b_path):
|
|
|
|
|
raise AnsibleError(
|
|
|
|
|
"Collection artifact at '{path!s}' is not a valid tar file.".
|
|
|
|
@ -710,7 +710,7 @@ def _tarfile_extract(
|
|
|
|
|
tar, # type: tarfile.TarFile
|
|
|
|
|
member, # type: tarfile.TarInfo
|
|
|
|
|
):
|
|
|
|
|
# type: (...) -> t.Iterator[tuple[tarfile.TarInfo, t.IO[bytes] | None]]
|
|
|
|
|
# type: (...) -> t.Iterator[tuple[tarfile.TarInfo, t.Optional[t.IO[bytes]]]]
|
|
|
|
|
tar_obj = tar.extractfile(member)
|
|
|
|
|
try:
|
|
|
|
|
yield member, tar_obj
|
|
|
|
|