mirror of https://github.com/ansible/ansible.git
ansible-galaxy: add collection sub command (#57106)
* ansible-galaxy: add collection init sub command * Fix changelog and other sanity issues * Slim down skeleton structure, fix encoding issue on template * Fix doc generation code to include sub commands * Added build step * Tidy up the build action * Fixed up doc changes and slight testing tweaks * Re-organise tests to use pytest * Added publish step and fixed up issues after working with Galaxy * Unit test improvments * Fix unit test on 3.5 * Add remaining build tests * Test fixes, make the integration tests clearer to debug on failures * Removed unicode name tests until I've got further clarification * Added publish unit tests * Change expected length value * Added collection install steps, tests forthcoming * Added unit tests for collection install entrypoint * Added some more tests for collection install * follow proper encoding rules and added more tests * Add remaining tests * tidied up tests and code based on review * exclude pre-release versions from galaxy APIpull/58886/head
parent
d336a989e4
commit
b6791e6ae3
@ -0,0 +1,5 @@
|
||||
minor_changes:
|
||||
- ansible-galaxy - Added the ``collection init`` command to create a skeleton collection directory.
|
||||
- ansible-galaxy - Added the ``collection build`` command to build a collection tarball ready for uploading.
|
||||
- ansible-galaxy - Added the ``collection publish`` command to publish a collection tarball to a Galaxy server.
|
||||
- ansible-galaxy - Added the ``collection install`` command to install collections locally.
|
@ -0,0 +1,951 @@
|
||||
# Copyright: (c) 2019, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import fnmatch
|
||||
import json
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import uuid
|
||||
import yaml
|
||||
|
||||
from contextlib import contextmanager
|
||||
from distutils.version import LooseVersion, StrictVersion
|
||||
from hashlib import sha256
|
||||
from io import BytesIO
|
||||
from yaml.error import YAMLError
|
||||
|
||||
import ansible.constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.module_utils import six
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.hashing import secure_hash, secure_hash_s
|
||||
from ansible.module_utils.urls import open_url
|
||||
|
||||
urlparse = six.moves.urllib.parse.urlparse
|
||||
urllib_error = six.moves.urllib.error
|
||||
|
||||
|
||||
display = Display()
|
||||
|
||||
MANIFEST_FORMAT = 1
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
class CollectionRequirement:
|
||||
|
||||
_FILE_MAPPING = [(b'MANIFEST.json', 'manifest_file'), (b'FILES.json', 'files_file')]
|
||||
|
||||
def __init__(self, namespace, name, b_path, source, versions, requirement, force, parent=None, validate_certs=True,
|
||||
metadata=None, files=None, skip=False):
|
||||
"""
|
||||
Represents a collection requirement, the versions that are available to be installed as well as any
|
||||
dependencies the collection has.
|
||||
|
||||
:param namespace: The collection namespace.
|
||||
:param name: The collection name.
|
||||
:param b_path: Byte str of the path to the collection tarball if it has already been downloaded.
|
||||
:param source: The Galaxy server URL to download if the collection is from Galaxy.
|
||||
:param versions: A list of versions of the collection that are available.
|
||||
:param requirement: The version requirement string used to verify the list of versions fit the requirements.
|
||||
:param force: Whether the force flag applied to the collection.
|
||||
:param parent: The name of the parent the collection is a dependency of.
|
||||
:param validate_certs: Whether to validate the Galaxy server certificate.
|
||||
:param metadata: The collection metadata dict if it has already been retrieved.
|
||||
:param files: The files that exist inside the collection. This is based on the FILES.json file inside the
|
||||
collection artifact.
|
||||
:param skip: Whether to skip installing the collection. Should be set if the collection is already installed
|
||||
and force is not set.
|
||||
"""
|
||||
self.namespace = namespace
|
||||
self.name = name
|
||||
self.b_path = b_path
|
||||
self.source = source
|
||||
self.versions = set(versions)
|
||||
self.force = force
|
||||
self.skip = skip
|
||||
self.required_by = []
|
||||
self._validate_certs = validate_certs
|
||||
|
||||
self._metadata = metadata
|
||||
self._files = files
|
||||
self._galaxy_info = None
|
||||
|
||||
self.add_requirement(parent, requirement)
|
||||
|
||||
def __str__(self):
|
||||
return to_text("%s.%s" % (self.namespace, self.name))
|
||||
|
||||
@property
|
||||
def latest_version(self):
|
||||
try:
|
||||
return max([v for v in self.versions if v != '*'], key=LooseVersion)
|
||||
except ValueError: # ValueError: max() arg is an empty sequence
|
||||
return '*'
|
||||
|
||||
@property
|
||||
def dependencies(self):
|
||||
if self._metadata:
|
||||
return self._metadata['dependencies']
|
||||
elif len(self.versions) > 1:
|
||||
return None
|
||||
|
||||
self._get_metadata()
|
||||
return self._metadata['dependencies']
|
||||
|
||||
def add_requirement(self, parent, requirement):
|
||||
self.required_by.append((parent, requirement))
|
||||
new_versions = set(v for v in self.versions if self._meets_requirements(v, requirement, parent))
|
||||
if len(new_versions) == 0:
|
||||
if self.skip:
|
||||
force_flag = '--force-with-deps' if parent else '--force'
|
||||
version = self.latest_version if self.latest_version != '*' else 'unknown'
|
||||
msg = "Cannot meet requirement %s:%s as it is already installed at version '%s'. Use %s to overwrite" \
|
||||
% (str(self), requirement, version, force_flag)
|
||||
raise AnsibleError(msg)
|
||||
elif parent is None:
|
||||
msg = "Cannot meet requirement %s for dependency %s" % (requirement, str(self))
|
||||
else:
|
||||
msg = "Cannot meet dependency requirement '%s:%s' for collection %s" % (str(self), requirement, parent)
|
||||
|
||||
collection_source = to_text(self.b_path, nonstring='passthru') or self.source
|
||||
req_by = "\n".join(
|
||||
"\t%s - '%s:%s'" % (to_text(p) if p else 'base', str(self), r)
|
||||
for p, r in self.required_by
|
||||
)
|
||||
|
||||
versions = ", ".join(sorted(self.versions, key=LooseVersion))
|
||||
raise AnsibleError(
|
||||
"%s from source '%s'. Available versions before last requirement added: %s\nRequirements from:\n%s"
|
||||
% (msg, collection_source, versions, req_by)
|
||||
)
|
||||
|
||||
self.versions = new_versions
|
||||
|
||||
def install(self, path, b_temp_path):
|
||||
if self.skip:
|
||||
display.display("Skipping '%s' as it is already installed" % str(self))
|
||||
return
|
||||
|
||||
# Install if it is not
|
||||
collection_path = os.path.join(path, self.namespace, self.name)
|
||||
b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
|
||||
display.display("Installing '%s:%s' to '%s'" % (str(self), self.latest_version,
|
||||
collection_path))
|
||||
|
||||
if self.b_path is None:
|
||||
download_url = self._galaxy_info['download_url']
|
||||
artifact_hash = self._galaxy_info['artifact']['sha256']
|
||||
self.b_path = _download_file(download_url, b_temp_path, artifact_hash, self._validate_certs)
|
||||
|
||||
if os.path.exists(b_collection_path):
|
||||
shutil.rmtree(b_collection_path)
|
||||
os.makedirs(b_collection_path)
|
||||
|
||||
with tarfile.open(self.b_path, mode='r') as collection_tar:
|
||||
files_member_obj = collection_tar.getmember('FILES.json')
|
||||
with _tarfile_extract(collection_tar, files_member_obj) as files_obj:
|
||||
files = json.loads(to_text(files_obj.read(), errors='surrogate_or_strict'))
|
||||
|
||||
_extract_tar_file(collection_tar, 'MANIFEST.json', b_collection_path, b_temp_path)
|
||||
_extract_tar_file(collection_tar, 'FILES.json', b_collection_path, b_temp_path)
|
||||
|
||||
for file_info in files['files']:
|
||||
file_name = file_info['name']
|
||||
if file_name == '.':
|
||||
continue
|
||||
|
||||
if file_info['ftype'] == 'file':
|
||||
_extract_tar_file(collection_tar, file_name, b_collection_path, b_temp_path,
|
||||
expected_hash=file_info['chksum_sha256'])
|
||||
else:
|
||||
os.makedirs(os.path.join(b_collection_path, to_bytes(file_name, errors='surrogate_or_strict')))
|
||||
|
||||
def set_latest_version(self):
|
||||
self.versions = set([self.latest_version])
|
||||
self._get_metadata()
|
||||
|
||||
def _get_metadata(self):
|
||||
if self._metadata:
|
||||
return
|
||||
|
||||
n_collection_url = _urljoin(self.source, 'api', 'v2', 'collections', self.namespace, self.name, 'versions',
|
||||
self.latest_version)
|
||||
details = json.load(open_url(n_collection_url, validate_certs=self._validate_certs))
|
||||
self._galaxy_info = details
|
||||
self._metadata = details['metadata']
|
||||
|
||||
def _meets_requirements(self, version, requirements, parent):
|
||||
"""
|
||||
Supports version identifiers can be '==', '!=', '>', '>=', '<', '<=', '*'. Each requirement is delimited by ','
|
||||
"""
|
||||
op_map = {
|
||||
'!=': operator.ne,
|
||||
'==': operator.eq,
|
||||
'=': operator.eq,
|
||||
'>=': operator.ge,
|
||||
'>': operator.gt,
|
||||
'<=': operator.le,
|
||||
'<': operator.lt,
|
||||
}
|
||||
|
||||
for req in list(requirements.split(',')):
|
||||
op_pos = 2 if len(req) > 1 and req[1] == '=' else 1
|
||||
op = op_map.get(req[:op_pos])
|
||||
|
||||
requirement = req[op_pos:]
|
||||
if not op:
|
||||
requirement = req
|
||||
op = operator.eq
|
||||
|
||||
# In the case we are checking a new requirement on a base requirement (parent != None) we can't accept
|
||||
# version as '*' (unknown version) unless the requirement is also '*'.
|
||||
if parent and version == '*' and requirement != '*':
|
||||
break
|
||||
elif requirement == '*' or version == '*':
|
||||
continue
|
||||
|
||||
if not op(LooseVersion(version), LooseVersion(requirement)):
|
||||
break
|
||||
else:
|
||||
return True
|
||||
|
||||
# The loop was broken early, it does not meet all the requirements
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def from_tar(b_path, validate_certs, force, parent=None):
|
||||
if not tarfile.is_tarfile(b_path):
|
||||
raise AnsibleError("Collection artifact at '%s' is not a valid tar file." % to_native(b_path))
|
||||
|
||||
info = {}
|
||||
with tarfile.open(b_path, mode='r') as collection_tar:
|
||||
for b_member_name, property_name in CollectionRequirement._FILE_MAPPING:
|
||||
n_member_name = to_native(b_member_name)
|
||||
try:
|
||||
member = collection_tar.getmember(n_member_name)
|
||||
except KeyError:
|
||||
raise AnsibleError("Collection at '%s' does not contain the required file %s."
|
||||
% (to_native(b_path), n_member_name))
|
||||
|
||||
with _tarfile_extract(collection_tar, member) as member_obj:
|
||||
try:
|
||||
info[property_name] = json.loads(to_text(member_obj.read(), errors='surrogate_or_strict'))
|
||||
except ValueError:
|
||||
raise AnsibleError("Collection tar file member %s does not contain a valid json string."
|
||||
% n_member_name)
|
||||
|
||||
meta = info['manifest_file']['collection_info']
|
||||
files = info['files_file']['files']
|
||||
|
||||
namespace = meta['namespace']
|
||||
name = meta['name']
|
||||
version = meta['version']
|
||||
|
||||
return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
|
||||
validate_certs=validate_certs, metadata=meta, files=files)
|
||||
|
||||
@staticmethod
|
||||
def from_path(b_path, validate_certs, force, parent=None):
|
||||
info = {}
|
||||
for b_file_name, property_name in CollectionRequirement._FILE_MAPPING:
|
||||
b_file_path = os.path.join(b_path, b_file_name)
|
||||
if not os.path.exists(b_file_path):
|
||||
continue
|
||||
|
||||
with open(b_file_path, 'rb') as file_obj:
|
||||
try:
|
||||
info[property_name] = json.loads(to_text(file_obj.read(), errors='surrogate_or_strict'))
|
||||
except ValueError:
|
||||
raise AnsibleError("Collection file at '%s' does not contain a valid json string."
|
||||
% to_native(b_file_path))
|
||||
|
||||
if 'manifest_file' in info:
|
||||
meta = info['manifest_file']['collection_info']
|
||||
else:
|
||||
display.warning("Collection at '%s' does not have a MANIFEST.json file, cannot detect version."
|
||||
% to_text(b_path))
|
||||
parent_dir, name = os.path.split(to_text(b_path, errors='surrogate_or_strict'))
|
||||
namespace = os.path.split(parent_dir)[1]
|
||||
meta = {
|
||||
'namespace': namespace,
|
||||
'name': name,
|
||||
'version': '*',
|
||||
'dependencies': {},
|
||||
}
|
||||
|
||||
namespace = meta['namespace']
|
||||
name = meta['name']
|
||||
version = meta['version']
|
||||
|
||||
files = info.get('files_file', {}).get('files', {})
|
||||
|
||||
return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
|
||||
validate_certs=validate_certs, metadata=meta, files=files, skip=True)
|
||||
|
||||
@staticmethod
|
||||
def from_name(collection, servers, requirement, validate_certs, force, parent=None):
|
||||
namespace, name = collection.split('.', 1)
|
||||
galaxy_info = None
|
||||
galaxy_meta = None
|
||||
|
||||
for server in servers:
|
||||
collection_url_paths = [server, 'api', 'v2', 'collections', namespace, name, 'versions']
|
||||
|
||||
is_single = False
|
||||
if not (requirement == '*' or requirement.startswith('<') or requirement.startswith('>') or
|
||||
requirement.startswith('!=')):
|
||||
if requirement.startswith('='):
|
||||
requirement = requirement.lstrip('=')
|
||||
|
||||
collection_url_paths.append(requirement)
|
||||
is_single = True
|
||||
|
||||
n_collection_url = _urljoin(*collection_url_paths)
|
||||
try:
|
||||
resp = json.load(open_url(n_collection_url, validate_certs=validate_certs))
|
||||
except urllib_error.HTTPError as err:
|
||||
if err.code == 404:
|
||||
continue
|
||||
raise
|
||||
|
||||
if is_single:
|
||||
galaxy_info = resp
|
||||
galaxy_meta = resp['metadata']
|
||||
versions = [resp['version']]
|
||||
else:
|
||||
versions = []
|
||||
while True:
|
||||
# Galaxy supports semver but ansible-galaxy does not. We ignore any versions that don't match
|
||||
# StrictVersion (x.y.z) and only support pre-releases if an explicit version was set (done above).
|
||||
versions += [v['version'] for v in resp['results'] if StrictVersion.version_re.match(v['version'])]
|
||||
if resp['next'] is None:
|
||||
break
|
||||
resp = json.load(open_url(to_native(resp['next'], errors='surrogate_or_strict'),
|
||||
validate_certs=validate_certs))
|
||||
|
||||
break
|
||||
else:
|
||||
raise AnsibleError("Failed to find collection %s:%s" % (collection, requirement))
|
||||
|
||||
req = CollectionRequirement(namespace, name, None, server, versions, requirement, force, parent=parent,
|
||||
validate_certs=validate_certs, metadata=galaxy_meta)
|
||||
req._galaxy_info = galaxy_info
|
||||
return req
|
||||
|
||||
|
||||
def build_collection(collection_path, output_path, force):
|
||||
"""
|
||||
Creates the Ansible collection artifact in a .tar.gz file.
|
||||
|
||||
:param collection_path: The path to the collection to build. This should be the directory that contains the
|
||||
galaxy.yml file.
|
||||
:param output_path: The path to create the collection build artifact. This should be a directory.
|
||||
:param force: Whether to overwrite an existing collection build artifact or fail.
|
||||
:return: The path to the collection build artifact.
|
||||
"""
|
||||
b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
|
||||
b_galaxy_path = os.path.join(b_collection_path, b'galaxy.yml')
|
||||
if not os.path.exists(b_galaxy_path):
|
||||
raise AnsibleError("The collection galaxy.yml path '%s' does not exist." % to_native(b_galaxy_path))
|
||||
|
||||
collection_meta = _get_galaxy_yml(b_galaxy_path)
|
||||
file_manifest = _build_files_manifest(b_collection_path)
|
||||
collection_manifest = _build_manifest(**collection_meta)
|
||||
|
||||
collection_output = os.path.join(output_path, "%s-%s-%s.tar.gz" % (collection_meta['namespace'],
|
||||
collection_meta['name'],
|
||||
collection_meta['version']))
|
||||
|
||||
b_collection_output = to_bytes(collection_output, errors='surrogate_or_strict')
|
||||
if os.path.exists(b_collection_output):
|
||||
if os.path.isdir(b_collection_output):
|
||||
raise AnsibleError("The output collection artifact '%s' already exists, "
|
||||
"but is a directory - aborting" % to_native(collection_output))
|
||||
elif not force:
|
||||
raise AnsibleError("The file '%s' already exists. You can use --force to re-create "
|
||||
"the collection artifact." % to_native(collection_output))
|
||||
|
||||
_build_collection_tar(b_collection_path, b_collection_output, collection_manifest, file_manifest)
|
||||
|
||||
|
||||
def publish_collection(collection_path, server, key, ignore_certs, wait):
|
||||
"""
|
||||
Publish an Ansible collection tarball into an Ansible Galaxy server.
|
||||
|
||||
:param collection_path: The path to the collection tarball to publish.
|
||||
:param server: A native string of the Ansible Galaxy server to publish to.
|
||||
:param key: The API key to use for authorization.
|
||||
:param ignore_certs: Whether to ignore certificate validation when interacting with the server.
|
||||
"""
|
||||
b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
|
||||
if not os.path.exists(b_collection_path):
|
||||
raise AnsibleError("The collection path specified '%s' does not exist." % to_native(collection_path))
|
||||
elif not tarfile.is_tarfile(b_collection_path):
|
||||
raise AnsibleError("The collection path specified '%s' is not a tarball, use 'ansible-galaxy collection "
|
||||
"build' to create a proper release artifact." % to_native(collection_path))
|
||||
|
||||
display.display("Publishing collection artifact '%s' to %s" % (collection_path, server))
|
||||
|
||||
n_url = _urljoin(server, 'api', 'v2', 'collections')
|
||||
|
||||
data, content_type = _get_mime_data(b_collection_path)
|
||||
headers = {
|
||||
'Content-type': content_type,
|
||||
'Content-length': len(data),
|
||||
}
|
||||
if key:
|
||||
headers['Authorization'] = "Token %s" % key
|
||||
validate_certs = not ignore_certs
|
||||
|
||||
try:
|
||||
resp = json.load(open_url(n_url, data=data, headers=headers, method='POST', validate_certs=validate_certs))
|
||||
except urllib_error.HTTPError as err:
|
||||
try:
|
||||
err_info = json.load(err)
|
||||
except (AttributeError, ValueError):
|
||||
err_info = {}
|
||||
|
||||
code = to_native(err_info.get('code', 'Unknown'))
|
||||
message = to_native(err_info.get('message', 'Unknown error returned by Galaxy server.'))
|
||||
|
||||
raise AnsibleError("Error when publishing collection (HTTP Code: %d, Message: %s Code: %s)"
|
||||
% (err.code, message, code))
|
||||
|
||||
display.vvv("Collection has been pushed to the Galaxy server %s" % server)
|
||||
import_uri = resp['task']
|
||||
if wait:
|
||||
_wait_import(import_uri, key, validate_certs)
|
||||
display.display("Collection has been successfully published to the Galaxy server")
|
||||
else:
|
||||
display.display("Collection has been pushed to the Galaxy server, not waiting until import has completed "
|
||||
"due to --no-wait being set. Import task results can be found at %s" % import_uri)
|
||||
|
||||
|
||||
def install_collections(collections, output_path, servers, validate_certs, ignore_errors, no_deps, force, force_deps):
|
||||
"""
|
||||
Install Ansible collections to the path specified.
|
||||
|
||||
:param collections: The collections to install, should be a list of tuples with (name, requirement, Galaxy server).
|
||||
:param output_path: The path to install the collections to.
|
||||
:param servers: A list of Galaxy servers to query when searching for a collection.
|
||||
:param validate_certs: Whether to validate the Galaxy server certificates.
|
||||
:param ignore_errors: Whether to ignore any errors when installing the collection.
|
||||
:param no_deps: Ignore any collection dependencies and only install the base requirements.
|
||||
:param force: Re-install a collection if it has already been installed.
|
||||
:param force_deps: Re-install a collection as well as its dependencies if they have already been installed.
|
||||
"""
|
||||
existing_collections = _find_existing_collections(output_path)
|
||||
|
||||
with _tempdir() as b_temp_path:
|
||||
dependency_map = _build_dependency_map(collections, existing_collections, b_temp_path, servers, validate_certs,
|
||||
force, force_deps, no_deps)
|
||||
|
||||
for collection in dependency_map.values():
|
||||
try:
|
||||
collection.install(output_path, b_temp_path)
|
||||
except AnsibleError as err:
|
||||
if ignore_errors:
|
||||
display.warning("Failed to install collection %s but skipping due to --ignore-errors being set. "
|
||||
"Error: %s" % (str(collection), to_text(err)))
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def parse_collections_requirements_file(requirements_file):
|
||||
"""
|
||||
Parses an Ansible requirement.yml file and returns all the collections defined in it. This value ca be used with
|
||||
install_collection(). The requirements file is in the form:
|
||||
|
||||
---
|
||||
collections:
|
||||
- namespace.collection
|
||||
- name: namespace.collection
|
||||
version: version identifier, multiple identifiers are separated by ','
|
||||
source: the URL or prededefined source name in ~/.ansible_galaxy to pull the collection from
|
||||
|
||||
:param requirements_file: The path to the requirements file.
|
||||
:return: A list of tuples (name, version, source).
|
||||
"""
|
||||
collection_info = []
|
||||
|
||||
b_requirements_file = to_bytes(requirements_file, errors='surrogate_or_strict')
|
||||
if not os.path.exists(b_requirements_file):
|
||||
raise AnsibleError("The requirements file '%s' does not exist." % to_native(requirements_file))
|
||||
|
||||
display.vvv("Reading collection requirement file at '%s'" % requirements_file)
|
||||
with open(b_requirements_file, 'rb') as req_obj:
|
||||
try:
|
||||
requirements = yaml.safe_load(req_obj)
|
||||
except YAMLError as err:
|
||||
raise AnsibleError("Failed to parse the collection requirements yml at '%s' with the following error:\n%s"
|
||||
% (to_native(requirements_file), to_native(err)))
|
||||
|
||||
if not isinstance(requirements, dict) or 'collections' not in requirements:
|
||||
# TODO: Link to documentation page that documents the requirements.yml format for collections.
|
||||
raise AnsibleError("Expecting collections requirements file to be a dict with the key "
|
||||
"collections that contains a list of collections to install.")
|
||||
|
||||
for collection_req in requirements['collections']:
|
||||
if isinstance(collection_req, dict):
|
||||
req_name = collection_req.get('name', None)
|
||||
if req_name is None:
|
||||
raise AnsibleError("Collections requirement entry should contain the key name.")
|
||||
|
||||
req_version = collection_req.get('version', '*')
|
||||
req_source = collection_req.get('source', None)
|
||||
|
||||
collection_info.append((req_name, req_version, req_source))
|
||||
else:
|
||||
collection_info.append((collection_req, '*', None))
|
||||
|
||||
return collection_info
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _tempdir():
|
||||
b_temp_path = tempfile.mkdtemp(dir=to_bytes(C.DEFAULT_LOCAL_TMP, errors='surrogate_or_strict'))
|
||||
yield b_temp_path
|
||||
shutil.rmtree(b_temp_path)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _tarfile_extract(tar, member):
|
||||
tar_obj = tar.extractfile(member)
|
||||
yield tar_obj
|
||||
tar_obj.close()
|
||||
|
||||
|
||||
def _get_galaxy_yml(b_galaxy_yml_path):
|
||||
mandatory_keys = frozenset(['namespace', 'name', 'version', 'authors', 'readme'])
|
||||
optional_strings = ('description', 'repository', 'documentation', 'homepage', 'issues', 'license_file')
|
||||
optional_lists = ('license', 'tags', 'authors') # authors isn't optional but this will ensure it is list
|
||||
optional_dicts = ('dependencies',)
|
||||
all_keys = frozenset(list(mandatory_keys) + list(optional_strings) + list(optional_lists) + list(optional_dicts))
|
||||
|
||||
try:
|
||||
with open(b_galaxy_yml_path, 'rb') as g_yaml:
|
||||
galaxy_yml = yaml.safe_load(g_yaml)
|
||||
except YAMLError as err:
|
||||
raise AnsibleError("Failed to parse the galaxy.yml at '%s' with the following error:\n%s"
|
||||
% (to_native(b_galaxy_yml_path), to_native(err)))
|
||||
|
||||
set_keys = set(galaxy_yml.keys())
|
||||
missing_keys = mandatory_keys.difference(set_keys)
|
||||
if missing_keys:
|
||||
raise AnsibleError("The collection galaxy.yml at '%s' is missing the following mandatory keys: %s"
|
||||
% (to_native(b_galaxy_yml_path), ", ".join(sorted(missing_keys))))
|
||||
|
||||
extra_keys = set_keys.difference(all_keys)
|
||||
if len(extra_keys) > 0:
|
||||
display.warning("Found unknown keys in collection galaxy.yml at '%s': %s"
|
||||
% (to_text(b_galaxy_yml_path), ", ".join(extra_keys)))
|
||||
|
||||
# Add the defaults if they have not been set
|
||||
for optional_string in optional_strings:
|
||||
if optional_string not in galaxy_yml:
|
||||
galaxy_yml[optional_string] = None
|
||||
|
||||
for optional_list in optional_lists:
|
||||
list_val = galaxy_yml.get(optional_list, None)
|
||||
|
||||
if list_val is None:
|
||||
galaxy_yml[optional_list] = []
|
||||
elif not isinstance(list_val, list):
|
||||
galaxy_yml[optional_list] = [list_val]
|
||||
|
||||
for optional_dict in optional_dicts:
|
||||
if optional_dict not in galaxy_yml:
|
||||
galaxy_yml[optional_dict] = {}
|
||||
|
||||
# license is a builtin var in Python, to avoid confusion we just rename it to license_ids
|
||||
galaxy_yml['license_ids'] = galaxy_yml['license']
|
||||
del galaxy_yml['license']
|
||||
|
||||
return galaxy_yml
|
||||
|
||||
|
||||
def _build_files_manifest(b_collection_path):
|
||||
b_ignore_files = frozenset([b'*.pyc', b'*.retry'])
|
||||
b_ignore_dirs = frozenset([b'CVS', b'.bzr', b'.hg', b'.git', b'.svn', b'__pycache__', b'.tox'])
|
||||
|
||||
entry_template = {
|
||||
'name': None,
|
||||
'ftype': None,
|
||||
'chksum_type': None,
|
||||
'chksum_sha256': None,
|
||||
'format': MANIFEST_FORMAT
|
||||
}
|
||||
manifest = {
|
||||
'files': [
|
||||
{
|
||||
'name': '.',
|
||||
'ftype': 'dir',
|
||||
'chksum_type': None,
|
||||
'chksum_sha256': None,
|
||||
'format': MANIFEST_FORMAT,
|
||||
},
|
||||
],
|
||||
'format': MANIFEST_FORMAT,
|
||||
}
|
||||
|
||||
def _walk(b_path, b_top_level_dir):
|
||||
for b_item in os.listdir(b_path):
|
||||
b_abs_path = os.path.join(b_path, b_item)
|
||||
b_rel_base_dir = b'' if b_path == b_top_level_dir else b_path[len(b_top_level_dir) + 1:]
|
||||
rel_path = to_text(os.path.join(b_rel_base_dir, b_item), errors='surrogate_or_strict')
|
||||
|
||||
if os.path.isdir(b_abs_path):
|
||||
if b_item in b_ignore_dirs:
|
||||
display.vvv("Skipping '%s' for collection build" % to_text(b_abs_path))
|
||||
continue
|
||||
|
||||
if os.path.islink(b_abs_path):
|
||||
b_link_target = os.path.realpath(b_abs_path)
|
||||
|
||||
if not b_link_target.startswith(b_top_level_dir):
|
||||
display.warning("Skipping '%s' as it is a symbolic link to a directory outside the collection"
|
||||
% to_text(b_abs_path))
|
||||
continue
|
||||
|
||||
manifest_entry = entry_template.copy()
|
||||
manifest_entry['name'] = rel_path
|
||||
manifest_entry['ftype'] = 'dir'
|
||||
|
||||
manifest['files'].append(manifest_entry)
|
||||
|
||||
_walk(b_abs_path, b_top_level_dir)
|
||||
else:
|
||||
if b_item == b'galaxy.yml':
|
||||
continue
|
||||
elif any(fnmatch.fnmatch(b_item, b_pattern) for b_pattern in b_ignore_files):
|
||||
display.vvv("Skipping '%s' for collection build" % to_text(b_abs_path))
|
||||
continue
|
||||
|
||||
manifest_entry = entry_template.copy()
|
||||
manifest_entry['name'] = rel_path
|
||||
manifest_entry['ftype'] = 'file'
|
||||
manifest_entry['chksum_type'] = 'sha256'
|
||||
manifest_entry['chksum_sha256'] = secure_hash(b_abs_path, hash_func=sha256)
|
||||
|
||||
manifest['files'].append(manifest_entry)
|
||||
|
||||
_walk(b_collection_path, b_collection_path)
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
def _build_manifest(namespace, name, version, authors, readme, tags, description, license_ids, license_file,
|
||||
dependencies, repository, documentation, homepage, issues, **kwargs):
|
||||
|
||||
manifest = {
|
||||
'collection_info': {
|
||||
'namespace': namespace,
|
||||
'name': name,
|
||||
'version': version,
|
||||
'authors': authors,
|
||||
'readme': readme,
|
||||
'tags': tags,
|
||||
'description': description,
|
||||
'license': license_ids,
|
||||
'license_file': license_file,
|
||||
'dependencies': dependencies,
|
||||
'repository': repository,
|
||||
'documentation': documentation,
|
||||
'homepage': homepage,
|
||||
'issues': issues,
|
||||
},
|
||||
'file_manifest_file': {
|
||||
'name': 'FILES.json',
|
||||
'ftype': 'file',
|
||||
'chksum_type': 'sha256',
|
||||
'chksum_sha256': None, # Filled out in _build_collection_tar
|
||||
'format': MANIFEST_FORMAT
|
||||
},
|
||||
'format': MANIFEST_FORMAT,
|
||||
}
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
def _build_collection_tar(b_collection_path, b_tar_path, collection_manifest, file_manifest):
|
||||
files_manifest_json = to_bytes(json.dumps(file_manifest, indent=True), errors='surrogate_or_strict')
|
||||
collection_manifest['file_manifest_file']['chksum_sha256'] = secure_hash_s(files_manifest_json, hash_func=sha256)
|
||||
collection_manifest_json = to_bytes(json.dumps(collection_manifest, indent=True), errors='surrogate_or_strict')
|
||||
|
||||
with _tempdir() as b_temp_path:
|
||||
b_tar_filepath = os.path.join(b_temp_path, os.path.basename(b_tar_path))
|
||||
|
||||
with tarfile.open(b_tar_filepath, mode='w:gz') as tar_file:
|
||||
# Add the MANIFEST.json and FILES.json file to the archive
|
||||
for name, b in [('MANIFEST.json', collection_manifest_json), ('FILES.json', files_manifest_json)]:
|
||||
b_io = BytesIO(b)
|
||||
tar_info = tarfile.TarInfo(name)
|
||||
tar_info.size = len(b)
|
||||
tar_info.mtime = time.time()
|
||||
tar_info.mode = 0o0644
|
||||
tar_file.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
for file_info in file_manifest['files']:
|
||||
if file_info['name'] == '.':
|
||||
continue
|
||||
|
||||
# arcname expects a native string, cannot be bytes
|
||||
filename = to_native(file_info['name'], errors='surrogate_or_strict')
|
||||
b_src_path = os.path.join(b_collection_path, to_bytes(filename, errors='surrogate_or_strict'))
|
||||
|
||||
def reset_stat(tarinfo):
|
||||
tarinfo.mode = 0o0755 if tarinfo.isdir() else 0o0644
|
||||
tarinfo.uid = tarinfo.gid = 0
|
||||
tarinfo.uname = tarinfo.gname = ''
|
||||
return tarinfo
|
||||
|
||||
tar_file.add(os.path.realpath(b_src_path), arcname=filename, recursive=False, filter=reset_stat)
|
||||
|
||||
shutil.copy(b_tar_filepath, b_tar_path)
|
||||
collection_name = "%s.%s" % (collection_manifest['collection_info']['namespace'],
|
||||
collection_manifest['collection_info']['name'])
|
||||
display.display('Created collection for %s at %s' % (collection_name, to_text(b_tar_path)))
|
||||
|
||||
|
||||
def _get_mime_data(b_collection_path):
|
||||
with open(b_collection_path, 'rb') as collection_tar:
|
||||
data = collection_tar.read()
|
||||
|
||||
boundary = '--------------------------%s' % uuid.uuid4().hex
|
||||
b_file_name = os.path.basename(b_collection_path)
|
||||
part_boundary = b"--" + to_bytes(boundary, errors='surrogate_or_strict')
|
||||
|
||||
form = [
|
||||
part_boundary,
|
||||
b"Content-Disposition: form-data; name=\"sha256\"",
|
||||
to_bytes(secure_hash_s(data), errors='surrogate_or_strict'),
|
||||
part_boundary,
|
||||
b"Content-Disposition: file; name=\"file\"; filename=\"%s\"" % b_file_name,
|
||||
b"Content-Type: application/octet-stream",
|
||||
b"",
|
||||
data,
|
||||
b"%s--" % part_boundary,
|
||||
]
|
||||
|
||||
content_type = 'multipart/form-data; boundary=%s' % boundary
|
||||
|
||||
return b"\r\n".join(form), content_type
|
||||
|
||||
|
||||
def _wait_import(task_url, key, validate_certs):
|
||||
headers = {}
|
||||
if key:
|
||||
headers['Authorization'] = "Token %s" % key
|
||||
|
||||
display.vvv('Waiting until galaxy import task %s has completed' % task_url)
|
||||
|
||||
wait = 2
|
||||
while True:
|
||||
resp = json.load(open_url(to_native(task_url, errors='surrogate_or_strict'), headers=headers, method='GET',
|
||||
validate_certs=validate_certs))
|
||||
|
||||
if resp.get('finished_at', None):
|
||||
break
|
||||
elif wait > 20:
|
||||
# We try for a maximum of ~60 seconds before giving up in case something has gone wrong on the server end.
|
||||
raise AnsibleError("Timeout while waiting for the Galaxy import process to finish, check progress at '%s'"
|
||||
% to_native(task_url))
|
||||
|
||||
status = resp.get('status', 'waiting')
|
||||
display.vvv('Galaxy import process has a status of %s, wait %d seconds before trying again' % (status, wait))
|
||||
time.sleep(wait)
|
||||
wait *= 1.5 # poor man's exponential backoff algo so we don't flood the Galaxy API.
|
||||
|
||||
for message in resp.get('messages', []):
|
||||
level = message['level']
|
||||
if level == 'error':
|
||||
display.error("Galaxy import error message: %s" % message['message'])
|
||||
elif level == 'warning':
|
||||
display.warning("Galaxy import warning message: %s" % message['message'])
|
||||
else:
|
||||
display.vvv("Galaxy import message: %s - %s" % (level, message['message']))
|
||||
|
||||
if resp['state'] == 'failed':
|
||||
code = to_native(resp['error'].get('code', 'UNKNOWN'))
|
||||
description = to_native(resp['error'].get('description', "Unknown error, see %s for more details" % task_url))
|
||||
raise AnsibleError("Galaxy import process failed: %s (Code: %s)" % (description, code))
|
||||
|
||||
|
||||
def _find_existing_collections(path):
|
||||
collections = []
|
||||
|
||||
b_path = to_bytes(path, errors='surrogate_or_strict')
|
||||
for b_namespace in os.listdir(b_path):
|
||||
b_namespace_path = os.path.join(b_path, b_namespace)
|
||||
if os.path.isfile(b_namespace_path):
|
||||
continue
|
||||
|
||||
for b_collection in os.listdir(b_namespace_path):
|
||||
b_collection_path = os.path.join(b_namespace_path, b_collection)
|
||||
if os.path.isdir(b_collection_path):
|
||||
req = CollectionRequirement.from_path(b_collection_path, True, False)
|
||||
display.vvv("Found installed collection %s:%s at '%s'" % (str(req), req.latest_version,
|
||||
to_text(b_collection_path)))
|
||||
collections.append(req)
|
||||
|
||||
return collections
|
||||
|
||||
|
||||
def _build_dependency_map(collections, existing_collections, b_temp_path, servers, validate_certs, force, force_deps,
|
||||
no_deps):
|
||||
dependency_map = {}
|
||||
|
||||
# First build the dependency map on the actual requirements
|
||||
for name, version, source in collections:
|
||||
_get_collection_info(dependency_map, existing_collections, name, version, source, b_temp_path, servers,
|
||||
validate_certs, (force or force_deps))
|
||||
|
||||
checked_parents = set([str(c) for c in dependency_map.values() if c.skip])
|
||||
while len(dependency_map) != len(checked_parents):
|
||||
while not no_deps: # Only parse dependencies if no_deps was not set
|
||||
parents_to_check = set(dependency_map.keys()).difference(checked_parents)
|
||||
|
||||
deps_exhausted = True
|
||||
for parent in parents_to_check:
|
||||
parent_info = dependency_map[parent]
|
||||
|
||||
if parent_info.dependencies:
|
||||
deps_exhausted = False
|
||||
for dep_name, dep_requirement in parent_info.dependencies.items():
|
||||
_get_collection_info(dependency_map, existing_collections, dep_name, dep_requirement,
|
||||
parent_info.source, b_temp_path, servers, validate_certs, force_deps,
|
||||
parent=parent)
|
||||
|
||||
checked_parents.add(parent)
|
||||
|
||||
# No extra dependencies were resolved, exit loop
|
||||
if deps_exhausted:
|
||||
break
|
||||
|
||||
# Now we have resolved the deps to our best extent, now select the latest version for collections with
|
||||
# multiple versions found and go from there
|
||||
deps_not_checked = set(dependency_map.keys()).difference(checked_parents)
|
||||
for collection in deps_not_checked:
|
||||
dependency_map[collection].set_latest_version()
|
||||
if no_deps or len(dependency_map[collection].dependencies) == 0:
|
||||
checked_parents.add(collection)
|
||||
|
||||
return dependency_map
|
||||
|
||||
|
||||
def _get_collection_info(dep_map, existing_collections, collection, requirement, source, b_temp_path, server_list,
|
||||
validate_certs, force, parent=None):
|
||||
dep_msg = ""
|
||||
if parent:
|
||||
dep_msg = " - as dependency of %s" % parent
|
||||
display.vvv("Processing requirement collection '%s'%s" % (to_text(collection), dep_msg))
|
||||
|
||||
b_tar_path = None
|
||||
if os.path.isfile(to_bytes(collection, errors='surrogate_or_strict')):
|
||||
display.vvvv("Collection requirement '%s' is a tar artifact" % to_text(collection))
|
||||
b_tar_path = to_bytes(collection, errors='surrogate_or_strict')
|
||||
elif urlparse(collection).scheme:
|
||||
display.vvvv("Collection requirement '%s' is a URL to a tar artifact" % collection)
|
||||
b_tar_path = _download_file(collection, b_temp_path, None, validate_certs)
|
||||
|
||||
if b_tar_path:
|
||||
req = CollectionRequirement.from_tar(b_tar_path, validate_certs, force, parent=parent)
|
||||
|
||||
collection_name = str(req)
|
||||
if collection_name in dep_map:
|
||||
collection_info = dep_map[collection_name]
|
||||
collection_info.add_requirement(None, req.latest_version)
|
||||
else:
|
||||
collection_info = req
|
||||
else:
|
||||
display.vvvv("Collection requirement '%s' is the name of a collection" % collection)
|
||||
if collection in dep_map:
|
||||
collection_info = dep_map[collection]
|
||||
collection_info.add_requirement(parent, requirement)
|
||||
else:
|
||||
servers = [source] if source else server_list
|
||||
collection_info = CollectionRequirement.from_name(collection, servers, requirement, validate_certs, force,
|
||||
parent=parent)
|
||||
|
||||
existing = [c for c in existing_collections if str(c) == str(collection_info)]
|
||||
if existing and not collection_info.force:
|
||||
# Test that the installed collection fits the requirement
|
||||
existing[0].add_requirement(str(collection_info), requirement)
|
||||
collection_info = existing[0]
|
||||
|
||||
dep_map[str(collection_info)] = collection_info
|
||||
|
||||
|
||||
def _urljoin(*args):
|
||||
return '/'.join(to_native(a, errors='surrogate_or_strict').rstrip('/') for a in args + ('',))
|
||||
|
||||
|
||||
def _download_file(url, b_path, expected_hash, validate_certs):
|
||||
bufsize = 65536
|
||||
digest = sha256()
|
||||
|
||||
urlsplit = os.path.splitext(to_text(url.rsplit('/', 1)[1]))
|
||||
b_file_name = to_bytes(urlsplit[0], errors='surrogate_or_strict')
|
||||
b_file_ext = to_bytes(urlsplit[1], errors='surrogate_or_strict')
|
||||
b_file_path = tempfile.NamedTemporaryFile(dir=b_path, prefix=b_file_name, suffix=b_file_ext, delete=False).name
|
||||
|
||||
display.vvv("Downloading %s to %s" % (url, to_text(b_path)))
|
||||
resp = open_url(to_native(url, errors='surrogate_or_strict'), validate_certs=validate_certs)
|
||||
|
||||
with open(b_file_path, 'wb') as download_file:
|
||||
data = resp.read(bufsize)
|
||||
while data:
|
||||
digest.update(data)
|
||||
download_file.write(data)
|
||||
data = resp.read(bufsize)
|
||||
|
||||
if expected_hash:
|
||||
actual_hash = digest.hexdigest()
|
||||
display.vvvv("Validating downloaded file hash %s with expected hash %s" % (actual_hash, expected_hash))
|
||||
if expected_hash != actual_hash:
|
||||
raise AnsibleError("Mismatch artifact hash with downloaded file")
|
||||
|
||||
return b_file_path
|
||||
|
||||
|
||||
def _extract_tar_file(tar, filename, b_dest, b_temp_path, expected_hash=None):
|
||||
n_filename = to_native(filename, errors='surrogate_or_strict')
|
||||
try:
|
||||
member = tar.getmember(n_filename)
|
||||
except KeyError:
|
||||
raise AnsibleError("Collection tar at '%s' does not contain the expected file '%s'." % (to_native(tar.name),
|
||||
n_filename))
|
||||
|
||||
with tempfile.NamedTemporaryFile(dir=b_temp_path, delete=False) as tmpfile_obj:
|
||||
bufsize = 65536
|
||||
sha256_digest = sha256()
|
||||
with _tarfile_extract(tar, member) as tar_obj:
|
||||
data = tar_obj.read(bufsize)
|
||||
while data:
|
||||
tmpfile_obj.write(data)
|
||||
tmpfile_obj.flush()
|
||||
sha256_digest.update(data)
|
||||
data = tar_obj.read(bufsize)
|
||||
|
||||
actual_hash = sha256_digest.hexdigest()
|
||||
|
||||
if expected_hash and actual_hash != expected_hash:
|
||||
raise AnsibleError("Checksum mismatch for '%s' inside collection at '%s'"
|
||||
% (n_filename, to_native(tar.name)))
|
||||
|
||||
b_dest_filepath = os.path.join(b_dest, to_bytes(filename, errors='surrogate_or_strict'))
|
||||
b_parent_dir = os.path.split(b_dest_filepath)[0]
|
||||
if not os.path.exists(b_parent_dir):
|
||||
# Seems like Galaxy does not validate if all file entries have a corresponding dir ftype entry. This check
|
||||
# makes sure we create the parent directory even if it wasn't set in the metadata.
|
||||
os.makedirs(b_parent_dir)
|
||||
|
||||
shutil.move(to_bytes(tmpfile_obj.name, errors='surrogate_or_strict'), b_dest_filepath)
|
@ -0,0 +1,3 @@
|
||||
# Ansible Collection - {{ namespace }}.{{ collection_name }}
|
||||
|
||||
Documentation for the collection.
|
@ -0,0 +1,65 @@
|
||||
### REQUIRED
|
||||
|
||||
# this can be a company/brand/organization or product namespace
|
||||
# under which all content lives
|
||||
namespace: {{ namespace }}
|
||||
|
||||
|
||||
# the designation of this specific collection
|
||||
name: {{ collection_name }}
|
||||
|
||||
|
||||
# semantic versioning compliant version designation
|
||||
version: 1.0.0
|
||||
|
||||
# the filename for the readme file which can be either markdown (.md)
|
||||
readme: README.md
|
||||
|
||||
|
||||
# a list of the collection's content authors
|
||||
# Ex: 'Full Name <email> (http://site) @nicks:irc/im/site#channel'
|
||||
authors:
|
||||
- {{ author }} <example@domain.com>
|
||||
|
||||
|
||||
### OPTIONAL but strongly advised
|
||||
|
||||
# short summary of the collection
|
||||
description: {{ description }}
|
||||
|
||||
|
||||
# Either a single valid SPDX license identifier or a list of valid SPDX license
|
||||
# identifiers, see https://spdx.org/licenses/. Could also set `license_file`
|
||||
# instead to point to the file the specifies the license in the collection
|
||||
# directory.
|
||||
license: {{ license }}
|
||||
|
||||
|
||||
# list of keywords you want to associate the collection
|
||||
# with for indexing/search systems
|
||||
tags: []
|
||||
|
||||
|
||||
# A dict of dependencies. A dependency is another collection
|
||||
# this collection requires to be installed for it to be usable.
|
||||
# The key of the dict is the collection label (namespace.name)
|
||||
# and the value is a spec for the semver version required.
|
||||
dependencies: {}
|
||||
|
||||
|
||||
### URLs
|
||||
|
||||
# url of originating SCM repository
|
||||
repository: {{ repository_url }}
|
||||
|
||||
|
||||
# url to online docs
|
||||
documentation: {{ documentation_url }}
|
||||
|
||||
|
||||
# homepage of the collection/project
|
||||
homepage: {{ homepage_url }}
|
||||
|
||||
|
||||
# issue tracker url
|
||||
issues: {{ issue_tracker_url }}
|
@ -0,0 +1,31 @@
|
||||
# Collections Plugins Directory
|
||||
|
||||
This directory can be used to ship various plugins inside an Ansible collection. Each plugin is placed in a folder that
|
||||
is named after the type of plugin it is in. It can also include the `module_utils` and `modules` directory that
|
||||
would contain module utils and modules respectively.
|
||||
|
||||
Here is an example directory of the majority of plugins currently supported by Ansible;
|
||||
|
||||
```
|
||||
└── plugins
|
||||
├── action
|
||||
├── become
|
||||
├── cache
|
||||
├── callback
|
||||
├── cliconf
|
||||
├── connection
|
||||
├── filter
|
||||
├── httpapi
|
||||
├── inventory
|
||||
├── lookup
|
||||
├── module_utils
|
||||
├── modules
|
||||
├── netconf
|
||||
├── shell
|
||||
├── strategy
|
||||
├── terminal
|
||||
├── test
|
||||
└── vars
|
||||
```
|
||||
|
||||
A full list of plugin types can be found at [Working With Plugins]({{ ansible_plugin_list_dir }}).
|
@ -1,2 +1,3 @@
|
||||
destructive
|
||||
shippable/posix/group3
|
||||
skip/python2.6 # build uses tarfile with features not available until 2.7
|
||||
|
@ -0,0 +1 @@
|
||||
A readme
|
@ -0,0 +1 @@
|
||||
Welcome to my test collection doc for {{ namespace }}.
|
@ -0,0 +1,7 @@
|
||||
namespace: '{{ namespace }}'
|
||||
name: '{{ collection_name }}'
|
||||
version: 0.1.0
|
||||
readme: README.md
|
||||
authors:
|
||||
- Ansible Cow <acow@bovineuniversity.edu>
|
||||
- Tu Cow <tucow@bovineuniversity.edu>
|
@ -0,0 +1,2 @@
|
||||
[defaults]
|
||||
test_key = {{ test_variable }}
|
@ -0,0 +1,2 @@
|
||||
[defaults]
|
||||
test_key = {{ test_variable }}
|
@ -0,0 +1,3 @@
|
||||
- name: test collection skeleton
|
||||
debug:
|
||||
msg: "Namespace: {{ namespace }}"
|
@ -0,0 +1,2 @@
|
||||
[defaults]
|
||||
test_key = {{ test_variable }}
|
@ -0,0 +1,2 @@
|
||||
[defaults]
|
||||
test_key = {{ test_variable }}
|
@ -0,0 +1,922 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2019, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import os
|
||||
import pytest
|
||||
import re
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from hashlib import sha256
|
||||
from io import BytesIO, StringIO
|
||||
from units.compat.mock import MagicMock
|
||||
|
||||
import ansible.module_utils.six.moves.urllib.error as urllib_error
|
||||
|
||||
from ansible.cli.galaxy import GalaxyCLI
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.galaxy import collection
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.utils import context_objects as co
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.hashing import secure_hash_s
|
||||
|
||||
|
||||
@pytest.fixture(autouse='function')
|
||||
def reset_cli_args():
|
||||
co.GlobalCLIArgs._Singleton__instance = None
|
||||
yield
|
||||
co.GlobalCLIArgs._Singleton__instance = None
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def collection_input(tmp_path_factory):
|
||||
''' Creates a collection skeleton directory for build tests '''
|
||||
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
namespace = 'ansible_namespace'
|
||||
collection = 'collection'
|
||||
skeleton = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
|
||||
|
||||
galaxy_args = ['ansible-galaxy', 'collection', 'init', '%s.%s' % (namespace, collection),
|
||||
'-c', '--init-path', test_dir, '--collection-skeleton', skeleton]
|
||||
GalaxyCLI(args=galaxy_args).run()
|
||||
collection_dir = os.path.join(test_dir, namespace, collection)
|
||||
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Output'))
|
||||
|
||||
return collection_dir, output_dir
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def collection_artifact(monkeypatch, tmp_path_factory):
|
||||
''' Creates a temp collection artifact and mocked open_url instance for publishing tests '''
|
||||
mock_open = MagicMock()
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
mock_uuid = MagicMock()
|
||||
mock_uuid.return_value.hex = 'uuid'
|
||||
monkeypatch.setattr(uuid, 'uuid4', mock_uuid)
|
||||
|
||||
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
|
||||
input_file = to_text(tmp_path / 'collection.tar.gz')
|
||||
|
||||
with tarfile.open(input_file, 'w:gz') as tfile:
|
||||
b_io = BytesIO(b"\x00\x01\x02\x03")
|
||||
tar_info = tarfile.TarInfo('test')
|
||||
tar_info.size = 4
|
||||
tar_info.mode = 0o0644
|
||||
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
return input_file, mock_open
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def requirements_file(request, tmp_path_factory):
|
||||
content = request.param
|
||||
|
||||
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Requirements'))
|
||||
requirements_file = os.path.join(test_dir, 'requirements.yml')
|
||||
|
||||
if content:
|
||||
with open(requirements_file, 'wb') as req_obj:
|
||||
req_obj.write(to_bytes(content))
|
||||
|
||||
yield requirements_file
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def galaxy_yml(request, tmp_path_factory):
|
||||
b_test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
||||
b_galaxy_yml = os.path.join(b_test_dir, b'galaxy.yml')
|
||||
with open(b_galaxy_yml, 'wb') as galaxy_obj:
|
||||
galaxy_obj.write(to_bytes(request.param))
|
||||
|
||||
yield b_galaxy_yml
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def tmp_tarfile(tmp_path_factory):
|
||||
''' Creates a temporary tar file for _extract_tar_file tests '''
|
||||
filename = u'ÅÑŚÌβŁÈ'
|
||||
temp_dir = to_bytes(tmp_path_factory.mktemp('test-%s Collections' % to_native(filename)))
|
||||
tar_file = os.path.join(temp_dir, to_bytes('%s.tar.gz' % filename))
|
||||
data = os.urandom(8)
|
||||
|
||||
with tarfile.open(tar_file, 'w:gz') as tfile:
|
||||
b_io = BytesIO(data)
|
||||
tar_info = tarfile.TarInfo(filename)
|
||||
tar_info.size = len(data)
|
||||
tar_info.mode = 0o0644
|
||||
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
sha256_hash = sha256()
|
||||
sha256_hash.update(data)
|
||||
|
||||
with tarfile.open(tar_file, 'r') as tfile:
|
||||
yield temp_dir, tfile, filename, sha256_hash.hexdigest()
|
||||
|
||||
|
||||
def test_build_collection_no_galaxy_yaml():
|
||||
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
|
||||
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
|
||||
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.build_collection(fake_path, 'output', False)
|
||||
|
||||
|
||||
def test_build_existing_output_file(collection_input):
|
||||
input_dir, output_dir = collection_input
|
||||
|
||||
existing_output_dir = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
|
||||
os.makedirs(existing_output_dir)
|
||||
|
||||
expected = "The output collection artifact '%s' already exists, but is a directory - aborting" \
|
||||
% to_native(existing_output_dir)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.build_collection(input_dir, output_dir, False)
|
||||
|
||||
|
||||
def test_build_existing_output_without_force(collection_input):
|
||||
input_dir, output_dir = collection_input
|
||||
|
||||
existing_output = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
|
||||
with open(existing_output, 'w+') as out_file:
|
||||
out_file.write("random garbage")
|
||||
out_file.flush()
|
||||
|
||||
expected = "The file '%s' already exists. You can use --force to re-create the collection artifact." \
|
||||
% to_native(existing_output)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.build_collection(input_dir, output_dir, False)
|
||||
|
||||
|
||||
def test_build_existing_output_with_force(collection_input):
|
||||
input_dir, output_dir = collection_input
|
||||
|
||||
existing_output = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
|
||||
with open(existing_output, 'w+') as out_file:
|
||||
out_file.write("random garbage")
|
||||
out_file.flush()
|
||||
|
||||
collection.build_collection(input_dir, output_dir, True)
|
||||
|
||||
# Verify the file was replaced with an actual tar file
|
||||
assert tarfile.is_tarfile(existing_output)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [b'namespace: value: broken'], indirect=True)
|
||||
def test_invalid_yaml_galaxy_file(galaxy_yml):
|
||||
expected = to_native(b"Failed to parse the galaxy.yml at '%s' with the following error:" % galaxy_yml)
|
||||
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection._get_galaxy_yml(galaxy_yml)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [b'namespace: test_namespace'], indirect=True)
|
||||
def test_missing_required_galaxy_key(galaxy_yml):
|
||||
expected = "The collection galaxy.yml at '%s' is missing the following mandatory keys: authors, name, " \
|
||||
"readme, version" % to_native(galaxy_yml)
|
||||
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection._get_galaxy_yml(galaxy_yml)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [b"""
|
||||
namespace: namespace
|
||||
name: collection
|
||||
authors: Jordan
|
||||
version: 0.1.0
|
||||
readme: README.md
|
||||
invalid: value"""], indirect=True)
|
||||
def test_warning_extra_keys(galaxy_yml, monkeypatch):
|
||||
display_mock = MagicMock()
|
||||
monkeypatch.setattr(Display, 'warning', display_mock)
|
||||
|
||||
collection._get_galaxy_yml(galaxy_yml)
|
||||
|
||||
assert display_mock.call_count == 1
|
||||
assert display_mock.call_args[0][0] == "Found unknown keys in collection galaxy.yml at '%s': invalid"\
|
||||
% to_text(galaxy_yml)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [b"""
|
||||
namespace: namespace
|
||||
name: collection
|
||||
authors: Jordan
|
||||
version: 0.1.0
|
||||
readme: README.md"""], indirect=True)
|
||||
def test_defaults_galaxy_yml(galaxy_yml):
|
||||
actual = collection._get_galaxy_yml(galaxy_yml)
|
||||
|
||||
assert sorted(list(actual.keys())) == [
|
||||
'authors', 'dependencies', 'description', 'documentation', 'homepage', 'issues', 'license_file', 'license_ids',
|
||||
'name', 'namespace', 'readme', 'repository', 'tags', 'version',
|
||||
]
|
||||
|
||||
assert actual['namespace'] == 'namespace'
|
||||
assert actual['name'] == 'collection'
|
||||
assert actual['authors'] == ['Jordan']
|
||||
assert actual['version'] == '0.1.0'
|
||||
assert actual['readme'] == 'README.md'
|
||||
assert actual['description'] is None
|
||||
assert actual['repository'] is None
|
||||
assert actual['documentation'] is None
|
||||
assert actual['homepage'] is None
|
||||
assert actual['issues'] is None
|
||||
assert actual['tags'] == []
|
||||
assert actual['dependencies'] == {}
|
||||
assert actual['license_ids'] == []
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [(b"""
|
||||
namespace: namespace
|
||||
name: collection
|
||||
authors: Jordan
|
||||
version: 0.1.0
|
||||
readme: README.md
|
||||
license: MIT"""), (b"""
|
||||
namespace: namespace
|
||||
name: collection
|
||||
authors: Jordan
|
||||
version: 0.1.0
|
||||
readme: README.md
|
||||
license:
|
||||
- MIT""")], indirect=True)
|
||||
def test_galaxy_yml_list_value(galaxy_yml):
|
||||
actual = collection._get_galaxy_yml(galaxy_yml)
|
||||
assert actual['license_ids'] == ['MIT']
|
||||
|
||||
|
||||
def test_build_ignore_files_and_folders(collection_input, monkeypatch):
|
||||
input_dir = collection_input[0]
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'vvv', mock_display)
|
||||
|
||||
git_folder = os.path.join(input_dir, '.git')
|
||||
retry_file = os.path.join(input_dir, 'ansible.retry')
|
||||
|
||||
os.makedirs(git_folder)
|
||||
with open(retry_file, 'w+') as ignore_file:
|
||||
ignore_file.write('random')
|
||||
ignore_file.flush()
|
||||
|
||||
actual = collection._build_files_manifest(to_bytes(input_dir))
|
||||
|
||||
assert actual['format'] == 1
|
||||
for manifest_entry in actual['files']:
|
||||
assert manifest_entry['name'] not in ['.git', 'ansible.retry', 'galaxy.yml']
|
||||
|
||||
expected_msgs = [
|
||||
"Skipping '%s' for collection build" % to_text(retry_file),
|
||||
"Skipping '%s' for collection build" % to_text(git_folder),
|
||||
]
|
||||
assert mock_display.call_count == 2
|
||||
assert mock_display.mock_calls[0][1][0] in expected_msgs
|
||||
assert mock_display.mock_calls[1][1][0] in expected_msgs
|
||||
|
||||
|
||||
def test_build_ignore_symlink_target_outside_collection(collection_input, monkeypatch):
|
||||
input_dir, outside_dir = collection_input
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'warning', mock_display)
|
||||
|
||||
link_path = os.path.join(input_dir, 'plugins', 'connection')
|
||||
os.symlink(outside_dir, link_path)
|
||||
|
||||
actual = collection._build_files_manifest(to_bytes(input_dir))
|
||||
for manifest_entry in actual['files']:
|
||||
assert manifest_entry['name'] != 'plugins/connection'
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Skipping '%s' as it is a symbolic link to a directory outside " \
|
||||
"the collection" % to_text(link_path)
|
||||
|
||||
|
||||
def test_build_copy_symlink_target_inside_collection(collection_input):
|
||||
input_dir = collection_input[0]
|
||||
|
||||
os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
|
||||
roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
|
||||
|
||||
roles_target = os.path.join(input_dir, 'roles', 'linked')
|
||||
roles_target_tasks = os.path.join(roles_target, 'tasks')
|
||||
os.makedirs(roles_target_tasks)
|
||||
with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main:
|
||||
tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:")
|
||||
tasks_main.flush()
|
||||
|
||||
os.symlink(roles_target, roles_link)
|
||||
|
||||
actual = collection._build_files_manifest(to_bytes(input_dir))
|
||||
|
||||
linked_entries = [e for e in actual['files'] if e['name'].startswith('playbooks/roles/linked')]
|
||||
assert len(linked_entries) == 3
|
||||
assert linked_entries[0]['name'] == 'playbooks/roles/linked'
|
||||
assert linked_entries[0]['ftype'] == 'dir'
|
||||
assert linked_entries[1]['name'] == 'playbooks/roles/linked/tasks'
|
||||
assert linked_entries[1]['ftype'] == 'dir'
|
||||
assert linked_entries[2]['name'] == 'playbooks/roles/linked/tasks/main.yml'
|
||||
assert linked_entries[2]['ftype'] == 'file'
|
||||
assert linked_entries[2]['chksum_sha256'] == '9c97a1633c51796999284c62236b8d5462903664640079b80c37bf50080fcbc3'
|
||||
|
||||
|
||||
def test_build_with_symlink_inside_collection(collection_input):
|
||||
input_dir, output_dir = collection_input
|
||||
|
||||
os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
|
||||
roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
|
||||
file_link = os.path.join(input_dir, 'docs', 'README.md')
|
||||
|
||||
roles_target = os.path.join(input_dir, 'roles', 'linked')
|
||||
roles_target_tasks = os.path.join(roles_target, 'tasks')
|
||||
os.makedirs(roles_target_tasks)
|
||||
with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main:
|
||||
tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:")
|
||||
tasks_main.flush()
|
||||
|
||||
os.symlink(roles_target, roles_link)
|
||||
os.symlink(os.path.join(input_dir, 'README.md'), file_link)
|
||||
|
||||
collection.build_collection(input_dir, output_dir, False)
|
||||
|
||||
output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
|
||||
assert tarfile.is_tarfile(output_artifact)
|
||||
|
||||
with tarfile.open(output_artifact, mode='r') as actual:
|
||||
members = actual.getmembers()
|
||||
|
||||
linked_members = [m for m in members if m.path.startswith('playbooks/roles/linked/tasks')]
|
||||
assert len(linked_members) == 2
|
||||
assert linked_members[0].name == 'playbooks/roles/linked/tasks'
|
||||
assert linked_members[0].isdir()
|
||||
|
||||
assert linked_members[1].name == 'playbooks/roles/linked/tasks/main.yml'
|
||||
assert linked_members[1].isreg()
|
||||
|
||||
linked_task = actual.extractfile(linked_members[1].name)
|
||||
actual_task = secure_hash_s(linked_task.read())
|
||||
linked_task.close()
|
||||
|
||||
assert actual_task == 'f4dcc52576b6c2cd8ac2832c52493881c4e54226'
|
||||
|
||||
linked_file = [m for m in members if m.path == 'docs/README.md']
|
||||
assert len(linked_file) == 1
|
||||
assert linked_file[0].isreg()
|
||||
|
||||
linked_file_obj = actual.extractfile(linked_file[0].name)
|
||||
actual_file = secure_hash_s(linked_file_obj.read())
|
||||
linked_file_obj.close()
|
||||
|
||||
assert actual_file == '63444bfc766154e1bc7557ef6280de20d03fcd81'
|
||||
|
||||
|
||||
def test_publish_missing_file():
|
||||
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
|
||||
expected = to_native("The collection path specified '%s' does not exist." % fake_path)
|
||||
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.publish_collection(fake_path, None, None, False, True)
|
||||
|
||||
|
||||
def test_publish_not_a_tarball():
|
||||
expected = "The collection path specified '{0}' is not a tarball, use 'ansible-galaxy collection build' to " \
|
||||
"create a proper release artifact."
|
||||
|
||||
with tempfile.NamedTemporaryFile(prefix=u'ÅÑŚÌβŁÈ') as temp_file:
|
||||
temp_file.write(b"\x00")
|
||||
temp_file.flush()
|
||||
with pytest.raises(AnsibleError, match=expected.format(to_native(temp_file.name))):
|
||||
collection.publish_collection(temp_file.name, None, None, False, True)
|
||||
|
||||
|
||||
def test_publish_no_wait(collection_artifact, monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
artifact_path, mock_open = collection_artifact
|
||||
fake_import_uri = 'https://galaxy.server.com/api/v2/import/1234'
|
||||
server = 'https://galaxy.com'
|
||||
|
||||
mock_open.return_value = StringIO(u'{"task":"%s"}' % fake_import_uri)
|
||||
expected_form, expected_content_type = collection._get_mime_data(to_bytes(artifact_path))
|
||||
|
||||
collection.publish_collection(artifact_path, server, 'key', False, False)
|
||||
|
||||
assert mock_open.call_count == 1
|
||||
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.com/api/v2/collections/'
|
||||
assert mock_open.mock_calls[0][2]['data'] == expected_form
|
||||
assert mock_open.mock_calls[0][2]['method'] == 'POST'
|
||||
assert mock_open.mock_calls[0][2]['validate_certs'] is True
|
||||
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == 'Token key'
|
||||
assert mock_open.mock_calls[0][2]['headers']['Content-length'] == len(expected_form)
|
||||
assert mock_open.mock_calls[0][2]['headers']['Content-type'] == expected_content_type
|
||||
|
||||
assert mock_display.call_count == 2
|
||||
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
|
||||
assert mock_display.mock_calls[1][1][0] == \
|
||||
"Collection has been pushed to the Galaxy server, not waiting until import has completed due to --no-wait " \
|
||||
"being set. Import task results can be found at %s" % fake_import_uri
|
||||
|
||||
|
||||
def test_publish_dont_validate_cert(collection_artifact):
|
||||
artifact_path, mock_open = collection_artifact
|
||||
|
||||
mock_open.return_value = StringIO(u'{"task":"https://galaxy.server.com/api/v2/import/1234"}')
|
||||
|
||||
collection.publish_collection(artifact_path, 'https://galaxy.server.com', 'key', True, False)
|
||||
|
||||
assert mock_open.call_count == 1
|
||||
assert mock_open.mock_calls[0][2]['validate_certs'] is False
|
||||
|
||||
|
||||
def test_publish_failure(collection_artifact):
|
||||
artifact_path, mock_open = collection_artifact
|
||||
|
||||
mock_open.side_effect = urllib_error.HTTPError('https://galaxy.server.com', 500, 'msg', {}, StringIO())
|
||||
|
||||
expected = 'Error when publishing collection (HTTP Code: 500, Message: Unknown error returned by Galaxy ' \
|
||||
'server. Code: Unknown)'
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
collection.publish_collection(artifact_path, 'https://galaxy.server.com', 'key', False, True)
|
||||
|
||||
|
||||
def test_publish_failure_with_json_info(collection_artifact):
|
||||
artifact_path, mock_open = collection_artifact
|
||||
|
||||
return_content = StringIO(u'{"message":"Galaxy error message","code":"GWE002"}')
|
||||
mock_open.side_effect = urllib_error.HTTPError('https://galaxy.server.com', 503, 'msg', {}, return_content)
|
||||
|
||||
expected = 'Error when publishing collection (HTTP Code: 503, Message: Galaxy error message Code: GWE002)'
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
collection.publish_collection(artifact_path, 'https://galaxy.server.com', 'key', False, True)
|
||||
|
||||
|
||||
def test_publish_with_wait(collection_artifact, monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
mock_vvv = MagicMock()
|
||||
monkeypatch.setattr(Display, 'vvv', mock_vvv)
|
||||
|
||||
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
|
||||
server = 'https://galaxy.server.com'
|
||||
|
||||
artifact_path, mock_open = collection_artifact
|
||||
|
||||
mock_open.side_effect = (
|
||||
StringIO(u'{"task":"%s"}' % fake_import_uri),
|
||||
StringIO(u'{"finished_at":"some_time","state":"success"}')
|
||||
)
|
||||
|
||||
collection.publish_collection(artifact_path, server, 'key', False, True)
|
||||
|
||||
assert mock_open.call_count == 2
|
||||
assert mock_open.mock_calls[1][1][0] == fake_import_uri
|
||||
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == 'Token key'
|
||||
assert mock_open.mock_calls[1][2]['validate_certs'] is True
|
||||
assert mock_open.mock_calls[1][2]['method'] == 'GET'
|
||||
|
||||
assert mock_display.call_count == 2
|
||||
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
|
||||
assert mock_display.mock_calls[1][1][0] == 'Collection has been successfully published to the Galaxy server'
|
||||
|
||||
assert mock_vvv.call_count == 2
|
||||
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
|
||||
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
|
||||
|
||||
|
||||
def test_publish_with_wait_timeout(collection_artifact, monkeypatch):
|
||||
monkeypatch.setattr(time, 'sleep', MagicMock())
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
mock_vvv = MagicMock()
|
||||
monkeypatch.setattr(Display, 'vvv', mock_vvv)
|
||||
|
||||
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
|
||||
server = 'https://galaxy.server.com'
|
||||
|
||||
artifact_path, mock_open = collection_artifact
|
||||
|
||||
mock_open.side_effect = (
|
||||
StringIO(u'{"task":"%s"}' % fake_import_uri),
|
||||
StringIO(u'{"finished_at":null}'),
|
||||
StringIO(u'{"finished_at":"some_time","state":"success"}')
|
||||
)
|
||||
|
||||
collection.publish_collection(artifact_path, server, 'key', True, True)
|
||||
|
||||
assert mock_open.call_count == 3
|
||||
assert mock_open.mock_calls[1][1][0] == fake_import_uri
|
||||
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == 'Token key'
|
||||
assert mock_open.mock_calls[1][2]['validate_certs'] is False
|
||||
assert mock_open.mock_calls[1][2]['method'] == 'GET'
|
||||
assert mock_open.mock_calls[2][1][0] == fake_import_uri
|
||||
assert mock_open.mock_calls[2][2]['headers']['Authorization'] == 'Token key'
|
||||
assert mock_open.mock_calls[2][2]['validate_certs'] is False
|
||||
assert mock_open.mock_calls[2][2]['method'] == 'GET'
|
||||
|
||||
assert mock_display.call_count == 2
|
||||
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
|
||||
assert mock_display.mock_calls[1][1][0] == 'Collection has been successfully published to the Galaxy server'
|
||||
|
||||
assert mock_vvv.call_count == 3
|
||||
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
|
||||
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
|
||||
assert mock_vvv.mock_calls[2][1][0] == \
|
||||
'Galaxy import process has a status of waiting, wait 2 seconds before trying again'
|
||||
|
||||
|
||||
def test_publish_with_wait_timeout(collection_artifact, monkeypatch):
|
||||
monkeypatch.setattr(time, 'sleep', MagicMock())
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
mock_vvv = MagicMock()
|
||||
monkeypatch.setattr(Display, 'vvv', mock_vvv)
|
||||
|
||||
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
|
||||
server = 'https://galaxy.server.com'
|
||||
|
||||
artifact_path, mock_open = collection_artifact
|
||||
|
||||
mock_open.side_effect = (
|
||||
StringIO(u'{"task":"%s"}' % fake_import_uri),
|
||||
StringIO(u'{"finished_at":null}'),
|
||||
StringIO(u'{"finished_at":null}'),
|
||||
StringIO(u'{"finished_at":null}'),
|
||||
StringIO(u'{"finished_at":null}'),
|
||||
StringIO(u'{"finished_at":null}'),
|
||||
StringIO(u'{"finished_at":null}'),
|
||||
StringIO(u'{"finished_at":null}'),
|
||||
)
|
||||
|
||||
expected = "Timeout while waiting for the Galaxy import process to finish, check progress at '%s'" \
|
||||
% fake_import_uri
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.publish_collection(artifact_path, server, 'key', True, True)
|
||||
|
||||
assert mock_open.call_count == 8
|
||||
for i in range(7):
|
||||
mock_call = mock_open.mock_calls[i + 1]
|
||||
assert mock_call[1][0] == fake_import_uri
|
||||
assert mock_call[2]['headers']['Authorization'] == 'Token key'
|
||||
assert mock_call[2]['validate_certs'] is False
|
||||
assert mock_call[2]['method'] == 'GET'
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
|
||||
|
||||
expected_wait_msg = 'Galaxy import process has a status of waiting, wait {0} seconds before trying again'
|
||||
assert mock_vvv.call_count == 8
|
||||
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
|
||||
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
|
||||
assert mock_vvv.mock_calls[2][1][0] == expected_wait_msg.format(2)
|
||||
assert mock_vvv.mock_calls[3][1][0] == expected_wait_msg.format(3)
|
||||
assert mock_vvv.mock_calls[4][1][0] == expected_wait_msg.format(4)
|
||||
assert mock_vvv.mock_calls[5][1][0] == expected_wait_msg.format(6)
|
||||
assert mock_vvv.mock_calls[6][1][0] == expected_wait_msg.format(10)
|
||||
assert mock_vvv.mock_calls[7][1][0] == expected_wait_msg.format(15)
|
||||
|
||||
|
||||
def test_publish_with_wait_and_failure(collection_artifact, monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
mock_vvv = MagicMock()
|
||||
monkeypatch.setattr(Display, 'vvv', mock_vvv)
|
||||
|
||||
mock_warn = MagicMock()
|
||||
monkeypatch.setattr(Display, 'warning', mock_warn)
|
||||
|
||||
mock_err = MagicMock()
|
||||
monkeypatch.setattr(Display, 'error', mock_err)
|
||||
|
||||
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
|
||||
server = 'https://galaxy.server.com'
|
||||
|
||||
artifact_path, mock_open = collection_artifact
|
||||
|
||||
import_stat = {
|
||||
'finished_at': 'some_time',
|
||||
'state': 'failed',
|
||||
'error': {
|
||||
'code': 'GW001',
|
||||
'description': 'Because I said so!',
|
||||
|
||||
},
|
||||
'messages': [
|
||||
{
|
||||
'level': 'error',
|
||||
'message': 'Some error',
|
||||
},
|
||||
{
|
||||
'level': 'warning',
|
||||
'message': 'Some warning',
|
||||
},
|
||||
{
|
||||
'level': 'info',
|
||||
'message': 'Some info',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mock_open.side_effect = (
|
||||
StringIO(u'{"task":"%s"}' % fake_import_uri),
|
||||
StringIO(to_text(json.dumps(import_stat)))
|
||||
)
|
||||
|
||||
expected = 'Galaxy import process failed: Because I said so! (Code: GW001)'
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
collection.publish_collection(artifact_path, server, 'key', True, True)
|
||||
|
||||
assert mock_open.call_count == 2
|
||||
assert mock_open.mock_calls[1][1][0] == fake_import_uri
|
||||
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == 'Token key'
|
||||
assert mock_open.mock_calls[1][2]['validate_certs'] is False
|
||||
assert mock_open.mock_calls[1][2]['method'] == 'GET'
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
|
||||
|
||||
assert mock_vvv.call_count == 3
|
||||
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
|
||||
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
|
||||
assert mock_vvv.mock_calls[2][1][0] == 'Galaxy import message: info - Some info'
|
||||
|
||||
assert mock_warn.call_count == 1
|
||||
assert mock_warn.mock_calls[0][1][0] == 'Galaxy import warning message: Some warning'
|
||||
|
||||
assert mock_err.call_count == 1
|
||||
assert mock_err.mock_calls[0][1][0] == 'Galaxy import error message: Some error'
|
||||
|
||||
|
||||
def test_publish_with_wait_and_failure_and_no_error(collection_artifact, monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
mock_vvv = MagicMock()
|
||||
monkeypatch.setattr(Display, 'vvv', mock_vvv)
|
||||
|
||||
mock_warn = MagicMock()
|
||||
monkeypatch.setattr(Display, 'warning', mock_warn)
|
||||
|
||||
mock_err = MagicMock()
|
||||
monkeypatch.setattr(Display, 'error', mock_err)
|
||||
|
||||
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
|
||||
server = 'https://galaxy.server.com'
|
||||
|
||||
artifact_path, mock_open = collection_artifact
|
||||
|
||||
import_stat = {
|
||||
'finished_at': 'some_time',
|
||||
'state': 'failed',
|
||||
'error': {},
|
||||
'messages': [
|
||||
{
|
||||
'level': 'error',
|
||||
'message': 'Some error',
|
||||
},
|
||||
{
|
||||
'level': 'warning',
|
||||
'message': 'Some warning',
|
||||
},
|
||||
{
|
||||
'level': 'info',
|
||||
'message': 'Some info',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mock_open.side_effect = (
|
||||
StringIO(u'{"task":"%s"}' % fake_import_uri),
|
||||
StringIO(to_text(json.dumps(import_stat)))
|
||||
)
|
||||
|
||||
expected = 'Galaxy import process failed: Unknown error, see %s for more details (Code: UNKNOWN)' % fake_import_uri
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
collection.publish_collection(artifact_path, server, 'key', True, True)
|
||||
|
||||
assert mock_open.call_count == 2
|
||||
assert mock_open.mock_calls[1][1][0] == fake_import_uri
|
||||
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == 'Token key'
|
||||
assert mock_open.mock_calls[1][2]['validate_certs'] is False
|
||||
assert mock_open.mock_calls[1][2]['method'] == 'GET'
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
|
||||
|
||||
assert mock_vvv.call_count == 3
|
||||
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
|
||||
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
|
||||
assert mock_vvv.mock_calls[2][1][0] == 'Galaxy import message: info - Some info'
|
||||
|
||||
assert mock_warn.call_count == 1
|
||||
assert mock_warn.mock_calls[0][1][0] == 'Galaxy import warning message: Some warning'
|
||||
|
||||
assert mock_err.call_count == 1
|
||||
assert mock_err.mock_calls[0][1][0] == 'Galaxy import error message: Some error'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', [None], indirect=True)
|
||||
def test_parse_requirements_file_that_doesnt_exist(requirements_file):
|
||||
expected = "The requirements file '%s' does not exist." % to_native(requirements_file)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.parse_collections_requirements_file(requirements_file)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', ['not a valid yml file: hi: world'], indirect=True)
|
||||
def test_parse_requirements_file_that_isnt_yaml(requirements_file):
|
||||
expected = "Failed to parse the collection requirements yml at '%s' with the following error" \
|
||||
% to_native(requirements_file)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.parse_collections_requirements_file(requirements_file)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', [('''
|
||||
# Older role based requirements.yml
|
||||
- galaxy.role
|
||||
- anotherrole
|
||||
'''), ('''
|
||||
# Doesn't have collections key
|
||||
roles:
|
||||
- galaxy.role
|
||||
- anotherole
|
||||
''')], indirect=True)
|
||||
def test_parse_requirements_in_invalid_format(requirements_file):
|
||||
expected = "Expecting collections requirements file to be a dict with the key collections that contains a list " \
|
||||
"of collections to install."
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.parse_collections_requirements_file(requirements_file)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', ['''
|
||||
collections:
|
||||
- version: 1.0.0
|
||||
'''], indirect=True)
|
||||
def test_parse_requirements_without_mandatory_name_key(requirements_file):
|
||||
expected = "Collections requirement entry should contain the key name."
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.parse_collections_requirements_file(requirements_file)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', [('''
|
||||
collections:
|
||||
- namespace.collection1
|
||||
- namespace.collection2
|
||||
'''), ('''
|
||||
collections:
|
||||
- name: namespace.collection1
|
||||
- name: namespace.collection2
|
||||
''')], indirect=True)
|
||||
def test_parse_requirements(requirements_file):
|
||||
expected = [('namespace.collection1', '*', None), ('namespace.collection2', '*', None)]
|
||||
actual = collection.parse_collections_requirements_file(requirements_file)
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', ['''
|
||||
collections:
|
||||
- name: namespace.collection1
|
||||
version: ">=1.0.0,<=2.0.0"
|
||||
source: https://galaxy-dev.ansible.com
|
||||
- namespace.collection2'''], indirect=True)
|
||||
def test_parse_requirements_with_extra_info(requirements_file):
|
||||
expected = [('namespace.collection1', '>=1.0.0,<=2.0.0', 'https://galaxy-dev.ansible.com'),
|
||||
('namespace.collection2', '*', None)]
|
||||
actual = collection.parse_collections_requirements_file(requirements_file)
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_find_existing_collections(tmp_path_factory, monkeypatch):
|
||||
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
||||
collection1 = os.path.join(test_dir, 'namespace1', 'collection1')
|
||||
collection2 = os.path.join(test_dir, 'namespace2', 'collection2')
|
||||
fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3')
|
||||
fake_collection2 = os.path.join(test_dir, 'namespace4')
|
||||
os.makedirs(collection1)
|
||||
os.makedirs(collection2)
|
||||
os.makedirs(os.path.split(fake_collection1)[0])
|
||||
|
||||
open(fake_collection1, 'wb+').close()
|
||||
open(fake_collection2, 'wb+').close()
|
||||
|
||||
collection1_manifest = json.dumps({
|
||||
'collection_info': {
|
||||
'namespace': 'namespace1',
|
||||
'name': 'collection1',
|
||||
'version': '1.2.3',
|
||||
'authors': ['Jordan Borean'],
|
||||
'readme': 'README.md',
|
||||
'dependencies': {},
|
||||
},
|
||||
'format': 1,
|
||||
})
|
||||
with open(os.path.join(collection1, 'MANIFEST.json'), 'wb') as manifest_obj:
|
||||
manifest_obj.write(to_bytes(collection1_manifest))
|
||||
|
||||
mock_warning = MagicMock()
|
||||
monkeypatch.setattr(Display, 'warning', mock_warning)
|
||||
|
||||
actual = collection._find_existing_collections(test_dir)
|
||||
|
||||
assert len(actual) == 2
|
||||
for actual_collection in actual:
|
||||
assert actual_collection.skip is True
|
||||
|
||||
if str(actual_collection) == 'namespace1.collection1':
|
||||
assert actual_collection.namespace == 'namespace1'
|
||||
assert actual_collection.name == 'collection1'
|
||||
assert actual_collection.b_path == to_bytes(collection1)
|
||||
assert actual_collection.source is None
|
||||
assert actual_collection.versions == set(['1.2.3'])
|
||||
assert actual_collection.latest_version == '1.2.3'
|
||||
assert actual_collection.dependencies == {}
|
||||
else:
|
||||
assert actual_collection.namespace == 'namespace2'
|
||||
assert actual_collection.name == 'collection2'
|
||||
assert actual_collection.b_path == to_bytes(collection2)
|
||||
assert actual_collection.source is None
|
||||
assert actual_collection.versions == set(['*'])
|
||||
assert actual_collection.latest_version == '*'
|
||||
assert actual_collection.dependencies == {}
|
||||
|
||||
assert mock_warning.call_count == 1
|
||||
assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, cannot " \
|
||||
"detect version." % to_text(collection2)
|
||||
|
||||
|
||||
def test_download_file(tmp_path_factory, monkeypatch):
|
||||
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
||||
|
||||
data = b"\x00\x01\x02\x03"
|
||||
sha256_hash = sha256()
|
||||
sha256_hash.update(data)
|
||||
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = BytesIO(data)
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
expected = os.path.join(temp_dir, b'file')
|
||||
actual = collection._download_file('http://google.com/file', temp_dir, sha256_hash.hexdigest(), True)
|
||||
|
||||
assert actual.startswith(expected)
|
||||
assert os.path.isfile(actual)
|
||||
with open(actual, 'rb') as file_obj:
|
||||
assert file_obj.read() == data
|
||||
|
||||
assert mock_open.call_count == 1
|
||||
assert mock_open.mock_calls[0][1][0] == 'http://google.com/file'
|
||||
|
||||
|
||||
def test_download_file_hash_mismatch(tmp_path_factory, monkeypatch):
|
||||
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
||||
|
||||
data = b"\x00\x01\x02\x03"
|
||||
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = BytesIO(data)
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
expected = "Mismatch artifact hash with downloaded file"
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection._download_file('http://google.com/file', temp_dir, 'bad', True)
|
||||
|
||||
|
||||
def test_extract_tar_file_invalid_hash(tmp_tarfile):
|
||||
temp_dir, tfile, filename, dummy = tmp_tarfile
|
||||
|
||||
expected = "Checksum mismatch for '%s' inside collection at '%s'" % (to_native(filename), to_native(tfile.name))
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection._extract_tar_file(tfile, filename, temp_dir, temp_dir, "fakehash")
|
||||
|
||||
|
||||
def test_extract_tar_file_missing_member(tmp_tarfile):
|
||||
temp_dir, tfile, dummy, dummy = tmp_tarfile
|
||||
|
||||
expected = "Collection tar at '%s' does not contain the expected file 'missing'." % to_native(tfile.name)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection._extract_tar_file(tfile, 'missing', temp_dir, temp_dir)
|
||||
|
||||
|
||||
def test_extract_tar_file_missing_parent_dir(tmp_tarfile):
|
||||
temp_dir, tfile, filename, checksum = tmp_tarfile
|
||||
output_dir = os.path.join(temp_dir, b'output')
|
||||
output_file = os.path.join(output_dir, to_bytes(filename))
|
||||
|
||||
collection._extract_tar_file(tfile, filename, output_dir, temp_dir, checksum)
|
||||
os.path.isfile(output_file)
|
@ -0,0 +1,719 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2019, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import os
|
||||
import pytest
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
import yaml
|
||||
|
||||
from io import BytesIO, StringIO
|
||||
from units.compat.mock import MagicMock
|
||||
|
||||
import ansible.module_utils.six.moves.urllib.error as urllib_error
|
||||
|
||||
from ansible.cli.galaxy import GalaxyCLI
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.galaxy import collection
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.utils import context_objects as co
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
||||
def call_galaxy_cli(args):
|
||||
orig = co.GlobalCLIArgs._Singleton__instance
|
||||
co.GlobalCLIArgs._Singleton__instance = None
|
||||
try:
|
||||
GalaxyCLI(args=['ansible-galaxy', 'collection'] + args).run()
|
||||
finally:
|
||||
co.GlobalCLIArgs._Singleton__instance = orig
|
||||
|
||||
|
||||
def artifact_json(namespace, name, version, dependencies, server):
|
||||
json_str = json.dumps({
|
||||
'artifact': {
|
||||
'filename': '%s-%s-%s.tar.gz' % (namespace, name, version),
|
||||
'sha256': '2d76f3b8c4bab1072848107fb3914c345f71a12a1722f25c08f5d3f51f4ab5fd',
|
||||
'size': 1234,
|
||||
},
|
||||
'download_url': '%s/download/%s-%s-%s.tar.gz' % (server, namespace, name, version),
|
||||
'metadata': {
|
||||
'namespace': namespace,
|
||||
'name': name,
|
||||
'dependencies': dependencies,
|
||||
},
|
||||
'version': version
|
||||
})
|
||||
return to_text(json_str)
|
||||
|
||||
|
||||
def artifact_versions_json(namespace, name, versions, server):
|
||||
results = []
|
||||
for version in versions:
|
||||
results.append({
|
||||
'href': '%s/api/v2/%s/%s/versions/%s/' % (server, namespace, name, version),
|
||||
'version': version,
|
||||
})
|
||||
|
||||
json_str = json.dumps({
|
||||
'count': len(versions),
|
||||
'next': None,
|
||||
'previous': None,
|
||||
'results': results
|
||||
})
|
||||
return to_text(json_str)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def collection_artifact(request, tmp_path_factory):
|
||||
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
namespace = 'ansible_namespace'
|
||||
collection = 'collection'
|
||||
|
||||
skeleton_path = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
|
||||
collection_path = os.path.join(test_dir, namespace, collection)
|
||||
|
||||
call_galaxy_cli(['init', '%s.%s' % (namespace, collection), '-c', '--init-path', test_dir,
|
||||
'--collection-skeleton', skeleton_path])
|
||||
dependencies = getattr(request, 'param', None)
|
||||
if dependencies:
|
||||
galaxy_yml = os.path.join(collection_path, 'galaxy.yml')
|
||||
with open(galaxy_yml, 'rb+') as galaxy_obj:
|
||||
existing_yaml = yaml.safe_load(galaxy_obj)
|
||||
existing_yaml['dependencies'] = dependencies
|
||||
|
||||
galaxy_obj.seek(0)
|
||||
galaxy_obj.write(to_bytes(yaml.safe_dump(existing_yaml)))
|
||||
galaxy_obj.truncate()
|
||||
|
||||
call_galaxy_cli(['build', collection_path, '--output-path', test_dir])
|
||||
|
||||
collection_tar = os.path.join(test_dir, '%s-%s-0.1.0.tar.gz' % (namespace, collection))
|
||||
return to_bytes(collection_path), to_bytes(collection_tar)
|
||||
|
||||
|
||||
def test_build_requirement_from_path(collection_artifact):
|
||||
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True, True)
|
||||
|
||||
assert actual.namespace == u'ansible_namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path == collection_artifact[0]
|
||||
assert actual.source is None
|
||||
assert actual.skip is True
|
||||
assert actual.versions == set([u'*'])
|
||||
assert actual.latest_version == u'*'
|
||||
assert actual.dependencies == {}
|
||||
|
||||
|
||||
def test_build_requirement_from_path_with_manifest(collection_artifact):
|
||||
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
|
||||
manifest_value = json.dumps({
|
||||
'collection_info': {
|
||||
'namespace': 'namespace',
|
||||
'name': 'name',
|
||||
'version': '1.1.1',
|
||||
'dependencies': {
|
||||
'ansible_namespace.collection': '*'
|
||||
}
|
||||
}
|
||||
})
|
||||
with open(manifest_path, 'wb') as manifest_obj:
|
||||
manifest_obj.write(to_bytes(manifest_value))
|
||||
|
||||
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True, True)
|
||||
|
||||
# While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'name'
|
||||
assert actual.b_path == collection_artifact[0]
|
||||
assert actual.source is None
|
||||
assert actual.skip is True
|
||||
assert actual.versions == set([u'1.1.1'])
|
||||
assert actual.latest_version == u'1.1.1'
|
||||
assert actual.dependencies == {'ansible_namespace.collection': '*'}
|
||||
|
||||
|
||||
def test_build_requirement_from_path_invalid_manifest(collection_artifact):
|
||||
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
|
||||
with open(manifest_path, 'wb') as manifest_obj:
|
||||
manifest_obj.write(b"not json")
|
||||
|
||||
expected = "Collection file at '%s' does not contain a valid json string." % to_native(manifest_path)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_path(collection_artifact[0], True, True)
|
||||
|
||||
|
||||
def test_build_requirement_from_tar(collection_artifact):
|
||||
actual = collection.CollectionRequirement.from_tar(collection_artifact[1], True, True)
|
||||
|
||||
assert actual.namespace == u'ansible_namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path == collection_artifact[1]
|
||||
assert actual.source is None
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'0.1.0'])
|
||||
assert actual.latest_version == u'0.1.0'
|
||||
assert actual.dependencies == {}
|
||||
|
||||
|
||||
def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
test_file = os.path.join(test_dir, b'fake.tar.gz')
|
||||
with open(test_file, 'wb') as test_obj:
|
||||
test_obj.write(b"\x00\x01\x02\x03")
|
||||
|
||||
expected = "Collection artifact at '%s' is not a valid tar file." % to_native(test_file)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_tar(test_file, True, True)
|
||||
|
||||
|
||||
def test_build_requirement_from_tar_no_manifest(tmp_path_factory):
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
|
||||
json_data = to_bytes(json.dumps(
|
||||
{
|
||||
'files': [],
|
||||
'format': 1,
|
||||
}
|
||||
))
|
||||
|
||||
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
|
||||
with tarfile.open(tar_path, 'w:gz') as tfile:
|
||||
b_io = BytesIO(json_data)
|
||||
tar_info = tarfile.TarInfo('FILES.json')
|
||||
tar_info.size = len(json_data)
|
||||
tar_info.mode = 0o0644
|
||||
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
expected = "Collection at '%s' does not contain the required file MANIFEST.json." % to_native(tar_path)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_tar(tar_path, True, True)
|
||||
|
||||
|
||||
def test_build_requirement_from_tar_no_files(tmp_path_factory):
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
|
||||
json_data = to_bytes(json.dumps(
|
||||
{
|
||||
'collection_info': {},
|
||||
}
|
||||
))
|
||||
|
||||
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
|
||||
with tarfile.open(tar_path, 'w:gz') as tfile:
|
||||
b_io = BytesIO(json_data)
|
||||
tar_info = tarfile.TarInfo('MANIFEST.json')
|
||||
tar_info.size = len(json_data)
|
||||
tar_info.mode = 0o0644
|
||||
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
expected = "Collection at '%s' does not contain the required file FILES.json." % to_native(tar_path)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_tar(tar_path, True, True)
|
||||
|
||||
|
||||
def test_build_requirement_from_tar_invalid_manifest(tmp_path_factory):
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
|
||||
json_data = b"not a json"
|
||||
|
||||
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
|
||||
with tarfile.open(tar_path, 'w:gz') as tfile:
|
||||
b_io = BytesIO(json_data)
|
||||
tar_info = tarfile.TarInfo('MANIFEST.json')
|
||||
tar_info.size = len(json_data)
|
||||
tar_info.mode = 0o0644
|
||||
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_tar(tar_path, True, True)
|
||||
|
||||
|
||||
def test_build_requirement_from_name(monkeypatch):
|
||||
galaxy_server = 'https://galaxy.ansible.com'
|
||||
json_str = artifact_versions_json('namespace', 'collection', ['2.1.9', '2.1.10'], galaxy_server)
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = StringIO(json_str)
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.source == to_text(galaxy_server)
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.1.9', u'2.1.10'])
|
||||
assert actual.latest_version == u'2.1.10'
|
||||
assert actual.dependencies is None
|
||||
|
||||
assert mock_open.call_count == 1
|
||||
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
|
||||
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
|
||||
|
||||
|
||||
def test_build_requirement_from_name_with_prerelease(monkeypatch):
|
||||
galaxy_server = 'https://galaxy-dev.ansible.com'
|
||||
json_str = artifact_versions_json('namespace', 'collection', ['1.0.1', '2.0.1-beta.1', '2.0.1'], galaxy_server)
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = StringIO(json_str)
|
||||
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.source == to_text(galaxy_server)
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'1.0.1', u'2.0.1'])
|
||||
assert actual.latest_version == u'2.0.1'
|
||||
assert actual.dependencies is None
|
||||
|
||||
assert mock_open.call_count == 1
|
||||
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
|
||||
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
|
||||
|
||||
|
||||
def test_build_requirment_from_name_with_prerelease_explicit(monkeypatch):
|
||||
galaxy_server = 'https://galaxy-dev.ansible.com'
|
||||
json_str = artifact_json('namespace', 'collection', '2.0.1-beta.1', {}, galaxy_server)
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = StringIO(json_str)
|
||||
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.1-beta.1', True,
|
||||
True)
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.source == to_text(galaxy_server)
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.0.1-beta.1'])
|
||||
assert actual.latest_version == u'2.0.1-beta.1'
|
||||
assert actual.dependencies == {}
|
||||
|
||||
assert mock_open.call_count == 1
|
||||
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/2.0.1-beta.1/" \
|
||||
% galaxy_server
|
||||
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
|
||||
|
||||
|
||||
def test_build_requirement_from_name_second_server(monkeypatch):
|
||||
galaxy_server = 'https://galaxy-dev.ansible.com'
|
||||
json_str = artifact_versions_json('namespace', 'collection', ['1.0.1', '1.0.2', '1.0.3'], galaxy_server)
|
||||
mock_open = MagicMock()
|
||||
mock_open.side_effect = (
|
||||
urllib_error.HTTPError('https://galaxy.server.com', 404, 'msg', {}, None),
|
||||
StringIO(json_str)
|
||||
)
|
||||
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', ['https://broken.com/', galaxy_server],
|
||||
'>1.0.1', False, True)
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.source == to_text(galaxy_server)
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'1.0.2', u'1.0.3'])
|
||||
assert actual.latest_version == u'1.0.3'
|
||||
assert actual.dependencies is None
|
||||
|
||||
assert mock_open.call_count == 2
|
||||
assert mock_open.mock_calls[0][1][0] == u"https://broken.com/api/v2/collections/namespace/collection/versions/"
|
||||
assert mock_open.mock_calls[0][2] == {'validate_certs': False}
|
||||
assert mock_open.mock_calls[1][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
|
||||
assert mock_open.mock_calls[1][2] == {'validate_certs': False}
|
||||
|
||||
|
||||
def test_build_requirement_from_name_missing(monkeypatch):
|
||||
mock_open = MagicMock()
|
||||
mock_open.side_effect = urllib_error.HTTPError('https://galaxy.server.com', 404, 'msg', {}, None)
|
||||
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
expected = "Failed to find collection namespace.collection:*"
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_name('namespace.collection',
|
||||
['https://broken.com/', 'https://broken2.com'], '*', False, True)
|
||||
|
||||
|
||||
def test_build_requirement_from_name_single_version(monkeypatch):
|
||||
galaxy_server = 'https://galaxy.ansible.com'
|
||||
json_str = artifact_json('namespace', 'collection', '2.0.0', {}, galaxy_server)
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = StringIO(json_str)
|
||||
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.0', True, True)
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.source == to_text(galaxy_server)
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.0.0'])
|
||||
assert actual.latest_version == u'2.0.0'
|
||||
assert actual.dependencies == {}
|
||||
|
||||
assert mock_open.call_count == 1
|
||||
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/2.0.0/" \
|
||||
% galaxy_server
|
||||
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
|
||||
|
||||
|
||||
def test_build_requirement_from_name_multiple_versions_one_match(monkeypatch):
|
||||
galaxy_server = 'https://galaxy.ansible.com'
|
||||
json_str1 = artifact_versions_json('namespace', 'collection', ['2.0.0', '2.0.1', '2.0.2'], galaxy_server)
|
||||
json_str2 = artifact_json('namespace', 'collection', '2.0.1', {}, galaxy_server)
|
||||
mock_open = MagicMock()
|
||||
mock_open.side_effect = (StringIO(json_str1), StringIO(json_str2))
|
||||
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '>=2.0.1,<2.0.2',
|
||||
True, True)
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.source == to_text(galaxy_server)
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.0.1'])
|
||||
assert actual.latest_version == u'2.0.1'
|
||||
assert actual.dependencies == {}
|
||||
|
||||
assert mock_open.call_count == 2
|
||||
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
|
||||
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
|
||||
assert mock_open.mock_calls[1][1][0] == u"%s/api/v2/collections/namespace/collection/versions/2.0.1/" \
|
||||
% galaxy_server
|
||||
assert mock_open.mock_calls[1][2] == {'validate_certs': True}
|
||||
|
||||
|
||||
def test_build_requirement_from_name_multiple_version_results(monkeypatch):
|
||||
galaxy_server = 'https://galaxy-dev.ansible.com'
|
||||
|
||||
json_str1 = json.dumps({
|
||||
'count': 6,
|
||||
'next': '%s/api/v2/collections/namespace/collection/versions/?page=2' % galaxy_server,
|
||||
'previous': None,
|
||||
'results': [
|
||||
{
|
||||
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.0/' % galaxy_server,
|
||||
'version': '2.0.0',
|
||||
},
|
||||
{
|
||||
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.1/' % galaxy_server,
|
||||
'version': '2.0.1',
|
||||
},
|
||||
{
|
||||
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.2/' % galaxy_server,
|
||||
'version': '2.0.2',
|
||||
},
|
||||
]
|
||||
})
|
||||
json_str2 = json.dumps({
|
||||
'count': 6,
|
||||
'next': None,
|
||||
'previous': '%s/api/v2/collections/namespace/collection/versions/?page=1' % galaxy_server,
|
||||
'results': [
|
||||
{
|
||||
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.3/' % galaxy_server,
|
||||
'version': '2.0.3',
|
||||
},
|
||||
{
|
||||
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.4/' % galaxy_server,
|
||||
'version': '2.0.4',
|
||||
},
|
||||
{
|
||||
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.5/' % galaxy_server,
|
||||
'version': '2.0.5',
|
||||
},
|
||||
]
|
||||
})
|
||||
mock_open = MagicMock()
|
||||
mock_open.side_effect = (StringIO(to_text(json_str1)), StringIO(to_text(json_str2)))
|
||||
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '!=2.0.2',
|
||||
True, True)
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.source == to_text(galaxy_server)
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.0.0', u'2.0.1', u'2.0.3', u'2.0.4', u'2.0.5'])
|
||||
assert actual.latest_version == u'2.0.5'
|
||||
assert actual.dependencies is None
|
||||
|
||||
assert mock_open.call_count == 2
|
||||
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
|
||||
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
|
||||
assert mock_open.mock_calls[1][1][0] == u"%s/api/v2/collections/namespace/collection/versions/?page=2" \
|
||||
% galaxy_server
|
||||
assert mock_open.mock_calls[1][2] == {'validate_certs': True}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('versions, requirement, expected_filter, expected_latest', [
|
||||
[['1.0.0', '1.0.1'], '*', ['1.0.0', '1.0.1'], '1.0.1'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<1.1.0', ['1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<=1.0.5', ['1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '>=1.1.0', ['1.1.0'], '1.1.0'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '!=1.1.0', ['1.0.0', '1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '==1.0.5', ['1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '1.0.5', ['1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '2.0.0', '3.0.0'], '>=2', ['2.0.0', '3.0.0'], '3.0.0'],
|
||||
])
|
||||
def test_add_collection_requirements(versions, requirement, expected_filter, expected_latest):
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', versions, requirement,
|
||||
False)
|
||||
assert req.versions == set(expected_filter)
|
||||
assert req.latest_version == expected_latest
|
||||
|
||||
|
||||
def test_add_collection_requirement_to_unknown_installed_version():
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
|
||||
skip=True)
|
||||
|
||||
expected = "Cannot meet requirement namespace.name:1.0.0 as it is already installed at version 'unknown'."
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
req.add_requirement(str(req), '1.0.0')
|
||||
|
||||
|
||||
def test_add_collection_wildcard_requirement_to_unknown_installed_version():
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
|
||||
skip=True)
|
||||
req.add_requirement(str(req), '*')
|
||||
|
||||
assert req.versions == set('*')
|
||||
assert req.latest_version == '*'
|
||||
|
||||
|
||||
def test_add_collection_requirement_with_conflict():
|
||||
source = 'https://galaxy.ansible.com'
|
||||
|
||||
expected = "Cannot meet requirement ==1.0.2 for dependency namespace.name from source '%s'. Available versions " \
|
||||
"before last requirement added: 1.0.0, 1.0.1\n" \
|
||||
"Requirements from:\n" \
|
||||
"\tbase - 'namespace.name:==1.0.2'" % source
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '==1.0.2', False)
|
||||
|
||||
|
||||
def test_add_requirement_to_existing_collection_with_conflict():
|
||||
source = 'https://galaxy.ansible.com'
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False)
|
||||
|
||||
expected = "Cannot meet dependency requirement 'namespace.name:1.0.2' for collection namespace.collection2 from " \
|
||||
"source '%s'. Available versions before last requirement added: 1.0.0, 1.0.1\n" \
|
||||
"Requirements from:\n" \
|
||||
"\tbase - 'namespace.name:*'\n" \
|
||||
"\tnamespace.collection2 - 'namespace.name:1.0.2'" % source
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
req.add_requirement('namespace.collection2', '1.0.2')
|
||||
|
||||
|
||||
def test_add_requirement_to_installed_collection_with_conflict():
|
||||
source = 'https://galaxy.ansible.com'
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
|
||||
skip=True)
|
||||
|
||||
expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
|
||||
"Use --force to overwrite"
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
req.add_requirement(None, '1.0.2')
|
||||
|
||||
|
||||
def test_add_requirement_to_installed_collection_with_conflict_as_dep():
|
||||
source = 'https://galaxy.ansible.com'
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
|
||||
skip=True)
|
||||
|
||||
expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
|
||||
"Use --force-with-deps to overwrite"
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
req.add_requirement('namespace.collection2', '1.0.2')
|
||||
|
||||
|
||||
def test_install_skipped_collection(monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, 'source', ['1.0.0'], '*', False, skip=True)
|
||||
req.install(None, None)
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Skipping 'namespace.name' as it is already installed"
|
||||
|
||||
|
||||
def test_install_collection(collection_artifact, monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
collection_tar = collection_artifact[1]
|
||||
output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
|
||||
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
|
||||
os.makedirs(os.path.join(collection_path, b'delete_me')) # Create a folder to verify the install cleans out the dir
|
||||
|
||||
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
|
||||
os.makedirs(temp_path)
|
||||
|
||||
req = collection.CollectionRequirement.from_tar(collection_tar, True, True)
|
||||
req.install(to_text(output_path), temp_path)
|
||||
|
||||
# Ensure the temp directory is empty, nothing is left behind
|
||||
assert os.listdir(temp_path) == []
|
||||
|
||||
actual_files = os.listdir(collection_path)
|
||||
actual_files.sort()
|
||||
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
|
||||
% to_text(collection_path)
|
||||
|
||||
|
||||
def test_install_collection_with_download(collection_artifact, monkeypatch):
|
||||
collection_tar = collection_artifact[1]
|
||||
output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
|
||||
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
mock_download = MagicMock()
|
||||
mock_download.return_value = collection_tar
|
||||
monkeypatch.setattr(collection, '_download_file', mock_download)
|
||||
|
||||
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
|
||||
os.makedirs(temp_path)
|
||||
|
||||
req = collection.CollectionRequirement('ansible_namespace', 'collection', None, ['https://galaxy.ansible.com'],
|
||||
['0.1.0'], '*', False)
|
||||
req._galaxy_info = {
|
||||
'download_url': 'https://downloadme.com',
|
||||
'artifact': {
|
||||
'sha256': 'myhash',
|
||||
},
|
||||
}
|
||||
req.install(to_text(output_path), temp_path)
|
||||
|
||||
# Ensure the temp directory is empty, nothing is left behind
|
||||
assert os.listdir(temp_path) == []
|
||||
|
||||
actual_files = os.listdir(collection_path)
|
||||
actual_files.sort()
|
||||
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
|
||||
% to_text(collection_path)
|
||||
|
||||
assert mock_download.call_count == 1
|
||||
assert mock_download.mock_calls[0][1][0] == 'https://downloadme.com'
|
||||
assert mock_download.mock_calls[0][1][1] == temp_path
|
||||
assert mock_download.mock_calls[0][1][2] == 'myhash'
|
||||
assert mock_download.mock_calls[0][1][3] is True
|
||||
|
||||
|
||||
def test_install_collections_from_tar(collection_artifact, monkeypatch):
|
||||
collection_path, collection_tar = collection_artifact
|
||||
temp_path = os.path.split(collection_tar)[0]
|
||||
shutil.rmtree(collection_path)
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
|
||||
[u'https://galaxy.ansible.com'], True, False, False, False, False)
|
||||
|
||||
assert os.path.isdir(collection_path)
|
||||
|
||||
actual_files = os.listdir(collection_path)
|
||||
actual_files.sort()
|
||||
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
|
||||
|
||||
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
|
||||
actual_manifest = json.loads(to_text(manifest_obj.read()))
|
||||
|
||||
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
|
||||
assert actual_manifest['collection_info']['name'] == 'collection'
|
||||
assert actual_manifest['collection_info']['version'] == '0.1.0'
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
|
||||
% to_text(collection_path)
|
||||
|
||||
|
||||
def test_install_collections_existing_without_force(collection_artifact, monkeypatch):
|
||||
collection_path, collection_tar = collection_artifact
|
||||
temp_path = os.path.split(collection_tar)[0]
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
# If we don't delete collection_path it will think the original build skeleton is installed so we expect a skip
|
||||
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
|
||||
[u'https://galaxy.ansible.com'], True, False, False, False, False)
|
||||
|
||||
assert os.path.isdir(collection_path)
|
||||
|
||||
actual_files = os.listdir(collection_path)
|
||||
actual_files.sort()
|
||||
assert actual_files == [b'README.md', b'docs', b'galaxy.yml', b'playbooks', b'plugins', b'roles']
|
||||
|
||||
assert mock_display.call_count == 2
|
||||
# Msg1 is the warning about not MANIFEST.json, cannot really check message as it has line breaks which varies based
|
||||
# on the path size
|
||||
assert mock_display.mock_calls[1][1][0] == "Skipping 'ansible_namespace.collection' as it is already installed"
|
||||
|
||||
|
||||
# Makes sure we don't get stuck in some recursive loop
|
||||
@pytest.mark.parametrize('collection_artifact', [
|
||||
{'ansible_namespace.collection': '>=0.0.1'},
|
||||
], indirect=True)
|
||||
def test_install_collection_with_circular_dependency(collection_artifact, monkeypatch):
|
||||
collection_path, collection_tar = collection_artifact
|
||||
temp_path = os.path.split(collection_tar)[0]
|
||||
shutil.rmtree(collection_path)
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
|
||||
[u'https://galaxy.ansible.com'], True, False, False, False, False)
|
||||
|
||||
assert os.path.isdir(collection_path)
|
||||
|
||||
actual_files = os.listdir(collection_path)
|
||||
actual_files.sort()
|
||||
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
|
||||
|
||||
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
|
||||
actual_manifest = json.loads(to_text(manifest_obj.read()))
|
||||
|
||||
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
|
||||
assert actual_manifest['collection_info']['name'] == 'collection'
|
||||
assert actual_manifest['collection_info']['version'] == '0.1.0'
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
|
||||
% to_text(collection_path)
|
Loading…
Reference in New Issue