ansible-galaxy: add collection sub command (#57106)

* ansible-galaxy: add collection init sub command

* Fix changelog and other sanity issues

* Slim down skeleton structure, fix encoding issue on template

* Fix doc generation code to include sub commands

* Added build step

* Tidy up the build action

* Fixed up doc changes and slight testing tweaks

* Re-organise tests to use pytest

* Added publish step and fixed up issues after working with Galaxy

* Unit test improvments

* Fix unit test on 3.5

* Add remaining build tests

* Test fixes, make the integration tests clearer to debug on failures

* Removed unicode name tests until I've got further clarification

* Added publish unit tests

* Change expected length value

* Added collection install steps, tests forthcoming

* Added unit tests for collection install entrypoint

* Added some more tests for collection install

* follow proper encoding rules and added more tests

* Add remaining tests

* tidied up tests and code based on review

* exclude pre-release versions from galaxy API
pull/58886/head
Jordan Borean 5 years ago committed by GitHub
parent d336a989e4
commit b6791e6ae3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,5 @@
minor_changes:
- ansible-galaxy - Added the ``collection init`` command to create a skeleton collection directory.
- ansible-galaxy - Added the ``collection build`` command to build a collection tarball ready for uploading.
- ansible-galaxy - Added the ``collection publish`` command to publish a collection tarball to a Galaxy server.
- ansible-galaxy - Added the ``collection install`` command to install collections locally.

@ -132,6 +132,7 @@ def opts_docs(cli_class_name, cli_module_name):
'short_desc': cli.parser.description,
'long_desc': trim_docstring(cli.__doc__),
'actions': {},
'content_depth': 2,
}
option_info = {'option_names': [],
'options': [],
@ -157,44 +158,56 @@ def opts_docs(cli_class_name, cli_module_name):
# now for each action/subcommand
# force populate parser with per action options
# use class attrs not the attrs on a instance (not that it matters here...)
try:
subparser = cli.parser._subparsers._group_actions[0].choices
except AttributeError:
subparser = {}
for action, parser in subparser.items():
action_info = {'option_names': [],
'options': []}
# docs['actions'][action] = {}
# docs['actions'][action]['name'] = action
action_info['name'] = action
action_info['desc'] = trim_docstring(getattr(cli, 'execute_%s' % action).__doc__)
def get_actions(parser, docs):
# use class attrs not the attrs on a instance (not that it matters here...)
try:
subparser = parser._subparsers._group_actions[0].choices
except AttributeError:
subparser = {}
depth = 0
for action, parser in subparser.items():
action_info = {'option_names': [],
'options': [],
'actions': {}}
# docs['actions'][action] = {}
# docs['actions'][action]['name'] = action
action_info['name'] = action
action_info['desc'] = trim_docstring(getattr(cli, 'execute_%s' % action).__doc__)
# docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip()
action_doc_list = opt_doc_list(parser)
uncommon_options = []
for action_doc in action_doc_list:
# uncommon_options = []
option_aliases = action_doc.get('options', [])
for option_alias in option_aliases:
# docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip()
action_doc_list = opt_doc_list(parser)
if option_alias in shared_opt_names:
continue
uncommon_options = []
for action_doc in action_doc_list:
# uncommon_options = []
# TODO: use set
if option_alias not in action_info['option_names']:
action_info['option_names'].append(option_alias)
option_aliases = action_doc.get('options', [])
for option_alias in option_aliases:
if action_doc in action_info['options']:
continue
if option_alias in shared_opt_names:
continue
uncommon_options.append(action_doc)
# TODO: use set
if option_alias not in action_info['option_names']:
action_info['option_names'].append(option_alias)
action_info['options'] = uncommon_options
if action_doc in action_info['options']:
continue
depth = 1 + get_actions(parser, action_info)
uncommon_options.append(action_doc)
docs['actions'][action] = action_info
action_info['options'] = uncommon_options
return depth
docs['actions'][action] = action_info
action_depth = get_actions(cli.parser, docs)
docs['content_depth'] = action_depth + 1
docs['options'] = opt_doc_list(cli.parser)
return docs

@ -14,7 +14,7 @@
.. contents::
:local:
:depth: 2
:depth: {{content_depth}}
.. program:: {{cli_name}}
@ -84,6 +84,29 @@ Actions
{% endfor %}
{% endif %}
{% for sub_action in actions[action]['actions'] %}
.. program:: {{cli_name}} {{action}} {{sub_action}}
.. _{{cli_name|replace('-','_')}}_{{action}}_{{sub_action}}:
{{ action + " " + sub_action }}
{{ '+' * (action|length + sub_action|length + 1) }}
{{ (actions[action]['actions'][sub_action]['desc']|default(' '))}}
{% if actions[action]['actions'][sub_action]['options'] %}
{% for option in actions[action]['actions'][sub_action]['options']|sort(attribute='options') %}
.. option:: {% for switch in option['options'] if switch in actions[action]['actions'][sub_action]['option_names'] %}{{switch}} {% if option['arg'] %} <{{option['arg']}}>{% endif %}{% if not loop.last %}, {% endif %}{% endfor %}
{{ (option['desc']) }}
{% endfor %}
{% endif %}
{% endfor %}
{% endfor %}
.. program:: {{cli_name}}
{% endif %}

@ -20,12 +20,17 @@ from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import Galaxy
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.collection import build_collection, install_collections, parse_collections_requirements_file, \
publish_collection
from ansible.galaxy.login import GalaxyLogin
from ansible.galaxy.role import GalaxyRole
from ansible.galaxy.token import GalaxyToken
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.ansible_release import __version__ as ansible_version
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.playbook.role.requirement import RoleRequirement
from ansible.utils.collection_loader import is_collection_ref
from ansible.utils.display import Display
from ansible.utils.plugin_docs import get_versioned_doclink
display = Display()
@ -36,6 +41,11 @@ class GalaxyCLI(CLI):
SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url")
def __init__(self, args):
# Inject role into sys.argv[1] as a backwards compatibility step
if len(args) > 1 and args[1] not in ['-h', '--help'] and 'role' not in args and 'collection' not in args:
# TODO: Should we add a warning here and eventually deprecate the implicit role subcommand choice
args.insert(1, 'role')
self.api = None
self.galaxy = None
super(GalaxyCLI, self).__init__(args)
@ -71,16 +81,82 @@ class GalaxyCLI(CLI):
'configured via DEFAULT_ROLES_PATH: %s ' % default_roles_path)
force = opt_help.argparse.ArgumentParser(add_help=False)
force.add_argument('-f', '--force', dest='force', action='store_true', default=False, help='Force overwriting an existing role')
subparsers = self.parser.add_subparsers(dest='action')
subparsers.required = True
delete_parser = subparsers.add_parser('delete', parents=[user_repo, common],
help='Removes the role from Galaxy. It does not remove or alter the actual GitHub repository.')
force.add_argument('-f', '--force', dest='force', action='store_true', default=False,
help='Force overwriting an existing role or collection')
# Add sub parser for the Galaxy role type (role or collection)
type_parser = self.parser.add_subparsers(metavar='TYPE', dest='type')
type_parser.required = True
# Define the actions for the collection object type
collection = type_parser.add_parser('collection',
parents=[common],
help='Manage an Ansible Galaxy collection.')
collection_parser = collection.add_subparsers(metavar='ACTION', dest='collection')
collection_parser.required = True
build_parser = collection_parser.add_parser(
'build', help='Build an Ansible collection artifact that can be published to Ansible Galaxy.',
parents=[common, force])
build_parser.set_defaults(func=self.execute_build)
build_parser.add_argument(
'args', metavar='collection', nargs='*', default=('./',),
help='Path to the collection(s) directory to build. This should be the directory that contains the '
'galaxy.yml file. The default is the current working directory.')
build_parser.add_argument(
'--output-path', dest='output_path', default='./',
help='The path in which the collection is built to. The default is the current working directory.')
self.add_init_parser(collection_parser, [common, force])
cinstall_parser = collection_parser.add_parser('install', help='Install collection from Ansible Galaxy',
parents=[force, common])
cinstall_parser.set_defaults(func=self.execute_install)
cinstall_parser.add_argument('args', metavar='collection_name', nargs='*',
help='The collection(s) name or path/url to a tar.gz collection artifact. This '
'is mutually exclusive with --requirements-file.')
cinstall_parser.add_argument('-p', '--collections-path', dest='collections_path', default='./',
help='The path to the directory containing your collections.')
cinstall_parser.add_argument('-i', '--ignore-errors', dest='ignore_errors', action='store_true', default=False,
help='Ignore errors during installation and continue with the next specified '
'collection. This will not ignore dependency conflict errors.')
cinstall_parser.add_argument('-r', '--requirements-file', dest='requirements',
help='A file containing a list of collections to be installed.')
cinstall_exclusive = cinstall_parser.add_mutually_exclusive_group()
cinstall_exclusive.add_argument('-n', '--no-deps', dest='no_deps', action='store_true', default=False,
help="Don't download collections listed as dependencies")
cinstall_exclusive.add_argument('--force-with-deps', dest='force_with_deps', action='store_true', default=False,
help="Force overwriting an existing collection and its dependencies")
publish_parser = collection_parser.add_parser(
'publish', help='Publish a collection artifact to Ansible Galaxy.',
parents=[common])
publish_parser.set_defaults(func=self.execute_publish)
publish_parser.add_argument(
'args', metavar='collection_path', help='The path to the collection tarball to publish.')
publish_parser.add_argument(
'--api-key', dest='api_key',
help='The Ansible Galaxy API key which can be found at https://galaxy.ansible.com/me/preferences. '
'You can also use ansible-galaxy login to retrieve this key.')
publish_parser.add_argument(
'--no-wait', dest='wait', action='store_false', default=True,
help="Don't wait for import validation results.")
# Define the actions for the role object type
role = type_parser.add_parser('role',
parents=[common],
help='Manage an Ansible Galaxy role.')
role_parser = role.add_subparsers(metavar='ACTION', dest='role')
role_parser.required = True
delete_parser = role_parser.add_parser('delete', parents=[user_repo, common],
help='Removes the role from Galaxy. It does not remove or alter the actual GitHub repository.')
delete_parser.set_defaults(func=self.execute_delete)
import_parser = subparsers.add_parser('import', help='Import a role', parents=[user_repo, common])
import_parser = role_parser.add_parser('import', help='Import a role', parents=[user_repo, common])
import_parser.set_defaults(func=self.execute_import)
import_parser.add_argument('--no-wait', dest='wait', action='store_false', default=True, help="Don't wait for import results.")
import_parser.add_argument('--branch', dest='reference',
@ -89,24 +165,20 @@ class GalaxyCLI(CLI):
import_parser.add_argument('--status', dest='check_status', action='store_true', default=False,
help='Check the status of the most recent import request for given github_user/github_repo.')
info_parser = subparsers.add_parser('info', help='View more details about a specific role.',
parents=[offline, common, roles_path])
info_parser = role_parser.add_parser('info', help='View more details about a specific role.',
parents=[offline, common, roles_path])
info_parser.set_defaults(func=self.execute_info)
info_parser.add_argument('args', nargs='+', help='role', metavar='role_name[,version]')
init_parser = subparsers.add_parser('init', help='Initialize new role with the base structure of a role.',
parents=[offline, force, common])
init_parser.set_defaults(func=self.execute_init)
init_parser.add_argument('--init-path', dest='init_path', default="./",
help='The path in which the skeleton role will be created. The default is the current working directory.')
init_parser.add_argument('--type', dest='role_type', action='store', default='default',
help="Initialize using an alternate role type. Valid types include: 'container', 'apb' and 'network'.")
init_parser.add_argument('--role-skeleton', dest='role_skeleton', default=C.GALAXY_ROLE_SKELETON,
help='The path to a role skeleton that the new role should be based upon.')
init_parser.add_argument('role_name', help='Role name')
install_parser = subparsers.add_parser('install', help='Install Roles from file(s), URL(s) or tar file(s)',
parents=[force, common, roles_path])
rinit_parser = self.add_init_parser(role_parser, [offline, force, common])
rinit_parser.add_argument('--type',
dest='role_type',
action='store',
default='default',
help="Initialize using an alternate role type. Valid types include: 'container', 'apb' and 'network'.")
install_parser = role_parser.add_parser('install', help='Install Roles from file(s), URL(s) or tar file(s)',
parents=[force, common, roles_path])
install_parser.set_defaults(func=self.execute_install)
install_parser.add_argument('-i', '--ignore-errors', dest='ignore_errors', action='store_true', default=False,
help='Ignore errors and continue with the next specified role.')
@ -120,31 +192,32 @@ class GalaxyCLI(CLI):
install_exclusive.add_argument('--force-with-deps', dest='force_with_deps', action='store_true', default=False,
help="Force overwriting an existing role and it's dependencies")
remove_parser = subparsers.add_parser('remove', help='Delete roles from roles_path.', parents=[common, roles_path])
remove_parser = role_parser.add_parser('remove', help='Delete roles from roles_path.', parents=[common, roles_path])
remove_parser.set_defaults(func=self.execute_remove)
remove_parser.add_argument('args', help='Role(s)', metavar='role', nargs='+')
list_parser = subparsers.add_parser('list', help='Show the name and version of each role installed in the roles_path.',
parents=[common, roles_path])
list_parser = role_parser.add_parser('list', help='Show the name and version of each role installed in the roles_path.',
parents=[common, roles_path])
list_parser.set_defaults(func=self.execute_list)
list_parser.add_argument('role', help='Role', nargs='?', metavar='role')
login_parser = subparsers.add_parser('login', parents=[common],
help="Login to api.github.com server in order to use ansible-galaxy sub "
"command such as 'import', 'delete' and 'setup'")
login_parser = role_parser.add_parser('login', parents=[common],
help="Login to api.github.com server in order to use ansible-galaxy role "
"sub command such as 'import', 'delete', 'publish', and 'setup'")
login_parser.set_defaults(func=self.execute_login)
login_parser.add_argument('--github-token', dest='token', default=None, help='Identify with github token rather than username and password.')
login_parser.add_argument('--github-token', dest='token', default=None,
help='Identify with github token rather than username and password.')
search_parser = subparsers.add_parser('search', help='Search the Galaxy database by tags, platforms, author and multiple keywords.',
parents=[common])
search_parser = role_parser.add_parser('search', help='Search the Galaxy database by tags, platforms, author and multiple keywords.',
parents=[common])
search_parser.set_defaults(func=self.execute_search)
search_parser.add_argument('--platforms', dest='platforms', help='list of OS platforms to filter by')
search_parser.add_argument('--galaxy-tags', dest='galaxy_tags', help='list of galaxy tags to filter by')
search_parser.add_argument('--author', dest='author', help='GitHub username')
search_parser.add_argument('args', help='Search terms', metavar='searchterm', nargs='*')
setup_parser = subparsers.add_parser('setup', help='Manage the integration between Galaxy and the given source.',
parents=[roles_path, common])
setup_parser = role_parser.add_parser('setup', help='Manage the integration between Galaxy and the given source.',
parents=[roles_path, common])
setup_parser.set_defaults(func=self.execute_setup)
setup_parser.add_argument('--remove', dest='remove_id', default=None,
help='Remove the integration matching the provided ID value. Use --list to see ID values.')
@ -154,6 +227,32 @@ class GalaxyCLI(CLI):
setup_parser.add_argument('github_repo', help='GitHub repository')
setup_parser.add_argument('secret', help='Secret')
def add_init_parser(self, parser, parents):
galaxy_type = parser.dest
obj_name_kwargs = {}
if galaxy_type == 'collection':
obj_name_kwargs['type'] = GalaxyCLI._validate_collection_name
init_parser = parser.add_parser('init',
help='Initialize new {0} with the base structure of a {0}.'.format(galaxy_type),
parents=parents)
init_parser.set_defaults(func=self.execute_init)
init_parser.add_argument('--init-path',
dest='init_path',
default='./',
help='The path in which the skeleton {0} will be created. The default is the current working directory.'.format(galaxy_type))
init_parser.add_argument('--{0}-skeleton'.format(galaxy_type),
dest='{0}_skeleton'.format(galaxy_type),
default=C.GALAXY_ROLE_SKELETON,
help='The path to a {0} skeleton that the new {0} should be based upon.'.format(galaxy_type))
init_parser.add_argument('{0}_name'.format(galaxy_type),
help='{0} name'.format(galaxy_type.capitalize()),
**obj_name_kwargs)
return init_parser
def post_process_args(self, options):
options = super(GalaxyCLI, self).post_process_args(options)
display.verbosity = options.verbosity
@ -199,59 +298,132 @@ class GalaxyCLI(CLI):
return u'\n'.join(text)
@staticmethod
def _resolve_path(path):
return os.path.abspath(os.path.expanduser(os.path.expandvars(path)))
@staticmethod
def _validate_collection_name(name):
if is_collection_ref('ansible_collections.{0}'.format(name)):
return name
raise AnsibleError("Invalid collection name, must be in the format <namespace>.<collection>")
############################
# execute actions
############################
def execute_role(self):
"""
Perform the action on an Ansible Galaxy role. Must be combined with a further action like delete/install/init
as listed below.
"""
# To satisfy doc build
pass
def execute_collection(self):
"""
Perform the action on an Ansible Galaxy collection. Must be combined with a further action like init/install as
listed below.
"""
# To satisfy doc build
pass
def execute_build(self):
"""
Build an Ansible Galaxy collection artifact that can be stored in a central repository like Ansible Galaxy.
"""
force = context.CLIARGS['force']
output_path = GalaxyCLI._resolve_path(context.CLIARGS['output_path'])
b_output_path = to_bytes(output_path, errors='surrogate_or_strict')
if not os.path.exists(b_output_path):
os.makedirs(b_output_path)
elif os.path.isfile(b_output_path):
raise AnsibleError("- the output collection directory %s is a file - aborting" % to_native(output_path))
for collection_path in context.CLIARGS['args']:
collection_path = GalaxyCLI._resolve_path(collection_path)
build_collection(collection_path, output_path, force)
def execute_init(self):
"""
creates the skeleton framework of a role that complies with the galaxy metadata format.
Creates the skeleton framework of a role or collection that complies with the Galaxy metadata format.
"""
galaxy_type = context.CLIARGS['type']
init_path = context.CLIARGS['init_path']
force = context.CLIARGS['force']
role_skeleton = context.CLIARGS['role_skeleton']
obj_skeleton = context.CLIARGS['{0}_skeleton'.format(galaxy_type)]
role_name = context.CLIARGS['role_name']
role_path = os.path.join(init_path, role_name)
if os.path.exists(role_path):
if os.path.isfile(role_path):
raise AnsibleError("- the path %s already exists, but is a file - aborting" % role_path)
elif not force:
raise AnsibleError("- the directory %s already exists."
"you can use --force to re-initialize this directory,\n"
"however it will reset any main.yml files that may have\n"
"been modified there already." % role_path)
obj_name = context.CLIARGS['{0}_name'.format(galaxy_type)]
inject_data = dict(
role_name=role_name,
author='your name',
description='your description',
company='your company (optional)',
license='license (GPL-2.0-or-later, MIT, etc)',
issue_tracker_url='http://example.com/issue/tracker',
min_ansible_version='2.4',
role_type=context.CLIARGS['role_type']
repository_url='http://example.com/repository',
documentation_url='http://docs.example.com',
homepage_url='http://example.com',
min_ansible_version=ansible_version[:3], # x.y
ansible_plugin_list_dir=get_versioned_doclink('plugins/plugins.html'),
)
# create role directory
if not os.path.exists(role_path):
os.makedirs(role_path)
if galaxy_type == 'role':
inject_data['role_name'] = obj_name
inject_data['role_type'] = context.CLIARGS['role_type']
inject_data['license'] = 'license (GPL-2.0-or-later, MIT, etc)'
obj_path = os.path.join(init_path, obj_name)
elif galaxy_type == 'collection':
namespace, collection_name = obj_name.split('.', 1)
inject_data['namespace'] = namespace
inject_data['collection_name'] = collection_name
inject_data['license'] = 'GPL-2.0-or-later'
obj_path = os.path.join(init_path, namespace, collection_name)
b_obj_path = to_bytes(obj_path, errors='surrogate_or_strict')
if os.path.exists(b_obj_path):
if os.path.isfile(obj_path):
raise AnsibleError("- the path %s already exists, but is a file - aborting" % to_native(obj_path))
elif not force:
raise AnsibleError("- the directory %s already exists. "
"You can use --force to re-initialize this directory,\n"
"however it will reset any main.yml files that may have\n"
"been modified there already." % to_native(obj_path))
if role_skeleton is not None:
if obj_skeleton is not None:
skeleton_ignore_expressions = C.GALAXY_ROLE_SKELETON_IGNORE
else:
role_skeleton = self.galaxy.default_role_skeleton_path
obj_skeleton = self.galaxy.default_role_skeleton_path
skeleton_ignore_expressions = ['^.*/.git_keep$']
role_skeleton = os.path.expanduser(role_skeleton)
obj_skeleton = os.path.expanduser(obj_skeleton)
skeleton_ignore_re = [re.compile(x) for x in skeleton_ignore_expressions]
template_env = Environment(loader=FileSystemLoader(role_skeleton))
if not os.path.exists(obj_skeleton):
raise AnsibleError("- the skeleton path '{0}' does not exist, cannot init {1}".format(
to_native(obj_skeleton), galaxy_type)
)
template_env = Environment(loader=FileSystemLoader(obj_skeleton))
# create role directory
if not os.path.exists(b_obj_path):
os.makedirs(b_obj_path)
for root, dirs, files in os.walk(obj_skeleton, topdown=True):
rel_root = os.path.relpath(root, obj_skeleton)
rel_dirs = rel_root.split(os.sep)
rel_root_dir = rel_dirs[0]
if galaxy_type == 'collection':
# A collection can contain templates in playbooks/*/templates and roles/*/templates
in_templates_dir = rel_root_dir in ['playbooks', 'roles'] and 'templates' in rel_dirs
else:
in_templates_dir = rel_root_dir == 'templates'
for root, dirs, files in os.walk(role_skeleton, topdown=True):
rel_root = os.path.relpath(root, role_skeleton)
in_templates_dir = rel_root.split(os.sep, 1)[0] == 'templates'
dirs[:] = [d for d in dirs if not any(r.match(d) for r in skeleton_ignore_re)]
for f in files:
@ -260,18 +432,18 @@ class GalaxyCLI(CLI):
continue
elif ext == ".j2" and not in_templates_dir:
src_template = os.path.join(rel_root, f)
dest_file = os.path.join(role_path, rel_root, filename)
template_env.get_template(src_template).stream(inject_data).dump(dest_file)
dest_file = os.path.join(obj_path, rel_root, filename)
template_env.get_template(src_template).stream(inject_data).dump(dest_file, encoding='utf-8')
else:
f_rel_path = os.path.relpath(os.path.join(root, f), role_skeleton)
shutil.copyfile(os.path.join(root, f), os.path.join(role_path, f_rel_path))
f_rel_path = os.path.relpath(os.path.join(root, f), obj_skeleton)
shutil.copyfile(os.path.join(root, f), os.path.join(obj_path, f_rel_path))
for d in dirs:
dir_path = os.path.join(role_path, rel_root, d)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
b_dir_path = to_bytes(os.path.join(obj_path, rel_root, d), errors='surrogate_or_strict')
if not os.path.exists(b_dir_path):
os.makedirs(b_dir_path)
display.display("- %s was created successfully" % role_name)
display.display("- %s was created successfully" % obj_name)
def execute_info(self):
"""
@ -321,6 +493,52 @@ class GalaxyCLI(CLI):
uses the args list of roles to be installed, unless -f was specified. The list of roles
can be a name (which will be downloaded via the galaxy API and github), or it can be a local tar archive file.
"""
if context.CLIARGS['type'] == 'collection':
collections = context.CLIARGS['args']
force = context.CLIARGS['force']
output_path = context.CLIARGS['collections_path']
# TODO: use a list of server that have been configured in ~/.ansible_galaxy
servers = [context.CLIARGS['api_server']]
ignore_certs = context.CLIARGS['ignore_certs']
ignore_errors = context.CLIARGS['ignore_errors']
requirements_file = context.CLIARGS['requirements']
no_deps = context.CLIARGS['no_deps']
force_deps = context.CLIARGS['force_with_deps']
if collections and requirements_file:
raise AnsibleError("The positional collection_name arg and --requirements-file are mutually exclusive.")
elif not collections and not requirements_file:
raise AnsibleError("You must specify a collection name or a requirements file.")
if requirements_file:
requirements_file = GalaxyCLI._resolve_path(requirements_file)
collection_requirements = parse_collections_requirements_file(requirements_file)
else:
collection_requirements = []
for collection_input in collections:
name, dummy, requirement = collection_input.partition(':')
collection_requirements.append((name, requirement or '*', None))
output_path = GalaxyCLI._resolve_path(output_path)
collections_path = C.COLLECTIONS_PATHS
if len([p for p in collections_path if p.startswith(output_path)]) == 0:
display.warning("The specified collections path '%s' is not part of the configured Ansible "
"collections paths '%s'. The installed collection won't be picked up in an Ansible "
"run." % (to_text(output_path), to_text(":".join(collections_path))))
if os.path.split(output_path)[1] != 'ansible_collections':
output_path = os.path.join(output_path, 'ansible_collections')
b_output_path = to_bytes(output_path, errors='surrogate_or_strict')
if not os.path.exists(b_output_path):
os.makedirs(b_output_path)
install_collections(collection_requirements, output_path, servers, (not ignore_certs), ignore_errors,
no_deps, force, force_deps)
return 0
role_file = context.CLIARGS['role_file']
if not context.CLIARGS['args'] and role_file is None:
@ -520,6 +738,18 @@ class GalaxyCLI(CLI):
raise AnsibleOptionsError("- None of the provided paths was usable. Please specify a valid path with --roles-path")
return 0
def execute_publish(self):
"""
Publish a collection into Ansible Galaxy.
"""
api_key = context.CLIARGS['api_key'] or GalaxyToken().get()
api_server = context.CLIARGS['api_server']
collection_path = GalaxyCLI._resolve_path(context.CLIARGS['args'])
ignore_certs = context.CLIARGS['ignore_certs']
wait = context.CLIARGS['wait']
publish_collection(collection_path, api_server, api_key, ignore_certs, wait)
def execute_search(self):
''' searches for roles on the Ansible Galaxy server'''
page_size = 1000

@ -1310,9 +1310,9 @@ GALAXY_IGNORE_CERTS:
- {key: ignore_certs, section: galaxy}
type: boolean
GALAXY_ROLE_SKELETON:
name: Galaxy skeleton direcotry
name: Galaxy role or collection skeleton directory
default:
description: Role skeleton directory to use as a template for the ``init`` action in ``ansible-galaxy``, same as ``--role-skeleton``.
description: Role or collection skeleton directory to use as a template for the ``init`` action in ``ansible-galaxy``, same as ``--role-skeleton``.
env: [{name: ANSIBLE_GALAXY_ROLE_SKELETON}]
ini:
- {key: role_skeleton, section: galaxy}
@ -1320,7 +1320,7 @@ GALAXY_ROLE_SKELETON:
GALAXY_ROLE_SKELETON_IGNORE:
name: Galaxy skeleton ignore
default: ["^.git$", "^.*/.git_keep$"]
description: patterns of files to ignore inside a galaxy role skeleton directory
description: patterns of files to ignore inside a Galaxy role or collection skeleton directory
env: [{name: ANSIBLE_GALAXY_ROLE_SKELETON_IGNORE}]
ini:
- {key: role_skeleton_ignore, section: galaxy}

@ -47,7 +47,10 @@ class Galaxy(object):
# load data path for resource usage
this_dir, this_filename = os.path.split(__file__)
type_path = context.CLIARGS.get('role_type', "default")
type_path = context.CLIARGS.get('role_type', 'default')
if type_path == 'default':
type_path = os.path.join(type_path, context.CLIARGS.get('type'))
self.DATA_PATH = os.path.join(this_dir, 'data', type_path)
@property

@ -0,0 +1,951 @@
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fnmatch
import json
import operator
import os
import shutil
import tarfile
import tempfile
import time
import uuid
import yaml
from contextlib import contextmanager
from distutils.version import LooseVersion, StrictVersion
from hashlib import sha256
from io import BytesIO
from yaml.error import YAMLError
import ansible.constants as C
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils import six
from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash, secure_hash_s
from ansible.module_utils.urls import open_url
urlparse = six.moves.urllib.parse.urlparse
urllib_error = six.moves.urllib.error
display = Display()
MANIFEST_FORMAT = 1
@six.python_2_unicode_compatible
class CollectionRequirement:
_FILE_MAPPING = [(b'MANIFEST.json', 'manifest_file'), (b'FILES.json', 'files_file')]
def __init__(self, namespace, name, b_path, source, versions, requirement, force, parent=None, validate_certs=True,
metadata=None, files=None, skip=False):
"""
Represents a collection requirement, the versions that are available to be installed as well as any
dependencies the collection has.
:param namespace: The collection namespace.
:param name: The collection name.
:param b_path: Byte str of the path to the collection tarball if it has already been downloaded.
:param source: The Galaxy server URL to download if the collection is from Galaxy.
:param versions: A list of versions of the collection that are available.
:param requirement: The version requirement string used to verify the list of versions fit the requirements.
:param force: Whether the force flag applied to the collection.
:param parent: The name of the parent the collection is a dependency of.
:param validate_certs: Whether to validate the Galaxy server certificate.
:param metadata: The collection metadata dict if it has already been retrieved.
:param files: The files that exist inside the collection. This is based on the FILES.json file inside the
collection artifact.
:param skip: Whether to skip installing the collection. Should be set if the collection is already installed
and force is not set.
"""
self.namespace = namespace
self.name = name
self.b_path = b_path
self.source = source
self.versions = set(versions)
self.force = force
self.skip = skip
self.required_by = []
self._validate_certs = validate_certs
self._metadata = metadata
self._files = files
self._galaxy_info = None
self.add_requirement(parent, requirement)
def __str__(self):
return to_text("%s.%s" % (self.namespace, self.name))
@property
def latest_version(self):
try:
return max([v for v in self.versions if v != '*'], key=LooseVersion)
except ValueError: # ValueError: max() arg is an empty sequence
return '*'
@property
def dependencies(self):
if self._metadata:
return self._metadata['dependencies']
elif len(self.versions) > 1:
return None
self._get_metadata()
return self._metadata['dependencies']
def add_requirement(self, parent, requirement):
self.required_by.append((parent, requirement))
new_versions = set(v for v in self.versions if self._meets_requirements(v, requirement, parent))
if len(new_versions) == 0:
if self.skip:
force_flag = '--force-with-deps' if parent else '--force'
version = self.latest_version if self.latest_version != '*' else 'unknown'
msg = "Cannot meet requirement %s:%s as it is already installed at version '%s'. Use %s to overwrite" \
% (str(self), requirement, version, force_flag)
raise AnsibleError(msg)
elif parent is None:
msg = "Cannot meet requirement %s for dependency %s" % (requirement, str(self))
else:
msg = "Cannot meet dependency requirement '%s:%s' for collection %s" % (str(self), requirement, parent)
collection_source = to_text(self.b_path, nonstring='passthru') or self.source
req_by = "\n".join(
"\t%s - '%s:%s'" % (to_text(p) if p else 'base', str(self), r)
for p, r in self.required_by
)
versions = ", ".join(sorted(self.versions, key=LooseVersion))
raise AnsibleError(
"%s from source '%s'. Available versions before last requirement added: %s\nRequirements from:\n%s"
% (msg, collection_source, versions, req_by)
)
self.versions = new_versions
def install(self, path, b_temp_path):
if self.skip:
display.display("Skipping '%s' as it is already installed" % str(self))
return
# Install if it is not
collection_path = os.path.join(path, self.namespace, self.name)
b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
display.display("Installing '%s:%s' to '%s'" % (str(self), self.latest_version,
collection_path))
if self.b_path is None:
download_url = self._galaxy_info['download_url']
artifact_hash = self._galaxy_info['artifact']['sha256']
self.b_path = _download_file(download_url, b_temp_path, artifact_hash, self._validate_certs)
if os.path.exists(b_collection_path):
shutil.rmtree(b_collection_path)
os.makedirs(b_collection_path)
with tarfile.open(self.b_path, mode='r') as collection_tar:
files_member_obj = collection_tar.getmember('FILES.json')
with _tarfile_extract(collection_tar, files_member_obj) as files_obj:
files = json.loads(to_text(files_obj.read(), errors='surrogate_or_strict'))
_extract_tar_file(collection_tar, 'MANIFEST.json', b_collection_path, b_temp_path)
_extract_tar_file(collection_tar, 'FILES.json', b_collection_path, b_temp_path)
for file_info in files['files']:
file_name = file_info['name']
if file_name == '.':
continue
if file_info['ftype'] == 'file':
_extract_tar_file(collection_tar, file_name, b_collection_path, b_temp_path,
expected_hash=file_info['chksum_sha256'])
else:
os.makedirs(os.path.join(b_collection_path, to_bytes(file_name, errors='surrogate_or_strict')))
def set_latest_version(self):
self.versions = set([self.latest_version])
self._get_metadata()
def _get_metadata(self):
if self._metadata:
return
n_collection_url = _urljoin(self.source, 'api', 'v2', 'collections', self.namespace, self.name, 'versions',
self.latest_version)
details = json.load(open_url(n_collection_url, validate_certs=self._validate_certs))
self._galaxy_info = details
self._metadata = details['metadata']
def _meets_requirements(self, version, requirements, parent):
"""
Supports version identifiers can be '==', '!=', '>', '>=', '<', '<=', '*'. Each requirement is delimited by ','
"""
op_map = {
'!=': operator.ne,
'==': operator.eq,
'=': operator.eq,
'>=': operator.ge,
'>': operator.gt,
'<=': operator.le,
'<': operator.lt,
}
for req in list(requirements.split(',')):
op_pos = 2 if len(req) > 1 and req[1] == '=' else 1
op = op_map.get(req[:op_pos])
requirement = req[op_pos:]
if not op:
requirement = req
op = operator.eq
# In the case we are checking a new requirement on a base requirement (parent != None) we can't accept
# version as '*' (unknown version) unless the requirement is also '*'.
if parent and version == '*' and requirement != '*':
break
elif requirement == '*' or version == '*':
continue
if not op(LooseVersion(version), LooseVersion(requirement)):
break
else:
return True
# The loop was broken early, it does not meet all the requirements
return False
@staticmethod
def from_tar(b_path, validate_certs, force, parent=None):
if not tarfile.is_tarfile(b_path):
raise AnsibleError("Collection artifact at '%s' is not a valid tar file." % to_native(b_path))
info = {}
with tarfile.open(b_path, mode='r') as collection_tar:
for b_member_name, property_name in CollectionRequirement._FILE_MAPPING:
n_member_name = to_native(b_member_name)
try:
member = collection_tar.getmember(n_member_name)
except KeyError:
raise AnsibleError("Collection at '%s' does not contain the required file %s."
% (to_native(b_path), n_member_name))
with _tarfile_extract(collection_tar, member) as member_obj:
try:
info[property_name] = json.loads(to_text(member_obj.read(), errors='surrogate_or_strict'))
except ValueError:
raise AnsibleError("Collection tar file member %s does not contain a valid json string."
% n_member_name)
meta = info['manifest_file']['collection_info']
files = info['files_file']['files']
namespace = meta['namespace']
name = meta['name']
version = meta['version']
return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
validate_certs=validate_certs, metadata=meta, files=files)
@staticmethod
def from_path(b_path, validate_certs, force, parent=None):
info = {}
for b_file_name, property_name in CollectionRequirement._FILE_MAPPING:
b_file_path = os.path.join(b_path, b_file_name)
if not os.path.exists(b_file_path):
continue
with open(b_file_path, 'rb') as file_obj:
try:
info[property_name] = json.loads(to_text(file_obj.read(), errors='surrogate_or_strict'))
except ValueError:
raise AnsibleError("Collection file at '%s' does not contain a valid json string."
% to_native(b_file_path))
if 'manifest_file' in info:
meta = info['manifest_file']['collection_info']
else:
display.warning("Collection at '%s' does not have a MANIFEST.json file, cannot detect version."
% to_text(b_path))
parent_dir, name = os.path.split(to_text(b_path, errors='surrogate_or_strict'))
namespace = os.path.split(parent_dir)[1]
meta = {
'namespace': namespace,
'name': name,
'version': '*',
'dependencies': {},
}
namespace = meta['namespace']
name = meta['name']
version = meta['version']
files = info.get('files_file', {}).get('files', {})
return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
validate_certs=validate_certs, metadata=meta, files=files, skip=True)
@staticmethod
def from_name(collection, servers, requirement, validate_certs, force, parent=None):
namespace, name = collection.split('.', 1)
galaxy_info = None
galaxy_meta = None
for server in servers:
collection_url_paths = [server, 'api', 'v2', 'collections', namespace, name, 'versions']
is_single = False
if not (requirement == '*' or requirement.startswith('<') or requirement.startswith('>') or
requirement.startswith('!=')):
if requirement.startswith('='):
requirement = requirement.lstrip('=')
collection_url_paths.append(requirement)
is_single = True
n_collection_url = _urljoin(*collection_url_paths)
try:
resp = json.load(open_url(n_collection_url, validate_certs=validate_certs))
except urllib_error.HTTPError as err:
if err.code == 404:
continue
raise
if is_single:
galaxy_info = resp
galaxy_meta = resp['metadata']
versions = [resp['version']]
else:
versions = []
while True:
# Galaxy supports semver but ansible-galaxy does not. We ignore any versions that don't match
# StrictVersion (x.y.z) and only support pre-releases if an explicit version was set (done above).
versions += [v['version'] for v in resp['results'] if StrictVersion.version_re.match(v['version'])]
if resp['next'] is None:
break
resp = json.load(open_url(to_native(resp['next'], errors='surrogate_or_strict'),
validate_certs=validate_certs))
break
else:
raise AnsibleError("Failed to find collection %s:%s" % (collection, requirement))
req = CollectionRequirement(namespace, name, None, server, versions, requirement, force, parent=parent,
validate_certs=validate_certs, metadata=galaxy_meta)
req._galaxy_info = galaxy_info
return req
def build_collection(collection_path, output_path, force):
"""
Creates the Ansible collection artifact in a .tar.gz file.
:param collection_path: The path to the collection to build. This should be the directory that contains the
galaxy.yml file.
:param output_path: The path to create the collection build artifact. This should be a directory.
:param force: Whether to overwrite an existing collection build artifact or fail.
:return: The path to the collection build artifact.
"""
b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
b_galaxy_path = os.path.join(b_collection_path, b'galaxy.yml')
if not os.path.exists(b_galaxy_path):
raise AnsibleError("The collection galaxy.yml path '%s' does not exist." % to_native(b_galaxy_path))
collection_meta = _get_galaxy_yml(b_galaxy_path)
file_manifest = _build_files_manifest(b_collection_path)
collection_manifest = _build_manifest(**collection_meta)
collection_output = os.path.join(output_path, "%s-%s-%s.tar.gz" % (collection_meta['namespace'],
collection_meta['name'],
collection_meta['version']))
b_collection_output = to_bytes(collection_output, errors='surrogate_or_strict')
if os.path.exists(b_collection_output):
if os.path.isdir(b_collection_output):
raise AnsibleError("The output collection artifact '%s' already exists, "
"but is a directory - aborting" % to_native(collection_output))
elif not force:
raise AnsibleError("The file '%s' already exists. You can use --force to re-create "
"the collection artifact." % to_native(collection_output))
_build_collection_tar(b_collection_path, b_collection_output, collection_manifest, file_manifest)
def publish_collection(collection_path, server, key, ignore_certs, wait):
"""
Publish an Ansible collection tarball into an Ansible Galaxy server.
:param collection_path: The path to the collection tarball to publish.
:param server: A native string of the Ansible Galaxy server to publish to.
:param key: The API key to use for authorization.
:param ignore_certs: Whether to ignore certificate validation when interacting with the server.
"""
b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
if not os.path.exists(b_collection_path):
raise AnsibleError("The collection path specified '%s' does not exist." % to_native(collection_path))
elif not tarfile.is_tarfile(b_collection_path):
raise AnsibleError("The collection path specified '%s' is not a tarball, use 'ansible-galaxy collection "
"build' to create a proper release artifact." % to_native(collection_path))
display.display("Publishing collection artifact '%s' to %s" % (collection_path, server))
n_url = _urljoin(server, 'api', 'v2', 'collections')
data, content_type = _get_mime_data(b_collection_path)
headers = {
'Content-type': content_type,
'Content-length': len(data),
}
if key:
headers['Authorization'] = "Token %s" % key
validate_certs = not ignore_certs
try:
resp = json.load(open_url(n_url, data=data, headers=headers, method='POST', validate_certs=validate_certs))
except urllib_error.HTTPError as err:
try:
err_info = json.load(err)
except (AttributeError, ValueError):
err_info = {}
code = to_native(err_info.get('code', 'Unknown'))
message = to_native(err_info.get('message', 'Unknown error returned by Galaxy server.'))
raise AnsibleError("Error when publishing collection (HTTP Code: %d, Message: %s Code: %s)"
% (err.code, message, code))
display.vvv("Collection has been pushed to the Galaxy server %s" % server)
import_uri = resp['task']
if wait:
_wait_import(import_uri, key, validate_certs)
display.display("Collection has been successfully published to the Galaxy server")
else:
display.display("Collection has been pushed to the Galaxy server, not waiting until import has completed "
"due to --no-wait being set. Import task results can be found at %s" % import_uri)
def install_collections(collections, output_path, servers, validate_certs, ignore_errors, no_deps, force, force_deps):
"""
Install Ansible collections to the path specified.
:param collections: The collections to install, should be a list of tuples with (name, requirement, Galaxy server).
:param output_path: The path to install the collections to.
:param servers: A list of Galaxy servers to query when searching for a collection.
:param validate_certs: Whether to validate the Galaxy server certificates.
:param ignore_errors: Whether to ignore any errors when installing the collection.
:param no_deps: Ignore any collection dependencies and only install the base requirements.
:param force: Re-install a collection if it has already been installed.
:param force_deps: Re-install a collection as well as its dependencies if they have already been installed.
"""
existing_collections = _find_existing_collections(output_path)
with _tempdir() as b_temp_path:
dependency_map = _build_dependency_map(collections, existing_collections, b_temp_path, servers, validate_certs,
force, force_deps, no_deps)
for collection in dependency_map.values():
try:
collection.install(output_path, b_temp_path)
except AnsibleError as err:
if ignore_errors:
display.warning("Failed to install collection %s but skipping due to --ignore-errors being set. "
"Error: %s" % (str(collection), to_text(err)))
else:
raise
def parse_collections_requirements_file(requirements_file):
"""
Parses an Ansible requirement.yml file and returns all the collections defined in it. This value ca be used with
install_collection(). The requirements file is in the form:
---
collections:
- namespace.collection
- name: namespace.collection
version: version identifier, multiple identifiers are separated by ','
source: the URL or prededefined source name in ~/.ansible_galaxy to pull the collection from
:param requirements_file: The path to the requirements file.
:return: A list of tuples (name, version, source).
"""
collection_info = []
b_requirements_file = to_bytes(requirements_file, errors='surrogate_or_strict')
if not os.path.exists(b_requirements_file):
raise AnsibleError("The requirements file '%s' does not exist." % to_native(requirements_file))
display.vvv("Reading collection requirement file at '%s'" % requirements_file)
with open(b_requirements_file, 'rb') as req_obj:
try:
requirements = yaml.safe_load(req_obj)
except YAMLError as err:
raise AnsibleError("Failed to parse the collection requirements yml at '%s' with the following error:\n%s"
% (to_native(requirements_file), to_native(err)))
if not isinstance(requirements, dict) or 'collections' not in requirements:
# TODO: Link to documentation page that documents the requirements.yml format for collections.
raise AnsibleError("Expecting collections requirements file to be a dict with the key "
"collections that contains a list of collections to install.")
for collection_req in requirements['collections']:
if isinstance(collection_req, dict):
req_name = collection_req.get('name', None)
if req_name is None:
raise AnsibleError("Collections requirement entry should contain the key name.")
req_version = collection_req.get('version', '*')
req_source = collection_req.get('source', None)
collection_info.append((req_name, req_version, req_source))
else:
collection_info.append((collection_req, '*', None))
return collection_info
@contextmanager
def _tempdir():
b_temp_path = tempfile.mkdtemp(dir=to_bytes(C.DEFAULT_LOCAL_TMP, errors='surrogate_or_strict'))
yield b_temp_path
shutil.rmtree(b_temp_path)
@contextmanager
def _tarfile_extract(tar, member):
tar_obj = tar.extractfile(member)
yield tar_obj
tar_obj.close()
def _get_galaxy_yml(b_galaxy_yml_path):
mandatory_keys = frozenset(['namespace', 'name', 'version', 'authors', 'readme'])
optional_strings = ('description', 'repository', 'documentation', 'homepage', 'issues', 'license_file')
optional_lists = ('license', 'tags', 'authors') # authors isn't optional but this will ensure it is list
optional_dicts = ('dependencies',)
all_keys = frozenset(list(mandatory_keys) + list(optional_strings) + list(optional_lists) + list(optional_dicts))
try:
with open(b_galaxy_yml_path, 'rb') as g_yaml:
galaxy_yml = yaml.safe_load(g_yaml)
except YAMLError as err:
raise AnsibleError("Failed to parse the galaxy.yml at '%s' with the following error:\n%s"
% (to_native(b_galaxy_yml_path), to_native(err)))
set_keys = set(galaxy_yml.keys())
missing_keys = mandatory_keys.difference(set_keys)
if missing_keys:
raise AnsibleError("The collection galaxy.yml at '%s' is missing the following mandatory keys: %s"
% (to_native(b_galaxy_yml_path), ", ".join(sorted(missing_keys))))
extra_keys = set_keys.difference(all_keys)
if len(extra_keys) > 0:
display.warning("Found unknown keys in collection galaxy.yml at '%s': %s"
% (to_text(b_galaxy_yml_path), ", ".join(extra_keys)))
# Add the defaults if they have not been set
for optional_string in optional_strings:
if optional_string not in galaxy_yml:
galaxy_yml[optional_string] = None
for optional_list in optional_lists:
list_val = galaxy_yml.get(optional_list, None)
if list_val is None:
galaxy_yml[optional_list] = []
elif not isinstance(list_val, list):
galaxy_yml[optional_list] = [list_val]
for optional_dict in optional_dicts:
if optional_dict not in galaxy_yml:
galaxy_yml[optional_dict] = {}
# license is a builtin var in Python, to avoid confusion we just rename it to license_ids
galaxy_yml['license_ids'] = galaxy_yml['license']
del galaxy_yml['license']
return galaxy_yml
def _build_files_manifest(b_collection_path):
b_ignore_files = frozenset([b'*.pyc', b'*.retry'])
b_ignore_dirs = frozenset([b'CVS', b'.bzr', b'.hg', b'.git', b'.svn', b'__pycache__', b'.tox'])
entry_template = {
'name': None,
'ftype': None,
'chksum_type': None,
'chksum_sha256': None,
'format': MANIFEST_FORMAT
}
manifest = {
'files': [
{
'name': '.',
'ftype': 'dir',
'chksum_type': None,
'chksum_sha256': None,
'format': MANIFEST_FORMAT,
},
],
'format': MANIFEST_FORMAT,
}
def _walk(b_path, b_top_level_dir):
for b_item in os.listdir(b_path):
b_abs_path = os.path.join(b_path, b_item)
b_rel_base_dir = b'' if b_path == b_top_level_dir else b_path[len(b_top_level_dir) + 1:]
rel_path = to_text(os.path.join(b_rel_base_dir, b_item), errors='surrogate_or_strict')
if os.path.isdir(b_abs_path):
if b_item in b_ignore_dirs:
display.vvv("Skipping '%s' for collection build" % to_text(b_abs_path))
continue
if os.path.islink(b_abs_path):
b_link_target = os.path.realpath(b_abs_path)
if not b_link_target.startswith(b_top_level_dir):
display.warning("Skipping '%s' as it is a symbolic link to a directory outside the collection"
% to_text(b_abs_path))
continue
manifest_entry = entry_template.copy()
manifest_entry['name'] = rel_path
manifest_entry['ftype'] = 'dir'
manifest['files'].append(manifest_entry)
_walk(b_abs_path, b_top_level_dir)
else:
if b_item == b'galaxy.yml':
continue
elif any(fnmatch.fnmatch(b_item, b_pattern) for b_pattern in b_ignore_files):
display.vvv("Skipping '%s' for collection build" % to_text(b_abs_path))
continue
manifest_entry = entry_template.copy()
manifest_entry['name'] = rel_path
manifest_entry['ftype'] = 'file'
manifest_entry['chksum_type'] = 'sha256'
manifest_entry['chksum_sha256'] = secure_hash(b_abs_path, hash_func=sha256)
manifest['files'].append(manifest_entry)
_walk(b_collection_path, b_collection_path)
return manifest
def _build_manifest(namespace, name, version, authors, readme, tags, description, license_ids, license_file,
dependencies, repository, documentation, homepage, issues, **kwargs):
manifest = {
'collection_info': {
'namespace': namespace,
'name': name,
'version': version,
'authors': authors,
'readme': readme,
'tags': tags,
'description': description,
'license': license_ids,
'license_file': license_file,
'dependencies': dependencies,
'repository': repository,
'documentation': documentation,
'homepage': homepage,
'issues': issues,
},
'file_manifest_file': {
'name': 'FILES.json',
'ftype': 'file',
'chksum_type': 'sha256',
'chksum_sha256': None, # Filled out in _build_collection_tar
'format': MANIFEST_FORMAT
},
'format': MANIFEST_FORMAT,
}
return manifest
def _build_collection_tar(b_collection_path, b_tar_path, collection_manifest, file_manifest):
files_manifest_json = to_bytes(json.dumps(file_manifest, indent=True), errors='surrogate_or_strict')
collection_manifest['file_manifest_file']['chksum_sha256'] = secure_hash_s(files_manifest_json, hash_func=sha256)
collection_manifest_json = to_bytes(json.dumps(collection_manifest, indent=True), errors='surrogate_or_strict')
with _tempdir() as b_temp_path:
b_tar_filepath = os.path.join(b_temp_path, os.path.basename(b_tar_path))
with tarfile.open(b_tar_filepath, mode='w:gz') as tar_file:
# Add the MANIFEST.json and FILES.json file to the archive
for name, b in [('MANIFEST.json', collection_manifest_json), ('FILES.json', files_manifest_json)]:
b_io = BytesIO(b)
tar_info = tarfile.TarInfo(name)
tar_info.size = len(b)
tar_info.mtime = time.time()
tar_info.mode = 0o0644
tar_file.addfile(tarinfo=tar_info, fileobj=b_io)
for file_info in file_manifest['files']:
if file_info['name'] == '.':
continue
# arcname expects a native string, cannot be bytes
filename = to_native(file_info['name'], errors='surrogate_or_strict')
b_src_path = os.path.join(b_collection_path, to_bytes(filename, errors='surrogate_or_strict'))
def reset_stat(tarinfo):
tarinfo.mode = 0o0755 if tarinfo.isdir() else 0o0644
tarinfo.uid = tarinfo.gid = 0
tarinfo.uname = tarinfo.gname = ''
return tarinfo
tar_file.add(os.path.realpath(b_src_path), arcname=filename, recursive=False, filter=reset_stat)
shutil.copy(b_tar_filepath, b_tar_path)
collection_name = "%s.%s" % (collection_manifest['collection_info']['namespace'],
collection_manifest['collection_info']['name'])
display.display('Created collection for %s at %s' % (collection_name, to_text(b_tar_path)))
def _get_mime_data(b_collection_path):
with open(b_collection_path, 'rb') as collection_tar:
data = collection_tar.read()
boundary = '--------------------------%s' % uuid.uuid4().hex
b_file_name = os.path.basename(b_collection_path)
part_boundary = b"--" + to_bytes(boundary, errors='surrogate_or_strict')
form = [
part_boundary,
b"Content-Disposition: form-data; name=\"sha256\"",
to_bytes(secure_hash_s(data), errors='surrogate_or_strict'),
part_boundary,
b"Content-Disposition: file; name=\"file\"; filename=\"%s\"" % b_file_name,
b"Content-Type: application/octet-stream",
b"",
data,
b"%s--" % part_boundary,
]
content_type = 'multipart/form-data; boundary=%s' % boundary
return b"\r\n".join(form), content_type
def _wait_import(task_url, key, validate_certs):
headers = {}
if key:
headers['Authorization'] = "Token %s" % key
display.vvv('Waiting until galaxy import task %s has completed' % task_url)
wait = 2
while True:
resp = json.load(open_url(to_native(task_url, errors='surrogate_or_strict'), headers=headers, method='GET',
validate_certs=validate_certs))
if resp.get('finished_at', None):
break
elif wait > 20:
# We try for a maximum of ~60 seconds before giving up in case something has gone wrong on the server end.
raise AnsibleError("Timeout while waiting for the Galaxy import process to finish, check progress at '%s'"
% to_native(task_url))
status = resp.get('status', 'waiting')
display.vvv('Galaxy import process has a status of %s, wait %d seconds before trying again' % (status, wait))
time.sleep(wait)
wait *= 1.5 # poor man's exponential backoff algo so we don't flood the Galaxy API.
for message in resp.get('messages', []):
level = message['level']
if level == 'error':
display.error("Galaxy import error message: %s" % message['message'])
elif level == 'warning':
display.warning("Galaxy import warning message: %s" % message['message'])
else:
display.vvv("Galaxy import message: %s - %s" % (level, message['message']))
if resp['state'] == 'failed':
code = to_native(resp['error'].get('code', 'UNKNOWN'))
description = to_native(resp['error'].get('description', "Unknown error, see %s for more details" % task_url))
raise AnsibleError("Galaxy import process failed: %s (Code: %s)" % (description, code))
def _find_existing_collections(path):
collections = []
b_path = to_bytes(path, errors='surrogate_or_strict')
for b_namespace in os.listdir(b_path):
b_namespace_path = os.path.join(b_path, b_namespace)
if os.path.isfile(b_namespace_path):
continue
for b_collection in os.listdir(b_namespace_path):
b_collection_path = os.path.join(b_namespace_path, b_collection)
if os.path.isdir(b_collection_path):
req = CollectionRequirement.from_path(b_collection_path, True, False)
display.vvv("Found installed collection %s:%s at '%s'" % (str(req), req.latest_version,
to_text(b_collection_path)))
collections.append(req)
return collections
def _build_dependency_map(collections, existing_collections, b_temp_path, servers, validate_certs, force, force_deps,
no_deps):
dependency_map = {}
# First build the dependency map on the actual requirements
for name, version, source in collections:
_get_collection_info(dependency_map, existing_collections, name, version, source, b_temp_path, servers,
validate_certs, (force or force_deps))
checked_parents = set([str(c) for c in dependency_map.values() if c.skip])
while len(dependency_map) != len(checked_parents):
while not no_deps: # Only parse dependencies if no_deps was not set
parents_to_check = set(dependency_map.keys()).difference(checked_parents)
deps_exhausted = True
for parent in parents_to_check:
parent_info = dependency_map[parent]
if parent_info.dependencies:
deps_exhausted = False
for dep_name, dep_requirement in parent_info.dependencies.items():
_get_collection_info(dependency_map, existing_collections, dep_name, dep_requirement,
parent_info.source, b_temp_path, servers, validate_certs, force_deps,
parent=parent)
checked_parents.add(parent)
# No extra dependencies were resolved, exit loop
if deps_exhausted:
break
# Now we have resolved the deps to our best extent, now select the latest version for collections with
# multiple versions found and go from there
deps_not_checked = set(dependency_map.keys()).difference(checked_parents)
for collection in deps_not_checked:
dependency_map[collection].set_latest_version()
if no_deps or len(dependency_map[collection].dependencies) == 0:
checked_parents.add(collection)
return dependency_map
def _get_collection_info(dep_map, existing_collections, collection, requirement, source, b_temp_path, server_list,
validate_certs, force, parent=None):
dep_msg = ""
if parent:
dep_msg = " - as dependency of %s" % parent
display.vvv("Processing requirement collection '%s'%s" % (to_text(collection), dep_msg))
b_tar_path = None
if os.path.isfile(to_bytes(collection, errors='surrogate_or_strict')):
display.vvvv("Collection requirement '%s' is a tar artifact" % to_text(collection))
b_tar_path = to_bytes(collection, errors='surrogate_or_strict')
elif urlparse(collection).scheme:
display.vvvv("Collection requirement '%s' is a URL to a tar artifact" % collection)
b_tar_path = _download_file(collection, b_temp_path, None, validate_certs)
if b_tar_path:
req = CollectionRequirement.from_tar(b_tar_path, validate_certs, force, parent=parent)
collection_name = str(req)
if collection_name in dep_map:
collection_info = dep_map[collection_name]
collection_info.add_requirement(None, req.latest_version)
else:
collection_info = req
else:
display.vvvv("Collection requirement '%s' is the name of a collection" % collection)
if collection in dep_map:
collection_info = dep_map[collection]
collection_info.add_requirement(parent, requirement)
else:
servers = [source] if source else server_list
collection_info = CollectionRequirement.from_name(collection, servers, requirement, validate_certs, force,
parent=parent)
existing = [c for c in existing_collections if str(c) == str(collection_info)]
if existing and not collection_info.force:
# Test that the installed collection fits the requirement
existing[0].add_requirement(str(collection_info), requirement)
collection_info = existing[0]
dep_map[str(collection_info)] = collection_info
def _urljoin(*args):
return '/'.join(to_native(a, errors='surrogate_or_strict').rstrip('/') for a in args + ('',))
def _download_file(url, b_path, expected_hash, validate_certs):
bufsize = 65536
digest = sha256()
urlsplit = os.path.splitext(to_text(url.rsplit('/', 1)[1]))
b_file_name = to_bytes(urlsplit[0], errors='surrogate_or_strict')
b_file_ext = to_bytes(urlsplit[1], errors='surrogate_or_strict')
b_file_path = tempfile.NamedTemporaryFile(dir=b_path, prefix=b_file_name, suffix=b_file_ext, delete=False).name
display.vvv("Downloading %s to %s" % (url, to_text(b_path)))
resp = open_url(to_native(url, errors='surrogate_or_strict'), validate_certs=validate_certs)
with open(b_file_path, 'wb') as download_file:
data = resp.read(bufsize)
while data:
digest.update(data)
download_file.write(data)
data = resp.read(bufsize)
if expected_hash:
actual_hash = digest.hexdigest()
display.vvvv("Validating downloaded file hash %s with expected hash %s" % (actual_hash, expected_hash))
if expected_hash != actual_hash:
raise AnsibleError("Mismatch artifact hash with downloaded file")
return b_file_path
def _extract_tar_file(tar, filename, b_dest, b_temp_path, expected_hash=None):
n_filename = to_native(filename, errors='surrogate_or_strict')
try:
member = tar.getmember(n_filename)
except KeyError:
raise AnsibleError("Collection tar at '%s' does not contain the expected file '%s'." % (to_native(tar.name),
n_filename))
with tempfile.NamedTemporaryFile(dir=b_temp_path, delete=False) as tmpfile_obj:
bufsize = 65536
sha256_digest = sha256()
with _tarfile_extract(tar, member) as tar_obj:
data = tar_obj.read(bufsize)
while data:
tmpfile_obj.write(data)
tmpfile_obj.flush()
sha256_digest.update(data)
data = tar_obj.read(bufsize)
actual_hash = sha256_digest.hexdigest()
if expected_hash and actual_hash != expected_hash:
raise AnsibleError("Checksum mismatch for '%s' inside collection at '%s'"
% (n_filename, to_native(tar.name)))
b_dest_filepath = os.path.join(b_dest, to_bytes(filename, errors='surrogate_or_strict'))
b_parent_dir = os.path.split(b_dest_filepath)[0]
if not os.path.exists(b_parent_dir):
# Seems like Galaxy does not validate if all file entries have a corresponding dir ftype entry. This check
# makes sure we create the parent directory even if it wasn't set in the metadata.
os.makedirs(b_parent_dir)
shutil.move(to_bytes(tmpfile_obj.name, errors='surrogate_or_strict'), b_dest_filepath)

@ -0,0 +1,3 @@
# Ansible Collection - {{ namespace }}.{{ collection_name }}
Documentation for the collection.

@ -0,0 +1,65 @@
### REQUIRED
# this can be a company/brand/organization or product namespace
# under which all content lives
namespace: {{ namespace }}
# the designation of this specific collection
name: {{ collection_name }}
# semantic versioning compliant version designation
version: 1.0.0
# the filename for the readme file which can be either markdown (.md)
readme: README.md
# a list of the collection's content authors
# Ex: 'Full Name <email> (http://site) @nicks:irc/im/site#channel'
authors:
- {{ author }} <example@domain.com>
### OPTIONAL but strongly advised
# short summary of the collection
description: {{ description }}
# Either a single valid SPDX license identifier or a list of valid SPDX license
# identifiers, see https://spdx.org/licenses/. Could also set `license_file`
# instead to point to the file the specifies the license in the collection
# directory.
license: {{ license }}
# list of keywords you want to associate the collection
# with for indexing/search systems
tags: []
# A dict of dependencies. A dependency is another collection
# this collection requires to be installed for it to be usable.
# The key of the dict is the collection label (namespace.name)
# and the value is a spec for the semver version required.
dependencies: {}
### URLs
# url of originating SCM repository
repository: {{ repository_url }}
# url to online docs
documentation: {{ documentation_url }}
# homepage of the collection/project
homepage: {{ homepage_url }}
# issue tracker url
issues: {{ issue_tracker_url }}

@ -0,0 +1,31 @@
# Collections Plugins Directory
This directory can be used to ship various plugins inside an Ansible collection. Each plugin is placed in a folder that
is named after the type of plugin it is in. It can also include the `module_utils` and `modules` directory that
would contain module utils and modules respectively.
Here is an example directory of the majority of plugins currently supported by Ansible;
```
└── plugins
├── action
├── become
├── cache
├── callback
├── cliconf
├── connection
├── filter
├── httpapi
├── inventory
├── lookup
├── module_utils
├── modules
├── netconf
├── shell
├── strategy
├── terminal
├── test
└── vars
```
A full list of plugin types can be found at [Working With Plugins]({{ ansible_plugin_list_dir }}).

@ -30,7 +30,7 @@ _SYNTHETIC_PACKAGES = {
}
# TODO: tighten this up to subset Python identifier requirements (and however we want to restrict ns/collection names)
_collection_qualified_re = re.compile(to_text(r'^(\w+)\.(\w+)\.(\w+)'))
_collection_qualified_re = re.compile(to_text(r'^(\w+)\.(\w+)\.(\w+)$'))
# FIXME: exception handling/error logging

@ -1,2 +1,3 @@
destructive
shippable/posix/group3
skip/python2.6 # build uses tarfile with features not available until 2.7

@ -2,7 +2,7 @@
set -eux -o pipefail
ansible-playbook setup.yml
ansible-playbook setup.yml "$@"
trap 'ansible-playbook cleanup.yml' EXIT
@ -37,7 +37,7 @@ popd # "${galaxy_local_test_role_dir}"
f_ansible_galaxy_status()
{
printf "### Testing ansible-galaxy: %s\n" "${@}"
printf "\n\n\n### Testing ansible-galaxy: %s\n" "${@}"
}
# Galaxy install test case
@ -47,7 +47,7 @@ f_ansible_galaxy_status "install of local git repo"
galaxy_testdir=$(mktemp -d)
pushd "${galaxy_testdir}"
ansible-galaxy install git+file:///"${galaxy_local_test_role_git_repo}"
ansible-galaxy install git+file:///"${galaxy_local_test_role_git_repo}" "$@"
# Test that the role was installed to the expected directory
[[ -d "${HOME}/.ansible/roles/${galaxy_local_test_role}" ]]
@ -62,7 +62,7 @@ galaxy_testdir=$(mktemp -d)
pushd "${galaxy_testdir}"
mkdir -p "${galaxy_relative_rolespath}"
ansible-galaxy install git+file:///"${galaxy_local_test_role_git_repo}" -p "${galaxy_relative_rolespath}"
ansible-galaxy install git+file:///"${galaxy_local_test_role_git_repo}" -p "${galaxy_relative_rolespath}" "$@"
# Test that the role was installed to the expected directory
[[ -d "${galaxy_relative_rolespath}/${galaxy_local_test_role}" ]]
@ -84,7 +84,7 @@ galaxy_testdir=$(mktemp -d)
pushd "${galaxy_testdir}"
git clone "${galaxy_local_test_role_git_repo}" "${galaxy_local_test_role}"
ansible-galaxy init roles-path-bug
ansible-galaxy init roles-path-bug "$@"
pushd roles-path-bug
cat <<EOF > ansible.cfg
[defaults]
@ -96,7 +96,7 @@ EOF
name: ${galaxy_local_test_role}
EOF
ansible-galaxy install -r requirements.yml -p roles/
ansible-galaxy install -r requirements.yml -p roles/ "$@"
popd # roles-path-bug
# Test that the role was installed to the expected directory
@ -105,5 +105,68 @@ EOF
popd # ${galaxy_testdir}
rm -fr "${galaxy_testdir}"
#################################
# ansible-galaxy collection tests
#################################
f_ansible_galaxy_status \
"collection init tests to make sure the relative dir logic works"
galaxy_testdir=$(mktemp -d)
pushd "${galaxy_testdir}"
ansible-galaxy collection init ansible_test.my_collection "$@"
# Test that the collection skeleton was created in the expected directory
for galaxy_collection_dir in "docs" "plugins" "roles"
do
[[ -d "${galaxy_testdir}/ansible_test/my_collection/${galaxy_collection_dir}" ]]
done
popd # ${galaxy_testdir}
rm -fr "${galaxy_testdir}"
f_ansible_galaxy_status \
"collection init tests to make sure the --init-path logic works"
galaxy_testdir=$(mktemp -d)
pushd "${galaxy_testdir}"
ansible-galaxy collection init ansible_test.my_collection --init-path "${galaxy_testdir}/test" "$@"
# Test that the collection skeleton was created in the expected directory
for galaxy_collection_dir in "docs" "plugins" "roles"
do
[[ -d "${galaxy_testdir}/test/ansible_test/my_collection/${galaxy_collection_dir}" ]]
done
popd # ${galaxy_testdir}
f_ansible_galaxy_status \
"collection build test creating artifact in current directory"
pushd "${galaxy_testdir}/test/ansible_test/my_collection"
ansible-galaxy collection build "$@"
[[ -f "${galaxy_testdir}/test/ansible_test/my_collection/ansible_test-my_collection-1.0.0.tar.gz" ]]
popd # ${galaxy_testdir}/ansible_test/my_collection
f_ansible_galaxy_status \
"collection build test to make sure we can specify a relative path"
pushd "${galaxy_testdir}"
ansible-galaxy collection build "test/ansible_test/my_collection" "$@"
[[ -f "${galaxy_testdir}/ansible_test-my_collection-1.0.0.tar.gz" ]]
# Make sure --force works
ansible-galaxy collection build "test/ansible_test/my_collection" --force "$@"
[[ -f "${galaxy_testdir}/ansible_test-my_collection-1.0.0.tar.gz" ]]
popd # ${galaxy_testdir}
rm -fr "${galaxy_testdir}"
rm -fr "${galaxy_local_test_role_dir}"

@ -0,0 +1 @@
Welcome to my test collection doc for {{ namespace }}.

@ -0,0 +1,7 @@
namespace: '{{ namespace }}'
name: '{{ collection_name }}'
version: 0.1.0
readme: README.md
authors:
- Ansible Cow <acow@bovineuniversity.edu>
- Tu Cow <tucow@bovineuniversity.edu>

@ -0,0 +1,3 @@
- name: test collection skeleton
debug:
msg: "Namespace: {{ namespace }}"

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# (c) 2016, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
@ -20,20 +21,30 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ansible
import json
import os
import pytest
import shutil
import tarfile
import tempfile
import yaml
import ansible.constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils.six import PY3
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_text
from ansible.utils import context_objects as co
from units.compat import unittest
from units.compat.mock import call, patch
from units.compat.mock import call, patch, MagicMock
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
class TestGalaxy(unittest.TestCase):
@ -259,7 +270,7 @@ class ValidRoleTests(object):
expected_role_dirs = ('defaults', 'files', 'handlers', 'meta', 'tasks', 'templates', 'vars', 'tests')
@classmethod
def setUpRole(cls, role_name, galaxy_args=None, skeleton_path=None):
def setUpRole(cls, role_name, galaxy_args=None, skeleton_path=None, use_explicit_type=False):
if galaxy_args is None:
galaxy_args = []
@ -276,7 +287,12 @@ class ValidRoleTests(object):
cls.role_name = role_name
# create role using default skeleton
gc = GalaxyCLI(args=['ansible-galaxy', 'init', '-c', '--offline'] + galaxy_args + ['--init-path', cls.test_dir, cls.role_name])
args = ['ansible-galaxy']
if use_explicit_type:
args += ['role']
args += ['init', '-c', '--offline'] + galaxy_args + ['--init-path', cls.test_dir, cls.role_name]
gc = GalaxyCLI(args=args)
gc.run()
cls.gc = gc
@ -416,7 +432,7 @@ class TestGalaxyInitSkeleton(unittest.TestCase, ValidRoleTests):
@classmethod
def setUpClass(cls):
role_skeleton_path = os.path.join(os.path.split(__file__)[0], 'test_data', 'role_skeleton')
cls.setUpRole('delete_me_skeleton', skeleton_path=role_skeleton_path)
cls.setUpRole('delete_me_skeleton', skeleton_path=role_skeleton_path, use_explicit_type=True)
def test_empty_files_dir(self):
files_dir = os.path.join(self.role_dir, 'files')
@ -444,3 +460,486 @@ class TestGalaxyInitSkeleton(unittest.TestCase, ValidRoleTests):
def test_skeleton_option(self):
self.assertEquals(self.role_skeleton_path, context.CLIARGS['role_skeleton'], msg='Skeleton path was not parsed properly from the command line')
@pytest.fixture()
def collection_skeleton(request, tmp_path_factory):
name, skeleton_path = request.param
galaxy_args = ['ansible-galaxy', 'collection', 'init', '-c']
if skeleton_path is not None:
galaxy_args += ['--collection-skeleton', skeleton_path]
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
galaxy_args += ['--init-path', test_dir, name]
GalaxyCLI(args=galaxy_args).run()
namespace_name, collection_name = name.split('.', 1)
collection_dir = os.path.join(test_dir, namespace_name, collection_name)
return collection_dir
@pytest.mark.parametrize('collection_skeleton', [
('ansible_test.my_collection', None),
], indirect=True)
def test_collection_default(collection_skeleton):
meta_path = os.path.join(collection_skeleton, 'galaxy.yml')
with open(meta_path, 'r') as galaxy_meta:
metadata = yaml.safe_load(galaxy_meta)
assert metadata['namespace'] == 'ansible_test'
assert metadata['name'] == 'my_collection'
assert metadata['authors'] == ['your name <example@domain.com>']
assert metadata['readme'] == 'README.md'
assert metadata['version'] == '1.0.0'
assert metadata['description'] == 'your description'
assert metadata['license'] == 'GPL-2.0-or-later'
assert metadata['tags'] == []
assert metadata['dependencies'] == {}
assert metadata['documentation'] == 'http://docs.example.com'
assert metadata['repository'] == 'http://example.com/repository'
assert metadata['homepage'] == 'http://example.com'
assert metadata['issues'] == 'http://example.com/issue/tracker'
assert len(metadata) == 13
for d in ['docs', 'plugins', 'roles']:
assert os.path.isdir(os.path.join(collection_skeleton, d)), \
"Expected collection subdirectory {0} doesn't exist".format(d)
@pytest.mark.parametrize('collection_skeleton', [
('ansible_test.delete_me_skeleton', os.path.join(os.path.split(__file__)[0], 'test_data', 'collection_skeleton')),
], indirect=True)
def test_collection_skeleton(collection_skeleton):
meta_path = os.path.join(collection_skeleton, 'galaxy.yml')
with open(meta_path, 'r') as galaxy_meta:
metadata = yaml.safe_load(galaxy_meta)
assert metadata['namespace'] == 'ansible_test'
assert metadata['name'] == 'delete_me_skeleton'
assert metadata['authors'] == ['Ansible Cow <acow@bovineuniversity.edu>', 'Tu Cow <tucow@bovineuniversity.edu>']
assert metadata['version'] == '0.1.0'
assert metadata['readme'] == 'README.md'
assert len(metadata) == 5
assert os.path.exists(os.path.join(collection_skeleton, 'README.md'))
# Test empty directories exist and are empty
for empty_dir in ['plugins/action', 'plugins/filter', 'plugins/inventory', 'plugins/lookup',
'plugins/module_utils', 'plugins/modules']:
assert os.listdir(os.path.join(collection_skeleton, empty_dir)) == []
# Test files that don't end with .j2 were not templated
doc_file = os.path.join(collection_skeleton, 'docs', 'My Collection.md')
with open(doc_file, 'r') as f:
doc_contents = f.read()
assert doc_contents.strip() == 'Welcome to my test collection doc for {{ namespace }}.'
# Test files that end with .j2 but are in the templates directory were not templated
for template_dir in ['playbooks/templates', 'playbooks/templates/subfolder',
'roles/common/templates', 'roles/common/templates/subfolder']:
test_conf_j2 = os.path.join(collection_skeleton, template_dir, 'test.conf.j2')
assert os.path.exists(test_conf_j2)
with open(test_conf_j2, 'r') as f:
contents = f.read()
expected_contents = '[defaults]\ntest_key = {{ test_variable }}'
assert expected_contents == contents.strip()
@pytest.fixture()
def collection_artifact(collection_skeleton, tmp_path_factory):
''' Creates a collection artifact tarball that is ready to be published and installed '''
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Output'))
# Because we call GalaxyCLI in collection_skeleton we need to reset the singleton back to None so it uses the new
# args, we reset the original args once it is done.
orig_cli_args = co.GlobalCLIArgs._Singleton__instance
try:
co.GlobalCLIArgs._Singleton__instance = None
galaxy_args = ['ansible-galaxy', 'collection', 'build', collection_skeleton, '--output-path', output_dir]
gc = GalaxyCLI(args=galaxy_args)
gc.run()
yield output_dir
finally:
co.GlobalCLIArgs._Singleton__instance = orig_cli_args
def test_invalid_skeleton_path():
expected = "- the skeleton path '/fake/path' does not exist, cannot init collection"
gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'init', 'my.collection', '--collection-skeleton',
'/fake/path'])
with pytest.raises(AnsibleError, match=expected):
gc.run()
@pytest.mark.parametrize("name", [
"",
"invalid",
"hypen-ns.collection",
"ns.hyphen-collection",
"ns.collection.weird",
])
def test_invalid_collection_name(name):
expected = "Invalid collection name, must be in the format <namespace>.<collection>"
gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'init', name])
with pytest.raises(AnsibleError, match=expected):
gc.run()
@pytest.mark.parametrize('collection_skeleton', [
('ansible_test.build_collection', None),
], indirect=True)
def test_collection_build(collection_artifact):
tar_path = os.path.join(collection_artifact, 'ansible_test-build_collection-1.0.0.tar.gz')
assert tarfile.is_tarfile(tar_path)
with tarfile.open(tar_path, mode='r') as tar:
tar_members = tar.getmembers()
valid_files = ['MANIFEST.json', 'FILES.json', 'roles', 'docs', 'plugins', 'plugins/README.md', 'README.md']
assert len(tar_members) == 7
# Verify the uid and gid is 0 and the correct perms are set
for member in tar_members:
assert member.name in valid_files
assert member.gid == 0
assert member.gname == ''
assert member.uid == 0
assert member.uname == ''
if member.isdir():
assert member.mode == 0o0755
else:
assert member.mode == 0o0644
manifest_file = tar.extractfile(tar_members[0])
try:
manifest = json.loads(to_text(manifest_file.read()))
finally:
manifest_file.close()
coll_info = manifest['collection_info']
file_manifest = manifest['file_manifest_file']
assert manifest['format'] == 1
assert len(manifest.keys()) == 3
assert coll_info['namespace'] == 'ansible_test'
assert coll_info['name'] == 'build_collection'
assert coll_info['version'] == '1.0.0'
assert coll_info['authors'] == ['your name <example@domain.com>']
assert coll_info['readme'] == 'README.md'
assert coll_info['tags'] == []
assert coll_info['description'] == 'your description'
assert coll_info['license'] == ['GPL-2.0-or-later']
assert coll_info['license_file'] is None
assert coll_info['dependencies'] == {}
assert coll_info['repository'] == 'http://example.com/repository'
assert coll_info['documentation'] == 'http://docs.example.com'
assert coll_info['homepage'] == 'http://example.com'
assert coll_info['issues'] == 'http://example.com/issue/tracker'
assert len(coll_info.keys()) == 14
assert file_manifest['name'] == 'FILES.json'
assert file_manifest['ftype'] == 'file'
assert file_manifest['chksum_type'] == 'sha256'
assert file_manifest['chksum_sha256'] is not None # Order of keys makes it hard to verify the checksum
assert file_manifest['format'] == 1
assert len(file_manifest.keys()) == 5
files_file = tar.extractfile(tar_members[1])
try:
files = json.loads(to_text(files_file.read()))
finally:
files_file.close()
assert len(files['files']) == 6
assert files['format'] == 1
assert len(files.keys()) == 2
valid_files_entries = ['.', 'roles', 'docs', 'plugins', 'plugins/README.md', 'README.md']
for file_entry in files['files']:
assert file_entry['name'] in valid_files_entries
assert file_entry['format'] == 1
if file_entry['name'] == 'plugins/README.md':
assert file_entry['ftype'] == 'file'
assert file_entry['chksum_type'] == 'sha256'
assert file_entry['chksum_sha256'] == '5be7ec7b71096d56e1cc48311b6a2266b77b5fdb9d1985b5bc625787b1e857c5'
elif file_entry['name'] == 'README.md':
assert file_entry['ftype'] == 'file'
assert file_entry['chksum_type'] == 'sha256'
assert file_entry['chksum_sha256'] == '45923ca2ece0e8ce31d29e5df9d8b649fe55e2f5b5b61c9724d7cc187bd6ad4a'
else:
assert file_entry['ftype'] == 'dir'
assert file_entry['chksum_type'] is None
assert file_entry['chksum_sha256'] is None
assert len(file_entry.keys()) == 5
@pytest.fixture()
def collection_install(reset_cli_args, tmp_path_factory, monkeypatch):
mock_install = MagicMock()
monkeypatch.setattr(ansible.cli.galaxy, 'install_collections', mock_install)
mock_warning = MagicMock()
monkeypatch.setattr(ansible.utils.display.Display, 'warning', mock_warning)
output_dir = to_text((tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Output')))
yield mock_install, mock_warning, output_dir
def test_collection_install_with_names(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
'--collections-path', output_dir]
GalaxyCLI(args=galaxy_args).run()
collection_path = os.path.join(output_dir, 'ansible_collections')
assert os.path.isdir(collection_path)
assert mock_warning.call_count == 1
assert "The specified collections path '%s' is not part of the configured Ansible collections path" % output_dir \
in mock_warning.call_args[0][0]
assert mock_install.call_count == 1
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None),
('namespace2.collection', '1.0.1', None)]
assert mock_install.call_args[0][1] == collection_path
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
assert mock_install.call_args[0][3] is True
assert mock_install.call_args[0][4] is False
assert mock_install.call_args[0][5] is False
assert mock_install.call_args[0][6] is False
assert mock_install.call_args[0][7] is False
def test_collection_install_with_requirements_file(collection_install):
mock_install, mock_warning, output_dir = collection_install
requirements_file = os.path.join(output_dir, 'requirements.yml')
with open(requirements_file, 'wb') as req_obj:
req_obj.write(b'''---
collections:
- namespace.coll
- name: namespace2.coll
version: '>2.0.1'
''')
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
'--collections-path', output_dir]
GalaxyCLI(args=galaxy_args).run()
collection_path = os.path.join(output_dir, 'ansible_collections')
assert os.path.isdir(collection_path)
assert mock_warning.call_count == 1
assert "The specified collections path '%s' is not part of the configured Ansible collections path" % output_dir \
in mock_warning.call_args[0][0]
assert mock_install.call_count == 1
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None),
('namespace2.coll', '>2.0.1', None)]
assert mock_install.call_args[0][1] == collection_path
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
assert mock_install.call_args[0][3] is True
assert mock_install.call_args[0][4] is False
assert mock_install.call_args[0][5] is False
assert mock_install.call_args[0][6] is False
assert mock_install.call_args[0][7] is False
def test_collection_install_with_relative_path(collection_install, monkeypatch):
mock_install = collection_install[0]
mock_req = MagicMock()
mock_req.return_value = [('namespace.coll', '*', None)]
monkeypatch.setattr(ansible.cli.galaxy, 'parse_collections_requirements_file', mock_req)
monkeypatch.setattr(os, 'makedirs', MagicMock())
requirements_file = './requirements.myl'
collections_path = './ansible_collections'
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
'--collections-path', collections_path]
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_count == 1
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None)]
assert mock_install.call_args[0][1] == os.path.abspath(collections_path)
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
assert mock_install.call_args[0][3] is True
assert mock_install.call_args[0][4] is False
assert mock_install.call_args[0][5] is False
assert mock_install.call_args[0][6] is False
assert mock_install.call_args[0][7] is False
assert mock_req.call_count == 1
assert mock_req.call_args[0][0] == os.path.abspath(requirements_file)
def test_collection_install_with_unexpanded_path(collection_install, monkeypatch):
mock_install = collection_install[0]
mock_req = MagicMock()
mock_req.return_value = [('namespace.coll', '*', None)]
monkeypatch.setattr(ansible.cli.galaxy, 'parse_collections_requirements_file', mock_req)
monkeypatch.setattr(os, 'makedirs', MagicMock())
requirements_file = '~/requirements.myl'
collections_path = '~/ansible_collections'
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
'--collections-path', collections_path]
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_count == 1
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None)]
assert mock_install.call_args[0][1] == os.path.expanduser(os.path.expandvars(collections_path))
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
assert mock_install.call_args[0][3] is True
assert mock_install.call_args[0][4] is False
assert mock_install.call_args[0][5] is False
assert mock_install.call_args[0][6] is False
assert mock_install.call_args[0][7] is False
assert mock_req.call_count == 1
assert mock_req.call_args[0][0] == os.path.expanduser(os.path.expandvars(requirements_file))
def test_collection_install_in_collection_dir(collection_install, monkeypatch):
mock_install, mock_warning, output_dir = collection_install
collections_path = C.COLLECTIONS_PATHS[0]
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
'--collections-path', collections_path]
GalaxyCLI(args=galaxy_args).run()
assert mock_warning.call_count == 0
assert mock_install.call_count == 1
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None),
('namespace2.collection', '1.0.1', None)]
assert mock_install.call_args[0][1] == os.path.join(collections_path, 'ansible_collections')
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
assert mock_install.call_args[0][3] is True
assert mock_install.call_args[0][4] is False
assert mock_install.call_args[0][5] is False
assert mock_install.call_args[0][6] is False
assert mock_install.call_args[0][7] is False
def test_collection_install_name_and_requirements_fail(collection_install):
test_path = collection_install[2]
expected = 'The positional collection_name arg and --requirements-file are mutually exclusive.'
with pytest.raises(AnsibleError, match=expected):
GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path',
test_path, '--requirements-file', test_path]).run()
def test_collection_install_no_name_and_requirements_fail(collection_install):
test_path = collection_install[2]
expected = 'You must specify a collection name or a requirements file.'
with pytest.raises(AnsibleError, match=expected):
GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', '--collections-path', test_path]).run()
def test_collection_install_path_with_ansible_collections(collection_install):
mock_install, mock_warning, output_dir = collection_install
collection_path = os.path.join(output_dir, 'ansible_collections')
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
'--collections-path', collection_path]
GalaxyCLI(args=galaxy_args).run()
assert os.path.isdir(collection_path)
assert mock_warning.call_count == 1
assert "The specified collections path '%s' is not part of the configured Ansible collections path" \
% collection_path in mock_warning.call_args[0][0]
assert mock_install.call_count == 1
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None),
('namespace2.collection', '1.0.1', None)]
assert mock_install.call_args[0][1] == collection_path
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
assert mock_install.call_args[0][3] is True
assert mock_install.call_args[0][4] is False
assert mock_install.call_args[0][5] is False
assert mock_install.call_args[0][6] is False
assert mock_install.call_args[0][7] is False
def test_collection_install_ignore_certs(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--ignore-certs']
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_args[0][3] is False
def test_collection_install_force(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--force']
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_args[0][6] is True
def test_collection_install_force_deps(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--force-with-deps']
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_args[0][7] is True
def test_collection_install_no_deps(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--no-deps']
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_args[0][5] is True
def test_collection_install_ignore(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--ignore-errors']
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_args[0][4] is True
def test_collection_install_custom_server(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--server', 'https://galaxy-dev.ansible.com']
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_args[0][2] == ['https://galaxy-dev.ansible.com']

@ -0,0 +1,922 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import pytest
import re
import tarfile
import tempfile
import time
import uuid
from hashlib import sha256
from io import BytesIO, StringIO
from units.compat.mock import MagicMock
import ansible.module_utils.six.moves.urllib.error as urllib_error
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError
from ansible.galaxy import collection
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.utils import context_objects as co
from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash_s
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture()
def collection_input(tmp_path_factory):
''' Creates a collection skeleton directory for build tests '''
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
namespace = 'ansible_namespace'
collection = 'collection'
skeleton = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
galaxy_args = ['ansible-galaxy', 'collection', 'init', '%s.%s' % (namespace, collection),
'-c', '--init-path', test_dir, '--collection-skeleton', skeleton]
GalaxyCLI(args=galaxy_args).run()
collection_dir = os.path.join(test_dir, namespace, collection)
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Output'))
return collection_dir, output_dir
@pytest.fixture()
def collection_artifact(monkeypatch, tmp_path_factory):
''' Creates a temp collection artifact and mocked open_url instance for publishing tests '''
mock_open = MagicMock()
monkeypatch.setattr(collection, 'open_url', mock_open)
mock_uuid = MagicMock()
mock_uuid.return_value.hex = 'uuid'
monkeypatch.setattr(uuid, 'uuid4', mock_uuid)
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
input_file = to_text(tmp_path / 'collection.tar.gz')
with tarfile.open(input_file, 'w:gz') as tfile:
b_io = BytesIO(b"\x00\x01\x02\x03")
tar_info = tarfile.TarInfo('test')
tar_info.size = 4
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
return input_file, mock_open
@pytest.fixture()
def requirements_file(request, tmp_path_factory):
content = request.param
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Requirements'))
requirements_file = os.path.join(test_dir, 'requirements.yml')
if content:
with open(requirements_file, 'wb') as req_obj:
req_obj.write(to_bytes(content))
yield requirements_file
@pytest.fixture()
def galaxy_yml(request, tmp_path_factory):
b_test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
b_galaxy_yml = os.path.join(b_test_dir, b'galaxy.yml')
with open(b_galaxy_yml, 'wb') as galaxy_obj:
galaxy_obj.write(to_bytes(request.param))
yield b_galaxy_yml
@pytest.fixture()
def tmp_tarfile(tmp_path_factory):
''' Creates a temporary tar file for _extract_tar_file tests '''
filename = u'ÅÑŚÌβŁÈ'
temp_dir = to_bytes(tmp_path_factory.mktemp('test-%s Collections' % to_native(filename)))
tar_file = os.path.join(temp_dir, to_bytes('%s.tar.gz' % filename))
data = os.urandom(8)
with tarfile.open(tar_file, 'w:gz') as tfile:
b_io = BytesIO(data)
tar_info = tarfile.TarInfo(filename)
tar_info.size = len(data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
sha256_hash = sha256()
sha256_hash.update(data)
with tarfile.open(tar_file, 'r') as tfile:
yield temp_dir, tfile, filename, sha256_hash.hexdigest()
def test_build_collection_no_galaxy_yaml():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(fake_path, 'output', False)
def test_build_existing_output_file(collection_input):
input_dir, output_dir = collection_input
existing_output_dir = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
os.makedirs(existing_output_dir)
expected = "The output collection artifact '%s' already exists, but is a directory - aborting" \
% to_native(existing_output_dir)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(input_dir, output_dir, False)
def test_build_existing_output_without_force(collection_input):
input_dir, output_dir = collection_input
existing_output = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
with open(existing_output, 'w+') as out_file:
out_file.write("random garbage")
out_file.flush()
expected = "The file '%s' already exists. You can use --force to re-create the collection artifact." \
% to_native(existing_output)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(input_dir, output_dir, False)
def test_build_existing_output_with_force(collection_input):
input_dir, output_dir = collection_input
existing_output = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
with open(existing_output, 'w+') as out_file:
out_file.write("random garbage")
out_file.flush()
collection.build_collection(input_dir, output_dir, True)
# Verify the file was replaced with an actual tar file
assert tarfile.is_tarfile(existing_output)
@pytest.mark.parametrize('galaxy_yml', [b'namespace: value: broken'], indirect=True)
def test_invalid_yaml_galaxy_file(galaxy_yml):
expected = to_native(b"Failed to parse the galaxy.yml at '%s' with the following error:" % galaxy_yml)
with pytest.raises(AnsibleError, match=expected):
collection._get_galaxy_yml(galaxy_yml)
@pytest.mark.parametrize('galaxy_yml', [b'namespace: test_namespace'], indirect=True)
def test_missing_required_galaxy_key(galaxy_yml):
expected = "The collection galaxy.yml at '%s' is missing the following mandatory keys: authors, name, " \
"readme, version" % to_native(galaxy_yml)
with pytest.raises(AnsibleError, match=expected):
collection._get_galaxy_yml(galaxy_yml)
@pytest.mark.parametrize('galaxy_yml', [b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
invalid: value"""], indirect=True)
def test_warning_extra_keys(galaxy_yml, monkeypatch):
display_mock = MagicMock()
monkeypatch.setattr(Display, 'warning', display_mock)
collection._get_galaxy_yml(galaxy_yml)
assert display_mock.call_count == 1
assert display_mock.call_args[0][0] == "Found unknown keys in collection galaxy.yml at '%s': invalid"\
% to_text(galaxy_yml)
@pytest.mark.parametrize('galaxy_yml', [b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md"""], indirect=True)
def test_defaults_galaxy_yml(galaxy_yml):
actual = collection._get_galaxy_yml(galaxy_yml)
assert sorted(list(actual.keys())) == [
'authors', 'dependencies', 'description', 'documentation', 'homepage', 'issues', 'license_file', 'license_ids',
'name', 'namespace', 'readme', 'repository', 'tags', 'version',
]
assert actual['namespace'] == 'namespace'
assert actual['name'] == 'collection'
assert actual['authors'] == ['Jordan']
assert actual['version'] == '0.1.0'
assert actual['readme'] == 'README.md'
assert actual['description'] is None
assert actual['repository'] is None
assert actual['documentation'] is None
assert actual['homepage'] is None
assert actual['issues'] is None
assert actual['tags'] == []
assert actual['dependencies'] == {}
assert actual['license_ids'] == []
@pytest.mark.parametrize('galaxy_yml', [(b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
license: MIT"""), (b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
license:
- MIT""")], indirect=True)
def test_galaxy_yml_list_value(galaxy_yml):
actual = collection._get_galaxy_yml(galaxy_yml)
assert actual['license_ids'] == ['MIT']
def test_build_ignore_files_and_folders(collection_input, monkeypatch):
input_dir = collection_input[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_display)
git_folder = os.path.join(input_dir, '.git')
retry_file = os.path.join(input_dir, 'ansible.retry')
os.makedirs(git_folder)
with open(retry_file, 'w+') as ignore_file:
ignore_file.write('random')
ignore_file.flush()
actual = collection._build_files_manifest(to_bytes(input_dir))
assert actual['format'] == 1
for manifest_entry in actual['files']:
assert manifest_entry['name'] not in ['.git', 'ansible.retry', 'galaxy.yml']
expected_msgs = [
"Skipping '%s' for collection build" % to_text(retry_file),
"Skipping '%s' for collection build" % to_text(git_folder),
]
assert mock_display.call_count == 2
assert mock_display.mock_calls[0][1][0] in expected_msgs
assert mock_display.mock_calls[1][1][0] in expected_msgs
def test_build_ignore_symlink_target_outside_collection(collection_input, monkeypatch):
input_dir, outside_dir = collection_input
mock_display = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_display)
link_path = os.path.join(input_dir, 'plugins', 'connection')
os.symlink(outside_dir, link_path)
actual = collection._build_files_manifest(to_bytes(input_dir))
for manifest_entry in actual['files']:
assert manifest_entry['name'] != 'plugins/connection'
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Skipping '%s' as it is a symbolic link to a directory outside " \
"the collection" % to_text(link_path)
def test_build_copy_symlink_target_inside_collection(collection_input):
input_dir = collection_input[0]
os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
roles_target = os.path.join(input_dir, 'roles', 'linked')
roles_target_tasks = os.path.join(roles_target, 'tasks')
os.makedirs(roles_target_tasks)
with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main:
tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:")
tasks_main.flush()
os.symlink(roles_target, roles_link)
actual = collection._build_files_manifest(to_bytes(input_dir))
linked_entries = [e for e in actual['files'] if e['name'].startswith('playbooks/roles/linked')]
assert len(linked_entries) == 3
assert linked_entries[0]['name'] == 'playbooks/roles/linked'
assert linked_entries[0]['ftype'] == 'dir'
assert linked_entries[1]['name'] == 'playbooks/roles/linked/tasks'
assert linked_entries[1]['ftype'] == 'dir'
assert linked_entries[2]['name'] == 'playbooks/roles/linked/tasks/main.yml'
assert linked_entries[2]['ftype'] == 'file'
assert linked_entries[2]['chksum_sha256'] == '9c97a1633c51796999284c62236b8d5462903664640079b80c37bf50080fcbc3'
def test_build_with_symlink_inside_collection(collection_input):
input_dir, output_dir = collection_input
os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
file_link = os.path.join(input_dir, 'docs', 'README.md')
roles_target = os.path.join(input_dir, 'roles', 'linked')
roles_target_tasks = os.path.join(roles_target, 'tasks')
os.makedirs(roles_target_tasks)
with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main:
tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:")
tasks_main.flush()
os.symlink(roles_target, roles_link)
os.symlink(os.path.join(input_dir, 'README.md'), file_link)
collection.build_collection(input_dir, output_dir, False)
output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
assert tarfile.is_tarfile(output_artifact)
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
linked_members = [m for m in members if m.path.startswith('playbooks/roles/linked/tasks')]
assert len(linked_members) == 2
assert linked_members[0].name == 'playbooks/roles/linked/tasks'
assert linked_members[0].isdir()
assert linked_members[1].name == 'playbooks/roles/linked/tasks/main.yml'
assert linked_members[1].isreg()
linked_task = actual.extractfile(linked_members[1].name)
actual_task = secure_hash_s(linked_task.read())
linked_task.close()
assert actual_task == 'f4dcc52576b6c2cd8ac2832c52493881c4e54226'
linked_file = [m for m in members if m.path == 'docs/README.md']
assert len(linked_file) == 1
assert linked_file[0].isreg()
linked_file_obj = actual.extractfile(linked_file[0].name)
actual_file = secure_hash_s(linked_file_obj.read())
linked_file_obj.close()
assert actual_file == '63444bfc766154e1bc7557ef6280de20d03fcd81'
def test_publish_missing_file():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection path specified '%s' does not exist." % fake_path)
with pytest.raises(AnsibleError, match=expected):
collection.publish_collection(fake_path, None, None, False, True)
def test_publish_not_a_tarball():
expected = "The collection path specified '{0}' is not a tarball, use 'ansible-galaxy collection build' to " \
"create a proper release artifact."
with tempfile.NamedTemporaryFile(prefix=u'ÅÑŚÌβŁÈ') as temp_file:
temp_file.write(b"\x00")
temp_file.flush()
with pytest.raises(AnsibleError, match=expected.format(to_native(temp_file.name))):
collection.publish_collection(temp_file.name, None, None, False, True)
def test_publish_no_wait(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
artifact_path, mock_open = collection_artifact
fake_import_uri = 'https://galaxy.server.com/api/v2/import/1234'
server = 'https://galaxy.com'
mock_open.return_value = StringIO(u'{"task":"%s"}' % fake_import_uri)
expected_form, expected_content_type = collection._get_mime_data(to_bytes(artifact_path))
collection.publish_collection(artifact_path, server, 'key', False, False)
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.com/api/v2/collections/'
assert mock_open.mock_calls[0][2]['data'] == expected_form
assert mock_open.mock_calls[0][2]['method'] == 'POST'
assert mock_open.mock_calls[0][2]['validate_certs'] is True
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == 'Token key'
assert mock_open.mock_calls[0][2]['headers']['Content-length'] == len(expected_form)
assert mock_open.mock_calls[0][2]['headers']['Content-type'] == expected_content_type
assert mock_display.call_count == 2
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
assert mock_display.mock_calls[1][1][0] == \
"Collection has been pushed to the Galaxy server, not waiting until import has completed due to --no-wait " \
"being set. Import task results can be found at %s" % fake_import_uri
def test_publish_dont_validate_cert(collection_artifact):
artifact_path, mock_open = collection_artifact
mock_open.return_value = StringIO(u'{"task":"https://galaxy.server.com/api/v2/import/1234"}')
collection.publish_collection(artifact_path, 'https://galaxy.server.com', 'key', True, False)
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][2]['validate_certs'] is False
def test_publish_failure(collection_artifact):
artifact_path, mock_open = collection_artifact
mock_open.side_effect = urllib_error.HTTPError('https://galaxy.server.com', 500, 'msg', {}, StringIO())
expected = 'Error when publishing collection (HTTP Code: 500, Message: Unknown error returned by Galaxy ' \
'server. Code: Unknown)'
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection.publish_collection(artifact_path, 'https://galaxy.server.com', 'key', False, True)
def test_publish_failure_with_json_info(collection_artifact):
artifact_path, mock_open = collection_artifact
return_content = StringIO(u'{"message":"Galaxy error message","code":"GWE002"}')
mock_open.side_effect = urllib_error.HTTPError('https://galaxy.server.com', 503, 'msg', {}, return_content)
expected = 'Error when publishing collection (HTTP Code: 503, Message: Galaxy error message Code: GWE002)'
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection.publish_collection(artifact_path, 'https://galaxy.server.com', 'key', False, True)
def test_publish_with_wait(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
server = 'https://galaxy.server.com'
artifact_path, mock_open = collection_artifact
mock_open.side_effect = (
StringIO(u'{"task":"%s"}' % fake_import_uri),
StringIO(u'{"finished_at":"some_time","state":"success"}')
)
collection.publish_collection(artifact_path, server, 'key', False, True)
assert mock_open.call_count == 2
assert mock_open.mock_calls[1][1][0] == fake_import_uri
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == 'Token key'
assert mock_open.mock_calls[1][2]['validate_certs'] is True
assert mock_open.mock_calls[1][2]['method'] == 'GET'
assert mock_display.call_count == 2
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
assert mock_display.mock_calls[1][1][0] == 'Collection has been successfully published to the Galaxy server'
assert mock_vvv.call_count == 2
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
def test_publish_with_wait_timeout(collection_artifact, monkeypatch):
monkeypatch.setattr(time, 'sleep', MagicMock())
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
server = 'https://galaxy.server.com'
artifact_path, mock_open = collection_artifact
mock_open.side_effect = (
StringIO(u'{"task":"%s"}' % fake_import_uri),
StringIO(u'{"finished_at":null}'),
StringIO(u'{"finished_at":"some_time","state":"success"}')
)
collection.publish_collection(artifact_path, server, 'key', True, True)
assert mock_open.call_count == 3
assert mock_open.mock_calls[1][1][0] == fake_import_uri
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == 'Token key'
assert mock_open.mock_calls[1][2]['validate_certs'] is False
assert mock_open.mock_calls[1][2]['method'] == 'GET'
assert mock_open.mock_calls[2][1][0] == fake_import_uri
assert mock_open.mock_calls[2][2]['headers']['Authorization'] == 'Token key'
assert mock_open.mock_calls[2][2]['validate_certs'] is False
assert mock_open.mock_calls[2][2]['method'] == 'GET'
assert mock_display.call_count == 2
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
assert mock_display.mock_calls[1][1][0] == 'Collection has been successfully published to the Galaxy server'
assert mock_vvv.call_count == 3
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
assert mock_vvv.mock_calls[2][1][0] == \
'Galaxy import process has a status of waiting, wait 2 seconds before trying again'
def test_publish_with_wait_timeout(collection_artifact, monkeypatch):
monkeypatch.setattr(time, 'sleep', MagicMock())
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
server = 'https://galaxy.server.com'
artifact_path, mock_open = collection_artifact
mock_open.side_effect = (
StringIO(u'{"task":"%s"}' % fake_import_uri),
StringIO(u'{"finished_at":null}'),
StringIO(u'{"finished_at":null}'),
StringIO(u'{"finished_at":null}'),
StringIO(u'{"finished_at":null}'),
StringIO(u'{"finished_at":null}'),
StringIO(u'{"finished_at":null}'),
StringIO(u'{"finished_at":null}'),
)
expected = "Timeout while waiting for the Galaxy import process to finish, check progress at '%s'" \
% fake_import_uri
with pytest.raises(AnsibleError, match=expected):
collection.publish_collection(artifact_path, server, 'key', True, True)
assert mock_open.call_count == 8
for i in range(7):
mock_call = mock_open.mock_calls[i + 1]
assert mock_call[1][0] == fake_import_uri
assert mock_call[2]['headers']['Authorization'] == 'Token key'
assert mock_call[2]['validate_certs'] is False
assert mock_call[2]['method'] == 'GET'
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
expected_wait_msg = 'Galaxy import process has a status of waiting, wait {0} seconds before trying again'
assert mock_vvv.call_count == 8
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
assert mock_vvv.mock_calls[2][1][0] == expected_wait_msg.format(2)
assert mock_vvv.mock_calls[3][1][0] == expected_wait_msg.format(3)
assert mock_vvv.mock_calls[4][1][0] == expected_wait_msg.format(4)
assert mock_vvv.mock_calls[5][1][0] == expected_wait_msg.format(6)
assert mock_vvv.mock_calls[6][1][0] == expected_wait_msg.format(10)
assert mock_vvv.mock_calls[7][1][0] == expected_wait_msg.format(15)
def test_publish_with_wait_and_failure(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
mock_warn = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_warn)
mock_err = MagicMock()
monkeypatch.setattr(Display, 'error', mock_err)
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
server = 'https://galaxy.server.com'
artifact_path, mock_open = collection_artifact
import_stat = {
'finished_at': 'some_time',
'state': 'failed',
'error': {
'code': 'GW001',
'description': 'Because I said so!',
},
'messages': [
{
'level': 'error',
'message': 'Some error',
},
{
'level': 'warning',
'message': 'Some warning',
},
{
'level': 'info',
'message': 'Some info',
},
],
}
mock_open.side_effect = (
StringIO(u'{"task":"%s"}' % fake_import_uri),
StringIO(to_text(json.dumps(import_stat)))
)
expected = 'Galaxy import process failed: Because I said so! (Code: GW001)'
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection.publish_collection(artifact_path, server, 'key', True, True)
assert mock_open.call_count == 2
assert mock_open.mock_calls[1][1][0] == fake_import_uri
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == 'Token key'
assert mock_open.mock_calls[1][2]['validate_certs'] is False
assert mock_open.mock_calls[1][2]['method'] == 'GET'
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
assert mock_vvv.call_count == 3
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
assert mock_vvv.mock_calls[2][1][0] == 'Galaxy import message: info - Some info'
assert mock_warn.call_count == 1
assert mock_warn.mock_calls[0][1][0] == 'Galaxy import warning message: Some warning'
assert mock_err.call_count == 1
assert mock_err.mock_calls[0][1][0] == 'Galaxy import error message: Some error'
def test_publish_with_wait_and_failure_and_no_error(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
mock_warn = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_warn)
mock_err = MagicMock()
monkeypatch.setattr(Display, 'error', mock_err)
fake_import_uri = 'https://galaxy-server/api/v2/import/1234'
server = 'https://galaxy.server.com'
artifact_path, mock_open = collection_artifact
import_stat = {
'finished_at': 'some_time',
'state': 'failed',
'error': {},
'messages': [
{
'level': 'error',
'message': 'Some error',
},
{
'level': 'warning',
'message': 'Some warning',
},
{
'level': 'info',
'message': 'Some info',
},
],
}
mock_open.side_effect = (
StringIO(u'{"task":"%s"}' % fake_import_uri),
StringIO(to_text(json.dumps(import_stat)))
)
expected = 'Galaxy import process failed: Unknown error, see %s for more details (Code: UNKNOWN)' % fake_import_uri
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection.publish_collection(artifact_path, server, 'key', True, True)
assert mock_open.call_count == 2
assert mock_open.mock_calls[1][1][0] == fake_import_uri
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == 'Token key'
assert mock_open.mock_calls[1][2]['validate_certs'] is False
assert mock_open.mock_calls[1][2]['method'] == 'GET'
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Publishing collection artifact '%s' to %s" % (artifact_path, server)
assert mock_vvv.call_count == 3
assert mock_vvv.mock_calls[0][1][0] == 'Collection has been pushed to the Galaxy server %s' % server
assert mock_vvv.mock_calls[1][1][0] == 'Waiting until galaxy import task %s has completed' % fake_import_uri
assert mock_vvv.mock_calls[2][1][0] == 'Galaxy import message: info - Some info'
assert mock_warn.call_count == 1
assert mock_warn.mock_calls[0][1][0] == 'Galaxy import warning message: Some warning'
assert mock_err.call_count == 1
assert mock_err.mock_calls[0][1][0] == 'Galaxy import error message: Some error'
@pytest.mark.parametrize('requirements_file', [None], indirect=True)
def test_parse_requirements_file_that_doesnt_exist(requirements_file):
expected = "The requirements file '%s' does not exist." % to_native(requirements_file)
with pytest.raises(AnsibleError, match=expected):
collection.parse_collections_requirements_file(requirements_file)
@pytest.mark.parametrize('requirements_file', ['not a valid yml file: hi: world'], indirect=True)
def test_parse_requirements_file_that_isnt_yaml(requirements_file):
expected = "Failed to parse the collection requirements yml at '%s' with the following error" \
% to_native(requirements_file)
with pytest.raises(AnsibleError, match=expected):
collection.parse_collections_requirements_file(requirements_file)
@pytest.mark.parametrize('requirements_file', [('''
# Older role based requirements.yml
- galaxy.role
- anotherrole
'''), ('''
# Doesn't have collections key
roles:
- galaxy.role
- anotherole
''')], indirect=True)
def test_parse_requirements_in_invalid_format(requirements_file):
expected = "Expecting collections requirements file to be a dict with the key collections that contains a list " \
"of collections to install."
with pytest.raises(AnsibleError, match=expected):
collection.parse_collections_requirements_file(requirements_file)
@pytest.mark.parametrize('requirements_file', ['''
collections:
- version: 1.0.0
'''], indirect=True)
def test_parse_requirements_without_mandatory_name_key(requirements_file):
expected = "Collections requirement entry should contain the key name."
with pytest.raises(AnsibleError, match=expected):
collection.parse_collections_requirements_file(requirements_file)
@pytest.mark.parametrize('requirements_file', [('''
collections:
- namespace.collection1
- namespace.collection2
'''), ('''
collections:
- name: namespace.collection1
- name: namespace.collection2
''')], indirect=True)
def test_parse_requirements(requirements_file):
expected = [('namespace.collection1', '*', None), ('namespace.collection2', '*', None)]
actual = collection.parse_collections_requirements_file(requirements_file)
assert actual == expected
@pytest.mark.parametrize('requirements_file', ['''
collections:
- name: namespace.collection1
version: ">=1.0.0,<=2.0.0"
source: https://galaxy-dev.ansible.com
- namespace.collection2'''], indirect=True)
def test_parse_requirements_with_extra_info(requirements_file):
expected = [('namespace.collection1', '>=1.0.0,<=2.0.0', 'https://galaxy-dev.ansible.com'),
('namespace.collection2', '*', None)]
actual = collection.parse_collections_requirements_file(requirements_file)
assert actual == expected
def test_find_existing_collections(tmp_path_factory, monkeypatch):
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
collection1 = os.path.join(test_dir, 'namespace1', 'collection1')
collection2 = os.path.join(test_dir, 'namespace2', 'collection2')
fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3')
fake_collection2 = os.path.join(test_dir, 'namespace4')
os.makedirs(collection1)
os.makedirs(collection2)
os.makedirs(os.path.split(fake_collection1)[0])
open(fake_collection1, 'wb+').close()
open(fake_collection2, 'wb+').close()
collection1_manifest = json.dumps({
'collection_info': {
'namespace': 'namespace1',
'name': 'collection1',
'version': '1.2.3',
'authors': ['Jordan Borean'],
'readme': 'README.md',
'dependencies': {},
},
'format': 1,
})
with open(os.path.join(collection1, 'MANIFEST.json'), 'wb') as manifest_obj:
manifest_obj.write(to_bytes(collection1_manifest))
mock_warning = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_warning)
actual = collection._find_existing_collections(test_dir)
assert len(actual) == 2
for actual_collection in actual:
assert actual_collection.skip is True
if str(actual_collection) == 'namespace1.collection1':
assert actual_collection.namespace == 'namespace1'
assert actual_collection.name == 'collection1'
assert actual_collection.b_path == to_bytes(collection1)
assert actual_collection.source is None
assert actual_collection.versions == set(['1.2.3'])
assert actual_collection.latest_version == '1.2.3'
assert actual_collection.dependencies == {}
else:
assert actual_collection.namespace == 'namespace2'
assert actual_collection.name == 'collection2'
assert actual_collection.b_path == to_bytes(collection2)
assert actual_collection.source is None
assert actual_collection.versions == set(['*'])
assert actual_collection.latest_version == '*'
assert actual_collection.dependencies == {}
assert mock_warning.call_count == 1
assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, cannot " \
"detect version." % to_text(collection2)
def test_download_file(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
data = b"\x00\x01\x02\x03"
sha256_hash = sha256()
sha256_hash.update(data)
mock_open = MagicMock()
mock_open.return_value = BytesIO(data)
monkeypatch.setattr(collection, 'open_url', mock_open)
expected = os.path.join(temp_dir, b'file')
actual = collection._download_file('http://google.com/file', temp_dir, sha256_hash.hexdigest(), True)
assert actual.startswith(expected)
assert os.path.isfile(actual)
with open(actual, 'rb') as file_obj:
assert file_obj.read() == data
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == 'http://google.com/file'
def test_download_file_hash_mismatch(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
data = b"\x00\x01\x02\x03"
mock_open = MagicMock()
mock_open.return_value = BytesIO(data)
monkeypatch.setattr(collection, 'open_url', mock_open)
expected = "Mismatch artifact hash with downloaded file"
with pytest.raises(AnsibleError, match=expected):
collection._download_file('http://google.com/file', temp_dir, 'bad', True)
def test_extract_tar_file_invalid_hash(tmp_tarfile):
temp_dir, tfile, filename, dummy = tmp_tarfile
expected = "Checksum mismatch for '%s' inside collection at '%s'" % (to_native(filename), to_native(tfile.name))
with pytest.raises(AnsibleError, match=expected):
collection._extract_tar_file(tfile, filename, temp_dir, temp_dir, "fakehash")
def test_extract_tar_file_missing_member(tmp_tarfile):
temp_dir, tfile, dummy, dummy = tmp_tarfile
expected = "Collection tar at '%s' does not contain the expected file 'missing'." % to_native(tfile.name)
with pytest.raises(AnsibleError, match=expected):
collection._extract_tar_file(tfile, 'missing', temp_dir, temp_dir)
def test_extract_tar_file_missing_parent_dir(tmp_tarfile):
temp_dir, tfile, filename, checksum = tmp_tarfile
output_dir = os.path.join(temp_dir, b'output')
output_file = os.path.join(output_dir, to_bytes(filename))
collection._extract_tar_file(tfile, filename, output_dir, temp_dir, checksum)
os.path.isfile(output_file)

@ -0,0 +1,719 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import pytest
import re
import shutil
import tarfile
import yaml
from io import BytesIO, StringIO
from units.compat.mock import MagicMock
import ansible.module_utils.six.moves.urllib.error as urllib_error
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError
from ansible.galaxy import collection
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.utils import context_objects as co
from ansible.utils.display import Display
def call_galaxy_cli(args):
orig = co.GlobalCLIArgs._Singleton__instance
co.GlobalCLIArgs._Singleton__instance = None
try:
GalaxyCLI(args=['ansible-galaxy', 'collection'] + args).run()
finally:
co.GlobalCLIArgs._Singleton__instance = orig
def artifact_json(namespace, name, version, dependencies, server):
json_str = json.dumps({
'artifact': {
'filename': '%s-%s-%s.tar.gz' % (namespace, name, version),
'sha256': '2d76f3b8c4bab1072848107fb3914c345f71a12a1722f25c08f5d3f51f4ab5fd',
'size': 1234,
},
'download_url': '%s/download/%s-%s-%s.tar.gz' % (server, namespace, name, version),
'metadata': {
'namespace': namespace,
'name': name,
'dependencies': dependencies,
},
'version': version
})
return to_text(json_str)
def artifact_versions_json(namespace, name, versions, server):
results = []
for version in versions:
results.append({
'href': '%s/api/v2/%s/%s/versions/%s/' % (server, namespace, name, version),
'version': version,
})
json_str = json.dumps({
'count': len(versions),
'next': None,
'previous': None,
'results': results
})
return to_text(json_str)
@pytest.fixture()
def collection_artifact(request, tmp_path_factory):
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
namespace = 'ansible_namespace'
collection = 'collection'
skeleton_path = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
collection_path = os.path.join(test_dir, namespace, collection)
call_galaxy_cli(['init', '%s.%s' % (namespace, collection), '-c', '--init-path', test_dir,
'--collection-skeleton', skeleton_path])
dependencies = getattr(request, 'param', None)
if dependencies:
galaxy_yml = os.path.join(collection_path, 'galaxy.yml')
with open(galaxy_yml, 'rb+') as galaxy_obj:
existing_yaml = yaml.safe_load(galaxy_obj)
existing_yaml['dependencies'] = dependencies
galaxy_obj.seek(0)
galaxy_obj.write(to_bytes(yaml.safe_dump(existing_yaml)))
galaxy_obj.truncate()
call_galaxy_cli(['build', collection_path, '--output-path', test_dir])
collection_tar = os.path.join(test_dir, '%s-%s-0.1.0.tar.gz' % (namespace, collection))
return to_bytes(collection_path), to_bytes(collection_tar)
def test_build_requirement_from_path(collection_artifact):
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True, True)
assert actual.namespace == u'ansible_namespace'
assert actual.name == u'collection'
assert actual.b_path == collection_artifact[0]
assert actual.source is None
assert actual.skip is True
assert actual.versions == set([u'*'])
assert actual.latest_version == u'*'
assert actual.dependencies == {}
def test_build_requirement_from_path_with_manifest(collection_artifact):
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
manifest_value = json.dumps({
'collection_info': {
'namespace': 'namespace',
'name': 'name',
'version': '1.1.1',
'dependencies': {
'ansible_namespace.collection': '*'
}
}
})
with open(manifest_path, 'wb') as manifest_obj:
manifest_obj.write(to_bytes(manifest_value))
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True, True)
# While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
assert actual.namespace == u'namespace'
assert actual.name == u'name'
assert actual.b_path == collection_artifact[0]
assert actual.source is None
assert actual.skip is True
assert actual.versions == set([u'1.1.1'])
assert actual.latest_version == u'1.1.1'
assert actual.dependencies == {'ansible_namespace.collection': '*'}
def test_build_requirement_from_path_invalid_manifest(collection_artifact):
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
with open(manifest_path, 'wb') as manifest_obj:
manifest_obj.write(b"not json")
expected = "Collection file at '%s' does not contain a valid json string." % to_native(manifest_path)
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_path(collection_artifact[0], True, True)
def test_build_requirement_from_tar(collection_artifact):
actual = collection.CollectionRequirement.from_tar(collection_artifact[1], True, True)
assert actual.namespace == u'ansible_namespace'
assert actual.name == u'collection'
assert actual.b_path == collection_artifact[1]
assert actual.source is None
assert actual.skip is False
assert actual.versions == set([u'0.1.0'])
assert actual.latest_version == u'0.1.0'
assert actual.dependencies == {}
def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
test_file = os.path.join(test_dir, b'fake.tar.gz')
with open(test_file, 'wb') as test_obj:
test_obj.write(b"\x00\x01\x02\x03")
expected = "Collection artifact at '%s' is not a valid tar file." % to_native(test_file)
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_tar(test_file, True, True)
def test_build_requirement_from_tar_no_manifest(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = to_bytes(json.dumps(
{
'files': [],
'format': 1,
}
))
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('FILES.json')
tar_info.size = len(json_data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
expected = "Collection at '%s' does not contain the required file MANIFEST.json." % to_native(tar_path)
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_tar(tar_path, True, True)
def test_build_requirement_from_tar_no_files(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = to_bytes(json.dumps(
{
'collection_info': {},
}
))
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('MANIFEST.json')
tar_info.size = len(json_data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
expected = "Collection at '%s' does not contain the required file FILES.json." % to_native(tar_path)
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_tar(tar_path, True, True)
def test_build_requirement_from_tar_invalid_manifest(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = b"not a json"
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('MANIFEST.json')
tar_info.size = len(json_data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_tar(tar_path, True, True)
def test_build_requirement_from_name(monkeypatch):
galaxy_server = 'https://galaxy.ansible.com'
json_str = artifact_versions_json('namespace', 'collection', ['2.1.9', '2.1.10'], galaxy_server)
mock_open = MagicMock()
mock_open.return_value = StringIO(json_str)
monkeypatch.setattr(collection, 'open_url', mock_open)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.source == to_text(galaxy_server)
assert actual.skip is False
assert actual.versions == set([u'2.1.9', u'2.1.10'])
assert actual.latest_version == u'2.1.10'
assert actual.dependencies is None
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
def test_build_requirement_from_name_with_prerelease(monkeypatch):
galaxy_server = 'https://galaxy-dev.ansible.com'
json_str = artifact_versions_json('namespace', 'collection', ['1.0.1', '2.0.1-beta.1', '2.0.1'], galaxy_server)
mock_open = MagicMock()
mock_open.return_value = StringIO(json_str)
monkeypatch.setattr(collection, 'open_url', mock_open)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.source == to_text(galaxy_server)
assert actual.skip is False
assert actual.versions == set([u'1.0.1', u'2.0.1'])
assert actual.latest_version == u'2.0.1'
assert actual.dependencies is None
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
def test_build_requirment_from_name_with_prerelease_explicit(monkeypatch):
galaxy_server = 'https://galaxy-dev.ansible.com'
json_str = artifact_json('namespace', 'collection', '2.0.1-beta.1', {}, galaxy_server)
mock_open = MagicMock()
mock_open.return_value = StringIO(json_str)
monkeypatch.setattr(collection, 'open_url', mock_open)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.1-beta.1', True,
True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.source == to_text(galaxy_server)
assert actual.skip is False
assert actual.versions == set([u'2.0.1-beta.1'])
assert actual.latest_version == u'2.0.1-beta.1'
assert actual.dependencies == {}
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/2.0.1-beta.1/" \
% galaxy_server
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
def test_build_requirement_from_name_second_server(monkeypatch):
galaxy_server = 'https://galaxy-dev.ansible.com'
json_str = artifact_versions_json('namespace', 'collection', ['1.0.1', '1.0.2', '1.0.3'], galaxy_server)
mock_open = MagicMock()
mock_open.side_effect = (
urllib_error.HTTPError('https://galaxy.server.com', 404, 'msg', {}, None),
StringIO(json_str)
)
monkeypatch.setattr(collection, 'open_url', mock_open)
actual = collection.CollectionRequirement.from_name('namespace.collection', ['https://broken.com/', galaxy_server],
'>1.0.1', False, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.source == to_text(galaxy_server)
assert actual.skip is False
assert actual.versions == set([u'1.0.2', u'1.0.3'])
assert actual.latest_version == u'1.0.3'
assert actual.dependencies is None
assert mock_open.call_count == 2
assert mock_open.mock_calls[0][1][0] == u"https://broken.com/api/v2/collections/namespace/collection/versions/"
assert mock_open.mock_calls[0][2] == {'validate_certs': False}
assert mock_open.mock_calls[1][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
assert mock_open.mock_calls[1][2] == {'validate_certs': False}
def test_build_requirement_from_name_missing(monkeypatch):
mock_open = MagicMock()
mock_open.side_effect = urllib_error.HTTPError('https://galaxy.server.com', 404, 'msg', {}, None)
monkeypatch.setattr(collection, 'open_url', mock_open)
expected = "Failed to find collection namespace.collection:*"
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_name('namespace.collection',
['https://broken.com/', 'https://broken2.com'], '*', False, True)
def test_build_requirement_from_name_single_version(monkeypatch):
galaxy_server = 'https://galaxy.ansible.com'
json_str = artifact_json('namespace', 'collection', '2.0.0', {}, galaxy_server)
mock_open = MagicMock()
mock_open.return_value = StringIO(json_str)
monkeypatch.setattr(collection, 'open_url', mock_open)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.0', True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.source == to_text(galaxy_server)
assert actual.skip is False
assert actual.versions == set([u'2.0.0'])
assert actual.latest_version == u'2.0.0'
assert actual.dependencies == {}
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/2.0.0/" \
% galaxy_server
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
def test_build_requirement_from_name_multiple_versions_one_match(monkeypatch):
galaxy_server = 'https://galaxy.ansible.com'
json_str1 = artifact_versions_json('namespace', 'collection', ['2.0.0', '2.0.1', '2.0.2'], galaxy_server)
json_str2 = artifact_json('namespace', 'collection', '2.0.1', {}, galaxy_server)
mock_open = MagicMock()
mock_open.side_effect = (StringIO(json_str1), StringIO(json_str2))
monkeypatch.setattr(collection, 'open_url', mock_open)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '>=2.0.1,<2.0.2',
True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.source == to_text(galaxy_server)
assert actual.skip is False
assert actual.versions == set([u'2.0.1'])
assert actual.latest_version == u'2.0.1'
assert actual.dependencies == {}
assert mock_open.call_count == 2
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
assert mock_open.mock_calls[1][1][0] == u"%s/api/v2/collections/namespace/collection/versions/2.0.1/" \
% galaxy_server
assert mock_open.mock_calls[1][2] == {'validate_certs': True}
def test_build_requirement_from_name_multiple_version_results(monkeypatch):
galaxy_server = 'https://galaxy-dev.ansible.com'
json_str1 = json.dumps({
'count': 6,
'next': '%s/api/v2/collections/namespace/collection/versions/?page=2' % galaxy_server,
'previous': None,
'results': [
{
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.0/' % galaxy_server,
'version': '2.0.0',
},
{
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.1/' % galaxy_server,
'version': '2.0.1',
},
{
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.2/' % galaxy_server,
'version': '2.0.2',
},
]
})
json_str2 = json.dumps({
'count': 6,
'next': None,
'previous': '%s/api/v2/collections/namespace/collection/versions/?page=1' % galaxy_server,
'results': [
{
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.3/' % galaxy_server,
'version': '2.0.3',
},
{
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.4/' % galaxy_server,
'version': '2.0.4',
},
{
'href': '%s/api/v2/collections/namespace/collection/versions/2.0.5/' % galaxy_server,
'version': '2.0.5',
},
]
})
mock_open = MagicMock()
mock_open.side_effect = (StringIO(to_text(json_str1)), StringIO(to_text(json_str2)))
monkeypatch.setattr(collection, 'open_url', mock_open)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '!=2.0.2',
True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.source == to_text(galaxy_server)
assert actual.skip is False
assert actual.versions == set([u'2.0.0', u'2.0.1', u'2.0.3', u'2.0.4', u'2.0.5'])
assert actual.latest_version == u'2.0.5'
assert actual.dependencies is None
assert mock_open.call_count == 2
assert mock_open.mock_calls[0][1][0] == u"%s/api/v2/collections/namespace/collection/versions/" % galaxy_server
assert mock_open.mock_calls[0][2] == {'validate_certs': True}
assert mock_open.mock_calls[1][1][0] == u"%s/api/v2/collections/namespace/collection/versions/?page=2" \
% galaxy_server
assert mock_open.mock_calls[1][2] == {'validate_certs': True}
@pytest.mark.parametrize('versions, requirement, expected_filter, expected_latest', [
[['1.0.0', '1.0.1'], '*', ['1.0.0', '1.0.1'], '1.0.1'],
[['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<1.1.0', ['1.0.5'], '1.0.5'],
[['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<=1.0.5', ['1.0.5'], '1.0.5'],
[['1.0.0', '1.0.5', '1.1.0'], '>=1.1.0', ['1.1.0'], '1.1.0'],
[['1.0.0', '1.0.5', '1.1.0'], '!=1.1.0', ['1.0.0', '1.0.5'], '1.0.5'],
[['1.0.0', '1.0.5', '1.1.0'], '==1.0.5', ['1.0.5'], '1.0.5'],
[['1.0.0', '1.0.5', '1.1.0'], '1.0.5', ['1.0.5'], '1.0.5'],
[['1.0.0', '2.0.0', '3.0.0'], '>=2', ['2.0.0', '3.0.0'], '3.0.0'],
])
def test_add_collection_requirements(versions, requirement, expected_filter, expected_latest):
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', versions, requirement,
False)
assert req.versions == set(expected_filter)
assert req.latest_version == expected_latest
def test_add_collection_requirement_to_unknown_installed_version():
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
skip=True)
expected = "Cannot meet requirement namespace.name:1.0.0 as it is already installed at version 'unknown'."
with pytest.raises(AnsibleError, match=expected):
req.add_requirement(str(req), '1.0.0')
def test_add_collection_wildcard_requirement_to_unknown_installed_version():
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
skip=True)
req.add_requirement(str(req), '*')
assert req.versions == set('*')
assert req.latest_version == '*'
def test_add_collection_requirement_with_conflict():
source = 'https://galaxy.ansible.com'
expected = "Cannot meet requirement ==1.0.2 for dependency namespace.name from source '%s'. Available versions " \
"before last requirement added: 1.0.0, 1.0.1\n" \
"Requirements from:\n" \
"\tbase - 'namespace.name:==1.0.2'" % source
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '==1.0.2', False)
def test_add_requirement_to_existing_collection_with_conflict():
source = 'https://galaxy.ansible.com'
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False)
expected = "Cannot meet dependency requirement 'namespace.name:1.0.2' for collection namespace.collection2 from " \
"source '%s'. Available versions before last requirement added: 1.0.0, 1.0.1\n" \
"Requirements from:\n" \
"\tbase - 'namespace.name:*'\n" \
"\tnamespace.collection2 - 'namespace.name:1.0.2'" % source
with pytest.raises(AnsibleError, match=re.escape(expected)):
req.add_requirement('namespace.collection2', '1.0.2')
def test_add_requirement_to_installed_collection_with_conflict():
source = 'https://galaxy.ansible.com'
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
skip=True)
expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
"Use --force to overwrite"
with pytest.raises(AnsibleError, match=re.escape(expected)):
req.add_requirement(None, '1.0.2')
def test_add_requirement_to_installed_collection_with_conflict_as_dep():
source = 'https://galaxy.ansible.com'
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
skip=True)
expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
"Use --force-with-deps to overwrite"
with pytest.raises(AnsibleError, match=re.escape(expected)):
req.add_requirement('namespace.collection2', '1.0.2')
def test_install_skipped_collection(monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
req = collection.CollectionRequirement('namespace', 'name', None, 'source', ['1.0.0'], '*', False, skip=True)
req.install(None, None)
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Skipping 'namespace.name' as it is already installed"
def test_install_collection(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
collection_tar = collection_artifact[1]
output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
os.makedirs(os.path.join(collection_path, b'delete_me')) # Create a folder to verify the install cleans out the dir
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
os.makedirs(temp_path)
req = collection.CollectionRequirement.from_tar(collection_tar, True, True)
req.install(to_text(output_path), temp_path)
# Ensure the temp directory is empty, nothing is left behind
assert os.listdir(temp_path) == []
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
% to_text(collection_path)
def test_install_collection_with_download(collection_artifact, monkeypatch):
collection_tar = collection_artifact[1]
output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_download = MagicMock()
mock_download.return_value = collection_tar
monkeypatch.setattr(collection, '_download_file', mock_download)
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
os.makedirs(temp_path)
req = collection.CollectionRequirement('ansible_namespace', 'collection', None, ['https://galaxy.ansible.com'],
['0.1.0'], '*', False)
req._galaxy_info = {
'download_url': 'https://downloadme.com',
'artifact': {
'sha256': 'myhash',
},
}
req.install(to_text(output_path), temp_path)
# Ensure the temp directory is empty, nothing is left behind
assert os.listdir(temp_path) == []
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
% to_text(collection_path)
assert mock_download.call_count == 1
assert mock_download.mock_calls[0][1][0] == 'https://downloadme.com'
assert mock_download.mock_calls[0][1][1] == temp_path
assert mock_download.mock_calls[0][1][2] == 'myhash'
assert mock_download.mock_calls[0][1][3] is True
def test_install_collections_from_tar(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
shutil.rmtree(collection_path)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
[u'https://galaxy.ansible.com'], True, False, False, False, False)
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
actual_manifest = json.loads(to_text(manifest_obj.read()))
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
assert actual_manifest['collection_info']['name'] == 'collection'
assert actual_manifest['collection_info']['version'] == '0.1.0'
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
% to_text(collection_path)
def test_install_collections_existing_without_force(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
# If we don't delete collection_path it will think the original build skeleton is installed so we expect a skip
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
[u'https://galaxy.ansible.com'], True, False, False, False, False)
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'README.md', b'docs', b'galaxy.yml', b'playbooks', b'plugins', b'roles']
assert mock_display.call_count == 2
# Msg1 is the warning about not MANIFEST.json, cannot really check message as it has line breaks which varies based
# on the path size
assert mock_display.mock_calls[1][1][0] == "Skipping 'ansible_namespace.collection' as it is already installed"
# Makes sure we don't get stuck in some recursive loop
@pytest.mark.parametrize('collection_artifact', [
{'ansible_namespace.collection': '>=0.0.1'},
], indirect=True)
def test_install_collection_with_circular_dependency(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
shutil.rmtree(collection_path)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
[u'https://galaxy.ansible.com'], True, False, False, False, False)
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
actual_manifest = json.loads(to_text(manifest_obj.read()))
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
assert actual_manifest['collection_info']['name'] == 'collection'
assert actual_manifest['collection_info']['version'] == '0.1.0'
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
% to_text(collection_path)
Loading…
Cancel
Save