add subdir support to collection loading (#60682)

* add subdir support to collection loading

* collections may now load plugins from subdirs under a plugin type or roles dir, eg `ns.coll.subdir1.subdir2.myrole`->ns.coll's roles/subdir1/subdir2/myrole, `ns.coll.subdir1.mymodule`->ns.coll's plugins/modules/subdir1/mymodule.py
* centralize parsing/validation in AnsibleCollectionRef class
* fix issues loading Jinja2 plugins from multiple sources 
* resolves #59462, #59890,

* sanity test fixes

* string fixes

* add changelog entry
pull/61009/head
Matt Davis 5 years ago committed by GitHub
parent 1aecdbb5c5
commit bfa004930a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,2 @@
minor_changes:
- roles and plugins in collections may now be stored in subdirectories under the roles or plugin-type dirs (https://github.com/ansible/ansible/pull/60682)

@ -36,7 +36,7 @@ from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import callback_loader, strategy_loader, module_loader from ansible.plugins.loader import callback_loader, strategy_loader, module_loader
from ansible.plugins.callback import CallbackBase from ansible.plugins.callback import CallbackBase
from ansible.template import Templar from ansible.template import Templar
from ansible.utils.collection_loader import is_collection_ref from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.helpers import pct_to_int from ansible.utils.helpers import pct_to_int
from ansible.vars.hostvars import HostVars from ansible.vars.hostvars import HostVars
from ansible.vars.reserved import warn_if_reserved from ansible.vars.reserved import warn_if_reserved
@ -158,7 +158,8 @@ class TaskQueueManager:
callback_obj.set_options() callback_obj.set_options()
self._callback_plugins.append(callback_obj) self._callback_plugins.append(callback_obj)
for callback_plugin_name in (c for c in C.DEFAULT_CALLBACK_WHITELIST if is_collection_ref(c)): for callback_plugin_name in (c for c in C.DEFAULT_CALLBACK_WHITELIST if AnsibleCollectionRef.is_valid_fqcr(c)):
# TODO: need to extend/duplicate the stdout callback check here (and possible move this ahead of the old way
callback_obj = callback_loader.get(callback_plugin_name) callback_obj = callback_loader.get(callback_plugin_name)
self._callback_plugins.append(callback_obj) self._callback_plugins.append(callback_obj)

@ -27,7 +27,7 @@ from ansible.galaxy import get_collections_galaxy_meta_info
from ansible.galaxy.api import _urljoin from ansible.galaxy.api import _urljoin
from ansible.module_utils._text import to_bytes, to_native, to_text from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils import six from ansible.module_utils import six
from ansible.utils.collection_loader import is_collection_ref from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash, secure_hash_s from ansible.utils.hashing import secure_hash, secure_hash_s
from ansible.module_utils.urls import open_url from ansible.module_utils.urls import open_url
@ -471,7 +471,7 @@ def validate_collection_name(name):
:return: The input value, required for argparse validation. :return: The input value, required for argparse validation.
""" """
collection, dummy, dummy = name.partition(':') collection, dummy, dummy = name.partition(':')
if is_collection_ref('ansible_collections.{0}'.format(collection)): if AnsibleCollectionRef.is_valid_collection_name(collection):
return name return name
raise AnsibleError("Invalid collection name '%s', name must be in the format <namespace>.<collection>." % name) raise AnsibleError("Invalid collection name '%s', name must be in the format <namespace>.<collection>." % name)

@ -39,7 +39,7 @@ options:
choices: [ absent, present, query ] choices: [ absent, present, query ]
extends_documentation_fragment: extends_documentation_fragment:
- ACI - aci
author: author:
- Steven Gerhart (@sgerhart) - Steven Gerhart (@sgerhart)
''' '''

@ -40,7 +40,7 @@ options:
choices: [ absent, present, query ] choices: [ absent, present, query ]
extends_documentation_fragment: extends_documentation_fragment:
- ACI - aci
author: author:
- Steven Gerhart (@sgerhart) - Steven Gerhart (@sgerhart)

@ -49,7 +49,7 @@ options:
choices: ['absent', 'present', 'query'] choices: ['absent', 'present', 'query']
extends_documentation_fragment: extends_documentation_fragment:
- ACI - aci
author: author:
- Steven Gerhart (@sgerhart) - Steven Gerhart (@sgerhart)

@ -36,7 +36,7 @@ options:
default: present default: present
choices: ['absent', 'present', 'query'] choices: ['absent', 'present', 'query']
extends_documentation_fragment: extends_documentation_fragment:
- ACI - aci
author: author:
- Steven Gerhart (@sgerhart) - Steven Gerhart (@sgerhart)
''' '''

@ -31,7 +31,7 @@ from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional from ansible.playbook.conditional import Conditional
from ansible.playbook.taggable import Taggable from ansible.playbook.taggable import Taggable
from ansible.template import Templar from ansible.template import Templar
from ansible.utils.collection_loader import get_collection_role_path, is_collection_ref from ansible.utils.collection_loader import get_collection_role_path, AnsibleCollectionRef
from ansible.utils.path import unfrackpath from ansible.utils.path import unfrackpath
from ansible.utils.display import Display from ansible.utils.display import Display
@ -154,7 +154,7 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch):
role_tuple = None role_tuple = None
# try to load as a collection-based role first # try to load as a collection-based role first
if self._collection_list or is_collection_ref(role_name): if self._collection_list or AnsibleCollectionRef.is_valid_fqcr(role_name):
role_tuple = get_collection_role_path(role_name, self._collection_list) role_tuple = get_collection_role_path(role_name, self._collection_list)
if role_tuple: if role_tuple:

@ -22,7 +22,7 @@ from ansible.module_utils.six import string_types
from ansible.parsing.utils.yaml import from_yaml from ansible.parsing.utils.yaml import from_yaml
from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
from ansible.utils.collection_loader import AnsibleCollectionLoader, AnsibleFlatMapLoader, is_collection_ref from ansible.utils.collection_loader import AnsibleCollectionLoader, AnsibleFlatMapLoader, AnsibleCollectionRef
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible.utils.plugin_docs import add_fragments from ansible.utils.plugin_docs import add_fragments
@ -317,38 +317,25 @@ class PluginLoader:
display.debug('Added %s to loader search path' % (directory)) display.debug('Added %s to loader search path' % (directory))
def _find_fq_plugin(self, fq_name, extension): def _find_fq_plugin(self, fq_name, extension):
fq_name = to_native(fq_name) plugin_type = AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(self.subdir)
# prefix our extension Python namespace if it isn't already there
if not fq_name.startswith('ansible_collections.'):
fq_name = 'ansible_collections.' + fq_name
splitname = fq_name.rsplit('.', 1) acr = AnsibleCollectionRef.from_fqcr(fq_name, plugin_type)
if len(splitname) != 2:
raise ValueError('{0} is not a valid namespace-qualified plugin name'.format(to_native(fq_name)))
package = splitname[0] n_resource = to_native(acr.resource, errors='strict')
resource = splitname[1]
append_plugin_type = self.subdir.replace('_plugins', '')
if append_plugin_type == 'library':
append_plugin_type = 'modules'
package += '.plugins.{0}'.format(append_plugin_type)
if extension: if extension:
resource += extension n_resource += extension
pkg = sys.modules.get(package) pkg = sys.modules.get(acr.n_python_package_name)
if not pkg: if not pkg:
# FIXME: there must be cheaper/safer way to do this # FIXME: there must be cheaper/safer way to do this
pkg = import_module(package) pkg = import_module(acr.n_python_package_name)
# if the package is one of our flatmaps, we need to consult its loader to find the path, since the file could be # if the package is one of our flatmaps, we need to consult its loader to find the path, since the file could be
# anywhere in the tree # anywhere in the tree
if hasattr(pkg, '__loader__') and isinstance(pkg.__loader__, AnsibleFlatMapLoader): if hasattr(pkg, '__loader__') and isinstance(pkg.__loader__, AnsibleFlatMapLoader):
try: try:
file_path = pkg.__loader__.find_file(resource) file_path = pkg.__loader__.find_file(n_resource)
return to_text(file_path) return to_text(file_path)
except IOError: except IOError:
# this loader already takes care of extensionless files, so if we didn't find it, just bail # this loader already takes care of extensionless files, so if we didn't find it, just bail
@ -356,15 +343,15 @@ class PluginLoader:
pkg_path = os.path.dirname(pkg.__file__) pkg_path = os.path.dirname(pkg.__file__)
resource_path = os.path.join(pkg_path, resource) n_resource_path = os.path.join(pkg_path, n_resource)
# FIXME: and is file or file link or ... # FIXME: and is file or file link or ...
if os.path.exists(resource_path): if os.path.exists(n_resource_path):
return to_text(resource_path) return to_text(n_resource_path)
# look for any matching extension in the package location (sans filter) # look for any matching extension in the package location (sans filter)
ext_blacklist = ['.pyc', '.pyo'] ext_blacklist = ['.pyc', '.pyo']
found_files = [f for f in glob.iglob(os.path.join(pkg_path, resource) + '.*') if os.path.isfile(f) and os.path.splitext(f)[1] not in ext_blacklist] found_files = [f for f in glob.iglob(os.path.join(pkg_path, n_resource) + '.*') if os.path.isfile(f) and os.path.splitext(f)[1] not in ext_blacklist]
if not found_files: if not found_files:
return None return None
@ -392,8 +379,8 @@ class PluginLoader:
# they can have any suffix # they can have any suffix
suffix = '' suffix = ''
# HACK: need this right now so we can still load shipped PS module_utils # FIXME: need this right now so we can still load shipped PS module_utils- come up with a more robust solution
if (is_collection_ref(name) or collection_list) and not name.startswith('Ansible'): if (AnsibleCollectionRef.is_valid_fqcr(name) or collection_list) and not name.startswith('Ansible'):
if '.' in name or not collection_list: if '.' in name or not collection_list:
candidates = [name] candidates = [name]
else: else:

@ -47,6 +47,7 @@ from ansible.plugins.loader import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import wrap_var from ansible.utils.unsafe_proxy import wrap_var
@ -325,20 +326,21 @@ class JinjaPluginIntercept(MutableMapping):
if func: if func:
return func return func
components = key.split('.') acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)
if len(components) != 3: if not acr:
raise KeyError('invalid plugin name: {0}'.format(key)) raise KeyError('invalid plugin name: {0}'.format(key))
collection_name = '.'.join(components[0:2])
collection_pkg = 'ansible_collections.{0}.plugins.{1}'.format(collection_name, self._dirname)
# FIXME: error handling for bogus plugin name, bogus impl, bogus filter/test # FIXME: error handling for bogus plugin name, bogus impl, bogus filter/test
# FIXME: move this capability into the Jinja plugin loader pkg = import_module(acr.n_python_package_name)
pkg = import_module(collection_pkg)
parent_prefix = acr.collection
if acr.subdirs:
parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)
for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=collection_name + '.'): for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
if ispkg: if ispkg:
continue continue
@ -347,7 +349,7 @@ class JinjaPluginIntercept(MutableMapping):
method_map = getattr(plugin_impl, self._method_map_name) method_map = getattr(plugin_impl, self._method_map_name)
for f in iteritems(method_map()): for f in iteritems(method_map()):
fq_name = '.'.join((collection_name, f[0])) fq_name = '.'.join((parent_prefix, f[0]))
self._collection_jinja_func_cache[fq_name] = f[1] self._collection_jinja_func_cache[fq_name] = f[1]
function_impl = self._collection_jinja_func_cache[key] function_impl = self._collection_jinja_func_cache[key]

@ -30,9 +30,6 @@ _SYNTHETIC_PACKAGES = {
'ansible_collections.ansible.builtin.plugins.modules': dict(type='flatmap', flatmap='ansible.modules', graft=True), 'ansible_collections.ansible.builtin.plugins.modules': dict(type='flatmap', flatmap='ansible.modules', graft=True),
} }
# TODO: tighten this up to subset Python identifier requirements (and however we want to restrict ns/collection names)
_collection_qualified_re = re.compile(to_text(r'^(\w+)\.(\w+)\.(\w+)$'))
# FIXME: exception handling/error logging # FIXME: exception handling/error logging
class AnsibleCollectionLoader(with_metaclass(Singleton, object)): class AnsibleCollectionLoader(with_metaclass(Singleton, object)):
@ -271,32 +268,186 @@ class AnsibleFlatMapLoader(object):
# def is_package(self, fullname): # def is_package(self, fullname):
class AnsibleCollectionRef:
# FUTURE: introspect plugin loaders to get these dynamically?
VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
'terminal', 'test', 'vars'])
# FIXME: tighten this up to match Python identifier reqs, etc
VALID_COLLECTION_NAME_RE = re.compile(to_text(r'^(\w+)\.(\w+)$'))
VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$'))
VALID_FQCR_RE = re.compile(to_text(r'^\w+\.\w+\.\w+(\.\w+)*$')) # can have 0-N included subdirs as well
def __init__(self, collection_name, subdirs, resource, ref_type):
"""
Create an AnsibleCollectionRef from components
:param collection_name: a collection name of the form 'namespace.collectionname'
:param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2')
:param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role')
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
"""
collection_name = to_text(collection_name, errors='strict')
if subdirs is not None:
subdirs = to_text(subdirs, errors='strict')
resource = to_text(resource, errors='strict')
ref_type = to_text(ref_type, errors='strict')
if not self.is_valid_collection_name(collection_name):
raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name)))
if ref_type not in self.VALID_REF_TYPES:
raise ValueError('invalid collection ref_type: {0}'.format(ref_type))
self.collection = collection_name
if subdirs:
if not re.match(self.VALID_SUBDIRS_RE, subdirs):
raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs)))
self.subdirs = subdirs
else:
self.subdirs = u''
self.resource = resource
self.ref_type = ref_type
package_components = [u'ansible_collections', self.collection]
if self.ref_type == u'role':
package_components.append(u'roles')
else:
# we assume it's a plugin
package_components += [u'plugins', self.ref_type]
if self.subdirs:
package_components.append(self.subdirs)
if self.ref_type == u'role':
# roles are their own resource
package_components.append(self.resource)
self.n_python_package_name = to_native('.'.join(package_components))
@staticmethod
def from_fqcr(ref, ref_type):
"""
Parse a string as a fully-qualified collection reference, raises ValueError if invalid
:param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
:return: a populated AnsibleCollectionRef object
"""
# assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name),
# we split the resource name off the right, split ns and coll off the left, and we're left with any optional
# subdirs that need to be added back below the plugin-specific subdir we'll add. So:
# ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource
# ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource
# ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename
if not AnsibleCollectionRef.is_valid_fqcr(ref):
raise ValueError('{0} is not a valid collection reference'.format(to_native(ref)))
ref = to_text(ref, errors='strict')
ref_type = to_text(ref_type, errors='strict')
resource_splitname = ref.rsplit(u'.', 1)
package_remnant = resource_splitname[0]
resource = resource_splitname[1]
# split the left two components of the collection package name off, anything remaining is plugin-type
# specific subdirs to be added back on below the plugin type
package_splitname = package_remnant.split(u'.', 2)
if len(package_splitname) == 3:
subdirs = package_splitname[2]
else:
subdirs = u''
collection_name = u'.'.join(package_splitname[0:2])
return AnsibleCollectionRef(collection_name, subdirs, resource, ref_type)
@staticmethod
def try_parse_fqcr(ref, ref_type):
"""
Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error)
:param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
:return: a populated AnsibleCollectionRef object on successful parsing, else None
"""
try:
return AnsibleCollectionRef.from_fqcr(ref, ref_type)
except ValueError:
pass
@staticmethod
def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name):
"""
Utility method to convert from a PluginLoader dir name to a plugin ref_type
:param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library')
:return: the corresponding plugin ref_type (eg, 'action', 'role')
"""
legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'')
if plugin_type == u'library':
plugin_type = u'modules'
if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES:
raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name)))
return plugin_type
@staticmethod
def is_valid_fqcr(ref, ref_type=None):
"""
Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself)
:param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
:param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment'
:return: True if the collection ref passed is well-formed, False otherwise
"""
ref = to_text(ref)
if not ref_type:
return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref))
return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type))
@staticmethod
def is_valid_collection_name(collection_name):
"""
Validates if is string is a well-formed collection name (does not look up the collection itself)
:param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname')
:return: True if the collection name passed is well-formed, False otherwise
"""
collection_name = to_text(collection_name)
return bool(re.match(AnsibleCollectionRef.VALID_COLLECTION_NAME_RE, collection_name))
def get_collection_role_path(role_name, collection_list=None): def get_collection_role_path(role_name, collection_list=None):
match = _collection_qualified_re.match(role_name) acr = AnsibleCollectionRef.try_parse_fqcr(role_name, 'role')
if match: if acr:
grps = match.groups() # looks like a valid qualified collection ref; skip the collection_list
collection_list = ['.'.join(grps[:2])] role = acr.resource
role = grps[2] collection_list = [acr.collection]
elif not collection_list: elif not collection_list:
return None # not a FQ role and no collection search list spec'd, nothing to do return None # not a FQ role and no collection search list spec'd, nothing to do
else: else:
role = role_name role = role_name # treat as unqualified, loop through the collection search list to try and resolve
for collection_name in collection_list: for collection_name in collection_list:
try: try:
role_package = u'ansible_collections.{0}.roles.{1}'.format(collection_name, role) acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=acr.subdirs, resource=acr.resource, ref_type=acr.ref_type)
# FIXME: error handling/logging; need to catch any import failures and move along # FIXME: error handling/logging; need to catch any import failures and move along
# FIXME: this line shouldn't be necessary, but py2 pkgutil.get_data is delegating back to built-in loader when it shouldn't # FIXME: this line shouldn't be necessary, but py2 pkgutil.get_data is delegating back to built-in loader when it shouldn't
pkg = import_module(role_package + u'.tasks') pkg = import_module(acr.n_python_package_name)
# get_data input must be a native string
tasks_file = pkgutil.get_data(to_native(role_package) + '.tasks', 'main.yml')
if tasks_file is not None: if pkg is not None:
# the package is now loaded, get the collection's package and ask where it lives # the package is now loaded, get the collection's package and ask where it lives
path = os.path.dirname(to_bytes(sys.modules[role_package].__file__, errors='surrogate_or_strict')) path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
return role, to_text(path, errors='surrogate_or_strict'), collection_name return role, to_text(path, errors='surrogate_or_strict'), collection_name
except IOError: except IOError:
@ -308,9 +459,5 @@ def get_collection_role_path(role_name, collection_list=None):
return None return None
def is_collection_ref(candidate_name):
return bool(_collection_qualified_re.match(candidate_name))
def set_collection_playbook_paths(b_playbook_paths): def set_collection_playbook_paths(b_playbook_paths):
AnsibleCollectionLoader().set_playbook_paths(b_playbook_paths) AnsibleCollectionLoader().set_playbook_paths(b_playbook_paths)

@ -47,27 +47,37 @@ def add_fragments(doc, filename, fragment_loader):
if isinstance(fragments, string_types): if isinstance(fragments, string_types):
fragments = [fragments] fragments = [fragments]
# Allow the module to specify a var other than DOCUMENTATION unknown_fragments = []
# to pull the fragment from, using dot notation as a separator
# doc_fragments are allowed to specify a fragment var other than DOCUMENTATION
# with a . separator; this is complicated by collections-hosted doc_fragments that
# use the same separator. Assume it's collection-hosted normally first, try to load
# as-specified. If failure, assume the right-most component is a var, split it off,
# and retry the load.
for fragment_slug in fragments: for fragment_slug in fragments:
fallback_name = None fragment_name = fragment_slug
fragment_slug = fragment_slug.lower() fragment_var = 'DOCUMENTATION'
if '.' in fragment_slug:
fallback_name = fragment_slug
fragment_name, fragment_var = fragment_slug.rsplit('.', 1)
fragment_var = fragment_var.upper()
else:
fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION'
fragment_class = fragment_loader.get(fragment_name) fragment_class = fragment_loader.get(fragment_name)
if fragment_class is None and fallback_name: if fragment_class is None and '.' in fragment_slug:
fragment_class = fragment_loader.get(fallback_name) splitname = fragment_slug.rsplit('.', 1)
fragment_var = 'DOCUMENTATION' fragment_name = splitname[0]
fragment_var = splitname[1].upper()
fragment_class = fragment_loader.get(fragment_name)
if fragment_class is None: if fragment_class is None:
raise AnsibleAssertionError('fragment_class is None') unknown_fragments.append(fragment_slug)
continue
fragment_yaml = getattr(fragment_class, fragment_var, None)
if fragment_yaml is None:
if fragment_var != 'DOCUMENTATION':
# if it's asking for something specific that's missing, that's an error
unknown_fragments.append(fragment_slug)
continue
else:
fragment_yaml = '{}' # TODO: this is still an error later since we require 'options' below...
fragment_yaml = getattr(fragment_class, fragment_var, '{}')
fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data() fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data()
if 'notes' in fragment: if 'notes' in fragment:
@ -102,6 +112,9 @@ def add_fragments(doc, filename, fragment_loader):
except Exception as e: except Exception as e:
raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename)) raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
if unknown_fragments:
raise AnsibleError('unknown doc_fragment(s) in file {0}: {1}'.format(filename, to_native(', '.join(unknown_fragments))))
def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False): def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False):
""" """

@ -0,0 +1,19 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = False
_VALID_ARGS = frozenset()
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(None, task_vars)
result = dict(changed=False)
return result

@ -0,0 +1,14 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def test_subdir_filter(data):
return "{0}_via_testfilter_from_subdir".format(data)
class FilterModule(object):
def filters(self):
return {
'test_subdir_filter': test_subdir_filter
}

@ -3,7 +3,7 @@ __metaclass__ = type
def testfilter(data): def testfilter(data):
return "{0}_from_userdir".format(data) return "{0}_via_testfilter_from_userdir".format(data)
class FilterModule(object): class FilterModule(object):

@ -0,0 +1,14 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def testfilter2(data):
return "{0}_via_testfilter2_from_userdir".format(data)
class FilterModule(object):
def filters(self):
return {
'testfilter2': testfilter2
}

@ -0,0 +1,11 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
return ['subdir_lookup_from_user_dir']

@ -8,4 +8,4 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs): def run(self, terms, variables, **kwargs):
return ['lookup_from_user_dir'] return ['mylookup_from_user_dir']

@ -0,0 +1,12 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
return ['mylookup2_from_user_dir']

@ -0,0 +1,14 @@
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
def main():
print(json.dumps(dict(changed=False, source='user')))
if __name__ == '__main__':
main()

@ -0,0 +1,25 @@
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
DOCUMENTATION = r'''
module: testmodule
description: for testing
extends_documentation_fragment:
- noncollbogusfrag
- noncollbogusfrag.bogusvar
- bogusns.testcoll.frag
- testns.boguscoll.frag
- testns.testcoll.bogusfrag
- testns.testcoll.frag.bogusvar
'''
def main():
print(json.dumps(dict(changed=False, source='user')))
if __name__ == '__main__':
main()

@ -0,0 +1,13 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def testtest(data):
return data == 'from_user2'
class TestModule(object):
def tests(self):
return {
'testtest2': testtest
}

@ -0,0 +1,13 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def subdir_test(data):
return data == 'subdir_from_user'
class TestModule(object):
def tests(self):
return {
'subdir_test': subdir_test
}

@ -0,0 +1,7 @@
- debug:
msg: '{{ test_role_input | default("(undefined)") }}'
register: test_role_output
- assert:
that:
- test_role_input is not defined or test_role_input == test_role_output.msg

@ -0,0 +1,30 @@
- name: check collections list from role meta
plugin_lookup:
register: pluginlookup_out
- name: call role-local ping module
ping:
register: ping_out
- name: call unqualified module in another collection listed in role meta (testns.coll_in_sys)
systestmodule:
register: systestmodule_out
# verify that pluginloader caching doesn't prevent us from explicitly calling a builtin plugin with the same name
- name: call builtin ping module explicitly
ansible.builtin.ping:
register: builtinping_out
- debug:
msg: '{{ test_role_input | default("(undefined)") }}'
register: test_role_output
# FIXME: add tests to ensure that block/task level stuff in a collection-hosted role properly inherit role default/meta values
- assert:
that:
- pluginlookup_out.collection_list == ['testns.testcoll', 'ansible.builtin', 'testns.coll_in_sys', 'bogus.fromrolemeta']
- ping_out.source is defined and ping_out.source == 'user'
- systestmodule_out.source is defined and systestmodule_out.source == 'sys'
- builtinping_out.ping is defined and builtinping_out.ping == 'pong'
- test_role_input is not defined or test_role_input == test_role_output.msg

@ -22,6 +22,16 @@
name: testns.testcoll.maskedmodule name: testns.testcoll.maskedmodule
register: maskedmodule_out register: maskedmodule_out
# action in a collection subdir
- name: test subdir action FQ
testns.testcoll.action_subdir.subdir_ping_action:
register: subdir_ping_action_out
# module in a collection subdir
- name: test subdir module FQ
testns.testcoll.module_subdir.subdir_ping_module:
register: subdir_ping_module_out
# module with a granular module_utils import (from (this collection).module_utils.leaf import thingtocall) # module with a granular module_utils import (from (this collection).module_utils.leaf import thingtocall)
- name: exec module with granular module utils import from this collection - name: exec module with granular module utils import from this collection
testns.testcoll.uses_leaf_mu_granular_import: testns.testcoll.uses_leaf_mu_granular_import:
@ -49,18 +59,28 @@
- systestmodule_out.source == 'sys' - systestmodule_out.source == 'sys'
- contentadjmodule_out.source == 'content_adj' - contentadjmodule_out.source == 'content_adj'
- not maskedmodule_out.plugin_path - not maskedmodule_out.plugin_path
- subdir_ping_action_out is not changed
- subdir_ping_module_out is not changed
- granular_out.mu_result == 'thingtocall in leaf' - granular_out.mu_result == 'thingtocall in leaf'
- granular_nested_out.mu_result == 'thingtocall in base called thingtocall in secondary' - granular_nested_out.mu_result == 'thingtocall in base called thingtocall in secondary'
- flat_out.mu_result == 'thingtocall in leaf' - flat_out.mu_result == 'thingtocall in leaf'
- from_out.mu_result == 'thingtocall in leaf' - from_out.mu_result == 'thingtocall in leaf'
- from_out.mu2_result == 'thingtocall in secondary' - from_out.mu2_result == 'thingtocall in secondary'
- hosts: testhost
tasks:
- name: exercise filters/tests/lookups - name: exercise filters/tests/lookups
assert: assert:
that: that:
- "'data' | testns.testcoll.testfilter == 'data_from_userdir'" - "'data' | testns.testcoll.testfilter == 'data_via_testfilter_from_userdir'"
- "'data' | testns.testcoll.testfilter2 == 'data_via_testfilter2_from_userdir'"
- "'data' | testns.testcoll.filter_subdir.test_subdir_filter == 'data_via_testfilter_from_subdir'"
- "'from_user' is testns.testcoll.testtest" - "'from_user' is testns.testcoll.testtest"
- lookup('testns.testcoll.mylookup') == 'lookup_from_user_dir' - "'from_user2' is testns.testcoll.testtest2"
- "'subdir_from_user' is testns.testcoll.test_subdir.subdir_test"
- lookup('testns.testcoll.mylookup') == 'mylookup_from_user_dir'
- lookup('testns.testcoll.mylookup2') == 'mylookup2_from_user_dir'
- lookup('testns.testcoll.lookup_subdir.my_subdir_lookup') == 'subdir_lookup_from_user_dir'
# ensure that the synthetic ansible.builtin collection limits to builtin plugins, that ansible.legacy loads overrides # ensure that the synthetic ansible.builtin collection limits to builtin plugins, that ansible.legacy loads overrides
# from legacy plugin dirs, and that a same-named plugin loaded from a real collection is not masked by the others # from legacy plugin dirs, and that a same-named plugin loaded from a real collection is not masked by the others
@ -182,7 +202,7 @@
# - lookup('mylookup') == 'lookup_from_user_dir' # - lookup('mylookup') == 'lookup_from_user_dir'
# test keyword-static execution of a FQ collection-backed role # test keyword-static execution of a FQ collection-backed role with "tasks/main.yaml"
- name: verify collection-backed role execution (keyword static) - name: verify collection-backed role execution (keyword static)
hosts: testhost hosts: testhost
collections: collections:
@ -191,7 +211,7 @@
vars: vars:
test_role_input: keyword static test_role_input: keyword static
roles: roles:
- role: testns.testcoll.testrole - role: testns.testcoll.testrole_main_yaml
tasks: tasks:
- name: ensure role executed - name: ensure role executed
assert: assert:
@ -249,6 +269,19 @@
that: that:
- test_role_output.msg == test_role_input - test_role_output.msg == test_role_input
# test dynamic execution of a FQ collection-backed role
- name: verify collection-backed role execution in subdir (include)
hosts: testhost
vars:
test_role_input: dynamic (subdir)
tasks:
- include_role:
name: testns.testcoll.role_subdir.subdir_testrole
- name: ensure role executed
assert:
that:
- test_role_output.msg == test_role_input
- name: validate static task include behavior - name: validate static task include behavior
hosts: testhost hosts: testhost

@ -17,6 +17,10 @@ ANSIBLE_CALLBACK_WHITELIST=testns.testcoll.usercallback ansible localhost -m pin
# test documentation # test documentation
ansible-doc testns.testcoll.testmodule -vvv | grep -- "- normal_doc_frag" ansible-doc testns.testcoll.testmodule -vvv | grep -- "- normal_doc_frag"
echo "testing bad doc_fragments (expected ERROR message follows)"
# test documentation failure
ansible-doc testns.testcoll.testmodule_bad_docfrags -vvv 2>&1 | grep -- "unknown doc_fragment"
# we need multiple plays, and conditional import_playbook is noisy and causes problems, so choose here which one to use... # we need multiple plays, and conditional import_playbook is noisy and causes problems, so choose here which one to use...
if [[ ${INVENTORY_PATH} == *.winrm ]]; then if [[ ${INVENTORY_PATH} == *.winrm ]]; then
export TEST_PLAYBOOK=windows.yml export TEST_PLAYBOOK=windows.yml

@ -77,7 +77,7 @@ def main():
elif path.startswith('test/integration/targets/'): elif path.startswith('test/integration/targets/'):
is_integration = True is_integration = True
if dirname.endswith('/library') or dirname.endswith('/plugins/modules') or dirname in ( if dirname.endswith('/library') or '/plugins/modules' in dirname or dirname in (
# non-standard module library directories # non-standard module library directories
'test/integration/targets/module_precedence/lib_no_extension', 'test/integration/targets/module_precedence/lib_no_extension',
'test/integration/targets/module_precedence/lib_with_extension', 'test/integration/targets/module_precedence/lib_with_extension',

@ -2,8 +2,12 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import os import os
import pytest
import re
import sys import sys
from ansible.utils.collection_loader import AnsibleCollectionRef
def test_import_from_collection(monkeypatch): def test_import_from_collection(monkeypatch):
collection_root = os.path.join(os.path.dirname(__file__), 'fixtures', 'collections') collection_root = os.path.join(os.path.dirname(__file__), 'fixtures', 'collections')
@ -127,3 +131,85 @@ def test_import_from_collection(monkeypatch):
# verify that the filename and line number reported by the trace is correct # verify that the filename and line number reported by the trace is correct
# this makes sure that collection loading preserves file paths and line numbers # this makes sure that collection loading preserves file paths and line numbers
assert trace_log == expected_trace_log assert trace_log == expected_trace_log
@pytest.mark.parametrize(
'ref,ref_type,expected_collection,expected_subdirs,expected_resource,expected_python_pkg_name',
[
('ns.coll.myaction', 'action', 'ns.coll', '', 'myaction', 'ansible_collections.ns.coll.plugins.action'),
('ns.coll.subdir1.subdir2.myaction', 'action', 'ns.coll', 'subdir1.subdir2', 'myaction', 'ansible_collections.ns.coll.plugins.action.subdir1.subdir2'),
('ns.coll.myrole', 'role', 'ns.coll', '', 'myrole', 'ansible_collections.ns.coll.roles.myrole'),
('ns.coll.subdir1.subdir2.myrole', 'role', 'ns.coll', 'subdir1.subdir2', 'myrole', 'ansible_collections.ns.coll.roles.subdir1.subdir2.myrole'),
])
def test_fqcr_parsing_valid(ref, ref_type, expected_collection,
expected_subdirs, expected_resource, expected_python_pkg_name):
assert AnsibleCollectionRef.is_valid_fqcr(ref, ref_type)
r = AnsibleCollectionRef.from_fqcr(ref, ref_type)
assert r.collection == expected_collection
assert r.subdirs == expected_subdirs
assert r.resource == expected_resource
assert r.n_python_package_name == expected_python_pkg_name
r = AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)
assert r.collection == expected_collection
assert r.subdirs == expected_subdirs
assert r.resource == expected_resource
assert r.n_python_package_name == expected_python_pkg_name
@pytest.mark.parametrize(
'ref,ref_type,expected_error_type,expected_error_expression',
[
('no_dots_at_all_action', 'action', ValueError, 'is not a valid collection reference'),
('no_nscoll.myaction', 'action', ValueError, 'is not a valid collection reference'),
('ns.coll.myaction', 'bogus', ValueError, 'invalid collection ref_type'),
])
def test_fqcr_parsing_invalid(ref, ref_type, expected_error_type, expected_error_expression):
assert not AnsibleCollectionRef.is_valid_fqcr(ref, ref_type)
with pytest.raises(expected_error_type) as curerr:
AnsibleCollectionRef.from_fqcr(ref, ref_type)
assert re.search(expected_error_expression, str(curerr.value))
r = AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)
assert r is None
@pytest.mark.parametrize(
'name,subdirs,resource,ref_type,python_pkg_name',
[
('ns.coll', None, 'res', 'doc_fragments', 'ansible_collections.ns.coll.plugins.doc_fragments'),
('ns.coll', 'subdir1', 'res', 'doc_fragments', 'ansible_collections.ns.coll.plugins.doc_fragments.subdir1'),
('ns.coll', 'subdir1.subdir2', 'res', 'action', 'ansible_collections.ns.coll.plugins.action.subdir1.subdir2'),
])
def test_collectionref_components_valid(name, subdirs, resource, ref_type, python_pkg_name):
x = AnsibleCollectionRef(name, subdirs, resource, ref_type)
assert x.collection == name
if subdirs:
assert x.subdirs == subdirs
else:
assert x.subdirs == ''
assert x.resource == resource
assert x.ref_type == ref_type
assert x.n_python_package_name == python_pkg_name
@pytest.mark.parametrize(
'name,subdirs,resource,ref_type,expected_error_type,expected_error_expression',
[
('bad_ns', '', 'resource', 'action', ValueError, 'invalid collection name'),
('ns.coll.', '', 'resource', 'action', ValueError, 'invalid collection name'),
('ns.coll', 'badsubdir#', 'resource', 'action', ValueError, 'invalid subdirs entry'),
('ns.coll', 'badsubdir.', 'resource', 'action', ValueError, 'invalid subdirs entry'),
('ns.coll', '.badsubdir', 'resource', 'action', ValueError, 'invalid subdirs entry'),
('ns.coll', '', 'resource', 'bogus', ValueError, 'invalid collection ref_type'),
])
def test_collectionref_components_invalid(name, subdirs, resource, ref_type, expected_error_type, expected_error_expression):
with pytest.raises(expected_error_type) as curerr:
AnsibleCollectionRef(name, subdirs, resource, ref_type)
assert re.search(expected_error_expression, str(curerr.value))

Loading…
Cancel
Save