mirror of https://github.com/ansible/ansible.git
Collection content loading (#52194)
* basic plugin loading working (with many hacks) * task collections working * play/block-level collection module/action working * implement PEP302 loader * implicit package support (no need for __init.py__ in collections) * provides future options for secure loading of content that shouldn't execute inside controller (eg, actively ignore __init__.py on content/module paths) * provide hook for synthetic collection setup (eg ansible.core pseudo-collection for specifying built-in plugins without legacy path, etc) * synthetic package support * ansible.core.plugins mapping works, others don't * synthetic collections working for modules/actions * fix direct-load legacy * change base package name to ansible_collections * note * collection role loading * expand paths from installed content root vars * feature complete? * rename ansible.core to ansible.builtin * and various sanity fixes * sanity tweaks * unittest fixes * less grabby error handler on has_plugin * probably need to replace with a or harden callers * fix win_ping test * disable module test with explicit file extension; might be able to support in some scenarios, but can't see any other tests that verify that behavior... * fix unicode conversion issues on py2 * attempt to keep things working-ish on py2.6 * python2.6 test fun round 2 * rename dirs/configs to "collections" * add wrapper dir for content-adjacent * fix pythoncheck to use localhost * unicode tweaks, native/bytes string prefixing * rename COLLECTION_PATHS to COLLECTIONS_PATHS * switch to pathspec * path handling cleanup * change expensive `all` back to or chain * unused import cleanup * quotes tweak * use wrapped iter/len in Jinja proxy * var name expansion * comment seemingly overcomplicated playbook_paths resolution * drop unnecessary conditional nesting * eliminate extraneous local * zap superfluous validation function * use slice for rolespec NS assembly * misc naming/unicode fixes * collection callback loader asks if valid FQ name instead of just '.' * switch collection role resolution behavior to be internally `text` as much as possible * misc fixmes * to_native in exception constructor * (slightly) detangle tuple accumulation mess in module_utils __init__ walker * more misc fixmes * tighten up action dispatch, add unqualified action test * rename Collection mixin to CollectionSearch * (attempt to) avoid potential confusion/conflict with builtin collections, etc * stale fixmes * tighten up pluginloader collections determination * sanity test fixes * ditch regex escape * clarify comment * update default collections paths config entry * use PATH format instead of list * skip integration tests on Python 2.6 ci_completepull/54558/head
parent
5173548a9f
commit
f86345f777
@ -0,0 +1,4 @@
|
||||
major_changes:
|
||||
- Experimental support for Ansible Collections and content namespacing - Ansible content can now be packaged in a
|
||||
collection and addressed via namespaces. This allows for easier sharing, distribution, and installation of bundled
|
||||
modules/roles/plugins, and consistent rules for accessing specific content via namespaces.
|
@ -0,0 +1,26 @@
|
||||
# Copyright: (c) 2019, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.playbook.attribute import FieldAttribute
|
||||
|
||||
|
||||
class CollectionSearch:
|
||||
# this needs to be populated before we can resolve tasks/roles/etc
|
||||
_collections = FieldAttribute(isa='list', listof=string_types, priority=100)
|
||||
|
||||
def _load_collections(self, attr, ds):
|
||||
if not ds:
|
||||
# if empty/None, just return whatever was there; legacy behavior will do the right thing
|
||||
return ds
|
||||
|
||||
if not isinstance(ds, list):
|
||||
ds = [ds]
|
||||
|
||||
if 'ansible.builtin' not in ds and 'ansible.legacy' not in ds:
|
||||
ds.append('ansible.legacy')
|
||||
|
||||
return ds
|
@ -0,0 +1,304 @@
|
||||
# (c) 2019 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os.path
|
||||
import pkgutil
|
||||
import re
|
||||
import sys
|
||||
|
||||
from types import ModuleType
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.six import iteritems, string_types
|
||||
|
||||
# HACK: keep Python 2.6 controller tests happy in CI until they're properly split
|
||||
try:
|
||||
from importlib import import_module
|
||||
except ImportError:
|
||||
import_module = __import__
|
||||
|
||||
_SYNTHETIC_PACKAGES = {
|
||||
'ansible_collections.ansible': dict(type='pkg_only'),
|
||||
'ansible_collections.ansible.builtin': dict(type='pkg_only'),
|
||||
'ansible_collections.ansible.builtin.plugins': dict(type='map', map='ansible.plugins'),
|
||||
'ansible_collections.ansible.builtin.plugins.module_utils': dict(type='map', map='ansible.module_utils', graft=True),
|
||||
'ansible_collections.ansible.builtin.plugins.modules': dict(type='flatmap', flatmap='ansible.modules', graft=True),
|
||||
}
|
||||
|
||||
# TODO: tighten this up to subset Python identifier requirements (and however we want to restrict ns/collection names)
|
||||
_collection_qualified_re = re.compile(to_text(r'^(\w+)\.(\w+)\.(\w+)$'))
|
||||
|
||||
|
||||
# FIXME: exception handling/error logging
|
||||
class AnsibleCollectionLoader(object):
|
||||
def __init__(self):
|
||||
self._n_configured_paths = C.config.get_config_value('COLLECTIONS_PATHS')
|
||||
|
||||
if isinstance(self._n_configured_paths, string_types):
|
||||
self._n_configured_paths = [self._n_configured_paths]
|
||||
elif self._n_configured_paths is None:
|
||||
self._n_configured_paths = []
|
||||
|
||||
# expand any placeholders in configured paths
|
||||
self._n_configured_paths = [to_native(os.path.expanduser(p), errors='surrogate_or_strict') for p in self._n_configured_paths]
|
||||
|
||||
self._n_playbook_paths = []
|
||||
# pre-inject grafted package maps so we can force them to use the right loader instead of potentially delegating to a "normal" loader
|
||||
for syn_pkg_def in (p for p in iteritems(_SYNTHETIC_PACKAGES) if p[1].get('graft')):
|
||||
pkg_name = syn_pkg_def[0]
|
||||
pkg_def = syn_pkg_def[1]
|
||||
|
||||
newmod = ModuleType(pkg_name)
|
||||
newmod.__package__ = pkg_name
|
||||
newmod.__file__ = '<ansible_synthetic_collection_package>'
|
||||
pkg_type = pkg_def.get('type')
|
||||
|
||||
# TODO: need to rethink map style so we can just delegate all the loading
|
||||
|
||||
if pkg_type == 'flatmap':
|
||||
newmod.__loader__ = AnsibleFlatMapLoader(import_module(pkg_def['flatmap']))
|
||||
newmod.__path__ = []
|
||||
|
||||
sys.modules[pkg_name] = newmod
|
||||
|
||||
@property
|
||||
def _n_collection_paths(self):
|
||||
return self._n_playbook_paths + self._n_configured_paths
|
||||
|
||||
def set_playbook_paths(self, b_playbook_paths):
|
||||
if isinstance(b_playbook_paths, string_types):
|
||||
b_playbook_paths = [b_playbook_paths]
|
||||
|
||||
# track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
|
||||
added_paths = set()
|
||||
|
||||
# de-dupe and ensure the paths are native strings (Python seems to do this for package paths etc, so assume it's safe)
|
||||
self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in b_playbook_paths if not (p in added_paths or added_paths.add(p))]
|
||||
# FIXME: only allow setting this once, or handle any necessary cache/package path invalidations internally?
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
# this loader is only concerned with items under the Ansible Collections namespace hierarchy, ignore others
|
||||
if fullname.startswith('ansible_collections.') or fullname == 'ansible_collections':
|
||||
return self
|
||||
|
||||
return None
|
||||
|
||||
def load_module(self, fullname):
|
||||
if sys.modules.get(fullname):
|
||||
return sys.modules[fullname]
|
||||
|
||||
# this loader implements key functionality for Ansible collections
|
||||
# * implicit distributed namespace packages for the root Ansible namespace (no pkgutil.extend_path hackery reqd)
|
||||
# * implicit package support for Python 2.7 (no need for __init__.py in collections, except to use standard Py2.7 tooling)
|
||||
# * preventing controller-side code injection during collection loading
|
||||
# * (default loader would execute arbitrary package code from all __init__.py's)
|
||||
|
||||
parent_pkg_name = '.'.join(fullname.split('.')[:-1])
|
||||
|
||||
parent_pkg = sys.modules.get(parent_pkg_name)
|
||||
|
||||
if parent_pkg_name and not parent_pkg:
|
||||
raise ImportError('parent package {0} not found'.format(parent_pkg_name))
|
||||
|
||||
# are we at or below the collection level? eg a.mynamespace.mycollection.something.else
|
||||
# if so, we don't want distributed namespace behavior; first mynamespace.mycollection on the path is where
|
||||
# we'll load everything from (ie, don't fall back to another mynamespace.mycollection lower on the path)
|
||||
sub_collection = fullname.count('.') > 1
|
||||
|
||||
synpkg_def = _SYNTHETIC_PACKAGES.get(fullname)
|
||||
|
||||
# FIXME: collapse as much of this back to on-demand as possible (maybe stub packages that get replaced when actually loaded?)
|
||||
if synpkg_def:
|
||||
pkg_type = synpkg_def.get('type')
|
||||
if not pkg_type:
|
||||
raise KeyError('invalid synthetic package type (no package "type" specified)')
|
||||
if pkg_type == 'map':
|
||||
map_package = synpkg_def.get('map')
|
||||
|
||||
if not map_package:
|
||||
raise KeyError('invalid synthetic map package definition (no target "map" defined)')
|
||||
mod = import_module(map_package)
|
||||
|
||||
sys.modules[fullname] = mod
|
||||
|
||||
return mod
|
||||
elif pkg_type == 'flatmap':
|
||||
raise NotImplementedError()
|
||||
elif pkg_type == 'pkg_only':
|
||||
newmod = ModuleType(fullname)
|
||||
newmod.__package__ = fullname
|
||||
newmod.__file__ = '<ansible_synthetic_collection_package>'
|
||||
newmod.__loader__ = self
|
||||
newmod.__path__ = []
|
||||
|
||||
sys.modules[fullname] = newmod
|
||||
|
||||
return newmod
|
||||
|
||||
if not parent_pkg: # top-level package, look for NS subpackages on all collection paths
|
||||
package_paths = [self._extend_path_with_ns(p, fullname) for p in self._n_collection_paths]
|
||||
else: # subpackage; search in all subpaths (we'll limit later inside a collection)
|
||||
package_paths = [self._extend_path_with_ns(p, fullname) for p in parent_pkg.__path__]
|
||||
|
||||
for candidate_child_path in package_paths:
|
||||
source = None
|
||||
is_package = True
|
||||
location = None
|
||||
# check for implicit sub-package first
|
||||
if os.path.isdir(candidate_child_path):
|
||||
# Py3.x implicit namespace packages don't have a file location, so they don't support get_data
|
||||
# (which assumes the parent dir or that the loader has an internal mapping); so we have to provide
|
||||
# a bogus leaf file on the __file__ attribute for pkgutil.get_data to strip off
|
||||
location = os.path.join(candidate_child_path, '__synthetic__')
|
||||
else:
|
||||
for source_path in [os.path.join(candidate_child_path, '__init__.py'),
|
||||
candidate_child_path + '.py']:
|
||||
if not os.path.isfile(source_path):
|
||||
continue
|
||||
|
||||
with open(source_path, 'rb') as fd:
|
||||
source = fd.read()
|
||||
location = source_path
|
||||
is_package = source_path.endswith('__init__.py')
|
||||
break
|
||||
|
||||
if not location:
|
||||
continue
|
||||
|
||||
newmod = ModuleType(fullname)
|
||||
newmod.__package__ = fullname
|
||||
newmod.__file__ = location
|
||||
newmod.__loader__ = self
|
||||
|
||||
if is_package:
|
||||
if sub_collection: # we never want to search multiple instances of the same collection; use first found
|
||||
newmod.__path__ = [candidate_child_path]
|
||||
else:
|
||||
newmod.__path__ = package_paths
|
||||
|
||||
if source:
|
||||
# FIXME: decide cases where we don't actually want to exec the code?
|
||||
exec(source, newmod.__dict__)
|
||||
|
||||
sys.modules[fullname] = newmod
|
||||
|
||||
return newmod
|
||||
|
||||
# FIXME: need to handle the "no dirs present" case for at least the root and synthetic internal collections like ansible.builtin
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _extend_path_with_ns(path, ns):
|
||||
ns_path_add = ns.rsplit('.', 1)[-1]
|
||||
|
||||
return os.path.join(path, ns_path_add)
|
||||
|
||||
def get_data(self, filename):
|
||||
with open(filename, 'rb') as fd:
|
||||
return fd.read()
|
||||
|
||||
|
||||
class AnsibleFlatMapLoader(object):
|
||||
_extension_blacklist = ['.pyc', '.pyo']
|
||||
|
||||
def __init__(self, root_package):
|
||||
self._root_package = root_package
|
||||
self._dirtree = None
|
||||
|
||||
def _init_dirtree(self):
|
||||
# FIXME: thread safety
|
||||
root_path = os.path.dirname(self._root_package.__file__)
|
||||
flat_files = []
|
||||
# FIXME: make this a dict of filename->dir for faster direct lookup?
|
||||
# FIXME: deal with _ prefixed deprecated files (or require another method for collections?)
|
||||
# FIXME: fix overloaded filenames (eg, rename Windows setup to win_setup)
|
||||
for root, dirs, files in os.walk(root_path):
|
||||
# add all files in this dir that don't have a blacklisted extension
|
||||
flat_files.extend(((root, f) for f in files if not any((f.endswith(ext) for ext in self._extension_blacklist))))
|
||||
self._dirtree = flat_files
|
||||
|
||||
def find_file(self, filename):
|
||||
# FIXME: thread safety
|
||||
if not self._dirtree:
|
||||
self._init_dirtree()
|
||||
|
||||
if '.' not in filename: # no extension specified, use extension regex to filter
|
||||
extensionless_re = re.compile(r'^{0}(\..+)?$'.format(re.escape(filename)))
|
||||
# why doesn't Python have first()?
|
||||
try:
|
||||
# FIXME: store extensionless in a separate direct lookup?
|
||||
filepath = next(os.path.join(r, f) for r, f in self._dirtree if extensionless_re.match(f))
|
||||
except StopIteration:
|
||||
raise IOError("couldn't find {0}".format(filename))
|
||||
else: # actual filename, just look it up
|
||||
# FIXME: this case sucks; make it a lookup
|
||||
try:
|
||||
filepath = next(os.path.join(r, f) for r, f in self._dirtree if f == filename)
|
||||
except StopIteration:
|
||||
raise IOError("couldn't find {0}".format(filename))
|
||||
|
||||
return filepath
|
||||
|
||||
def get_data(self, filename):
|
||||
found_file = self.find_file(filename)
|
||||
|
||||
with open(found_file, 'rb') as fd:
|
||||
return fd.read()
|
||||
|
||||
|
||||
# TODO: implement these for easier inline debugging?
|
||||
# def get_source(self, fullname):
|
||||
# def get_code(self, fullname):
|
||||
# def is_package(self, fullname):
|
||||
|
||||
|
||||
def get_collection_role_path(role_name, collection_list=None):
|
||||
match = _collection_qualified_re.match(role_name)
|
||||
|
||||
if match:
|
||||
grps = match.groups()
|
||||
collection_list = ['.'.join(grps[:2])]
|
||||
role = grps[2]
|
||||
elif not collection_list:
|
||||
return None # not a FQ role and no collection search list spec'd, nothing to do
|
||||
else:
|
||||
role = role_name
|
||||
|
||||
for collection_name in collection_list:
|
||||
try:
|
||||
role_package = u'ansible_collections.{0}.roles.{1}'.format(collection_name, role)
|
||||
# FIXME: error handling/logging; need to catch any import failures and move along
|
||||
|
||||
# FIXME: this line shouldn't be necessary, but py2 pkgutil.get_data is delegating back to built-in loader when it shouldn't
|
||||
pkg = import_module(role_package + u'.tasks')
|
||||
|
||||
# get_data input must be a native string
|
||||
tasks_file = pkgutil.get_data(to_native(role_package) + '.tasks', 'main.yml')
|
||||
|
||||
if tasks_file is not None:
|
||||
# the package is now loaded, get the collection's package and ask where it lives
|
||||
path = os.path.dirname(to_bytes(sys.modules[role_package].__file__, errors='surrogate_or_strict'))
|
||||
return role, to_text(path, errors='surrogate_or_strict'), collection_name
|
||||
|
||||
except IOError:
|
||||
continue
|
||||
except Exception as ex:
|
||||
# FIXME: pick out typical import errors first, then error logging
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def is_collection_ref(candidate_name):
|
||||
return bool(_collection_qualified_re.match(candidate_name))
|
||||
|
||||
|
||||
def set_collection_playbook_paths(b_playbook_paths):
|
||||
# set for any/all AnsibleCollectionLoader instance(s) on meta_path
|
||||
for loader in (l for l in sys.meta_path if isinstance(l, AnsibleCollectionLoader)):
|
||||
loader.set_playbook_paths(b_playbook_paths)
|
@ -0,0 +1,3 @@
|
||||
# use a plugin defined in a content-adjacent collection to ensure we added it properly
|
||||
plugin: testns.content_adj.statichost
|
||||
hostname: dynamic_host_a
|
@ -0,0 +1,2 @@
|
||||
shippable/posix/group4
|
||||
skip/python2.6
|
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, source='sys')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, failed=True, msg='this collection should be masked by testcoll in the user content root')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, source='sys')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,2 @@
|
||||
- fail:
|
||||
msg: this role should never be visible or runnable
|
@ -0,0 +1,30 @@
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.plugins import loader
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
TRANSFERS_FILES = False
|
||||
_VALID_ARGS = frozenset(('type', 'name'))
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
if task_vars is None:
|
||||
task_vars = dict()
|
||||
|
||||
result = super(ActionModule, self).run(None, task_vars)
|
||||
|
||||
type = self._task.args.get('type')
|
||||
name = self._task.args.get('name')
|
||||
|
||||
result = dict(changed=False, collection_list=self._task.collections)
|
||||
|
||||
if all([type, name]):
|
||||
attr_name = '{0}_loader'.format(type)
|
||||
|
||||
typed_loader = getattr(loader, attr_name, None)
|
||||
|
||||
if not typed_loader:
|
||||
return (dict(failed=True, msg='invalid plugin type {0}'.format(type)))
|
||||
|
||||
result['plugin_path'] = typed_loader.find_plugin(name, collection_list=self._task.collections)
|
||||
|
||||
return result
|
@ -0,0 +1,24 @@
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
|
||||
DOCUMENTATION = '''
|
||||
callback: usercallback
|
||||
callback_type: notification
|
||||
short_description: does stuff
|
||||
description:
|
||||
- does some stuff
|
||||
'''
|
||||
|
||||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_NAME = 'usercallback'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
def __init__(self):
|
||||
|
||||
super(CallbackModule, self).__init__()
|
||||
self._display.display("loaded usercallback from collection, yay")
|
||||
|
||||
def v2_runner_on_ok(self, result):
|
||||
self._display.display("usercallback says ok")
|
@ -0,0 +1,38 @@
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.plugins.connection import ConnectionBase
|
||||
|
||||
DOCUMENTATION = """
|
||||
connection: localconn
|
||||
short_description: do stuff local
|
||||
description:
|
||||
- does stuff
|
||||
options:
|
||||
connectionvar:
|
||||
description:
|
||||
- something we set
|
||||
default: the_default
|
||||
vars:
|
||||
- name: ansible_localconn_connectionvar
|
||||
"""
|
||||
|
||||
|
||||
class Connection(ConnectionBase):
|
||||
transport = 'local'
|
||||
has_pipelining = True
|
||||
|
||||
def _connect(self):
|
||||
return self
|
||||
|
||||
def exec_command(self, cmd, in_data=None, sudoable=True):
|
||||
stdout = 'localconn ran {0}'.format(to_native(cmd))
|
||||
stderr = 'connectionvar is {0}'.format(to_native(self.get_option('connectionvar')))
|
||||
return (0, stdout, stderr)
|
||||
|
||||
def put_file(self, in_path, out_path):
|
||||
raise NotImplementedError('just a test')
|
||||
|
||||
def fetch_file(self, in_path, out_path):
|
||||
raise NotImplementedError('just a test')
|
||||
|
||||
def close(self):
|
||||
self._connected = False
|
@ -0,0 +1,10 @@
|
||||
def testfilter(data):
|
||||
return "{0}_from_userdir".format(data)
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
'testfilter': testfilter
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
|
||||
def run(self, terms, variables, **kwargs):
|
||||
|
||||
return ['lookup_from_user_dir']
|
@ -0,0 +1,7 @@
|
||||
# FIXME: this style (full module import via from) doesn't work yet from collections
|
||||
# from ansible_collections.testns.testcoll.plugins.module_utils import secondary
|
||||
import ansible_collections.testns.testcoll.plugins.module_utils.secondary
|
||||
|
||||
|
||||
def thingtocall():
|
||||
return "thingtocall in base called " + ansible_collections.testns.testcoll.plugins.module_utils.secondary.thingtocall()
|
@ -0,0 +1,2 @@
|
||||
def thingtocall():
|
||||
return "thingtocall in leaf"
|
@ -0,0 +1,2 @@
|
||||
def thingtocall():
|
||||
return "thingtocall in secondary"
|
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, source='user')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, source='user')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
# FIXME: this is only required due to a bug around "new style module detection"
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.testns.testcoll.plugins.module_utils.base import thingtocall
|
||||
|
||||
|
||||
def main():
|
||||
mu_result = thingtocall()
|
||||
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
|
||||
|
||||
sys.exit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
# FIXME: this is only required due to a bug around "new style module detection"
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
import ansible_collections.testns.testcoll.plugins.module_utils.leaf
|
||||
|
||||
|
||||
def main():
|
||||
mu_result = ansible_collections.testns.testcoll.plugins.module_utils.leaf.thingtocall()
|
||||
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
|
||||
|
||||
sys.exit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
# FIXME: this is only required due to a bug around "new style module detection"
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.testns.testcoll.plugins.module_utils.leaf import thingtocall
|
||||
|
||||
|
||||
def main():
|
||||
mu_result = thingtocall()
|
||||
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
|
||||
|
||||
sys.exit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
# FIXME: this is only required due to a bug around "new style module detection"
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
# FIXME: this style doesn't work yet under collections
|
||||
from ansible_collections.testns.testcoll.plugins.module_utils import leaf
|
||||
|
||||
|
||||
def main():
|
||||
mu_result = leaf.thingtocall()
|
||||
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
|
||||
|
||||
sys.exit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,9 @@
|
||||
def testtest(data):
|
||||
return data == 'from_user'
|
||||
|
||||
|
||||
class TestModule(object):
|
||||
def tests(self):
|
||||
return {
|
||||
'testtest': testtest
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
collections:
|
||||
- ansible.builtin
|
||||
- testns.coll_in_sys
|
||||
- bogus.fromrolemeta
|
@ -0,0 +1,30 @@
|
||||
- name: check collections list from role meta
|
||||
plugin_lookup:
|
||||
register: pluginlookup_out
|
||||
|
||||
- name: call role-local ping module
|
||||
ping:
|
||||
register: ping_out
|
||||
|
||||
- name: call unqualified module in another collection listed in role meta (testns.coll_in_sys)
|
||||
systestmodule:
|
||||
register: systestmodule_out
|
||||
|
||||
# verify that pluginloader caching doesn't prevent us from explicitly calling a builtin plugin with the same name
|
||||
- name: call builtin ping module explicitly
|
||||
ansible.builtin.ping:
|
||||
register: builtinping_out
|
||||
|
||||
- debug:
|
||||
msg: '{{ test_role_input | default("(undefined)") }}'
|
||||
register: test_role_output
|
||||
|
||||
# FIXME: add tests to ensure that block/task level stuff in a collection-hosted role properly inherit role default/meta values
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- pluginlookup_out.collection_list == ['testns.testcoll', 'ansible.builtin', 'testns.coll_in_sys', 'bogus.fromrolemeta']
|
||||
- ping_out.source is defined and ping_out.source == 'user'
|
||||
- systestmodule_out.source is defined and systestmodule_out.source == 'sys'
|
||||
- builtinping_out.ping is defined and builtinping_out.ping == 'pong'
|
||||
- test_role_input is not defined or test_role_input == test_role_output.msg
|
@ -0,0 +1,55 @@
|
||||
# Copyright (c) 2018 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
inventory: statichost
|
||||
short_description: Add a single host
|
||||
options:
|
||||
plugin:
|
||||
description: plugin name (must be statichost)
|
||||
required: true
|
||||
hostname:
|
||||
description: Toggle display of stderr even when script was successful
|
||||
type: list
|
||||
'''
|
||||
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
|
||||
|
||||
|
||||
class InventoryModule(BaseInventoryPlugin):
|
||||
|
||||
NAME = 'statichost'
|
||||
|
||||
def __init__(self):
|
||||
|
||||
super(InventoryModule, self).__init__()
|
||||
|
||||
self._hosts = set()
|
||||
|
||||
def verify_file(self, path):
|
||||
''' Verify if file is usable by this plugin, base does minimal accessibility check '''
|
||||
|
||||
if not path.endswith('.statichost.yml') and not path.endswith('.statichost.yaml'):
|
||||
return False
|
||||
return super(InventoryModule, self).verify_file(path)
|
||||
|
||||
def parse(self, inventory, loader, path, cache=None):
|
||||
|
||||
super(InventoryModule, self).parse(inventory, loader, path)
|
||||
|
||||
config_data = loader.load_from_file(path, cache=False)
|
||||
host_to_add = config_data.get('hostname')
|
||||
|
||||
if not host_to_add:
|
||||
raise AnsibleParserError("hostname was not specified")
|
||||
|
||||
# this is where the magic happens
|
||||
self.inventory.add_host(host_to_add, 'all')
|
||||
|
||||
# self.inventory.add_group()...
|
||||
# self.inventory.add_child()...
|
||||
# self.inventory.set_variable()..
|
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, source='content_adj')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, source='legacy_library_dir')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,280 @@
|
||||
- hosts: testhost
|
||||
tasks:
|
||||
# basic test of FQ module lookup and that we got the right one (user-dir hosted)
|
||||
- name: exec FQ module in a user-dir testns collection
|
||||
testns.testcoll.testmodule:
|
||||
register: testmodule_out
|
||||
|
||||
# verifies that distributed collection subpackages are visible under a multi-location namespace (testns exists in user and sys locations)
|
||||
- name: exec FQ module in a sys-dir testns collection
|
||||
testns.coll_in_sys.systestmodule:
|
||||
register: systestmodule_out
|
||||
|
||||
# verifies that content-adjacent collections were automatically added to the installed content roots
|
||||
- name: exec FQ module from content-adjacent collection
|
||||
testns.content_adj.contentadjmodule:
|
||||
register: contentadjmodule_out
|
||||
|
||||
# content should only be loaded from the first visible instance of a collection
|
||||
- name: attempt to look up FQ module in a masked collection
|
||||
testns.testcoll.plugin_lookup:
|
||||
type: module
|
||||
name: testns.testcoll.maskedmodule
|
||||
register: maskedmodule_out
|
||||
|
||||
# module with a granular module_utils import (from (this collection).module_utils.leaf import thingtocall)
|
||||
- name: exec module with granular module utils import from this collection
|
||||
testns.testcoll.uses_leaf_mu_granular_import:
|
||||
register: granular_out
|
||||
|
||||
# module with a granular nested module_utils import (from (this collection).module_utils.base import thingtocall,
|
||||
# where base imports secondary from the same collection's module_utils)
|
||||
- name: exec module with nested module utils from this collection
|
||||
testns.testcoll.uses_base_mu_granular_nested_import:
|
||||
register: granular_nested_out
|
||||
|
||||
# module with a flat module_utils import (import (this collection).module_utils.leaf)
|
||||
- name: exec module with flat module_utils import from this collection
|
||||
testns.testcoll.uses_leaf_mu_flat_import:
|
||||
register: flat_out
|
||||
|
||||
# FIXME: this one doesn't work yet
|
||||
# module with a full-module module_utils import using 'from' (from (this collection).module_utils import leaf)
|
||||
- name: exec module with full-module module_utils import using 'from' from this collection
|
||||
testns.testcoll.uses_leaf_mu_module_import_from:
|
||||
ignore_errors: true
|
||||
register: from_out
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- testmodule_out.source == 'user'
|
||||
- systestmodule_out.source == 'sys'
|
||||
- contentadjmodule_out.source == 'content_adj'
|
||||
- not maskedmodule_out.plugin_path
|
||||
- granular_out.mu_result == 'thingtocall in leaf'
|
||||
- granular_nested_out.mu_result == 'thingtocall in base called thingtocall in secondary'
|
||||
- flat_out.mu_result == 'thingtocall in leaf'
|
||||
- from_out is failed # FIXME: switch back once this import is fixed --> from_out.mu_result == 'thingtocall in leaf'
|
||||
|
||||
- name: exercise filters/tests/lookups
|
||||
assert:
|
||||
that:
|
||||
- "'data' | testns.testcoll.testfilter == 'data_from_userdir'"
|
||||
- "'from_user' is testns.testcoll.testtest"
|
||||
- lookup('testns.testcoll.mylookup') == 'lookup_from_user_dir'
|
||||
|
||||
# ensure that the synthetic ansible.builtin collection limits to builtin plugins, that ansible.legacy loads overrides
|
||||
# from legacy plugin dirs, and that a same-named plugin loaded from a real collection is not masked by the others
|
||||
- hosts: testhost
|
||||
tasks:
|
||||
- name: test unqualified ping from library dir
|
||||
ping:
|
||||
register: unqualified_ping_out
|
||||
|
||||
- name: test legacy-qualified ping from library dir
|
||||
ansible.legacy.ping:
|
||||
register: legacy_ping_out
|
||||
|
||||
- name: test builtin ping
|
||||
ansible.builtin.ping:
|
||||
register: builtin_ping_out
|
||||
|
||||
- name: test collection-based ping
|
||||
testns.testcoll.ping:
|
||||
register: collection_ping_out
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- unqualified_ping_out.source == 'legacy_library_dir'
|
||||
- legacy_ping_out.source == 'legacy_library_dir'
|
||||
- builtin_ping_out.ping == 'pong'
|
||||
- collection_ping_out.source == 'user'
|
||||
|
||||
# verify the default value for the collections list is empty
|
||||
- hosts: testhost
|
||||
tasks:
|
||||
- name: sample default collections value
|
||||
testns.testcoll.plugin_lookup:
|
||||
register: coll_default_out
|
||||
|
||||
- assert:
|
||||
that:
|
||||
# in original release, collections defaults to empty, which is mostly equivalent to ansible.legacy
|
||||
- not coll_default_out.collection_list
|
||||
|
||||
|
||||
# ensure that inheritance/masking works as expected, that the proper default values are injected when missing,
|
||||
# and that the order is preserved if one of the magic values is explicitly specified
|
||||
- name: verify collections keyword play/block/task inheritance and magic values
|
||||
hosts: testhost
|
||||
collections:
|
||||
- bogus.fromplay
|
||||
tasks:
|
||||
- name: sample play collections value
|
||||
testns.testcoll.plugin_lookup:
|
||||
register: coll_play_out
|
||||
|
||||
- name: collections override block-level
|
||||
collections:
|
||||
- bogus.fromblock
|
||||
block:
|
||||
- name: sample block collections value
|
||||
testns.testcoll.plugin_lookup:
|
||||
register: coll_block_out
|
||||
|
||||
- name: sample task collections value
|
||||
collections:
|
||||
- bogus.fromtask
|
||||
testns.testcoll.plugin_lookup:
|
||||
register: coll_task_out
|
||||
|
||||
- name: sample task with explicit core
|
||||
collections:
|
||||
- ansible.builtin
|
||||
- bogus.fromtaskexplicitcore
|
||||
testns.testcoll.plugin_lookup:
|
||||
register: coll_task_core
|
||||
|
||||
- name: sample task with explicit legacy
|
||||
collections:
|
||||
- ansible.legacy
|
||||
- bogus.fromtaskexplicitlegacy
|
||||
testns.testcoll.plugin_lookup:
|
||||
register: coll_task_legacy
|
||||
|
||||
- assert:
|
||||
that:
|
||||
# ensure that parent value inheritance is masked properly by explicit setting
|
||||
- coll_play_out.collection_list == ['bogus.fromplay', 'ansible.legacy']
|
||||
- coll_block_out.collection_list == ['bogus.fromblock', 'ansible.legacy']
|
||||
- coll_task_out.collection_list == ['bogus.fromtask', 'ansible.legacy']
|
||||
- coll_task_core.collection_list == ['ansible.builtin', 'bogus.fromtaskexplicitcore']
|
||||
- coll_task_legacy.collection_list == ['ansible.legacy', 'bogus.fromtaskexplicitlegacy']
|
||||
|
||||
- name: verify unqualified plugin resolution behavior
|
||||
hosts: testhost
|
||||
collections:
|
||||
- testns.testcoll
|
||||
- testns.coll_in_sys
|
||||
- testns.contentadj
|
||||
tasks:
|
||||
# basic test of unqualified module lookup and that we got the right one (user-dir hosted, there's another copy of
|
||||
# this one in the same-named collection in sys dir that should be masked
|
||||
- name: exec unqualified module in a user-dir testns collection
|
||||
testmodule:
|
||||
register: testmodule_out
|
||||
|
||||
# use another collection to verify that we're looking in all collections listed on the play
|
||||
- name: exec unqualified module in a sys-dir testns collection
|
||||
systestmodule:
|
||||
register: systestmodule_out
|
||||
|
||||
# ensure we're looking up actions properly
|
||||
- name: unqualified action test
|
||||
plugin_lookup:
|
||||
register: pluginlookup_out
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- testmodule_out.source == 'user'
|
||||
- systestmodule_out.source == 'sys'
|
||||
- pluginlookup_out.collection_list == ['testns.testcoll', 'testns.coll_in_sys', 'testns.contentadj', 'ansible.legacy']
|
||||
|
||||
# FIXME: this won't work until collections list gets passed through task templar
|
||||
# - name: exercise unqualified filters/tests/lookups
|
||||
# assert:
|
||||
# that:
|
||||
# - "'data' | testfilter == 'data_from_userdir'"
|
||||
# - "'from_user' is testtest"
|
||||
# - lookup('mylookup') == 'lookup_from_user_dir'
|
||||
|
||||
|
||||
# test keyword-static execution of a FQ collection-backed role
|
||||
- name: verify collection-backed role execution (keyword static)
|
||||
hosts: testhost
|
||||
collections:
|
||||
# set to ansible.builtin only to ensure that roles function properly without inheriting the play's collections config
|
||||
- ansible.builtin
|
||||
vars:
|
||||
test_role_input: keyword static
|
||||
roles:
|
||||
- role: testns.testcoll.testrole
|
||||
tasks:
|
||||
- name: ensure role executed
|
||||
assert:
|
||||
that:
|
||||
- test_role_output.msg == test_role_input
|
||||
|
||||
|
||||
# test dynamic execution of a FQ collection-backed role
|
||||
- name: verify collection-backed role execution (dynamic)
|
||||
hosts: testhost
|
||||
collections:
|
||||
# set to ansible.builtin only to ensure that roles function properly without inheriting the play's collections config
|
||||
- ansible.builtin
|
||||
vars:
|
||||
test_role_input: dynamic
|
||||
tasks:
|
||||
- include_role:
|
||||
name: testns.testcoll.testrole
|
||||
- name: ensure role executed
|
||||
assert:
|
||||
that:
|
||||
- test_role_output.msg == test_role_input
|
||||
|
||||
|
||||
# test task-static execution of a FQ collection-backed role
|
||||
- name: verify collection-backed role execution (task static)
|
||||
hosts: testhost
|
||||
collections:
|
||||
- ansible.builtin
|
||||
vars:
|
||||
test_role_input: task static
|
||||
tasks:
|
||||
- import_role:
|
||||
name: testns.testcoll.testrole
|
||||
- name: ensure role executed
|
||||
assert:
|
||||
that:
|
||||
- test_role_output.msg == test_role_input
|
||||
|
||||
|
||||
# test a legacy playbook-adjacent role, ensure that play collections config is not inherited
|
||||
- name: verify legacy playbook-adjacent role behavior
|
||||
hosts: testhost
|
||||
collections:
|
||||
- bogus.bogus
|
||||
vars:
|
||||
test_role_input: legacy playbook-adjacent
|
||||
roles:
|
||||
- testrole
|
||||
# FIXME: this should technically work to look up a playbook-adjacent role
|
||||
# - ansible.legacy.testrole
|
||||
tasks:
|
||||
- name: ensure role executed
|
||||
assert:
|
||||
that:
|
||||
- test_role_output.msg == test_role_input
|
||||
|
||||
|
||||
- name: test a collection-hosted connection plugin against a host from a collection-hosted inventory plugin
|
||||
hosts: dynamic_host_a
|
||||
vars:
|
||||
ansible_connection: testns.testcoll.localconn
|
||||
ansible_localconn_connectionvar: from_play
|
||||
tasks:
|
||||
- raw: echo 'hello world'
|
||||
register: connection_out
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- connection_out.stdout == "localconn ran echo 'hello world'"
|
||||
# ensure that the connection var we overrode above made it into the running config
|
||||
- connection_out.stderr == "connectionvar is from_play"
|
||||
|
||||
- hosts: testhost
|
||||
tasks:
|
||||
- assert:
|
||||
that:
|
||||
- hostvars['dynamic_host_a'] is defined
|
||||
- hostvars['dynamic_host_a'].connection_out.stdout == "localconn ran echo 'hello world'"
|
@ -0,0 +1,8 @@
|
||||
# this test specifically avoids testhost because we need to know about the controller's Python
|
||||
- hosts: localhost
|
||||
gather_facts: yes
|
||||
gather_subset: min
|
||||
tasks:
|
||||
- debug:
|
||||
msg: UNSUPPORTEDPYTHON {{ ansible_python_version }}
|
||||
when: ansible_python_version is version('2.7', '<')
|
@ -0,0 +1,25 @@
|
||||
- debug:
|
||||
msg: executing testrole from legacy playbook-adjacent roles dir
|
||||
|
||||
- name: exec a FQ module from a legacy role
|
||||
testns.testcoll.testmodule:
|
||||
register: coll_module_out
|
||||
|
||||
- name: exec a legacy playbook-adjacent module from a legacy role
|
||||
ping:
|
||||
register: ping_out
|
||||
|
||||
- name: sample collections list inside a legacy role (should be empty)
|
||||
testns.testcoll.plugin_lookup:
|
||||
register: plugin_lookup_out
|
||||
|
||||
- debug:
|
||||
msg: '{{ test_role_input | default("(undefined)") }}'
|
||||
register: test_role_output
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- coll_module_out.source == 'user'
|
||||
# ensure we used the library/ ping override, not the builtin or one from another collection
|
||||
- ping_out.source == 'legacy_library_dir'
|
||||
- not plugin_lookup_out.collection_list
|
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -eux
|
||||
|
||||
export ANSIBLE_COLLECTIONS_PATHS=$PWD/collection_root_user:$PWD/collection_root_sys
|
||||
export ANSIBLE_GATHERING=explicit
|
||||
export ANSIBLE_GATHER_SUBSET=minimal
|
||||
|
||||
# temporary hack to keep this test from running on Python 2.6 in CI
|
||||
if ansible-playbook -i ../../inventory pythoncheck.yml | grep UNSUPPORTEDPYTHON; then
|
||||
echo skipping test for unsupported Python version...
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# test callback
|
||||
ANSIBLE_CALLBACK_WHITELIST=testns.testcoll.usercallback ansible localhost -m ping | grep "usercallback says ok"
|
||||
|
||||
# run test playbook
|
||||
ansible-playbook -i ../../inventory -i ./a.statichost.yml -v play.yml
|
Loading…
Reference in New Issue