refactor and fixes for doc parsing (#77719)

* refactor and remove redundant code in documentation

  allow location and building api to be more accessible
  fix issues with displaying ansible.legacy and ansible.builtin
  ensure we don't x2 process tokens (some modules reference them also) fixes #77764
  move to constants vs hardcoded
  more informative errors and comments
  now have actual filter/test plugins, which expose the filter/test functions
  moved filter/test loading/finding logic into jinja2pluginloader, removed dupe implementations
  added tests for case in which we unique by basename when listing

Update lib/ansible/utils/plugin_docs.py
Co-authored-by: Sloane Hertel <19572925+s-hertel@users.noreply.github.com>
pull/78686/head
Brian Coca 2 years ago committed by GitHub
parent 2464e1e91c
commit 4260b71cc7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -26,7 +26,7 @@ from ansible import constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.collections.list import list_collection_dirs
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError, AnsiblePluginNotFound
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.common.json import json_dump
from ansible.module_utils.common.yaml import yaml_dump
@ -40,10 +40,7 @@ from ansible.plugins.loader import action_loader, fragment_loader
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
from ansible.utils.display import Display
from ansible.utils.plugin_docs import (
get_docstring,
get_versioned_doclink,
)
from ansible.utils.plugin_docs import get_plugin_docs, get_docstring, get_versioned_doclink
display = Display()
@ -67,10 +64,6 @@ def jdump(text):
raise AnsibleError('We could not convert all the documentation into JSON as there was a conversion issue: %s' % to_native(e))
class PluginNotFound(Exception):
pass
class RoleMixin(object):
"""A mixin containing all methods relevant to role argument specification functionality.
@ -649,7 +642,7 @@ class DocCLI(CLI, RoleMixin):
loader = DocCLI._prep_loader(plugin_type)
coll_filter = self._get_collection_filter()
self.plugins.update(list_plugins(plugin_type, coll_filter, context.CLIARGS['module_path']))
self.plugins.update(list_plugins(plugin_type, coll_filter))
# get appropriate content depending on option
if content == 'dir':
@ -665,22 +658,19 @@ class DocCLI(CLI, RoleMixin):
def _get_plugins_docs(self, plugin_type, names, fail_ok=False, fail_on_errors=True):
loader = DocCLI._prep_loader(plugin_type)
search_paths = DocCLI.print_paths(loader)
# get the docs for plugins in the command line list
plugin_docs = {}
for plugin in names:
doc = {}
try:
doc, plainexamples, returndocs, metadata = DocCLI._get_plugin_doc(plugin, plugin_type, loader, search_paths)
except PluginNotFound:
display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths))
doc, plainexamples, returndocs, metadata = get_plugin_docs(plugin, plugin_type, loader, fragment_loader, (context.CLIARGS['verbosity'] > 0))
except AnsiblePluginNotFound as e:
display.warning(to_native(e))
continue
except Exception as e:
if not fail_on_errors:
plugin_docs[plugin] = {
'error': 'Missing documentation or could not parse documentation: %s' % to_native(e),
}
plugin_docs[plugin] = {'error': 'Missing documentation or could not parse documentation: %s' % to_native(e)}
continue
display.vvv(traceback.format_exc())
msg = "%s %s missing documentation (or could not parse documentation): %s\n" % (plugin_type, plugin, to_native(e))
@ -692,9 +682,7 @@ class DocCLI(CLI, RoleMixin):
if not doc:
# The doc section existed but was empty
if not fail_on_errors:
plugin_docs[plugin] = {
'error': 'No valid documentation found',
}
plugin_docs[plugin] = {'error': 'No valid documentation found'}
continue
docs = DocCLI._combine_plugin_doc(plugin, plugin_type, doc, plainexamples, returndocs, metadata)
@ -703,9 +691,7 @@ class DocCLI(CLI, RoleMixin):
try:
json_dump(docs)
except Exception as e: # pylint:disable=broad-except
plugin_docs[plugin] = {
'error': 'Cannot serialize documentation as JSON: %s' % to_native(e),
}
plugin_docs[plugin] = {'error': 'Cannot serialize documentation as JSON: %s' % to_native(e)}
continue
plugin_docs[plugin] = docs
@ -777,18 +763,23 @@ class DocCLI(CLI, RoleMixin):
ptypes = TARGET_OPTIONS
docs['all'] = {}
for ptype in ptypes:
# TODO: remove when we have all shipped plugins of these types documented
# also fail_ok below should be False
if ptype in ('test', 'filter'):
no_fail = True
else:
no_fail = (not context.CLIARGS['no_fail_on_errors'])
if ptype == 'role':
roles = self._create_role_list(fail_on_errors=not context.CLIARGS['no_fail_on_errors'])
docs['all'][ptype] = self._create_role_doc(
roles.keys(), context.CLIARGS['entry_point'], fail_on_errors=not context.CLIARGS['no_fail_on_errors'])
roles = self._create_role_list(fail_on_errors=no_fail)
docs['all'][ptype] = self._create_role_doc(roles.keys(), context.CLIARGS['entry_point'], fail_on_errors=no_fail)
elif ptype == 'keyword':
names = DocCLI._list_keywords()
docs['all'][ptype] = DocCLI._get_keywords_docs(names.keys())
else:
plugin_names = self._list_plugins(ptype, None)
# TODO: remove exception for test/filter once all core ones are documented
docs['all'][ptype] = self._get_plugins_docs(ptype, plugin_names, fail_ok=(ptype in ('test', 'filter')),
fail_on_errors=not context.CLIARGS['no_fail_on_errors'])
docs['all'][ptype] = self._get_plugins_docs(ptype, plugin_names, fail_ok=(ptype in ('test', 'filter')), fail_on_errors=no_fail)
# reset list after each type to avoid polution
elif listing:
if plugin_type == 'keyword':
@ -863,7 +854,7 @@ class DocCLI(CLI, RoleMixin):
paths = loader._get_paths_with_context()
plugins = {}
for path_context in paths:
plugins.update(list_plugins(plugin_type, searc_path=context.CLIARGS['module_path']))
plugins.update(list_plugins(plugin_type))
return sorted(plugins.keys())
@staticmethod
@ -907,32 +898,6 @@ class DocCLI(CLI, RoleMixin):
return clean_ns
@staticmethod
def _get_plugin_doc(plugin, plugin_type, loader, search_paths):
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
for ext in C.DOC_EXTENSIONS:
result = loader.find_plugin_with_context(plugin, mod_type=ext, ignore_deprecated=True, check_aliases=True)
if result.resolved:
break
else:
if not result.resolved:
raise PluginNotFound('%s was not found in %s' % (plugin, search_paths))
filename = result.plugin_resolved_path
collection_name = result.plugin_resolved_collection
doc, plainexamples, returndocs, metadata = get_docstring(
filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0),
collection_name=collection_name, plugin_type=plugin_type)
# If the plugin existed but did not have a DOCUMENTATION element and was not removed, it's an error
if doc is None:
raise ValueError('%s did not contain a DOCUMENTATION attribute' % plugin)
doc['filename'] = filename
doc['collection'] = collection_name
return doc, plainexamples, returndocs, metadata
@staticmethod
def _combine_plugin_doc(plugin, plugin_type, doc, plainexamples, returndocs, metadata):
# generate extra data
@ -993,6 +958,7 @@ class DocCLI(CLI, RoleMixin):
descs = {}
for plugin in self.plugins.keys():
# TODO: move to plugin itself i.e: plugin.get_desc()
doc = None
filename = Path(to_native(self.plugins[plugin][0]))
docerror = None
@ -1006,7 +972,7 @@ class DocCLI(CLI, RoleMixin):
# handle test/filters that are in file with diff name
base = plugin.split('.')[-1]
basefile = filename.with_name(base + filename.suffix)
for extension in ('.py', '.yml', '.yaml'): # TODO: constant?
for extension in C.DOC_EXTENSIONS:
docfile = basefile.with_suffix(extension)
try:
if docfile.exists():

@ -366,3 +366,8 @@ class AnsibleCollectionUnsupportedVersionError(AnsiblePluginError):
class AnsibleFilterTypeError(AnsibleTemplateError, TypeError):
''' a Jinja filter templating failure due to bad type'''
pass
class AnsiblePluginNotFound(AnsiblePluginError):
''' Indicates we did not find an Ansible plugin '''
pass

@ -52,8 +52,9 @@ def read_docstring_from_yaml_file(filename, verbose=True, ignore_errors=True):
elif verbose:
display.error(msg)
for key in string_to_vars:
data[string_to_vars[key]] = file_data.get(key, None)
if file_data:
for key in string_to_vars:
data[string_to_vars[key]] = file_data.get(key, None)
return data
@ -64,32 +65,44 @@ def read_docstring_from_python_module(filename, verbose=True, ignore_errors=True
Parse from YAML and return the resulting python structure or None together with examples as plain text.
"""
found = 0
seen = set()
data = _init_doc_dict()
next_string = None
with tokenize.open(filename) as f:
tokens = tokenize.generate_tokens(f.readline)
for token in tokens:
# found lable that looks like variable
if token.type == tokenize.NAME:
if token.string in string_to_vars:
# label is expected value, in correct place and has not been seen before
if token.start == 1 and token.string in string_to_vars and token.string not in seen:
# next token that is string has the docs
next_string = string_to_vars[token.string]
continue
# previous token indicated this string is a doc string
if next_string is not None and token.type == tokenize.STRING:
found += 1
# ensure we only process one case of it
seen.add(token.string)
value = token.string
# strip string modifiers/delimiters
if value.startswith(('r', 'b')):
value = value.lstrip('rb')
if value.startswith(("'", '"')):
value = value.strip("'\"")
# actually use the data
if next_string == 'plainexamples':
# keep as string
# keep as string, can be yaml, but we let caller deal with it
data[next_string] = to_text(value)
else:
# yaml load the data
try:
data[next_string] = AnsibleLoader(value, file_name=filename).get_single_data()
except Exception as e:
@ -102,7 +115,7 @@ def read_docstring_from_python_module(filename, verbose=True, ignore_errors=True
next_string = None
# if nothing else worked, fall back to old method
if not found:
if not seen:
data = read_docstring_from_python_file(filename, verbose, ignore_errors)
return data

@ -26,7 +26,6 @@ from ansible import context
from ansible.module_utils.compat.paramiko import paramiko
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.plugins import get_plugin_class
from ansible.utils.display import Display
from ansible.utils.ssh_functions import check_for_controlpersist
@ -168,7 +167,7 @@ class PlayContext(Base):
# generic derived from connection plugin, temporary for backwards compat, in the end we should not set play_context properties
# get options for plugins
options = C.config.get_configuration_definitions(get_plugin_class(plugin), plugin._load_name)
options = C.config.get_configuration_definitions(plugin.plugin_type, plugin._load_name)
for option in options:
if option:
flag = options[option].get('name')

@ -57,11 +57,12 @@ class AnsiblePlugin(ABC):
def __init__(self):
self._options = {}
self._defs = None
def get_option(self, option, hostvars=None):
if option not in self._options:
try:
option_value = C.config.get_config_value(option, plugin_type=get_plugin_class(self), plugin_name=self._load_name, variables=hostvars)
option_value = C.config.get_config_value(option, plugin_type=self.plugin_type, plugin_name=self._load_name, variables=hostvars)
except AnsibleError as e:
raise KeyError(to_native(e))
self.set_option(option, option_value)
@ -69,8 +70,7 @@ class AnsiblePlugin(ABC):
def get_options(self, hostvars=None):
options = {}
defs = C.config.get_configuration_definitions(plugin_type=get_plugin_class(self), name=self._load_name)
for option in defs:
for option in self.option_definitions.keys():
options[option] = self.get_option(option, hostvars=hostvars)
return options
@ -85,7 +85,7 @@ class AnsiblePlugin(ABC):
:arg var_options: Dict with either 'connection variables'
:arg direct: Dict with 'direct assignment'
'''
self._options = C.config.get_plugin_options(get_plugin_class(self), self._load_name, keys=task_keys, variables=var_options, direct=direct)
self._options = C.config.get_plugin_options(self.plugin_type, self._load_name, keys=task_keys, variables=var_options, direct=direct)
# allow extras/wildcards from vars that are not directly consumed in configuration
# this is needed to support things like winrm that can have extended protocol options we don't directly handle
@ -97,6 +97,37 @@ class AnsiblePlugin(ABC):
self.set_options()
return option in self._options
@property
def plugin_type(self):
return self.__class__.__name__.lower().replace('module', '')
@property
def option_definitions(self):
if self._defs is None:
self._defs = C.config.get_configuration_definitions(plugin_type=self.plugin_type, name=self._load_name)
return self._defs
def _check_required(self):
# FIXME: standardize required check based on config
pass
class AnsibleJinja2Plugin(AnsiblePlugin):
def __init__(self, function):
super(AnsibleJinja2Plugin, self).__init__()
self._function = function
@property
def plugin_type(self):
return self.__class__.__name__.lower().replace('ansiblejinja2', '')
def _no_options(self, *args, **kwargs):
raise NotImplementedError()
has_option = get_option = get_options = option_definitions = set_option = set_options = _no_options
@property
def j2_function(self):
return self._function

@ -35,7 +35,7 @@ from ansible.module_utils.six import text_type
from ansible.parsing.ajson import AnsibleJSONEncoder
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.parsing.yaml.objects import AnsibleUnicode
from ansible.plugins import AnsiblePlugin, get_plugin_class
from ansible.plugins import AnsiblePlugin
from ansible.utils.color import stringc
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, NativeJinjaUnsafeText
@ -178,7 +178,7 @@ class CallbackBase(AnsiblePlugin):
'''
# load from config
self._plugin_options = C.config.get_plugin_options(get_plugin_class(self), self._load_name, keys=task_keys, variables=var_options, direct=direct)
self._plugin_options = C.config.get_plugin_options(self.plugin_type, self._load_name, keys=task_keys, variables=var_options, direct=direct)
@staticmethod
def host_label(result):

@ -1,3 +1,14 @@
# Make coding more python3-ish
# (c) Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.plugins import AnsibleJinja2Plugin
class AnsibleJinja2Filter(AnsibleJinja2Plugin):
def _no_options(self, *args, **kwargs):
raise NotImplementedError("Jinaj2 filter plugins do not support option functions, they use direct arguments instead.")

@ -7,15 +7,14 @@ __metaclass__ = type
import os
from ansible import context
from ansible import constants as C
from ansible.collections.list import list_collections
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_native, to_bytes
from ansible.plugins import loader
from ansible.utils.display import Display
from ansible.utils.path import is_subpath
from ansible.utils.collection_loader._collection_finder import _get_collection_path
from ansible.utils.collection_loader._collection_finder import _get_collection_path, AnsibleCollectionRef
display = Display()
@ -27,6 +26,16 @@ IGNORE = {
}
def get_composite_name(collection, name, path, depth):
# collectionize name
composite = [collection]
if depth:
composite.extend(path.split(os.path.sep)[depth * -1:])
composite.append(to_native(name))
return '.'.join(composite)
def _list_plugins_from_paths(ptype, dirs, collection, depth=0):
plugins = {}
@ -66,64 +75,25 @@ def _list_plugins_from_paths(ptype, dirs, collection, depth=0):
if any([
plugin in C.IGNORE_FILES, # general files to ignore
ext in C.REJECT_EXTS, # general extensions to ignore
ext in (b'.yml', b'.yaml', b'.json'), # ignore docs files TODO: constant!
plugin in IGNORE.get(bkey, ()), # plugin in reject list
os.path.islink(full_path), # skip aliases, author should document in 'aliaes' field
]):
continue
if ptype in ('test', 'filter'):
ploader = getattr(loader, '{0}_loader'.format(ptype))
if ptype == 'filter':
method_name = 'filters'
elif ptype == 'test':
method_name = 'tests'
else:
raise AnsibleError('how did you get here?')
added = False
try:
if path not in ploader._extra_dirs:
ploader.add_directory(path)
added = True
for plugin_map in ploader.all():
if not is_subpath(plugin_map._original_path, path, real=True):
# loader will not restrict to collection so we need to do it here
# requires both to be 'real' since loader solves symlinks
continue
try:
# uses the jinja2 method tests/filters to get 'name -> function' map
method_map = getattr(plugin_map, method_name)
jplugins = method_map()
seen = set()
# skip aliases, names that reference same function
for candidate in jplugins:
if jplugins[candidate] not in seen:
# use names and associate to actual file instead of 'function'
composite = [collection]
if depth:
composite.extend(plugin_map._original_path.split(os.path.sep)[depth * -1:])
composite.append(to_native(candidate))
fqcn = '.'.join(composite)
plugins[fqcn] = plugin_map._original_path
seen.add(jplugins[candidate])
except Exception as e:
display.warning("Skipping plugin file %s as it seems to be invalid: %r" % (to_native(plugin_map._original_path), e))
finally:
if added:
ploader._extra_dirs.remove(os.path.realpath(path))
ploader._clear_caches()
file_plugins = _list_j2_plugins_from_file(collection, full_path, ptype, plugin)
except KeyError as e:
display.warning('Skipping file %s: %s' % (full_path, to_native(e)))
continue
for plugin in file_plugins:
plugin_name = get_composite_name(collection, plugin._load_name, full_path, depth)
plugins[plugin_name] = full_path
else:
# collectionize name
composite = [collection]
if depth:
composite.extend(path.split(os.path.sep)[depth * -1:])
composite.append(to_native(plugin))
plugin = '.'.join(composite)
if not os.path.islink(full_path):
# skip aliases, author should document in 'aliaes' field
plugins[plugin] = full_path
plugin = get_composite_name(collection, plugin, path, depth)
plugins[plugin] = full_path
else:
display.debug("Skip listing plugins in '{0}' as it is not a directory".format(path))
else:
@ -132,6 +102,16 @@ def _list_plugins_from_paths(ptype, dirs, collection, depth=0):
return plugins
def _list_j2_plugins_from_file(collection, plugin_path, ptype, plugin_name):
ploader = getattr(loader, '{0}_loader'.format(ptype))
if collection in ('ansible.builtin', 'ansible.legacy'):
file_plugins = ploader.all()
else:
file_plugins = ploader.get_contained_plugins(collection, plugin_path, plugin_name)
return file_plugins
def list_collection_plugins(ptype, collections, search_paths=None):
# starts at {plugin_name: filepath, ...}, but changes at the end
@ -146,19 +126,22 @@ def list_collection_plugins(ptype, collections, search_paths=None):
for collection in collections.keys():
if collection == 'ansible.builtin':
# dirs from ansible install, but not configured paths
dirs.extend([d.path for d in ploader._get_paths_with_context() if d.path not in ploader.config])
dirs.extend([d.path for d in ploader._get_paths_with_context() if d.internal])
elif collection == 'ansible.legacy':
# configured paths + search paths (should include basedirs/-M)
dirs = ploader.config
if search_paths is not None:
for d in search_paths:
if not d.endswith(ploader.subdir):
d = os.path.join([d, ploader.subdir])
dirs.append(d)
dirs.extend([d.path for d in ploader._get_paths_with_context() if not d.internal])
if context.CLIARGS.get('module_path', None):
dirs.extend(context.CLIARGS['module_path'])
else:
# search path in this case is for locating collection itself
# search path in this case is for locating collection itselfA
b_ptype = to_bytes(C.COLLECTION_PTYPE_COMPAT.get(ptype, ptype))
dirs = [to_native(os.path.join(collections[collection], b'plugins', b_ptype))]
# acr = AnsibleCollectionRef.try_parse_fqcr(collection, ptype)
# if acr:
# dirs = acr.subdirs
# else:
# raise Exception('bad acr for %s, %s' % (collection, ptype))
plugins.update(_list_plugins_from_paths(ptype, dirs, collection))
@ -187,17 +170,18 @@ def list_plugins(ptype, collection=None, search_paths=None):
# {plugin_name: (filepath, class), ...}
plugins = {}
do_legacy = False
do_legacy_replace = True
collections = {}
if collection is None:
# list all collections
# list all collections, add synthetic ones
collections['ansible.builtin'] = b''
collections['ansible.legacy'] = b''
collections.update(list_collections(search_paths=search_paths, dedupe=True))
do_legacy = True
elif collection == 'ansilbe.builtin':
collections['ansible.builtin'] = b''
elif collection == 'ansible.legacy':
do_legacy = True
# add builtin, since legacy also resolves to these
collections[collection] = b''
collections['ansible.builtin'] = b''
do_legacy_replace = False
else:
try:
collections[collection] = to_bytes(_get_collection_path(collection))
@ -207,14 +191,20 @@ def list_plugins(ptype, collection=None, search_paths=None):
if collections:
plugins.update(list_collection_plugins(ptype, collections))
if do_legacy:
legacy = list_collection_plugins(ptype, {'ansible.legacy': search_paths})
for plugin in legacy.keys():
builtin = plugin.replace('ansible.legacy.', 'ansible.builtin.', 1)
if builtin in plugins and legacy[plugin][0] == plugins[builtin][0]:
# add only if no overlap or overlap but diff files
continue
plugins[plugin] = legacy[plugin]
if do_legacy_replace:
# remove legacy that exist as builtin, they are the same plugin but builtin is prefered display
for plugin in list(plugins.keys()):
if 'ansible.builtin' in plugin:
legacy = plugin.replace('ansible.builtin.', 'ansible.legacy.', 1)
if legacy in plugins:
del plugins[legacy]
else:
# when listing only ansilbe.legacy, this includes all of the builtin under the legacy ns
for plugin in list(plugins.keys()):
if 'ansible.builtin' in plugin:
legacy = plugin.replace('ansible.builtin.', 'ansible.legacy.', 1)
plugins[legacy] = plugins[plugin]
del plugins[plugin]
return plugins

@ -10,11 +10,14 @@ __metaclass__ = type
import glob
import os
import os.path
import pkgutil
import sys
import warnings
from collections import defaultdict, namedtuple
from traceback import format_exc
from ansible import __version__ as ansible_version
from ansible import constants as C
from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError
from ansible.module_utils._text import to_bytes, to_text, to_native
@ -26,8 +29,7 @@ from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_P
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder, _get_collection_metadata
from ansible.utils.display import Display
from ansible.utils.plugin_docs import add_fragments
from ansible import __version__ as ansible_version
from ansible.utils.plugin_docs import add_fragments, find_plugin_docfile
# TODO: take the packaging dep, or vendor SpecifierSet?
@ -399,12 +401,18 @@ class PluginLoader:
# if type name != 'module_doc_fragment':
if type_name in C.CONFIGURABLE_PLUGINS and not C.config.get_configuration_definition(type_name, name):
dstring = AnsibleLoader(getattr(module, 'DOCUMENTATION', ''), file_name=path).get_single_data()
# TODO: allow configurable plugins to use sidecar
# if not dstring:
# filename, cn = find_plugin_docfile( name, type_name, self, [os.path.dirname(path)], C.YAML_DOC_EXTENSIONS)
# # TODO: dstring = AnsibleLoader(, file_name=path).get_single_data()
if dstring:
add_fragments(dstring, path, fragment_loader=fragment_loader, is_module=(type_name == 'module'))
if dstring and 'options' in dstring and isinstance(dstring['options'], dict):
C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['options'])
display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
if 'options' in dstring and isinstance(dstring['options'], dict):
C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['options'])
display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
@ -546,8 +554,7 @@ class PluginLoader:
found_files = sorted(found_files) # sort to ensure deterministic results, with the shortest match first
if len(found_files) > 1:
# TODO: warn?
pass
display.debug('Found several possible candidates for the plugin but using first: %s' % ','.join(found_files))
return plugin_load_context.resolve(
full_name, to_text(found_files[0]), acr.collection,
@ -764,8 +771,7 @@ class PluginLoader:
# last ditch, if it's something that can be redirected, look for a builtin redirect before giving up
candidate_fqcr = 'ansible.builtin.{0}'.format(name)
if '.' not in name and AnsibleCollectionRef.is_valid_fqcr(candidate_fqcr):
return self._find_fq_plugin(fq_name=candidate_fqcr, extension=suffix, plugin_load_context=plugin_load_context,
ignore_deprecated=ignore_deprecated)
return self._find_fq_plugin(fq_name=candidate_fqcr, extension=suffix, plugin_load_context=plugin_load_context, ignore_deprecated=ignore_deprecated)
return plugin_load_context.nope('{0} is not eligible for last-chance resolution'.format(name))
@ -896,7 +902,7 @@ class PluginLoader:
def all(self, *args, **kwargs):
'''
Iterate through all plugins of this type
Iterate through all plugins of this type, in configured paths (no collections)
A plugin loader is initialized with a specific type. This function is an iterator returning
all of the plugins of that type to the caller.
@ -946,15 +952,17 @@ class PluginLoader:
name = os.path.splitext(path)[0]
basename = os.path.basename(name)
if basename == '__init__' or basename in _PLUGIN_FILTERS[self.package]:
# either empty or ignored by the module blocklist
if basename in _PLUGIN_FILTERS[self.package]:
display.debug("'%s' skipped due to a defined plugin filter" % basename)
continue
if basename == 'base' and self.package == 'ansible.plugins.cache':
if basename == '__init__' or (basename == 'base' and self.package == 'ansible.plugins.cache'):
# cache has legacy 'base.py' file, which is wrapper for __init__.py
display.debug("'%s' skipped due to reserved name" % basename)
continue
if dedupe and basename in loaded_modules:
display.debug("'%s' skipped as duplicate" % basename)
continue
loaded_modules.add(basename)
@ -964,17 +972,19 @@ class PluginLoader:
continue
if path not in self._module_cache:
if self.type in ('filter', 'test'):
# filter and test plugin files can contain multiple plugins
# they must have a unique python module name to prevent them from shadowing each other
full_name = '{0}_{1}'.format(abs(hash(path)), basename)
else:
full_name = basename
try:
if self.subdir in ('filter_plugins', 'test_plugins'):
# filter and test plugin files can contain multiple plugins
# they must have a unique python module name to prevent them from shadowing each other
full_name = '{0}_{1}'.format(abs(hash(path)), basename)
else:
full_name = basename
module = self._load_module_source(full_name, path)
except Exception as e:
display.warning("Skipping plugin (%s) as it seems to be invalid: %s" % (path, to_text(e)))
display.warning("Skipping plugin (%s), cannot load: %s" % (path, to_text(e)))
continue
self._module_cache[path] = module
found_in_cache = False
else:
@ -1017,58 +1027,285 @@ class Jinja2Loader(PluginLoader):
PluginLoader optimized for Jinja2 plugins
The filter and test plugins are Jinja2 plugins encapsulated inside of our plugin format.
The way the calling code is setup, we need to do a few things differently in the all() method
We can't use the base class version because of file == plugin assumptions and dedupe logic
We need to do a few things differently in the base class because of file == plugin
assumptions and dedupe logic.
"""
def __init__(self, class_name, package, config, subdir, aliases=None, required_base_class=None):
super(Jinja2Loader, self).__init__(class_name, package, config, subdir, aliases=aliases, required_base_class=required_base_class)
self._loaded_j2_file_maps = []
def _clear_caches(self):
super(Jinja2Loader, self)._clear_caches()
self._loaded_j2_file_maps = []
def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
''' this is really 'find plugin file' '''
return super(Jinja2Loader, self).find_plugin(name, mod_type=mod_type, ignore_deprecated=ignore_deprecated, check_aliases=check_aliases,
collection_list=collection_list)
# TODO: handle collection plugin find, see 'get_with_context'
# this can really 'find plugin file'
plugin = super(Jinja2Loader, self).find_plugin(name, mod_type=mod_type, ignore_deprecated=ignore_deprecated, check_aliases=check_aliases,
collection_list=collection_list)
def get(self, name, *args, **kwargs):
# if not found, try loading all non collection plugins and see if this in there
if not plugin:
all_plugins = self.all()
plugin = all_plugins.get(name, None)
return plugin
@property
def method_map_name(self):
return get_plugin_class(self.class_name) + 's'
def get_contained_plugins(self, collection, plugin_path, name):
plugins = []
full_name = '.'.join(['ansible_collections', collection, 'plugins', self.type, name])
try:
# use 'parent' loader class to find files, but cannot return this as it can contain multiple plugins per file
if plugin_path not in self._module_cache:
self._module_cache[plugin_path] = self._load_module_source(full_name, plugin_path)
module = self._module_cache[plugin_path]
obj = getattr(module, self.class_name)
except Exception as e:
raise KeyError('Failed to load %s for %s: %s' % (plugin_path, collection, to_native(e)))
plugin_impl = obj()
if plugin_impl is None:
raise KeyError('Could not find %s.%s' % (collection, name))
try:
method_map = getattr(plugin_impl, self.method_map_name)
plugin_map = method_map().items()
except Exception as e:
display.warning("Ignoring %s plugins in '%s' as it seems to be invalid: %r" % (self.type, to_text(plugin_path), e))
return plugins
for func_name, func in plugin_map:
fq_name = '.'.join((collection, func_name))
pclass = self._load_jinja2_class()
plugin = pclass(func)
if plugin in plugins:
continue
self._update_object(plugin, fq_name, plugin_path)
plugins.append(plugin)
return plugins
def get_with_context(self, name, *args, **kwargs):
# found_in_cache = True
class_only = kwargs.pop('class_only', False) # just pop it, dont want to pass through
collection_list = kwargs.pop('collection_list', None)
context = PluginLoadContext()
# avoid collection path for legacy
name = name.removeprefix('ansible.legacy.')
if '.' not in name and not collection_list:
# find in builtin/legacy list
for known_plugin in self.all(*args, **kwargs):
if known_plugin._load_name == name:
# set context
context.resolved = True
context.plugin_resolved_name = name
context.plugin_resolved_path = known_plugin._original_path
# TODO: context.plugin_resolved_collection = 'ansible.builtin' if path_with_context.internal else 'ansible.legacy'
return get_with_context_result(known_plugin, context)
plugin = None
key, leaf_key = get_fqcr_and_name(name)
seen = set()
# follow the meta!
while True:
if key in seen:
raise AnsibleError('recursive collection redirect found for %r' % name, 0)
seen.add(key)
acr = AnsibleCollectionRef.try_parse_fqcr(key, self.type)
if not acr:
raise KeyError('invalid plugin name: {0}'.format(key))
try:
ts = _get_collection_metadata(acr.collection)
except ValueError as e:
# no collection
raise KeyError('Invalid plugin FQCN ({0}): {1}'.format(key, to_native(e)))
# TODO: implement cycle detection (unified across collection redir as well)
routing_entry = ts.get('plugin_routing', {}).get(self.type, {}).get(leaf_key, {})
# check deprecations
deprecation_entry = routing_entry.get('deprecation')
if deprecation_entry:
warning_text = deprecation_entry.get('warning_text')
removal_date = deprecation_entry.get('removal_date')
removal_version = deprecation_entry.get('removal_version')
if not warning_text:
warning_text = '{0} "{1}" is deprecated'.format(self.type, key)
display.deprecated(warning_text, version=removal_version, date=removal_date, collection_name=acr.collection)
# check removal
tombstone_entry = routing_entry.get('tombstone')
if tombstone_entry:
warning_text = tombstone_entry.get('warning_text')
removal_date = tombstone_entry.get('removal_date')
removal_version = tombstone_entry.get('removal_version')
if not warning_text:
warning_text = '{0} "{1}" has been removed'.format(self.type, key)
if '.' in name: # NOTE: this is wrong way to detect collection, see note above for example
return super(Jinja2Loader, self).get(name, *args, **kwargs)
exc_msg = display.get_deprecation_message(warning_text, version=removal_version, date=removal_date,
collection_name=acr.collection, removed=True)
# Nothing is currently using this method
raise AnsibleError('No code should call "get" for Jinja2Loaders (Not implemented) for non collection use')
raise AnsiblePluginRemovedError(exc_msg)
# check redirects
redirect = routing_entry.get('redirect', None)
if redirect:
next_key, leaf_key = get_fqcr_and_name(redirect, collection=acr.collection)
display.vvv('redirecting (type: {0}) {1}.{2} to {3}'.format(self.type, acr.collection, acr.resource, next_key))
key = next_key
else:
break
try:
pkg = import_module(acr.n_python_package_name)
except ImportError as e:
raise KeyError(to_native(e))
parent_prefix = acr.collection
if acr.subdirs:
parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)
try:
for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
if ispkg:
continue
try:
# use 'parent' loader class to find files, but cannot return this as it can contain
# multiple plugins per file
plugin_impl = super(Jinja2Loader, self).get_with_context(module_name, *args, **kwargs)
except Exception as e:
raise KeyError(to_native(e))
try:
method_map = getattr(plugin_impl.object, self.method_map_name)
plugin_map = method_map().items()
except Exception as e:
display.warning("Skipping %s plugins in '%s' as it seems to be invalid: %r" % (self.type, to_text(plugin_impl.object._original_path), e))
continue
for func_name, func in plugin_map:
fq_name = '.'.join((parent_prefix, func_name))
# TODO: load anyways into CACHE so we only match each at end of loop
# the files themseves should already be cached by base class caching of modules(python)
if key in (func_name, fq_name):
pclass = self._load_jinja2_class()
plugin = pclass(func)
if plugin:
context = plugin_impl.plugin_load_context
self._update_object(plugin, fq_name, plugin_impl.object._original_path)
break # go to next file as it can override if dupe (dont break both loops)
except AnsiblePluginRemovedError as apre:
raise AnsibleError(to_native(apre), 0, orig_exc=apre)
except (AnsibleError, KeyError):
raise
except Exception as ex:
display.warning('An unexpected error occurred during Jinja2 plugin loading: {0}'.format(to_native(ex)))
display.vvv('Unexpected error during Jinja2 plugin loading: {0}'.format(format_exc()))
raise AnsibleError(to_native(ex), 0, orig_exc=ex)
return get_with_context_result(plugin, context)
def all(self, *args, **kwargs):
# inputs, we ignore 'dedupe' we always do, used in base class to find files for this one
path_only = kwargs.pop('path_only', False)
class_only = kwargs.pop('class_only', False) # basically ignored for test/filters since they are functions
# Having both path_only and class_only is a coding bug
if path_only and class_only:
raise AnsibleError('Do not set both path_only and class_only when calling PluginLoader.all()')
found = set()
# get plugins from files in configured paths (mulitple in each)
for p_map in self._j2_all_file_maps(*args, **kwargs):
# p_map is really object from file with class that holds mulitple plugins
plugins_list = getattr(p_map, self.method_map_name)
try:
plugins = plugins_list()
except Exception as e:
display.vvvv("Skipping %s plugins in '%s' as it seems to be invalid: %r" % (self.type, to_text(p_map._original_path), e))
continue
for plugin_name in plugins.keys():
if plugin_name in _PLUGIN_FILTERS[self.package]:
display.debug("%s skipped due to a defined plugin filter" % plugin_name)
continue
if plugin_name in found:
display.debug("%s skipped as duplicate" % plugin_name)
continue
if path_only:
result = p_map._original_path
else:
# loader class is for the file with multiple plugins, but each plugin now has it's own class
pclass = self._load_jinja2_class()
result = pclass(plugins[plugin_name]) # if bad plugin, let exception rise
found.add(plugin_name)
self._update_object(result, plugin_name, p_map._original_path)
yield result
def _load_jinja2_class(self):
""" override the normal method of plugin classname as these are used in the generic funciton
to access the 'multimap' of filter/tests to function, this is a 'singular' plugin for
each entry.
"""
Differences with :meth:`PluginLoader.all`:
class_name = 'AnsibleJinja2%s' % get_plugin_class(self.class_name).capitalize()
module = __import__(self.package, fromlist=[class_name])
return getattr(module, class_name)
def _j2_all_file_maps(self, *args, **kwargs):
"""
* Unlike other plugin types, file != plugin, a file can contain multiple plugins (of same type).
This is why we do not deduplicate ansible file names at this point, we mostly care about
the names of the actual jinja2 plugins which are inside of our files.
* We reverse the order of the list of files compared to other PluginLoaders. This is
because of how calling code chooses to sync the plugins from the list. It adds all the
Jinja2 plugins from one of our Ansible files into a dict. Then it adds the Jinja2
plugins from the next Ansible file, overwriting any Jinja2 plugins that had the same
name. This is an encapsulation violation (the PluginLoader should not know about what
calling code does with the data) but we're pushing the common code here. We'll fix
this in the future by moving more of the common code into this PluginLoader.
* We return a list. We could iterate the list instead but that's extra work for no gain because
the API receiving this doesn't care. It just needs an iterable
* This method will NOT fetch collection plugins, only those that would be expected under 'ansible.legacy'.
* This method will NOT fetch collection plugin files, only those that would be expected under 'ansible.builtin/legacy'.
"""
# We don't deduplicate ansible file names.
# Instead, calling code deduplicates jinja2 plugin names when loading each file.
kwargs['_dedupe'] = False
# TODO: move this to initialization and extract/dedupe plugin names in loader and offset this from
# caller. It would have to cache/refresh on add_directory to reevaluate plugin list and dedupe.
# Another option is to always prepend 'ansible.legac'y and force the collection path to
# load/find plugins, just need to check compatibility of that approach.
# This would also enable get/find_plugin for these type of plugins.
# We have to instantiate a list of all files so that we can reverse the list.
# We reverse it so that calling code will deduplicate this correctly.
files = list(super(Jinja2Loader, self).all(*args, **kwargs))
files .reverse()
return files
# populate cache if needed
if not self._loaded_j2_file_maps:
# We don't deduplicate ansible file names.
# Instead, calling code deduplicates jinja2 plugin names when loading each file.
kwargs['_dedupe'] = False
# To match correct precedence, call base class' all() to get a list of files,
self._loaded_j2_file_maps = list(super(Jinja2Loader, self).all(*args, **kwargs))
return self._loaded_j2_file_maps
def get_fqcr_and_name(resource, collection='ansible.builtin'):
if '.' not in resource:
name = resource
fqcr = collection + '.' + resource
else:
name = resource.split('.')[-1]
fqcr = resource
return fqcr, name
def _load_plugin_filter():

@ -1,3 +1,13 @@
# Make coding more python3-ish
# (c) Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins import AnsibleJinja2Plugin
class AnsibleJinja2Test(AnsibleJinja2Plugin):
def _no_options(self, *args, **kwargs):
raise NotImplementedError("Jinaj2 test plugins do not support option functions, they use direct arguments instead.")

@ -22,7 +22,6 @@ __metaclass__ = type
import ast
import datetime
import os
import pkgutil
import pwd
import re
import time
@ -44,20 +43,16 @@ from ansible.errors import (
AnsibleFilterError,
AnsibleLookupError,
AnsibleOptionsError,
AnsiblePluginRemovedError,
AnsibleUndefinedVariable,
)
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils._text import to_native, to_text, to_bytes
from ansible.module_utils.common.collections import is_sequence
from ansible.module_utils.compat.importlib import import_module
from ansible.plugins.loader import filter_loader, lookup_loader, test_loader
from ansible.template.native_helpers import ansible_native_concat, ansible_eval_concat, ansible_concat
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.display import Display
from ansible.utils.collection_loader._collection_finder import _get_collection_metadata
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.native_jinja import NativeJinjaText
from ansible.utils.unsafe_proxy import wrap_var
@ -413,174 +408,60 @@ class AnsibleContext(Context):
class JinjaPluginIntercept(MutableMapping):
def __init__(self, delegatee, pluginloader, *args, **kwargs):
super(JinjaPluginIntercept, self).__init__(*args, **kwargs)
self._delegatee = delegatee
self._pluginloader = pluginloader
''' Simulated dict class that loads Jinja2Plugins at request
otherwise all plugins would need to be loaded a priori.
if self._pluginloader.class_name == 'FilterModule':
self._method_map_name = 'filters'
self._dirname = 'filter'
elif self._pluginloader.class_name == 'TestModule':
self._method_map_name = 'tests'
self._dirname = 'test'
self._collection_jinja_func_cache = {}
NOTE: plugin_loader still loads all 'builtin/legacy' at
start so only collection plugins are really at request.
'''
self._ansible_plugins_loaded = False
def __init__(self, delegatee, pluginloader, *args, **kwargs):
def _load_ansible_plugins(self):
if self._ansible_plugins_loaded:
return
super(JinjaPluginIntercept, self).__init__(*args, **kwargs)
for plugin in self._pluginloader.all():
try:
method_map = getattr(plugin, self._method_map_name)
self._delegatee.update(method_map())
except Exception as e:
display.warning("Skipping %s plugin %s as it seems to be invalid: %r" % (self._dirname, to_text(plugin._original_path), e))
continue
self._pluginloader = pluginloader
if self._pluginloader.class_name == 'FilterModule':
for plugin_name, plugin in self._delegatee.items():
if plugin_name in C.STRING_TYPE_FILTERS:
self._delegatee[plugin_name] = _wrap_native_text(plugin)
else:
self._delegatee[plugin_name] = _unroll_iterator(plugin)
# cache of resolved plugins
self._delegatee = delegatee
self._ansible_plugins_loaded = True
# track loaded plugins here as cache above includes 'jinja2' filters but ours should override
self._loaded_builtins = set()
# FUTURE: we can cache FQ filter/test calls for the entire duration of a run, since a given collection's impl's
# aren't supposed to change during a run
def __getitem__(self, key):
original_key = key
self._load_ansible_plugins()
try:
if not isinstance(key, string_types):
raise ValueError('key must be a string')
key = to_native(key)
if '.' not in key: # might be a built-in or legacy, check the delegatee dict first, then try for a last-chance base redirect
func = self._delegatee.get(key)
if func:
return func
key, leaf_key = get_fqcr_and_name(key)
seen = set()
while True:
if key in seen:
raise TemplateSyntaxError(
'recursive collection redirect found for %r' % original_key,
0
)
seen.add(key)
acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)
if not acr:
raise KeyError('invalid plugin name: {0}'.format(key))
ts = _get_collection_metadata(acr.collection)
# TODO: implement cycle detection (unified across collection redir as well)
routing_entry = ts.get('plugin_routing', {}).get(self._dirname, {}).get(leaf_key, {})
deprecation_entry = routing_entry.get('deprecation')
if deprecation_entry:
warning_text = deprecation_entry.get('warning_text')
removal_date = deprecation_entry.get('removal_date')
removal_version = deprecation_entry.get('removal_version')
if not warning_text:
warning_text = '{0} "{1}" is deprecated'.format(self._dirname, key)
display.deprecated(warning_text, version=removal_version, date=removal_date, collection_name=acr.collection)
tombstone_entry = routing_entry.get('tombstone')
if tombstone_entry:
warning_text = tombstone_entry.get('warning_text')
removal_date = tombstone_entry.get('removal_date')
removal_version = tombstone_entry.get('removal_version')
if not warning_text:
warning_text = '{0} "{1}" has been removed'.format(self._dirname, key)
exc_msg = display.get_deprecation_message(warning_text, version=removal_version, date=removal_date,
collection_name=acr.collection, removed=True)
raise AnsiblePluginRemovedError(exc_msg)
redirect = routing_entry.get('redirect', None)
if redirect:
next_key, leaf_key = get_fqcr_and_name(redirect, collection=acr.collection)
display.vvv('redirecting (type: {0}) {1}.{2} to {3}'.format(self._dirname, acr.collection, acr.resource, next_key))
key = next_key
else:
break
func = self._collection_jinja_func_cache.get(key)
if func:
return func
if not isinstance(key, string_types):
raise ValueError('key must be a string, got %s instead' % type(key))
if key not in self._loaded_builtins:
plugin = None
try:
pkg = import_module(acr.n_python_package_name)
except ImportError:
raise KeyError()
parent_prefix = acr.collection
if acr.subdirs:
parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)
# TODO: implement collection-level redirect
for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
if ispkg:
continue
plugin = self._pluginloader.get(key)
except (AnsibleError, KeyError) as e:
raise TemplateSyntaxError('Could not load "%s": %s' % (key, to_native(e)), 0)
except Exception as e:
display.vvvv('Unexpected plugin load (%s) exception: %s' % (key, to_native(e)))
raise e
try:
plugin_impl = self._pluginloader.get(module_name)
except Exception as e:
raise TemplateSyntaxError(to_native(e), 0)
try:
method_map = getattr(plugin_impl, self._method_map_name)
func_items = method_map().items()
except Exception as e:
display.warning(
"Skipping %s plugin %s as it seems to be invalid: %r" % (self._dirname, to_text(plugin_impl._original_path), e),
)
continue
# if a plugin was found/loaded
if plugin:
# set in filter cache and avoid expensive plugin load
self._delegatee[key] = plugin.j2_function
self._loaded_builtins.add(key)
# let it trigger keyerror if we could not find ours or jinja2 one
func = self._delegatee[key]
# if i do have func and it is a filter, it nees wrapping
if self._pluginloader.type == 'filter':
# filter need wrapping
if key in C.STRING_TYPE_FILTERS:
# avoid litera_eval when you WANT strings
func = _wrap_native_text(func)
else:
# conditionally unroll iterators/generators to avoid having to use `|list` after every filter
func = _unroll_iterator(func)
for func_name, func in func_items:
fq_name = '.'.join((parent_prefix, func_name))
# FIXME: detect/warn on intra-collection function name collisions
if self._pluginloader.class_name == 'FilterModule':
if fq_name.startswith(('ansible.builtin.', 'ansible.legacy.')) and \
func_name in C.STRING_TYPE_FILTERS:
self._collection_jinja_func_cache[fq_name] = _wrap_native_text(func)
else:
self._collection_jinja_func_cache[fq_name] = _unroll_iterator(func)
else:
self._collection_jinja_func_cache[fq_name] = func
function_impl = self._collection_jinja_func_cache[key]
return function_impl
except AnsiblePluginRemovedError as apre:
raise TemplateSyntaxError(to_native(apre), 0)
except KeyError:
raise
except Exception as ex:
display.warning('an unexpected error occurred during Jinja2 environment setup: {0}'.format(to_native(ex)))
display.vvv('exception during Jinja2 environment setup: {0}'.format(format_exc()))
raise TemplateSyntaxError(to_native(ex), 0)
return func
def __setitem__(self, key, value):
return self._delegatee.__setitem__(key, value)
@ -597,17 +478,6 @@ class JinjaPluginIntercept(MutableMapping):
return len(self._delegatee)
def get_fqcr_and_name(resource, collection='ansible.builtin'):
if '.' not in resource:
name = resource
fqcr = collection + '.' + resource
else:
name = resource.split('.')[-1]
fqcr = resource
return fqcr, name
def _fail_on_undefined(data):
"""Recursively find an undefined value in a nested data structure
and properly raise the undefined exception.
@ -1086,10 +956,10 @@ class Templar:
try:
t = myenv.from_string(data)
except TemplateSyntaxError as e:
raise AnsibleError("template error while templating string: %s. String: %s" % (to_native(e), to_native(data)))
raise AnsibleError("template error while templating string: %s. String: %s" % (to_native(e), to_native(data)), orig_exc=e)
except Exception as e:
if 'recursion' in to_native(e):
raise AnsibleError("recursive loop detected in template string: %s" % to_native(data))
raise AnsibleError("recursive loop detected in template string: %s" % to_native(data), orig_exc=e)
else:
return data
@ -1127,10 +997,10 @@ class Templar:
if 'AnsibleUndefined' in to_native(te):
errmsg = "Unable to look up a name or access an attribute in template string (%s).\n" % to_native(data)
errmsg += "Make sure your variable name does not contain invalid characters like '-': %s" % to_native(te)
raise AnsibleUndefinedVariable(errmsg)
raise AnsibleUndefinedVariable(errmsg, orig_exc=te)
else:
display.debug("failing because of a type error, template data is: %s" % to_text(data))
raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_native(data), to_native(te)))
raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_native(data), to_native(te)), orig_exc=te)
finally:
self.cur_context = cached_context
@ -1153,7 +1023,7 @@ class Templar:
return res
except (UndefinedError, AnsibleUndefinedVariable) as e:
if fail_on_undefined:
raise AnsibleUndefinedVariable(e)
raise AnsibleUndefinedVariable(e, orig_exc=e)
else:
display.debug("Ignoring undefined failure: %s" % to_text(e))
return data

@ -5,16 +5,16 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from collections.abc import MutableMapping, MutableSet, MutableSequence
from pathlib import Path
from ansible import constants as C
from ansible.release import __version__ as ansible_version
from ansible.errors import AnsibleError
from ansible.errors import AnsibleError, AnsibleParserError, AnsiblePluginNotFound
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native
from ansible.parsing.plugin_docs import read_docstring
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars
display = Display()
@ -271,3 +271,85 @@ def get_versioned_doclink(path):
return '{0}{1}/{2}'.format(base_url, doc_version, path)
except Exception as ex:
return '(unable to create versioned doc link for path {0}: {1})'.format(path, to_native(ex))
def _find_adjacent(path, plugin, extensions):
found = None
adjacent = Path(path)
plugin_base_name = plugin.split('.')[-1]
if adjacent.stem != plugin_base_name:
# this should only affect filters/tests
adjacent = adjacent.with_name(plugin_base_name)
for ext in extensions:
candidate = adjacent.with_suffix(ext)
if candidate.exists():
found = to_native(candidate)
break
return found
def find_plugin_docfile(plugin, plugin_type, loader):
''' if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding 'sidecar' file for docs '''
context = loader.find_plugin_with_context(plugin, ignore_deprecated=False, check_aliases=True)
plugin_obj = None
if (not context or not context.resolved) and plugin_type in ('filter', 'test'):
# should only happen for filters/test
plugin_obj, context = loader.get_with_context(plugin)
if not context or not context.resolved:
raise AnsiblePluginNotFound('%s was not found' % (plugin), plugin_load_context=context)
docfile = Path(context.plugin_resolved_path)
possible_names = [plugin, getattr(plugin_obj, '_load_name', None), docfile.name.removeprefix('_'), docfile.name]
if context:
if context.redirect_list:
possible_names.append(context.redirect_list[-1])
possible_names.append(context.plugin_resolved_name)
if docfile.suffix not in C.DOC_EXTENSIONS or docfile.name not in possible_names:
# only look for adjacent if plugin file does not support documents or
# name does not match file basname (except deprecated)
filename = _find_adjacent(docfile, plugin, C.DOC_EXTENSIONS)
else:
filename = to_native(docfile)
if filename is None:
raise AnsibleError('%s cannot contain DOCUMENTATION nor does it have a companion documentation file' % (plugin))
return filename, context.plugin_resolved_collection
def get_plugin_docs(plugin, plugin_type, loader, fragment_loader, verbose):
docs = []
# find plugin doc file, if it doesn't exist this will throw error, we let it through
# can raise exception and short circuit when 'not found'
filename, collection_name = find_plugin_docfile(plugin, plugin_type, loader)
try:
docs = get_docstring(filename, fragment_loader, verbose=verbose, collection_name=collection_name, plugin_type=plugin_type)
except Exception as e:
raise AnsibleParserError('%s did not contain a DOCUMENTATION attribute (%s)' % (plugin, filename), orig_exc=e)
# no good? try adjacent
if not docs[0]:
try:
newfile = _find_adjacent(filename, plugin, C.DOC_EXTENSIONS)
docs = get_docstring(newfile, fragment_loader, verbose=verbose, collection_name=collection_name, plugin_type=plugin_type)
except Exception as e:
raise AnsibleParserError('Adjacent file %s did not contain a DOCUMENTATION attribute (%s)' % (plugin, filename), orig_exc=e)
# got nothing, so this is 'undocumented', but lets populate at least some friendly info
if not docs[0]:
docs[0] = {'description': 'UNDOCUMENTED', 'short_description': 'UNDOCUMENTED'}
# add extra data to docs[0] (aka 'DOCUMENTATION')
docs[0]['filename'] = filename
docs[0]['collection'] = collection_name
return docs

@ -23,5 +23,6 @@ class FilterModule(object):
def filters(self):
return {
'noop': nochange,
'ultimatequestion': meaningoflife
'ultimatequestion': meaningoflife,
'b64decode': nochange, # here to colide with basename of builtin
}

@ -0,0 +1,16 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def yolo(value):
return True
class TestModule(object):
''' Ansible core jinja2 tests '''
def tests(self):
return {
# failure testing
'yolo': yolo,
}

@ -0,0 +1,18 @@
DOCUMENTATION:
name: yolo
short_description: you only live once
description:
- This is always true
options:
_input:
description: does not matter
type: raw
required: true
EXAMPLES: |
{{ 'anything' is yolo }}
RETURN:
output:
type: boolean
description: always true

@ -0,0 +1,21 @@
DOCUMENTATION:
name: donothing
author: lazy
version_added: 'histerical'
short_description: noop
description:
- don't do anything
options:
_input:
description: Anything you want to get back
type: raw
required: true
EXAMPLES: |
# set first 10 volumes rw, rest as dp
meaning: "{{ (stuff|donothing}}"
RETURN:
_value:
description: guess
type: raw

@ -0,0 +1,22 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.utils.display import Display
display = Display()
def donothing(a):
return a
class FilterModule(object):
''' Ansible core jinja2 filters '''
def filters(self):
return {
'donothing': donothing,
}

@ -0,0 +1,19 @@
#!/usr/bin/python
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
name: double_doc
description:
- module also uses 'DOCUMENTATION' in class
'''
class Foo:
# 2nd ref to documentation string, used to trip up tokinzer doc reader
DOCUMENTATION = None
def __init__(self):
pass

@ -0,0 +1,21 @@
#!powershell
# Copyright: (c) Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#AnsibleRequires -CSharpUtil Ansible.Basic
$spec = @{
options = @{
hello = @{ type = 'str'; required = $true }
}
supports_check_mode = $true
}
$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
$hello = $module.Params.hello
$module.Result.msg = $hello
$module.Result.changed = $false
$module.ExitJson()

@ -0,0 +1,9 @@
DOCUMENTATION:
module: test_win_module
short_description: Test win module
description:
- Test win module with sidecar docs
author:
- Ansible Core Team
EXAMPLES: ''
RETURN: ''

@ -39,7 +39,7 @@ do
# FIXME post=$(ansible-doc -l -t ${ptype} --playbook-dir ./|wc -l)
# FIXME test "$pre" -eq $((post - 1))
if [ "${ptype}" == "filter" ]; then
expected=2
expected=3
else
expected=1
fi
@ -133,4 +133,42 @@ ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --no-fail-on-errors --
# ensure that --metadata-dump does fail when --no-fail-on-errors is not supplied
output=$(ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --playbook-dir broken-docs testns.testcol 2>&1 | grep -c 'ERROR!' || true)
test "$output" -eq 1
test "${output}" -eq 1
# ensure we list the 'legacy plugins'
[ "$(ansible-doc -M ./library -l ansible.legacy |wc -l)" -gt "0" ]
# playbook dir should work the same
[ "$(ansible-doc -l ansible.legacy --playbook-dir ./|wc -l)" -gt "0" ]
# see that we show undocumented when missing docs
[ "$(ansible-doc -M ./library -l ansible.legacy |grep -c UNDOCUMENTED)" == "6" ]
# ensure filtering works and does not include any 'test_' modules
[ "$(ansible-doc -M ./library -l ansible.builtin |grep -c test_)" == 0 ]
[ "$(ansible-doc --playbook-dir ./ -l ansible.builtin |grep -c test_)" == 0 ]
# ensure filtering still shows modules
count=$(ANSIBLE_LIBRARY='./nolibrary' ansible-doc -l ansible.builtin |wc -l)
[ "${count}" -gt "0" ]
[ "$(ansible-doc -M ./library -l ansible.builtin |wc -l)" == "${count}" ]
[ "$(ansible-doc --playbook-dir ./ -l ansible.builtin |wc -l)" == "${count}" ]
# produce 'sidecar' docs for test
[ "$(ansible-doc -t test --playbook-dir ./ testns.testcol.yolo| wc -l)" -gt "0" ]
[ "$(ansible-doc -t filter --playbook-dir ./ donothing| wc -l)" -gt "0" ]
[ "$(ansible-doc -t filter --playbook-dir ./ ansible.legacy.donothing| wc -l)" -gt "0" ]
# produce 'sidecar' docs for module
[ "$(ansible-doc -M ./library test_win_module| wc -l)" -gt "0" ]
[ "$(ansible-doc --playbook-dir ./ test_win_module| wc -l)" -gt "0" ]
# test 'double DOCUMENTATION' use
[ "$(ansible-doc --playbook-dir ./ double_doc| wc -l)" -gt "0" ]
# don't break on module dir
ansible-doc --list --module-path ./modules > /dev/null
# ensure we dedupe by fqcn and not base name
#[ "$(ansible-doc -l -t filter --playbook-dir ./ |grep 'b64decode' | wc -l)" -eq "2"]

@ -15,8 +15,8 @@
"ERROR! Unable to retrieve documentation from 'test_docs_missing_description' due to: All (sub-)options and return values must have a 'description' field"
in result.stderr
- name: module with suboptions
command: ansible-doc test_docs_suboptions
- name: module with suboptions (avoid first line as it has full path)
shell: ansible-doc test_docs_suboptions| tail -n +2
register: result
ignore_errors: true
@ -31,7 +31,7 @@
- actual_output == expected_output
- name: module with return docs
command: ansible-doc test_docs_returns
shell: ansible-doc test_docs_returns| tail -n +2
register: result
ignore_errors: true
@ -53,14 +53,14 @@
- assert:
that:
- result is failed
- '"ERROR! module test_docs_returns_broken missing documentation (or could not parse documentation)" in result.stderr'
- '"module test_docs_returns_broken missing documentation (or could not parse documentation)" in result.stderr'
- name: non-existent module
command: ansible-doc test_does_not_exist
register: result
- assert:
that:
- '"[WARNING]: module test_does_not_exist not found in:" in result.stderr'
- '"test_does_not_exist was not found" in result.stderr'
- name: documented module
command: ansible-doc test_docs
@ -138,7 +138,7 @@
- '"Alternatives: new_module" in result.stdout'
- name: documented module with YAML anchors
command: ansible-doc test_docs_yaml_anchors
shell: ansible-doc test_docs_yaml_anchors |tail -n +2
register: result
- set_fact:
actual_output: >-
@ -147,3 +147,8 @@
- assert:
that:
- actual_output == expected_output
- name: ensure 'donothing' adjacent filter is loaded
assert:
that:
- "'x' == ('x'|donothing)"

@ -1,4 +1,3 @@
> TEST_DOCS_RETURNS (library/test_docs_returns.py)
Test module

@ -1,4 +1,3 @@
> TEST_DOCS_SUBOPTIONS (library/test_docs_suboptions.py)
Test module

@ -1,4 +1,3 @@
> TEST_DOCS_YAML_ANCHORS (library/test_docs_yaml_anchors.py)
Test module

@ -96,11 +96,11 @@
command: ansible-inventory --list --output unicode_inventory.json -i {{ role_path }}/files/unicode.yml
- set_fact:
json_inventory_file: "{{ lookup('file', 'unicode_inventory.json') | string }}"
json_inventory_file: "{{ lookup('file', 'unicode_inventory.json') }}"
- assert:
that:
- json_inventory_file is contains('příbor')
- json_inventory_file|string is contains('příbor')
always:
- file:
name: unicode_inventory.json

@ -1,6 +1,6 @@
- hosts: localhost
vars:
git_install: '{{ lookup("file", lookup("env", "OUTPUT_DIR") + "/git_install.json") | from_json }}'
git_install: '{{ lookup("file", lookup("env", "OUTPUT_DIR") + "/git_install.json") }}'
tasks:
- name: remove unwanted packages
package:

@ -31,7 +31,7 @@
- assert:
that:
- redirect_failure is failed
- '"No filter named ''testns.testredirect.dead_end''" in redirect_failure.msg'
- "'Could not load \"testns.testredirect.dead_end\"' in redirect_failure.msg"
# recursive filter redirect
- debug: msg="{{ 'data' | testns.testredirect.recursive_redirect }}"
ignore_errors: yes

@ -2019,13 +2019,12 @@
## test copying the directory on remote with chown
- name: setting 'ansible_copy_test_user_name' outside block since 'always' section depends on this also
set_fact:
ansible_copy_test_user_name: 'ansible_copy_test_{{ 100000 | random }}'
- block:
- set_fact:
ansible_copy_test_user_name: 'ansible_copy_test_{{ 100000 | random }}'
- name: execute - create a user for test
user:
name: '{{ ansible_copy_test_user_name }}'

@ -12,13 +12,6 @@
that:
- '"[WARNING]: Skipping filter plugin" in result.stderr'
- '"[WARNING]: Skipping test plugin" in result.stderr'
- |
result.stderr|regex_findall('bad_filter')|length == 2
- |
result.stderr|regex_findall('bad_test')|length == 2
- |
result.stderr|regex_findall('bad_collection_filter')|length == 3
- |
result.stderr|regex_findall('bad_collection_filter2')|length == 1
- |
result.stderr|regex_findall('bad_collection_test')|length == 2
- result.stderr|regex_findall('bad_collection_filter')|length == 3
- result.stderr|regex_findall('bad_collection_filter2')|length == 1
- result.stderr|regex_findall('bad_collection_test')|length == 2

@ -1,19 +1,19 @@
- name: Set variables to verify lookup_list
set_fact: "{{ item if item is string else item[0] }}={{ item }}"
with_list:
- a
- [b, c]
- d
- name: Set variables to verify lookup_list
set_fact: "{{ item if (item is string)|bool else item[0] }}={{ item }}"
with_list:
- a
- [b, c]
- d
- name: Verify lookup_list
assert:
that:
- a is defined
- b is defined
- c is not defined
- d is defined
- b is iterable and b is not string
- b|length == 2
- a == a
- b == ['b', 'c']
- d == d
- name: Verify lookup_list
assert:
that:
- a is defined
- b is defined
- c is not defined
- d is defined
- b is iterable and b is not string
- b|length == 2
- a == a
- b == ['b', 'c']
- d == d

@ -127,4 +127,4 @@ def test_legacy_modules_list():
obj = DocCLI(args=args)
obj.parse()
result = obj._list_plugins('module', module_loader)
assert len(result) == 0
assert len(result) > 0

@ -123,7 +123,7 @@ class TestErrors(unittest.TestCase):
['/path/to/import_fixture.py']
]
pl = PluginLoader('test', '', 'test', 'test_plugin')
pl = PluginLoader('test', '', 'test', 'test_plugins')
# Aside from needing ``list()`` so we can do a len, ``PluginLoader.all`` returns a generator
# so ``list()`` actually causes ``PluginLoader.all`` to run.
plugins = list(pl.all())

Loading…
Cancel
Save