add ansible_name/ansible_aliases attrs to plugin objects (#78700)

*  new _fqcn attribute to plugin objects
* unbreak plugins in subdirs
* Fix inadventent changes to _load_name and use existing vars
* add plugin aliases and name property, and replace plugin._load_name where incorrect
* Fix listing plugin names

Fix listing legacy and builtin together
test deprecated plugin documentation
fix doc extensions
remove sometimes inaccurate _load_name handling from plugin.name

* Add tests for REJECT_EXTS and doc extensions

Fix unpredictable collection redirects so non-fqcns in the redirect list are guaranteed to be legacy (instead of determined by the collections keyword)

Move aliases and name properties to _update_object so all plugin types, including doc fragments, can use them

* make legacy plugin names internally consistent
* rename attributes to ansible_name and ansible_aliases
pull/78792/head
Sloane Hertel 2 years ago committed by GitHub
parent a55afcc39a
commit 86b86398f0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,4 @@
minor_changes:
- plugin loader - add ansible_name and ansible_aliases attributes to plugin objects/classes.
bugfixes:
- ansible-doc - fix listing plugins.

@ -100,7 +100,7 @@ REJECT_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt', '.rst
BOOL_TRUE = BOOLEANS_TRUE
COLLECTION_PTYPE_COMPAT = {'module': 'modules'}
PYTHON_DOC_EXTENSIONS = ('.py', '.pyc', '.pyo')
PYTHON_DOC_EXTENSIONS = ('.py',)
YAML_DOC_EXTENSIONS = ('.yml', '.yaml')
DOC_EXTENSIONS = PYTHON_DOC_EXTENSIONS + YAML_DOC_EXTENSIONS

@ -542,7 +542,7 @@ class TaskExecutor:
# get the connection and the handler for this execution
if (not self._connection or
not getattr(self._connection, 'connected', False) or
self._connection._load_name != current_connection or
not self._connection.matches_name([current_connection]) or
# pc compare, left here for old plugins, but should be irrelevant for those
# using get_option, since they are cleared each iteration.
self._play_context.remote_addr != self._connection._play_context.remote_addr):

@ -59,6 +59,16 @@ class AnsiblePlugin(ABC):
self._options = {}
self._defs = None
def matches_name(self, possible_names):
possible_fqcns = set()
for name in possible_names:
if '.' not in name:
possible_fqcns.add(f"ansible.builtin.{name}")
elif name.startswith("ansible.legacy."):
possible_fqcns.add(name.removeprefix("ansible.legacy."))
possible_fqcns.add(name)
return bool(possible_fqcns.intersection(set(self.ansible_aliases)))
def get_option(self, option, hostvars=None):
if option not in self._options:
try:

@ -25,8 +25,8 @@ class ActionModule(ActionBase):
# TODO: remove in favor of controller side argspec detecing valid arguments
# network facts modules must support gather_subset
try:
name = self._connection.redirected_names[-1].removeprefix('ansible.netcommon.')
except (IndexError, AttributeError):
name = self._connection.ansible_name.removeprefix('ansible.netcommon.')
except AttributeError:
name = self._connection._load_name.split('.')[-1]
if name not in ('network_cli', 'httpapi', 'netconf'):
subset = mod_args.pop('gather_subset', None)
@ -81,7 +81,7 @@ class ActionModule(ActionBase):
if 'smart' in modules:
connection_map = C.config.get_config_value('CONNECTION_FACTS_MODULES', variables=task_vars)
network_os = self._task.args.get('network_os', task_vars.get('ansible_network_os', task_vars.get('ansible_facts', {}).get('network_os')))
modules.extend([connection_map.get(network_os or self._connection._load_name, 'ansible.legacy.setup')])
modules.extend([connection_map.get(network_os or self._connection.ansible_name, 'ansible.legacy.setup')])
modules.pop(modules.index('smart'))
failed = {}

@ -27,12 +27,19 @@ IGNORE = {
def get_composite_name(collection, name, path, depth):
resolved_collection = collection
if '.' not in name:
resource_name = name
else:
if collection == 'ansible.legacy' and name.startswith('ansible.builtin.'):
resolved_collection = 'ansible.builtin'
resource_name = '.'.join(name.split(f"{resolved_collection}.")[1:])
# collectionize name
composite = [collection]
composite = [resolved_collection]
if depth:
composite.extend(path.split(os.path.sep)[depth * -1:])
composite.append(to_native(name))
composite.append(to_native(resource_name))
return '.'.join(composite)
@ -58,7 +65,7 @@ def _list_plugins_from_paths(ptype, dirs, collection, depth=0):
continue
display.debug("Found possible plugin: '{0}'".format(plugin_file))
b_plugin, ext = os.path.splitext(plugin_file)
b_plugin, b_ext = os.path.splitext(plugin_file)
plugin = to_native(b_plugin)
full_path = os.path.join(b_path, plugin_file)
@ -74,8 +81,8 @@ def _list_plugins_from_paths(ptype, dirs, collection, depth=0):
else:
if any([
plugin in C.IGNORE_FILES, # general files to ignore
ext in C.REJECT_EXTS, # general extensions to ignore
ext in (b'.yml', b'.yaml', b'.json'), # ignore docs files TODO: constant!
to_native(b_ext) in C.REJECT_EXTS, # general extensions to ignore
b_ext in (b'.yml', b'.yaml', b'.json'), # ignore docs files TODO: constant!
plugin in IGNORE.get(bkey, ()), # plugin in reject list
os.path.islink(full_path), # skip aliases, author should document in 'aliaes' field
]):
@ -89,11 +96,11 @@ def _list_plugins_from_paths(ptype, dirs, collection, depth=0):
continue
for plugin in file_plugins:
plugin_name = get_composite_name(collection, plugin._load_name, full_path, depth)
plugin_name = get_composite_name(collection, plugin.ansible_name, os.path.dirname(to_native(full_path)), depth)
plugins[plugin_name] = full_path
else:
plugin = get_composite_name(collection, plugin, path, depth)
plugins[plugin] = full_path
plugin_name = get_composite_name(collection, plugin, os.path.dirname(to_native(full_path)), depth)
plugins[plugin_name] = full_path
else:
display.debug("Skip listing plugins in '{0}' as it is not a directory".format(path))
else:
@ -116,7 +123,6 @@ def list_collection_plugins(ptype, collections, search_paths=None):
# starts at {plugin_name: filepath, ...}, but changes at the end
plugins = {}
dirs = []
try:
ploader = getattr(loader, '{0}_loader'.format(ptype))
except AttributeError:
@ -126,10 +132,10 @@ def list_collection_plugins(ptype, collections, search_paths=None):
for collection in collections.keys():
if collection == 'ansible.builtin':
# dirs from ansible install, but not configured paths
dirs.extend([d.path for d in ploader._get_paths_with_context() if d.internal])
dirs = [d.path for d in ploader._get_paths_with_context() if d.internal]
elif collection == 'ansible.legacy':
# configured paths + search paths (should include basedirs/-M)
dirs.extend([d.path for d in ploader._get_paths_with_context() if not d.internal])
dirs = [d.path for d in ploader._get_paths_with_context() if not d.internal]
if context.CLIARGS.get('module_path', None):
dirs.extend(context.CLIARGS['module_path'])
else:
@ -170,7 +176,6 @@ def list_plugins(ptype, collection=None, search_paths=None):
# {plugin_name: (filepath, class), ...}
plugins = {}
do_legacy_replace = True
collections = {}
if collection is None:
# list all collections, add synthetic ones
@ -181,7 +186,6 @@ def list_plugins(ptype, collection=None, search_paths=None):
# add builtin, since legacy also resolves to these
collections[collection] = b''
collections['ansible.builtin'] = b''
do_legacy_replace = False
else:
try:
collections[collection] = to_bytes(_get_collection_path(collection))
@ -191,27 +195,12 @@ def list_plugins(ptype, collection=None, search_paths=None):
if collections:
plugins.update(list_collection_plugins(ptype, collections))
if do_legacy_replace:
# remove legacy that exist as builtin, they are the same plugin but builtin is prefered display
for plugin in list(plugins.keys()):
if 'ansible.builtin' in plugin:
legacy = plugin.replace('ansible.builtin.', 'ansible.legacy.', 1)
if legacy in plugins:
del plugins[legacy]
else:
# when listing only ansilbe.legacy, this includes all of the builtin under the legacy ns
for plugin in list(plugins.keys()):
if 'ansible.builtin' in plugin:
legacy = plugin.replace('ansible.builtin.', 'ansible.legacy.', 1)
plugins[legacy] = plugins[plugin]
del plugins[plugin]
return plugins
# wrappers
def list_plugin_names(ptype, collection=None):
return list_plugins(ptype, collection).keys()
return [plugin.ansible_name for plugin in list_plugins(ptype, collection)]
def list_plugin_files(ptype, collection=None):

@ -511,6 +511,7 @@ class PluginLoader:
# FIXME: remove once this is covered in debug or whatever
display.vv("redirecting (type: {0}) {1} to {2}".format(plugin_type, fq_name, redirect))
# The name doing the redirection is added at the beginning of _resolve_plugin_step,
# but if the unqualified name is used in conjunction with the collections keyword, only
# the unqualified name is in the redirect list.
@ -695,6 +696,7 @@ class PluginLoader:
plugin_load_context.plugin_resolved_path = path_with_context.path
plugin_load_context.plugin_resolved_name = name
plugin_load_context.plugin_resolved_collection = 'ansible.builtin' if path_with_context.internal else ''
plugin_load_context._resolved_fqcn = ('ansible.builtin.' + name if path_with_context.internal else name)
plugin_load_context.resolved = True
return plugin_load_context
except KeyError:
@ -753,6 +755,7 @@ class PluginLoader:
plugin_load_context.plugin_resolved_path = path_with_context.path
plugin_load_context.plugin_resolved_name = name
plugin_load_context.plugin_resolved_collection = 'ansible.builtin' if path_with_context.internal else ''
plugin_load_context._resolved_fqcn = 'ansible.builtin.' + name if path_with_context.internal else name
plugin_load_context.resolved = True
return plugin_load_context
except KeyError:
@ -772,6 +775,7 @@ class PluginLoader:
plugin_load_context.plugin_resolved_path = path_with_context.path
plugin_load_context.plugin_resolved_name = alias_name
plugin_load_context.plugin_resolved_collection = 'ansible.builtin' if path_with_context.internal else ''
plugin_load_context._resolved_fqcn = 'ansible.builtin.' + alias_name if path_with_context.internal else alias_name
plugin_load_context.resolved = True
return plugin_load_context
@ -826,13 +830,25 @@ class PluginLoader:
return module
def _update_object(self, obj, name, path, redirected_names=None):
def _update_object(self, obj, name, path, redirected_names=None, resolved=None):
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
setattr(obj, '_load_name', name)
setattr(obj, '_redirected_names', redirected_names or [])
names = []
if resolved:
names.append(resolved)
if redirected_names:
# reverse list so best name comes first
names.extend(redirected_names[::-1])
if not names:
raise AnsibleError(f"Missing FQCN for plugin source {name}")
setattr(obj, 'ansible_aliases', names)
setattr(obj, 'ansible_name', names[0])
def get(self, name, *args, **kwargs):
return self.get_with_context(name, *args, **kwargs).object
@ -849,6 +865,9 @@ class PluginLoader:
# FIXME: this is probably an error (eg removed plugin)
return get_with_context_result(None, plugin_load_context)
fq_name = plugin_load_context.resolved_fqcn
if '.' not in fq_name:
fq_name = '.'.join((plugin_load_context.plugin_resolved_collection, fq_name))
name = plugin_load_context.plugin_resolved_name
path = plugin_load_context.plugin_resolved_path
redirected_names = plugin_load_context.redirect_list or []
@ -881,7 +900,7 @@ class PluginLoader:
# A plugin may need to use its _load_name in __init__ (for example, to set
# or get options from config), so update the object before using the constructor
instance = object.__new__(obj)
self._update_object(instance, name, path, redirected_names)
self._update_object(instance, name, path, redirected_names, fq_name)
obj.__init__(instance, *args, **kwargs) # pylint: disable=unnecessary-dunder-call
obj = instance
except TypeError as e:
@ -891,7 +910,7 @@ class PluginLoader:
return get_with_context_result(None, plugin_load_context)
raise
self._update_object(obj, name, path, redirected_names)
self._update_object(obj, name, path, redirected_names, fq_name)
return get_with_context_result(obj, plugin_load_context)
def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
@ -951,9 +970,13 @@ class PluginLoader:
all_matches = []
found_in_cache = True
for i in self._get_paths():
legacy_excluding_builtin = set()
for path_with_context in self._get_paths_with_context():
matches = glob.glob(to_native(os.path.join(path_with_context.path, "*.py")))
if not path_with_context.internal:
legacy_excluding_builtin.update(matches)
# we sort within each path, but keep path precedence from config
all_matches.extend(sorted(glob.glob(to_native(os.path.join(i, "*.py"))), key=os.path.basename))
all_matches.extend(sorted(matches, key=os.path.basename))
loaded_modules = set()
for path in all_matches:
@ -1026,7 +1049,11 @@ class PluginLoader:
except TypeError as e:
display.warning("Skipping plugin (%s) as it seems to be incomplete: %s" % (path, to_text(e)))
self._update_object(obj, basename, path)
if path in legacy_excluding_builtin:
fqcn = basename
else:
fqcn = f"ansible.builtin.{basename}"
self._update_object(obj, basename, path, resolved=fqcn)
yield obj
@ -1097,7 +1124,7 @@ class Jinja2Loader(PluginLoader):
plugin = pclass(func)
if plugin in plugins:
continue
self._update_object(plugin, full, plugin_path)
self._update_object(plugin, full, plugin_path, resolved=fq_name)
plugins.append(plugin)
return plugins
@ -1113,15 +1140,15 @@ class Jinja2Loader(PluginLoader):
# avoid collection path for legacy
name = name.removeprefix('ansible.legacy.')
if '.' not in name and not collection_list:
# find in builtin/legacy list
if '.' not in name:
# Filter/tests must always be FQCN except builtin and legacy
for known_plugin in self.all(*args, **kwargs):
if known_plugin._load_name == name:
# set context
if known_plugin.matches_name([name]):
context.resolved = True
context.plugin_resolved_name = name
context.plugin_resolved_path = known_plugin._original_path
# TODO: context.plugin_resolved_collection = 'ansible.builtin' if path_with_context.internal else 'ansible.legacy'
context.plugin_resolved_collection = 'ansible.builtin' if known_plugin.ansible_name.startswith('ansible.builtin.') else ''
context._resolved_fqcn = known_plugin.ansible_name
return get_with_context_result(known_plugin, context)
plugin = None
@ -1220,6 +1247,7 @@ class Jinja2Loader(PluginLoader):
for func_name, func in plugin_map:
fq_name = '.'.join((parent_prefix, func_name))
src_name = f"ansible_collections.{acr.collection}.plugins.{self.type}.{acr.subdirs}.{func_name}"
# TODO: load anyways into CACHE so we only match each at end of loop
# the files themseves should already be cached by base class caching of modules(python)
if key in (func_name, fq_name):
@ -1227,7 +1255,7 @@ class Jinja2Loader(PluginLoader):
plugin = pclass(func)
if plugin:
context = plugin_impl.plugin_load_context
self._update_object(plugin, fq_name, plugin_impl.object._original_path)
self._update_object(plugin, src_name, plugin_impl.object._original_path, resolved=fq_name)
break # go to next file as it can override if dupe (dont break both loops)
except AnsiblePluginRemovedError as apre:
@ -1279,7 +1307,12 @@ class Jinja2Loader(PluginLoader):
pclass = self._load_jinja2_class()
result = pclass(plugins[plugin_name]) # if bad plugin, let exception rise
found.add(plugin_name)
self._update_object(result, plugin_name, p_map._original_path)
fqcn = plugin_name
collection = '.'.join(p_map.ansible_name.split('.')[:2]) if p_map.ansible_name.count('.') >= 2 else ''
if not plugin_name.startswith(collection):
fqcn = f"{collection}.{plugin_name}"
self._update_object(result, plugin_name, p_map._original_path, resolved=fqcn)
yield result
def _load_jinja2_class(self):

@ -163,10 +163,8 @@ def add_fragments(doc, filename, fragment_loader, is_module=False):
fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data()
real_collection_name = 'ansible.builtin'
real_fragment_name = getattr(fragment_class, '_load_name')
if real_fragment_name.startswith('ansible_collections.'):
real_collection_name = '.'.join(real_fragment_name.split('.')[1:3])
real_fragment_name = getattr(fragment_class, 'ansible_name')
real_collection_name = '.'.join(real_fragment_name.split('.')[0:2]) if '.' in real_fragment_name else ''
add_collection_to_versions_and_dates(fragment, real_collection_name, is_module=is_module)
if 'notes' in fragment:
@ -296,7 +294,6 @@ def find_plugin_docfile(plugin, plugin_type, loader):
''' if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding 'sidecar' file for docs '''
context = loader.find_plugin_with_context(plugin, ignore_deprecated=False, check_aliases=True)
plugin_obj = None
if (not context or not context.resolved) and plugin_type in ('filter', 'test'):
# should only happen for filters/test
plugin_obj, context = loader.get_with_context(plugin)
@ -305,14 +302,8 @@ def find_plugin_docfile(plugin, plugin_type, loader):
raise AnsiblePluginNotFound('%s was not found' % (plugin), plugin_load_context=context)
docfile = Path(context.plugin_resolved_path)
possible_names = [plugin, getattr(plugin_obj, '_load_name', None), docfile.name.removeprefix('_'), docfile.name]
if context:
if context.redirect_list:
possible_names.append(context.redirect_list[-1])
possible_names.append(context.plugin_resolved_name)
if docfile.suffix not in C.DOC_EXTENSIONS or docfile.name not in possible_names:
# only look for adjacent if plugin file does not support documents or
# name does not match file basname (except deprecated)
if docfile.suffix not in C.DOC_EXTENSIONS:
# only look for adjacent if plugin file does not support documents
filename = _find_adjacent(docfile, plugin, C.DOC_EXTENSIONS)
else:
filename = to_native(docfile)
@ -348,7 +339,7 @@ def get_plugin_docs(plugin, plugin_type, loader, fragment_loader, verbose):
# add extra data to docs[0] (aka 'DOCUMENTATION')
if docs[0] is None:
raise AnsibleParserError('No documentation availalbe for %s (%s)' % (plugin, filename))
raise AnsibleParserError('No documentation available for %s (%s)' % (plugin, filename))
else:
docs[0]['filename'] = filename
docs[0]['collection'] = collection_name

@ -56,12 +56,7 @@ def get_vars_from_path(loader, path, entities, stage):
for plugin in vars_plugin_list:
# legacy plugins always run by default, but they can set REQUIRES_ENABLED=True to opt out.
# The name in config corresponds to the following _load_name:
# - legacy_plugin == legacy_plugin
# - ansible.legacy.legacy_plugin == legacy_plugin
# - builtin_plugin == builtin_plugin
# - ansible.builtin.builtin_plugin == ansible_collections.ansible.builtin.plugins.vars.builtin_plugin
builtin_or_legacy = '.' not in plugin._load_name or plugin._load_name.startswith('ansible_collections.ansible.builtin.')
builtin_or_legacy = plugin.ansible_name.startswith('ansible.builtin.') or '.' not in plugin.ansible_name
# builtin is supposed to have REQUIRES_ENABLED=True, the following is for legacy plugins...
needs_enabled = not builtin_or_legacy
@ -77,21 +72,10 @@ def get_vars_from_path(loader, path, entities, stage):
if not builtin_or_legacy and (hasattr(plugin, 'REQUIRES_ENABLED') or hasattr(plugin, 'REQUIRES_WHITELIST')):
display.warning(
"Vars plugins in collections must be enabled to be loaded, REQUIRES_ENABLED is not supported. "
"This should be removed from the plugin %s." % plugin._load_name # FIXME: display ns.coll.resource instead of _load_name
"This should be removed from the plugin %s." % plugin.ansible_name
)
elif builtin_or_legacy and plugin._load_name not in C.VARIABLE_PLUGINS_ENABLED and needs_enabled:
# Maybe it was enabled by FQCN.
is_builtin = plugin._load_name == 'ansible_collections.ansible.builtin.plugins.vars.host_group_vars'
if is_builtin:
fqcn_builtin = 'ansible.builtin.host_group_vars'
fqcn_legacy = 'ansible.legacy.host_group_vars'
if fqcn_builtin not in C.VARIABLE_PLUGINS_ENABLED and fqcn_legacy not in C.VARIABLE_PLUGINS_ENABLED:
continue
else:
# legacy plugin
fqcn_legacy = 'ansible.legacy.%s' % plugin._load_name
if fqcn_legacy not in C.VARIABLE_PLUGINS_ENABLED:
continue
elif builtin_or_legacy and needs_enabled and not plugin.matches_name(C.VARIABLE_PLUGINS_ENABLED):
continue
has_stage = hasattr(plugin, 'get_option') and plugin.has_option('stage')

@ -0,0 +1,23 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.utils.display import Display
display = Display()
def nochange(a):
return a
class FilterModule(object):
''' Ansible core jinja2 filters '''
def filters(self):
return {
'noop': nochange,
'nested': nochange,
}

@ -0,0 +1,37 @@
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
---
module: subdir_module
short_description: A module in multiple subdirectories
description:
- A module in multiple subdirectories
author:
- Ansible Core Team
version_added: 1.0.0
options: {}
'''
EXAMPLES = '''
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec=dict(),
)
module.exit_json()
if __name__ == '__main__':
main()

@ -21,4 +21,5 @@ class FilterModule(object):
'donothing': donothing,
'nodocs': donothing,
'split': donothing,
'b64decode': donothing,
}

@ -0,0 +1,5 @@
# Copyright (c) 2022 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

@ -0,0 +1,26 @@
# Copyright (c) 2022 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: deprecated_with_docs
short_description: test lookup
description: test lookup
author: Ansible Core Team
version_added: "2.14"
deprecated:
why: reasons
alternative: other thing
removed_in: "2.16"
removed_from_collection: "ansible.legacy"
options: {}
'''
EXAMPLE = '''
'''
RETURN = '''
'''

@ -0,0 +1,16 @@
DOCUMENTATION:
name: deprecated_with_adj_docs
short_description: test lookup
description: test lookup
author: Ansible Core Team
version_added: "2.14"
deprecated:
why: reasons
alternative: use other thing
removed_in: "2.16"
removed_from_collection: "ansible.legacy"
options: {}
EXAMPLE: ""
RETURN: {}

@ -30,23 +30,38 @@ ansible-doc --list testns.testcol --playbook-dir ./ 2>&1 | grep -v "Invalid coll
# ensure we dont break on invalid collection name for list
ansible-doc --list testns.testcol.fakemodule --playbook-dir ./ 2>&1 | grep "Invalid collection name"
# test listing diff plugin types from collection
for ptype in cache inventory lookup vars filter
for ptype in cache inventory lookup vars filter module
do
# each plugin type adds 1 from collection
# FIXME pre=$(ansible-doc -l -t ${ptype}|wc -l)
# FIXME post=$(ansible-doc -l -t ${ptype} --playbook-dir ./|wc -l)
# FIXME test "$pre" -eq $((post - 1))
if [ "${ptype}" == "filter" ]; then
expected=3
expected=5
expected_names=("b64decode" "filter_subdir.nested" "filter_subdir.noop" "noop" "ultimatequestion")
elif [ "${ptype}" == "module" ]; then
expected=4
expected_names=("fakemodule" "notrealmodule" "randommodule" "database.database_type.subdir_module")
else
expected=1
if [ "${ptype}" == "cache" ]; then expected_names=("notjsonfile");
elif [ "${ptype}" == "inventory" ]; then expected_names=("statichost");
elif [ "${ptype}" == "lookup" ]; then expected_names=("noop");
elif [ "${ptype}" == "vars" ]; then expected_names=("noop_vars_plugin"); fi
fi
# ensure we ONLY list from the collection
justcol=$(ansible-doc -l -t ${ptype} --playbook-dir ./ testns.testcol|wc -l)
test "$justcol" -eq "$expected"
# ensure the right names are displayed
list_result=$(ansible-doc -l -t ${ptype} --playbook-dir ./ testns.testcol)
metadata_result=$(ansible-doc --metadata-dump --no-fail-on-errors -t ${ptype} --playbook-dir ./ testns.testcol)
for name in "${expected_names[@]}"; do
echo "${list_result}" | grep "testns.testcol.${name}"
echo "${metadata_result}" | grep "testns.testcol.${name}"
done
# ensure we get error if passinginvalid collection, much less any plugins
ansible-doc -l -t ${ptype} testns.testcol 2>&1 | grep "unable to locate collection"
@ -174,9 +189,26 @@ ansible-doc -t filter --playbook-dir ./ nodocs 2>&1| grep -c 'missing documentat
ansible-doc --list --module-path ./modules > /dev/null
# ensure we dedupe by fqcn and not base name
#[ "$(ansible-doc -l -t filter --playbook-dir ./ |grep 'b64decode' | wc -l)" -eq "2"]
[ "$(ansible-doc -l -t filter --playbook-dir ./ |grep -c 'b64decode')" -eq "3" ]
# ensure we don't show duplicates for plugins that only exist in ansible.builtin when listing ansible.legacy plugins
[ "$(ansible-doc -l -t filter --playbook-dir ./ |grep -c 'b64encode')" -eq "1" ]
# with playbook dir, legacy should override
ansible-doc -t filter split --playbook-dir ./ |grep histerical
pyc_src="$(pwd)/filter_plugins/other.py"
pyc_1="$(pwd)/filter_plugins/split.pyc"
pyc_2="$(pwd)/library/notaplugin.pyc"
trap 'rm -rf "$pyc_1" "$pyc_2"' EXIT
# test pyc files are not used as adjacent documentation
python -c "import py_compile; py_compile.compile('$pyc_src', cfile='$pyc_1')"
ansible-doc -t filter split --playbook-dir ./ |grep histerical
# test pyc files are not listed as plugins
python -c "import py_compile; py_compile.compile('$pyc_src', cfile='$pyc_2')"
test "$(ansible-doc -l -t module --playbook-dir ./ 2>&1 1>/dev/null |grep -c "notaplugin")" == 0
# without playbook dir, builtin should return
ansible-doc -t filter split |grep -v histerical

@ -152,3 +152,21 @@
assert:
that:
- "'x' == ('x'|donothing)"
- name: docs for deprecated plugin
command: ansible-doc deprecated_with_docs -t lookup
register: result
- assert:
that:
- '"WARNING" not in result.stderr'
- '"DEPRECATED_WITH_DOCS " in result.stdout'
- '"AUTHOR: Ansible Core Team" in result.stdout'
- name: adjacent docs for deprecated plugin
command: ansible-doc deprecated_with_adj_docs -t lookup
register: result
- assert:
that:
- '"WARNING" not in result.stderr'
- '"DEPRECATED_WITH_ADJ_DOCS " in result.stdout'
- '"AUTHOR: Ansible Core Team" in result.stdout'

@ -104,7 +104,7 @@ class TestErrors(unittest.TestCase):
self.assertEqual(one, two)
@patch('ansible.plugins.loader.glob')
@patch.object(PluginLoader, '_get_paths')
@patch.object(PluginLoader, '_get_paths_with_context')
def test_all_no_duplicate_names(self, gp_mock, glob_mock):
'''
This test goes along with ``test__load_module_source_no_duplicate_names``
@ -114,8 +114,8 @@ class TestErrors(unittest.TestCase):
fixture_path = os.path.join(os.path.dirname(__file__), 'loader_fixtures')
gp_mock.return_value = [
fixture_path,
'/path/to'
MagicMock(path=fixture_path),
MagicMock(path='/path/to'),
]
glob_mock.glob.side_effect = [

Loading…
Cancel
Save