add collection plugin listing to ansible-doc (#68522)

- listing from collections
 - able to filter by namespace or collection
 - masks dupes just as normal collection loading does
pull/68596/head
Brian Coca 5 years ago committed by GitHub
parent a3d6b6bc48
commit 8c044b846d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -18,6 +18,7 @@ from ansible import constants as C
from ansible import context
from ansible.cli import CLI
from ansible.cli.arguments import option_helpers as opt_help
from ansible.collections.list import list_collection_dirs, get_collection_name_from_path
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils._text import to_native
from ansible.module_utils.common._collections_compat import Container, Sequence
@ -36,6 +37,16 @@ def jdump(text):
display.display(json.dumps(text, sort_keys=True, indent=4))
def add_collection_plugins(plugin_list, plugin_type, coll_filter=None):
# TODO: take into account routing.yml once implemented
colldirs = list_collection_dirs(coll_filter=coll_filter)
for path in colldirs:
collname = get_collection_name_from_path(path)
ptype = C.COLLECTION_PTYPE_COMPAT.get(plugin_type, plugin_type)
plugin_list.update(DocCLI.find_plugins(os.path.join(path, 'plugins', ptype), plugin_type, collname))
class RemovedPlugin(Exception):
pass
@ -60,6 +71,8 @@ class DocCLI(CLI):
def init_parser(self):
coll_filter = 'A supplied argument will be used for filtering, can be a namespace or full collection name.'
super(DocCLI, self).init_parser(
desc="plugin documentation tool",
epilog="See man pages for Ansible CLI options or website for tutorials https://docs.ansible.com"
@ -77,9 +90,9 @@ class DocCLI(CLI):
exclusive = self.parser.add_mutually_exclusive_group()
exclusive.add_argument("-F", "--list_files", action="store_true", default=False, dest="list_files",
help='Show plugin names and their source files without summaries (implies --list)')
help='Show plugin names and their source files without summaries (implies --list). %s' % coll_filter)
exclusive.add_argument("-l", "--list", action="store_true", default=False, dest='list_dir',
help='List available plugins')
help='List available plugins. %s' % coll_filter)
exclusive.add_argument("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
help='Show playbook snippet for specified plugin(s)')
exclusive.add_argument("--metadata-dump", action="store_true", default=False, dest='dump',
@ -119,49 +132,52 @@ class DocCLI(CLI):
search_paths = DocCLI.print_paths(loader)
loader._paths = None # reset so we can use subdirs below
# list plugins names and filepath for type
if context.CLIARGS['list_files']:
# list plugins names or filepath for type, both options share most code
if context.CLIARGS['list_files'] or context.CLIARGS['list_dir']:
coll_filter = None
if len(context.CLIARGS['args']) == 1:
coll_filter = context.CLIARGS['args'][0]
if coll_filter in ('', None):
paths = loader._get_paths()
for path in paths:
self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))
plugins = self._get_plugin_list_filenames(loader)
add_collection_plugins(self.plugin_list, plugin_type, coll_filter=coll_filter)
# get appropriate content depending on option
if context.CLIARGS['list_dir']:
results = self._get_plugin_list_descriptions(loader)
elif context.CLIARGS['list_files']:
results = self._get_plugin_list_filenames(loader)
if do_json:
jdump(plugins)
else:
jdump(results)
elif self.plugin_list:
# format for user
displace = max(len(x) for x in self.plugin_list)
linelimit = display.columns - displace - 5
text = []
for plugin in plugins.keys():
filename = plugins[plugin]
text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(filename), filename))
DocCLI.pager("\n".join(text))
# format display per option
if context.CLIARGS['list_files']:
# list files
# list file plugins for type (does not read docs, very fast)
elif context.CLIARGS['list_dir']:
paths = loader._get_paths()
for path in paths:
self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))
for plugin in results.keys():
descs = self._get_plugin_list_descriptions(loader)
if do_json:
jdump(descs)
filename = results[plugin]
text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(filename), filename))
else:
displace = max(len(x) for x in self.plugin_list)
linelimit = display.columns - displace - 5
text = []
# list plugins
deprecated = []
for plugin in descs.keys():
desc = DocCLI.tty_ify(descs[plugin])
for plugin in results.keys():
desc = DocCLI.tty_ify(results[plugin])
if len(desc) > linelimit:
desc = desc[:linelimit] + '...'
if plugin.startswith('_'): # Handle deprecated
if plugin.startswith('_'): # Handle deprecated # TODO: add mark for deprecated collection plugins
deprecated.append("%-*s %-*.*s" % (displace, plugin[1:], linelimit, len(desc), desc))
else:
text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(desc), desc))
@ -170,7 +186,10 @@ class DocCLI(CLI):
text.append("\nDEPRECATED:")
text.extend(deprecated)
# display results
DocCLI.pager("\n".join(text))
else:
display.warning("No plugins found.")
# dump plugin desc/metadata as JSON
elif context.CLIARGS['dump']:
@ -349,7 +368,7 @@ class DocCLI(CLI):
return text
@staticmethod
def find_plugins(path, ptype):
def find_plugins(path, ptype, collection=None):
display.vvvv("Searching %s for plugins" % path)
@ -386,6 +405,10 @@ class DocCLI(CLI):
plugin = plugin.lstrip('_') # remove underscore from deprecated plugins
if plugin not in BLACKLIST.get(bkey, ()):
if collection:
plugin = '%s.%s' % (collection, plugin)
plugin_list.add(plugin)
display.vvvv("Added %s" % plugin)

@ -0,0 +1,27 @@
# (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
FLAG_FILES = frozenset(['MANIFEST.json', 'galaxy.yml'])
def is_collection_path(path):
"""
Verify that a path meets min requirements to be a collection
:param path: byte-string path to evaluate for collection containment
:return: boolean signifying 'collectionness'
"""
is_coll = False
if os.path.isdir(path):
for flag in FLAG_FILES:
if os.path.exists(os.path.join(path, flag)):
is_coll = True
break
return is_coll

@ -0,0 +1,89 @@
# (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from collections import defaultdict
from ansible.collections import is_collection_path
from ansible.utils.collection_loader import AnsibleCollectionLoader, get_collection_name_from_path
from ansible.utils.display import Display
display = Display()
def list_valid_collection_paths(search_paths=None, warn=False):
"""
Filter out non existing or invalid search_paths for collections
:param search_paths: list of text-string paths, if none load default config
:param warn: display warning if search_path does not exist
:return: subset of original list
"""
if search_paths is None:
search_paths = AnsibleCollectionLoader().n_collection_paths
for path in search_paths:
if not os.path.exists(path):
# warn for missing, but not if default
if warn:
display.warning("The configured collection path {0} does not exist.".format(path))
continue
if not os.path.isdir(path):
if warn:
display.warning("The configured collection path {0}, exists, but it is not a directory.".format(path))
continue
yield path
def list_collection_dirs(search_paths=None, coll_filter=None):
"""
Return paths for the specific collections found in passed or configured search paths
:param search_paths: list of text-string paths, if none load default config
:param coll_filter: limit collections to just the specific namespace or collection, if None all are returned
:return: list of collection directory paths
"""
collections = defaultdict(dict)
for path in list_valid_collection_paths(search_paths):
if os.path.isdir(path):
coll_root = os.path.join(path, 'ansible_collections')
if os.path.exists(coll_root) and os.path.isdir(coll_root):
coll = None
if coll_filter is None:
namespaces = os.listdir(coll_root)
else:
if '.' in coll_filter:
(nsp, coll) = coll_filter.split('.')
else:
nsp = coll_filter
namespaces = [nsp]
for ns in namespaces:
namespace_dir = os.path.join(coll_root, ns)
if os.path.isdir(namespace_dir):
if coll is None:
colls = os.listdir(namespace_dir)
else:
colls = [coll]
for collection in colls:
# skip dupe collections as they will be masked in execution
if collection not in collections[ns]:
coll_dir = os.path.join(namespace_dir, collection)
if is_collection_path(coll_dir):
cpath = os.path.join(namespace_dir, collection)
collections[ns][collection] = cpath
yield cpath

@ -97,6 +97,7 @@ BECOME_METHODS = _DeprecatedSequenceConstant(
# CONSTANTS ### yes, actual ones
BLACKLIST_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt', '.rst')
BOOL_TRUE = BOOLEANS_TRUE
COLLECTION_PTYPE_COMPAT = {'module': 'modules'}
DEFAULT_BECOME_PASS = None
DEFAULT_PASSWORD_CHARS = to_text(ascii_letters + digits + ".,:-_", errors='strict') # characters included in auto-generated passwords
DEFAULT_REMOTE_PASS = None

@ -6,7 +6,6 @@ __metaclass__ = type
import os
import os.path
import pkgutil
import re
import sys
@ -509,7 +508,6 @@ def get_collection_role_path(role_name, collection_list=None):
if acr:
# looks like a valid qualified collection ref; skip the collection_list
role = acr.resource
collection_list = [acr.collection]
subdirs = acr.subdirs
resource = acr.resource

Loading…
Cancel
Save