Fix ansible-doc and docsite generation for removed modules

* Fix ansible-doc wrt removed modules
  * Fix listing of modules ia ansible-doc to not complain about removed modules
    Removed modules are marked as such in the metadata but nowhere else.
    Need to retrieve the metadata when a module doesn't have a doc so that
    we can tell if it falls under this case.
  * omit removed modules from json dump
  * Print an error that the module has been removed if attempting to run
    ansible-doc on that specific module

* Get plugin_formatter to stop outputting removed modules
pull/44661/merge
Toshio Kuratomi 6 years ago
parent b2932a41b0
commit 0873d46617

@ -30,6 +30,7 @@ import re
import sys import sys
import warnings import warnings
from collections import defaultdict from collections import defaultdict
from copy import deepcopy
from distutils.version import LooseVersion from distutils.version import LooseVersion
from functools import partial from functools import partial
from pprint import PrettyPrinter from pprint import PrettyPrinter
@ -263,11 +264,18 @@ def get_plugin_info(module_dir, limit_to=None, verbose=False):
# Regular module to process # Regular module to process
# #
# use ansible core library to parse out doc metadata YAML and plaintext examples
doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose)
if metadata and 'removed' in metadata.get('status'):
continue
category = categories category = categories
# Start at the second directory because we don't want the "vendor" # Start at the second directory because we don't want the "vendor"
mod_path_only = os.path.dirname(module_path[len(module_dir):]) mod_path_only = os.path.dirname(module_path[len(module_dir):])
primary_category = ''
module_categories = [] module_categories = []
# build up the categories that this module belongs to # build up the categories that this module belongs to
for new_cat in mod_path_only.split('/')[1:]: for new_cat in mod_path_only.split('/')[1:]:
@ -283,9 +291,6 @@ def get_plugin_info(module_dir, limit_to=None, verbose=False):
if module_categories: if module_categories:
primary_category = module_categories[0] primary_category = module_categories[0]
# use ansible core library to parse out doc metadata YAML and plaintext examples
doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose)
if 'options' in doc and doc['options'] is None: if 'options' in doc and doc['options'] is None:
display.error("*** ERROR: DOCUMENTATION.options must be a dictionary/hash when used. ***") display.error("*** ERROR: DOCUMENTATION.options must be a dictionary/hash when used. ***")
pos = getattr(doc, "ansible_pos", None) pos = getattr(doc, "ansible_pos", None)
@ -525,6 +530,11 @@ def process_plugins(module_map, templates, outputname, output_dir, ansible_versi
def process_categories(plugin_info, categories, templates, output_dir, output_name, plugin_type): def process_categories(plugin_info, categories, templates, output_dir, output_name, plugin_type):
# For some reason, this line is changing plugin_info:
# text = templates['list_of_CATEGORY_modules'].render(template_data)
# To avoid that, make a deepcopy of the data.
# We should track that down and fix it at some point in the future.
plugin_info = deepcopy(plugin_info)
for category in sorted(categories.keys()): for category in sorted(categories.keys()):
module_map = categories[category] module_map = categories[category]
category_filename = output_name % category category_filename = output_name % category

@ -30,10 +30,12 @@ from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils._text import to_native from ansible.module_utils._text import to_native
from ansible.module_utils.six import string_types from ansible.module_utils.six import string_types
from ansible.parsing.metadata import extract_metadata
from ansible.parsing.plugin_docs import read_docstub
from ansible.parsing.yaml.dumper import AnsibleDumper from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, \ from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, \
vars_loader, connection_loader, strategy_loader, inventory_loader, shell_loader, fragment_loader vars_loader, connection_loader, strategy_loader, inventory_loader, shell_loader, fragment_loader
from ansible.utils.plugin_docs import BLACKLIST, get_docstring, get_docstub from ansible.utils.plugin_docs import BLACKLIST, get_docstring
try: try:
from __main__ import display from __main__ import display
@ -130,7 +132,7 @@ class DocCLI(CLI):
for path in paths: for path in paths:
self.plugin_list.update(self.find_plugins(path, plugin_type)) self.plugin_list.update(self.find_plugins(path, plugin_type))
self.pager(self.get_plugin_list_text(loader, doc_getter=get_docstub)) self.pager(self.get_plugin_list_text(loader))
return 0 return 0
# process all plugins of type # process all plugins of type
@ -144,7 +146,9 @@ class DocCLI(CLI):
plugin_data[plugin_type] = dict() plugin_data[plugin_type] = dict()
plugin_names = self.get_all_plugins_of_type(plugin_type) plugin_names = self.get_all_plugins_of_type(plugin_type)
for plugin_name in plugin_names: for plugin_name in plugin_names:
plugin_data[plugin_type][plugin_name] = self.get_plugin_metadata(plugin_type, plugin_name) plugin_info = self.get_plugin_metadata(plugin_type, plugin_name)
if plugin_info is not None:
plugin_data[plugin_type][plugin_name] = plugin_info
self.pager(json.dumps(plugin_data, sort_keys=True, indent=4)) self.pager(json.dumps(plugin_data, sort_keys=True, indent=4))
@ -183,12 +187,21 @@ class DocCLI(CLI):
raise AnsibleError("unable to load {0} plugin named {1} ".format(plugin_type, plugin_name)) raise AnsibleError("unable to load {0} plugin named {1} ".format(plugin_type, plugin_name))
try: try:
doc, __, __, __ = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0)) doc, __, __, metadata = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0))
except Exception: except Exception:
display.vvv(traceback.format_exc()) display.vvv(traceback.format_exc())
raise AnsibleError( raise AnsibleError(
"%s %s at %s has a documentation error formatting or is missing documentation." % "%s %s at %s has a documentation error formatting or is missing documentation." %
(plugin_type, plugin_name, filename), wrap_text=False) (plugin_type, plugin_name, filename))
if doc is None:
if 'removed' not in metadata.get('status', []):
raise AnsibleError(
"%s %s at %s has a documentation error formatting or is missing documentation." %
(plugin_type, plugin_name, filename))
# Removed plugins don't have any documentation
return None
return dict( return dict(
name=plugin_name, name=plugin_name,
@ -258,6 +271,10 @@ class DocCLI(CLI):
return text return text
else: else:
if 'removed' in metadata.get('status', []):
display.warning("%s %s has been removed\n" % (plugin_type, plugin))
return
# this typically means we couldn't even parse the docstring, not just that the YAML is busted, # this typically means we couldn't even parse the docstring, not just that the YAML is busted,
# probably a quoting issue. # probably a quoting issue.
raise AnsibleError("Parsing produced an empty object.") raise AnsibleError("Parsing produced an empty object.")
@ -304,7 +321,7 @@ class DocCLI(CLI):
return plugin_list return plugin_list
def get_plugin_list_text(self, loader, doc_getter=get_docstring): def get_plugin_list_text(self, loader):
columns = display.columns columns = display.columns
displace = max(len(x) for x in self.plugin_list) displace = max(len(x) for x in self.plugin_list)
linelimit = columns - displace - 5 linelimit = columns - displace - 5
@ -325,13 +342,19 @@ class DocCLI(CLI):
doc = None doc = None
try: try:
doc, plainexamples, returndocs, metadata = doc_getter(filename, fragment_loader) doc = read_docstub(filename)
except Exception: except Exception:
display.warning("%s has a documentation formatting error" % plugin) display.warning("%s has a documentation formatting error" % plugin)
continue
if not doc or not isinstance(doc, dict): if not doc or not isinstance(doc, dict):
desc = 'UNDOCUMENTED' with open(filename) as f:
display.warning("%s parsing did not produce documentation." % plugin) metadata = extract_metadata(module_data=f.read())
if 'removed' not in metadata[0].get('status', []):
desc = 'UNDOCUMENTED'
display.warning("%s parsing did not produce documentation." % plugin)
else:
continue
else: else:
desc = self.tty_ify(doc.get('short_description', 'INVALID SHORT DESCRIPTION').strip()) desc = self.tty_ify(doc.get('short_description', 'INVALID SHORT DESCRIPTION').strip())

@ -82,41 +82,27 @@ def read_docstring(filename, verbose=True, ignore_errors=True):
return data return data
def read_docstub(filename, verbose=True, ignore_errors=True): def read_docstub(filename):
""" """
Quickly find short_description using string methods instead of node parsing. Quickly find short_description using string methods instead of node parsing.
This does not return a full set of documentation strings and is intended for This does not return a full set of documentation strings and is intended for
operations like ansible-doc -l. operations like ansible-doc -l.
""" """
data = { t_module_data = open(filename, 'r')
'doc': None, capturing = False
'plainexamples': None, doc_stub = []
'returndocs': None,
'metadata': None for line in t_module_data:
} # start capturing the stub until indentation returns
if capturing and line[0] == ' ':
try: doc_stub.append(line)
t_module_data = open(filename, 'r') elif capturing and line[0] != ' ':
capturing = False break
doc_stub = [] if 'short_description:' in line:
capturing = True
for line in t_module_data: doc_stub.append(line)
# start capturing the stub until indentation returns
if capturing and line[0] == ' ': data = AnsibleLoader(r"".join(doc_stub), file_name=filename).get_single_data()
doc_stub.append(line)
elif capturing and line[0] != ' ':
break
if 'short_description:' in line:
capturing = True
doc_stub.append(line)
data['doc'] = AnsibleLoader(r"".join(doc_stub), file_name=filename).get_single_data()
except:
if verbose:
display.error("unable to parse %s" % filename)
if not ignore_errors:
raise
return data return data

@ -120,16 +120,3 @@ def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False)
add_fragments(data['doc'], filename, fragment_loader=fragment_loader) add_fragments(data['doc'], filename, fragment_loader=fragment_loader)
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata'] return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
def get_docstub(filename, fragment_loader, verbose=False, ignore_errors=False):
"""
When only short_description is needed, load a stub of the full DOCUMENTATION string to speed up operation.
"""
data = read_docstub(filename, verbose=verbose, ignore_errors=ignore_errors)
if data.get('doc', False):
add_fragments(data['doc'], filename, fragment_loader=fragment_loader)
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']

Loading…
Cancel
Save