Extend validate-modules to also validate plugins (#71734)

* Let validate-modules also validate plugins.

* Support 'option' in 'cli'.

* Use DOCUMENTABLE_PLUGINS instead of UNDOCUMENTED_PLUGIN_TYPES.

* Support 'keyword', clean up error codes.

* Call settings.process_errors only once; remove __version__.

* Add changelog fragment.
pull/74989/head
Felix Fontein 2 years ago committed by GitHub
parent 7d234a40ee
commit 0990c4ca7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,2 @@
minor_changes:
- "ansible-test sanity validate-modules - the validate-modules sanity test now also checks the documentation of documentable plugin types (https://github.com/ansible/ansible/pull/71734)."

@ -123,6 +123,11 @@ COMMAND = 'sanity'
SANITY_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity')
TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity')
# NOTE: must match ansible.constants.DOCUMENTABLE_PLUGINS, but with 'module' replaced by 'modules'!
DOCUMENTABLE_PLUGINS = (
'become', 'cache', 'callback', 'cliconf', 'connection', 'httpapi', 'inventory', 'lookup', 'netconf', 'modules', 'shell', 'strategy', 'vars'
)
created_venvs = [] # type: t.List[str]

@ -7,6 +7,7 @@ import re
import typing as t
from . import (
DOCUMENTABLE_PLUGINS,
SanitySingleVersion,
SanityFailure,
SanitySuccess,
@ -50,22 +51,7 @@ class AnsibleDocTest(SanitySingleVersion):
"""Sanity test for ansible-doc."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
# This should use documentable plugins from constants instead
unsupported_plugin_types = {
# not supported by ansible-doc
'action',
'doc_fragments',
'filter',
'module_utils',
'terminal',
'test',
# The following are plugin directories not directly supported by ansible-core (and thus also not by ansible-doc)
# (https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst#modules--plugins)
'plugin_utils',
'sub_plugins',
}
plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type not in unsupported_plugin_types]
plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type in DOCUMENTABLE_PLUGINS]
return [target for target in targets
if os.path.splitext(target.path)[1] == '.py'

@ -1,11 +1,13 @@
"""Sanity test using validate-modules."""
from __future__ import annotations
import collections
import json
import os
import typing as t
from . import (
DOCUMENTABLE_PLUGINS,
SanitySingleVersion,
SanityMessage,
SanityFailure,
@ -64,28 +66,56 @@ class ValidateModulesTest(SanitySingleVersion):
'deprecated-date',
])
self._prefixes = {
plugin_type: plugin_path + '/'
for plugin_type, plugin_path in data_context().content.plugin_paths.items()
if plugin_type in DOCUMENTABLE_PLUGINS
}
self._exclusions = set()
if not data_context().content.collection:
self._exclusions.add('lib/ansible/plugins/cache/base.py')
@property
def error_code(self): # type: () -> t.Optional[str]
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'A100'
def get_plugin_type(self, target): # type: (TestTarget) -> t.Optional[str]
"""Return the plugin type of the given target, or None if it is not a plugin or module."""
if target.path.endswith('/__init__.py'):
return None
if target.path in self._exclusions:
return None
for plugin_type, prefix in self._prefixes.items():
if target.path.startswith(prefix):
return plugin_type
return None
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if target.module]
return [target for target in targets if self.get_plugin_type(target) is not None]
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
env = ansible_environment(args, color=False)
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
target_per_type = collections.defaultdict(list)
for target in targets.include:
target_per_type[self.get_plugin_type(target)].append(target)
cmd = [
python.path,
os.path.join(SANITY_ROOT, 'validate-modules', 'validate.py'),
'--format', 'json',
'--arg-spec',
] + paths
]
if data_context().content.collection:
cmd.extend(['--collection', data_context().content.collection.directory])
@ -109,39 +139,52 @@ class ValidateModulesTest(SanitySingleVersion):
else:
display.warning('Cannot perform module comparison against the base branch because the base branch was not detected.')
try:
stdout, stderr = run_command(args, cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
errors = []
if stderr or status not in (0, 3):
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
for plugin_type, plugin_targets in sorted(target_per_type.items()):
paths = [target.path for target in plugin_targets]
plugin_cmd = list(cmd)
if args.explain:
return SanitySuccess(self.name)
if plugin_type != 'modules':
plugin_cmd += ['--plugin-type', plugin_type]
messages = json.loads(stdout)
plugin_cmd += paths
errors = []
try:
stdout, stderr = run_command(args, plugin_cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr or status not in (0, 3):
raise SubprocessError(cmd=plugin_cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
continue
for filename in messages:
output = messages[filename]
messages = json.loads(stdout)
for item in output['errors']:
errors.append(SanityMessage(
path=filename,
line=int(item['line']) if 'line' in item else 0,
column=int(item['column']) if 'column' in item else 0,
code='%s' % item['code'],
message=item['msg'],
))
for filename in messages:
output = messages[filename]
errors = settings.process_errors(errors, paths)
for item in output['errors']:
errors.append(SanityMessage(
path=filename,
line=int(item['line']) if 'line' in item else 0,
column=int(item['column']) if 'column' in item else 0,
code='%s' % item['code'],
message=item['msg'],
))
all_paths = [target.path for target in targets.include]
all_errors = settings.process_errors(errors, all_paths)
if args.explain:
return SanitySuccess(self.name)
if errors:
return SanityFailure(self.name, messages=errors)
if all_errors:
return SanityFailure(self.name, messages=all_errors)
return SanitySuccess(self.name)

@ -16,4 +16,3 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
__version__ = '0.0.1b'

@ -121,6 +121,11 @@ OS_CALL_REGEX = re.compile(r'os\.call.*')
LOOSE_ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version.split('.')[:3]))
PLUGINS_WITH_RETURN_VALUES = ('module', )
PLUGINS_WITH_EXAMPLES = ('module', )
PLUGINS_WITH_YAML_EXAMPLES = ('module', )
def is_potential_secret_option(option_name):
if not NO_LOG_REGEX.search(option_name):
return False
@ -303,14 +308,15 @@ class ModuleValidator(Validator):
ACCEPTLIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function'))
def __init__(self, path, analyze_arg_spec=False, collection=None, collection_version=None,
base_branch=None, git_cache=None, reporter=None, routing=None):
base_branch=None, git_cache=None, reporter=None, routing=None, plugin_type='module'):
super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
self.path = path
self.basename = os.path.basename(self.path)
self.name = os.path.splitext(self.basename)[0]
self.plugin_type = plugin_type
self.analyze_arg_spec = analyze_arg_spec
self.analyze_arg_spec = analyze_arg_spec and plugin_type == 'module'
self._Version = LooseVersion
self._StrictVersion = StrictVersion
@ -914,7 +920,9 @@ class ModuleValidator(Validator):
# We are testing a collection
if self.routing:
routing_deprecation = self.routing.get('plugin_routing', {}).get('modules', {}).get(self.name, {}).get('deprecation', {})
routing_deprecation = self.routing.get('plugin_routing', {})
routing_deprecation = routing_deprecation.get('modules' if self.plugin_type == 'module' else self.plugin_type, {})
routing_deprecation = routing_deprecation.get(self.name, {}).get('deprecation', {})
if routing_deprecation:
# meta/runtime.yml says this is deprecated
routing_says_deprecated = True
@ -935,7 +943,8 @@ class ModuleValidator(Validator):
self.name, 'DOCUMENTATION'
)
if doc:
add_collection_to_versions_and_dates(doc, self.collection_name, is_module=True)
add_collection_to_versions_and_dates(doc, self.collection_name,
is_module=self.plugin_type == 'module')
for error in errors:
self.reporter.error(
path=self.object_path,
@ -952,7 +961,8 @@ class ModuleValidator(Validator):
with CaptureStd():
try:
get_docstring(self.path, fragment_loader, verbose=True,
collection_name=self.collection_name, is_module=True)
collection_name=self.collection_name,
is_module=self.plugin_type == 'module')
except AssertionError:
fragment = doc['extends_documentation_fragment']
self.reporter.error(
@ -973,7 +983,8 @@ class ModuleValidator(Validator):
)
if not missing_fragment:
add_fragments(doc, self.object_path, fragment_loader=fragment_loader, is_module=True)
add_fragments(doc, self.object_path, fragment_loader=fragment_loader,
is_module=self.plugin_type == 'module')
if 'options' in doc and doc['options'] is None:
self.reporter.error(
@ -1005,6 +1016,7 @@ class ModuleValidator(Validator):
os.readlink(self.object_path).split('.')[0],
for_collection=bool(self.collection),
deprecated_module=deprecated,
plugin_type=self.plugin_type,
),
'DOCUMENTATION',
'invalid-documentation',
@ -1017,6 +1029,7 @@ class ModuleValidator(Validator):
self.object_name.split('.')[0],
for_collection=bool(self.collection),
deprecated_module=deprecated,
plugin_type=self.plugin_type,
),
'DOCUMENTATION',
'invalid-documentation',
@ -1027,12 +1040,13 @@ class ModuleValidator(Validator):
self._check_version_added(doc, existing_doc)
if not bool(doc_info['EXAMPLES']['value']):
self.reporter.error(
path=self.object_path,
code='missing-examples',
msg='No EXAMPLES provided'
)
else:
if self.plugin_type in PLUGINS_WITH_EXAMPLES:
self.reporter.error(
path=self.object_path,
code='missing-examples',
msg='No EXAMPLES provided'
)
elif self.plugin_type in PLUGINS_WITH_YAML_EXAMPLES:
_doc, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'],
doc_info['EXAMPLES']['lineno'],
self.name, 'EXAMPLES', load_all=True,
@ -1050,25 +1064,28 @@ class ModuleValidator(Validator):
)
if not bool(doc_info['RETURN']['value']):
if self._is_new_module():
self.reporter.error(
path=self.object_path,
code='missing-return',
msg='No RETURN provided'
)
else:
self.reporter.warning(
path=self.object_path,
code='missing-return-legacy',
msg='No RETURN provided'
)
if self.plugin_type in PLUGINS_WITH_RETURN_VALUES:
if self._is_new_module():
self.reporter.error(
path=self.object_path,
code='missing-return',
msg='No RETURN provided'
)
else:
self.reporter.warning(
path=self.object_path,
code='missing-return-legacy',
msg='No RETURN provided'
)
else:
data, errors, traces = parse_yaml(doc_info['RETURN']['value'],
doc_info['RETURN']['lineno'],
self.name, 'RETURN')
if data:
add_collection_to_versions_and_dates(data, self.collection_name, is_module=True, return_docs=True)
self._validate_docs_schema(data, return_schema(for_collection=bool(self.collection)),
add_collection_to_versions_and_dates(data, self.collection_name,
is_module=self.plugin_type == 'module', return_docs=True)
self._validate_docs_schema(data,
return_schema(for_collection=bool(self.collection), plugin_type=self.plugin_type),
'RETURN', 'return-syntax-error')
for error in errors:
@ -1421,7 +1438,8 @@ class ModuleValidator(Validator):
try:
if not context:
add_fragments(docs, self.object_path, fragment_loader=fragment_loader, is_module=True)
add_fragments(docs, self.object_path, fragment_loader=fragment_loader,
is_module=self.plugin_type == 'module')
except Exception:
# Cannot merge fragments
return
@ -2005,7 +2023,8 @@ class ModuleValidator(Validator):
with CaptureStd():
try:
existing_doc, dummy_examples, dummy_return, existing_metadata = get_docstring(
self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name, is_module=True)
self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name,
is_module=self.plugin_type == 'module')
existing_options = existing_doc.get('options', {}) or {}
except AssertionError:
fragment = doc['extends_documentation_fragment']
@ -2207,15 +2226,18 @@ class ModuleValidator(Validator):
pass
if self._python_module() and not self._just_docs() and not end_of_deprecation_should_be_removed_only:
self._validate_ansible_module_call(docs)
if self.plugin_type == 'module':
self._validate_ansible_module_call(docs)
self._check_for_sys_exit()
self._find_rejectlist_imports()
self._find_module_utils()
if self.plugin_type == 'module':
self._find_module_utils()
self._find_has_import()
first_callable = self._get_first_callable() or 1000000 # use a bogus "high" line number if no callable exists
self._ensure_imports_below_docs(doc_info, first_callable)
self._check_for_subprocess()
self._check_for_os_call()
if self.plugin_type == 'module':
self._check_for_subprocess()
self._check_for_os_call()
if self._powershell_module():
if self.basename in self.PS_DOC_REJECTLIST:
@ -2233,7 +2255,8 @@ class ModuleValidator(Validator):
self._check_gpl3_header()
if not self._just_docs() and not end_of_deprecation_should_be_removed_only:
self._check_interpreter(powershell=self._powershell_module())
if self.plugin_type == 'module':
self._check_interpreter(powershell=self._powershell_module())
self._check_type_instead_of_isinstance(
powershell=self._powershell_module()
)
@ -2288,8 +2311,8 @@ def re_compile(value):
def run():
parser = argparse.ArgumentParser(prog="validate-modules")
parser.add_argument('modules', nargs='+',
help='Path to module or module directory')
parser.add_argument('plugins', nargs='+',
help='Path to module/plugin or module/plugin directory')
parser.add_argument('-w', '--warnings', help='Show warnings',
action='store_true')
parser.add_argument('--exclude', help='RegEx exclusion pattern',
@ -2311,13 +2334,16 @@ def run():
parser.add_argument('--collection-version',
help='The collection\'s version number used to check '
'deprecations')
parser.add_argument('--plugin-type',
default='module',
help='The plugin type to validate. Defaults to %(default)s')
args = parser.parse_args()
args.modules = [m.rstrip('/') for m in args.modules]
args.plugins = [m.rstrip('/') for m in args.plugins]
reporter = Reporter()
git_cache = GitCache(args.base_branch)
git_cache = GitCache(args.base_branch, args.plugin_type)
check_dirs = set()
@ -2334,25 +2360,26 @@ def run():
except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: YAML load failed: %s' % (routing_file, 0, 0, re.sub(r'\s+', ' ', str(ex))))
for module in args.modules:
if os.path.isfile(module):
path = module
for plugin in args.plugins:
if os.path.isfile(plugin):
path = plugin
if args.exclude and args.exclude.search(path):
continue
if ModuleValidator.is_on_rejectlist(path):
continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
git_cache=git_cache, reporter=reporter, routing=routing) as mv1:
git_cache=git_cache, reporter=reporter, routing=routing,
plugin_type=args.plugin_type) as mv1:
mv1.validate()
check_dirs.add(os.path.dirname(path))
for root, dirs, files in os.walk(module):
basedir = root[len(module) + 1:].split('/', 1)[0]
for root, dirs, files in os.walk(plugin):
basedir = root[len(plugin) + 1:].split('/', 1)[0]
if basedir in REJECTLIST_DIRS:
continue
for dirname in dirs:
if root == module and dirname in REJECTLIST_DIRS:
if root == plugin and dirname in REJECTLIST_DIRS:
continue
path = os.path.join(root, dirname)
if args.exclude and args.exclude.search(path):
@ -2367,10 +2394,11 @@ def run():
continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
git_cache=git_cache, reporter=reporter, routing=routing) as mv2:
git_cache=git_cache, reporter=reporter, routing=routing,
plugin_type=args.plugin_type) as mv2:
mv2.validate()
if not args.collection:
if not args.collection and args.plugin_type == 'module':
for path in sorted(check_dirs):
pv = PythonPackageValidator(path, reporter=reporter)
pv.validate()
@ -2382,16 +2410,21 @@ def run():
class GitCache:
def __init__(self, base_branch):
def __init__(self, base_branch, plugin_type):
self.base_branch = base_branch
self.plugin_type = plugin_type
self.rel_path = 'lib/ansible/modules/'
if plugin_type != 'module':
self.rel_path = 'lib/ansible/plugins/%s/' % plugin_type
if self.base_branch:
self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, 'lib/ansible/modules/'])
self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, self.rel_path])
else:
self.base_tree = []
try:
self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', 'lib/ansible/modules/'])
self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', self.rel_path])
except GitError as ex:
if ex.status == 128:
# fallback when there is no .git directory
@ -2405,7 +2438,10 @@ class GitCache:
else:
raise
self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in ('.py', '.ps1'))
allowed_exts = ('.py', '.ps1')
if plugin_type != 'module':
allowed_exts = ('.py', )
self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in allowed_exts)
self.base_module_paths.pop('__init__.py', None)
@ -2418,11 +2454,10 @@ class GitCache:
if os.path.islink(path):
self.head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
@staticmethod
def _get_module_files():
def _get_module_files(self):
module_files = []
for (dir_path, dir_names, file_names) in os.walk('lib/ansible/modules/'):
for (dir_path, dir_names, file_names) in os.walk(self.rel_path):
for file_name in file_names:
module_files.append(os.path.join(dir_path, file_name))

@ -11,7 +11,7 @@ from ansible.module_utils.compat.version import StrictVersion
from functools import partial
from urllib.parse import urlparse
from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid
from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid, Exclusive
from ansible.module_utils.six import string_types
from ansible.module_utils.common.collections import is_iterable
from ansible.utils.version import SemanticVersion
@ -384,47 +384,129 @@ def version_added(v, error_code='version-added-invalid', accept_historical=False
return v
def list_dict_option_schema(for_collection):
suboption_schema = Schema(
{
Required('description'): doc_string_or_strings,
'required': bool,
'choices': list,
'aliases': Any(list_string_types),
'version_added': version(for_collection),
'version_added_collection': collection_name,
'default': json_value,
# Note: Types are strings, not literal bools, such as True or False
'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
# in case of type='list' elements define type of individual item in list
'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
# Recursive suboptions
'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)),
},
extra=PREVENT_EXTRA
)
def list_dict_option_schema(for_collection, plugin_type):
if plugin_type == 'module':
option_types = Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str')
element_types = option_types
else:
option_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'none',
'path', 'tmp', 'temppath', 'tmppath', 'pathspec', 'pathlist', 'str', 'string', 'raw')
element_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'path', 'str', 'string', 'raw')
basic_option_schema = {
Required('description'): doc_string_or_strings,
'required': bool,
'choices': list,
'aliases': Any(list_string_types),
'version_added': version(for_collection),
'version_added_collection': collection_name,
'default': json_value,
# Note: Types are strings, not literal bools, such as True or False
'type': option_types,
# in case of type='list' elements define type of individual item in list
'elements': element_types,
}
if plugin_type != 'module':
basic_option_schema['name'] = Any(*string_types)
deprecated_schema = All(
Schema(
All(
{
# This definition makes sure everything has the correct types/values
'why': doc_string,
'alternatives': doc_string,
# vod stands for 'version or date'; this is the name of the exclusive group
Exclusive('removed_at_date', 'vod'): date(),
Exclusive('version', 'vod'): version(for_collection),
'collection_name': collection_name,
},
{
# This definition makes sure that everything we require is there
Required('why'): Any(*string_types),
'alternatives': Any(*string_types),
Required(Any('removed_at_date', 'version')): Any(*string_types),
Required('collection_name'): Any(*string_types),
},
),
extra=PREVENT_EXTRA
),
partial(check_removal_version,
version_field='version',
collection_name_field='collection_name',
error_code='invalid-removal-version')
)
env_schema = All(
Schema({
Required('name'): Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
ini_schema = All(
Schema({
Required('key'): Any(*string_types),
Required('section'): Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
vars_schema = All(
Schema({
Required('name'): Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
cli_schema = All(
Schema({
Required('name'): Any(*string_types),
'option': Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
keyword_schema = All(
Schema({
Required('name'): Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
basic_option_schema.update({
'env': [env_schema],
'ini': [ini_schema],
'vars': [vars_schema],
'cli': [cli_schema],
'keyword': [keyword_schema],
'deprecated': deprecated_schema,
})
suboption_schema = dict(basic_option_schema)
suboption_schema.update({
# Recursive suboptions
'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)),
})
suboption_schema = Schema(suboption_schema, extra=PREVENT_EXTRA)
# This generates list of dicts with keys from string_types and suboption_schema value
# for example in Python 3: {str: suboption_schema}
list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types]
option_schema = Schema(
{
Required('description'): doc_string_or_strings,
'required': bool,
'choices': list,
'aliases': Any(list_string_types),
'version_added': version(for_collection),
'version_added_collection': collection_name,
'default': json_value,
'suboptions': Any(None, *list_dict_suboption_schema),
# Note: Types are strings, not literal bools, such as True or False
'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
# in case of type='list' elements define type of individual item in list
'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
},
extra=PREVENT_EXTRA
)
option_schema = dict(basic_option_schema)
option_schema.update({
'suboptions': Any(None, *list_dict_suboption_schema),
})
option_schema = Schema(option_schema, extra=PREVENT_EXTRA)
option_version_added = Schema(
All({
@ -450,24 +532,38 @@ def return_contains(v):
return v
def return_schema(for_collection):
def return_schema(for_collection, plugin_type='module'):
if plugin_type == 'module':
return_types = Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str')
element_types = Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str')
else:
return_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'path', 'str', 'string', 'raw')
element_types = return_types
basic_return_option_schema = {
Required('description'): doc_string_or_strings,
'returned': doc_string,
'version_added': version(for_collection),
'version_added_collection': collection_name,
'sample': json_value,
'example': json_value,
# in case of type='list' elements define type of individual item in list
'elements': element_types,
'choices': Any([object], (object,)),
}
if plugin_type == 'module':
# type is only required for modules right now
basic_return_option_schema[Required('type')] = return_types
else:
basic_return_option_schema['type'] = return_types
inner_return_option_schema = dict(basic_return_option_schema)
inner_return_option_schema.update({
'contains': Any(None, *list({str_type: Self} for str_type in string_types)),
})
return_contains_schema = Any(
All(
Schema(
{
Required('description'): doc_string_or_strings,
'returned': doc_string, # only returned on top level
Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'),
'version_added': version(for_collection),
'version_added_collection': collection_name,
'choices': Any([object], (object,)),
'sample': json_value,
'example': json_value,
'contains': Any(None, *list({str_type: Self} for str_type in string_types)),
# in case of type='list' elements define type of individual item in list
'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
}
),
Schema(inner_return_option_schema),
Schema(return_contains),
Schema(partial(version_added, error_code='option-invalid-version-added')),
),
@ -478,23 +574,19 @@ def return_schema(for_collection):
# for example in Python 3: {str: return_contains_schema}
list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types]
return_option_schema = dict(basic_return_option_schema)
return_option_schema.update({
'contains': Any(None, *list_dict_return_contains_schema),
})
if plugin_type == 'module':
# 'returned' is required on top-level
del return_option_schema['returned']
return_option_schema[Required('returned')] = Any(*string_types)
return Any(
All(
Schema(
{
any_string_types: {
Required('description'): doc_string_or_strings,
Required('returned'): doc_string,
Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'),
'version_added': version(for_collection),
'version_added_collection': collection_name,
'choices': Any([object], (object,)),
'sample': json_value,
'example': json_value,
'contains': Any(None, *list_dict_return_contains_schema),
# in case of type='list' elements define type of individual item in list
'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
}
any_string_types: return_option_schema
}
),
Schema({any_string_types: return_contains}),
@ -560,24 +652,35 @@ def author(value):
return value
def doc_schema(module_name, for_collection=False, deprecated_module=False):
def doc_schema(module_name, for_collection=False, deprecated_module=False, plugin_type='module'):
if module_name.startswith('_'):
module_name = module_name[1:]
deprecated_module = True
if for_collection is False and plugin_type == 'connection' and module_name == 'paramiko_ssh':
# The plugin loader has a hard-coded exception: when the builtin connection 'paramiko' is
# referenced, it loads 'paramiko_ssh' instead. That's why in this plugin, the name must be
# 'paramiko' and not 'paramiko_ssh'.
module_name = 'paramiko'
doc_schema_dict = {
Required('module'): module_name,
Required('module' if plugin_type == 'module' else 'name'): module_name,
Required('short_description'): doc_string,
Required('description'): doc_string_or_strings,
Required('author'): All(Any(None, list_string_types, *string_types), author),
'notes': Any(None, [doc_string]),
'seealso': Any(None, seealso_schema),
'requirements': [doc_string],
'todo': Any(None, doc_string_or_strings),
'options': Any(None, *list_dict_option_schema(for_collection)),
'options': Any(None, *list_dict_option_schema(for_collection, plugin_type)),
'extends_documentation_fragment': Any(list_string_types, *string_types),
'version_added_collection': collection_name,
}
if plugin_type == 'module':
doc_schema_dict[Required('author')] = All(Any(None, list_string_types, *string_types), author)
else:
# author is optional for plugins (for now)
doc_schema_dict['author'] = All(Any(None, list_string_types, *string_types), author)
if plugin_type == 'callback':
doc_schema_dict[Required('type')] = Any('aggregate', 'notification', 'stdout')
if for_collection:
# Optional

Loading…
Cancel
Save