Extend validate-modules to also validate plugins (#71734)

* Let validate-modules also validate plugins.

* Support 'option' in 'cli'.

* Use DOCUMENTABLE_PLUGINS instead of UNDOCUMENTED_PLUGIN_TYPES.

* Support 'keyword', clean up error codes.

* Call settings.process_errors only once; remove __version__.

* Add changelog fragment.
pull/74989/head
Felix Fontein 2 years ago committed by GitHub
parent 7d234a40ee
commit 0990c4ca7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,2 @@
minor_changes:
- "ansible-test sanity validate-modules - the validate-modules sanity test now also checks the documentation of documentable plugin types (https://github.com/ansible/ansible/pull/71734)."

@ -123,6 +123,11 @@ COMMAND = 'sanity'
SANITY_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity') SANITY_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity')
TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity') TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity')
# NOTE: must match ansible.constants.DOCUMENTABLE_PLUGINS, but with 'module' replaced by 'modules'!
DOCUMENTABLE_PLUGINS = (
'become', 'cache', 'callback', 'cliconf', 'connection', 'httpapi', 'inventory', 'lookup', 'netconf', 'modules', 'shell', 'strategy', 'vars'
)
created_venvs = [] # type: t.List[str] created_venvs = [] # type: t.List[str]

@ -7,6 +7,7 @@ import re
import typing as t import typing as t
from . import ( from . import (
DOCUMENTABLE_PLUGINS,
SanitySingleVersion, SanitySingleVersion,
SanityFailure, SanityFailure,
SanitySuccess, SanitySuccess,
@ -50,22 +51,7 @@ class AnsibleDocTest(SanitySingleVersion):
"""Sanity test for ansible-doc.""" """Sanity test for ansible-doc."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test.""" """Return the given list of test targets, filtered to include only those relevant for the test."""
# This should use documentable plugins from constants instead plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type in DOCUMENTABLE_PLUGINS]
unsupported_plugin_types = {
# not supported by ansible-doc
'action',
'doc_fragments',
'filter',
'module_utils',
'terminal',
'test',
# The following are plugin directories not directly supported by ansible-core (and thus also not by ansible-doc)
# (https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst#modules--plugins)
'plugin_utils',
'sub_plugins',
}
plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type not in unsupported_plugin_types]
return [target for target in targets return [target for target in targets
if os.path.splitext(target.path)[1] == '.py' if os.path.splitext(target.path)[1] == '.py'

@ -1,11 +1,13 @@
"""Sanity test using validate-modules.""" """Sanity test using validate-modules."""
from __future__ import annotations from __future__ import annotations
import collections
import json import json
import os import os
import typing as t import typing as t
from . import ( from . import (
DOCUMENTABLE_PLUGINS,
SanitySingleVersion, SanitySingleVersion,
SanityMessage, SanityMessage,
SanityFailure, SanityFailure,
@ -64,28 +66,56 @@ class ValidateModulesTest(SanitySingleVersion):
'deprecated-date', 'deprecated-date',
]) ])
self._prefixes = {
plugin_type: plugin_path + '/'
for plugin_type, plugin_path in data_context().content.plugin_paths.items()
if plugin_type in DOCUMENTABLE_PLUGINS
}
self._exclusions = set()
if not data_context().content.collection:
self._exclusions.add('lib/ansible/plugins/cache/base.py')
@property @property
def error_code(self): # type: () -> t.Optional[str] def error_code(self): # type: () -> t.Optional[str]
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'A100' return 'A100'
def get_plugin_type(self, target): # type: (TestTarget) -> t.Optional[str]
"""Return the plugin type of the given target, or None if it is not a plugin or module."""
if target.path.endswith('/__init__.py'):
return None
if target.path in self._exclusions:
return None
for plugin_type, prefix in self._prefixes.items():
if target.path.startswith(prefix):
return plugin_type
return None
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test.""" """Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if target.module] return [target for target in targets if self.get_plugin_type(target) is not None]
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
env = ansible_environment(args, color=False) env = ansible_environment(args, color=False)
settings = self.load_processor(args) settings = self.load_processor(args)
paths = [target.path for target in targets.include] target_per_type = collections.defaultdict(list)
for target in targets.include:
target_per_type[self.get_plugin_type(target)].append(target)
cmd = [ cmd = [
python.path, python.path,
os.path.join(SANITY_ROOT, 'validate-modules', 'validate.py'), os.path.join(SANITY_ROOT, 'validate-modules', 'validate.py'),
'--format', 'json', '--format', 'json',
'--arg-spec', '--arg-spec',
] + paths ]
if data_context().content.collection: if data_context().content.collection:
cmd.extend(['--collection', data_context().content.collection.directory]) cmd.extend(['--collection', data_context().content.collection.directory])
@ -109,39 +139,52 @@ class ValidateModulesTest(SanitySingleVersion):
else: else:
display.warning('Cannot perform module comparison against the base branch because the base branch was not detected.') display.warning('Cannot perform module comparison against the base branch because the base branch was not detected.')
try: errors = []
stdout, stderr = run_command(args, cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr or status not in (0, 3): for plugin_type, plugin_targets in sorted(target_per_type.items()):
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout) paths = [target.path for target in plugin_targets]
plugin_cmd = list(cmd)
if args.explain: if plugin_type != 'modules':
return SanitySuccess(self.name) plugin_cmd += ['--plugin-type', plugin_type]
messages = json.loads(stdout) plugin_cmd += paths
errors = [] try:
stdout, stderr = run_command(args, plugin_cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr or status not in (0, 3):
raise SubprocessError(cmd=plugin_cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
continue
for filename in messages: messages = json.loads(stdout)
output = messages[filename]
for item in output['errors']: for filename in messages:
errors.append(SanityMessage( output = messages[filename]
path=filename,
line=int(item['line']) if 'line' in item else 0,
column=int(item['column']) if 'column' in item else 0,
code='%s' % item['code'],
message=item['msg'],
))
errors = settings.process_errors(errors, paths) for item in output['errors']:
errors.append(SanityMessage(
path=filename,
line=int(item['line']) if 'line' in item else 0,
column=int(item['column']) if 'column' in item else 0,
code='%s' % item['code'],
message=item['msg'],
))
all_paths = [target.path for target in targets.include]
all_errors = settings.process_errors(errors, all_paths)
if args.explain:
return SanitySuccess(self.name)
if errors: if all_errors:
return SanityFailure(self.name, messages=errors) return SanityFailure(self.name, messages=all_errors)
return SanitySuccess(self.name) return SanitySuccess(self.name)

@ -16,4 +16,3 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations from __future__ import annotations
__version__ = '0.0.1b'

@ -121,6 +121,11 @@ OS_CALL_REGEX = re.compile(r'os\.call.*')
LOOSE_ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version.split('.')[:3])) LOOSE_ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version.split('.')[:3]))
PLUGINS_WITH_RETURN_VALUES = ('module', )
PLUGINS_WITH_EXAMPLES = ('module', )
PLUGINS_WITH_YAML_EXAMPLES = ('module', )
def is_potential_secret_option(option_name): def is_potential_secret_option(option_name):
if not NO_LOG_REGEX.search(option_name): if not NO_LOG_REGEX.search(option_name):
return False return False
@ -303,14 +308,15 @@ class ModuleValidator(Validator):
ACCEPTLIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function')) ACCEPTLIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function'))
def __init__(self, path, analyze_arg_spec=False, collection=None, collection_version=None, def __init__(self, path, analyze_arg_spec=False, collection=None, collection_version=None,
base_branch=None, git_cache=None, reporter=None, routing=None): base_branch=None, git_cache=None, reporter=None, routing=None, plugin_type='module'):
super(ModuleValidator, self).__init__(reporter=reporter or Reporter()) super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
self.path = path self.path = path
self.basename = os.path.basename(self.path) self.basename = os.path.basename(self.path)
self.name = os.path.splitext(self.basename)[0] self.name = os.path.splitext(self.basename)[0]
self.plugin_type = plugin_type
self.analyze_arg_spec = analyze_arg_spec self.analyze_arg_spec = analyze_arg_spec and plugin_type == 'module'
self._Version = LooseVersion self._Version = LooseVersion
self._StrictVersion = StrictVersion self._StrictVersion = StrictVersion
@ -914,7 +920,9 @@ class ModuleValidator(Validator):
# We are testing a collection # We are testing a collection
if self.routing: if self.routing:
routing_deprecation = self.routing.get('plugin_routing', {}).get('modules', {}).get(self.name, {}).get('deprecation', {}) routing_deprecation = self.routing.get('plugin_routing', {})
routing_deprecation = routing_deprecation.get('modules' if self.plugin_type == 'module' else self.plugin_type, {})
routing_deprecation = routing_deprecation.get(self.name, {}).get('deprecation', {})
if routing_deprecation: if routing_deprecation:
# meta/runtime.yml says this is deprecated # meta/runtime.yml says this is deprecated
routing_says_deprecated = True routing_says_deprecated = True
@ -935,7 +943,8 @@ class ModuleValidator(Validator):
self.name, 'DOCUMENTATION' self.name, 'DOCUMENTATION'
) )
if doc: if doc:
add_collection_to_versions_and_dates(doc, self.collection_name, is_module=True) add_collection_to_versions_and_dates(doc, self.collection_name,
is_module=self.plugin_type == 'module')
for error in errors: for error in errors:
self.reporter.error( self.reporter.error(
path=self.object_path, path=self.object_path,
@ -952,7 +961,8 @@ class ModuleValidator(Validator):
with CaptureStd(): with CaptureStd():
try: try:
get_docstring(self.path, fragment_loader, verbose=True, get_docstring(self.path, fragment_loader, verbose=True,
collection_name=self.collection_name, is_module=True) collection_name=self.collection_name,
is_module=self.plugin_type == 'module')
except AssertionError: except AssertionError:
fragment = doc['extends_documentation_fragment'] fragment = doc['extends_documentation_fragment']
self.reporter.error( self.reporter.error(
@ -973,7 +983,8 @@ class ModuleValidator(Validator):
) )
if not missing_fragment: if not missing_fragment:
add_fragments(doc, self.object_path, fragment_loader=fragment_loader, is_module=True) add_fragments(doc, self.object_path, fragment_loader=fragment_loader,
is_module=self.plugin_type == 'module')
if 'options' in doc and doc['options'] is None: if 'options' in doc and doc['options'] is None:
self.reporter.error( self.reporter.error(
@ -1005,6 +1016,7 @@ class ModuleValidator(Validator):
os.readlink(self.object_path).split('.')[0], os.readlink(self.object_path).split('.')[0],
for_collection=bool(self.collection), for_collection=bool(self.collection),
deprecated_module=deprecated, deprecated_module=deprecated,
plugin_type=self.plugin_type,
), ),
'DOCUMENTATION', 'DOCUMENTATION',
'invalid-documentation', 'invalid-documentation',
@ -1017,6 +1029,7 @@ class ModuleValidator(Validator):
self.object_name.split('.')[0], self.object_name.split('.')[0],
for_collection=bool(self.collection), for_collection=bool(self.collection),
deprecated_module=deprecated, deprecated_module=deprecated,
plugin_type=self.plugin_type,
), ),
'DOCUMENTATION', 'DOCUMENTATION',
'invalid-documentation', 'invalid-documentation',
@ -1027,12 +1040,13 @@ class ModuleValidator(Validator):
self._check_version_added(doc, existing_doc) self._check_version_added(doc, existing_doc)
if not bool(doc_info['EXAMPLES']['value']): if not bool(doc_info['EXAMPLES']['value']):
self.reporter.error( if self.plugin_type in PLUGINS_WITH_EXAMPLES:
path=self.object_path, self.reporter.error(
code='missing-examples', path=self.object_path,
msg='No EXAMPLES provided' code='missing-examples',
) msg='No EXAMPLES provided'
else: )
elif self.plugin_type in PLUGINS_WITH_YAML_EXAMPLES:
_doc, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'], _doc, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'],
doc_info['EXAMPLES']['lineno'], doc_info['EXAMPLES']['lineno'],
self.name, 'EXAMPLES', load_all=True, self.name, 'EXAMPLES', load_all=True,
@ -1050,25 +1064,28 @@ class ModuleValidator(Validator):
) )
if not bool(doc_info['RETURN']['value']): if not bool(doc_info['RETURN']['value']):
if self._is_new_module(): if self.plugin_type in PLUGINS_WITH_RETURN_VALUES:
self.reporter.error( if self._is_new_module():
path=self.object_path, self.reporter.error(
code='missing-return', path=self.object_path,
msg='No RETURN provided' code='missing-return',
) msg='No RETURN provided'
else: )
self.reporter.warning( else:
path=self.object_path, self.reporter.warning(
code='missing-return-legacy', path=self.object_path,
msg='No RETURN provided' code='missing-return-legacy',
) msg='No RETURN provided'
)
else: else:
data, errors, traces = parse_yaml(doc_info['RETURN']['value'], data, errors, traces = parse_yaml(doc_info['RETURN']['value'],
doc_info['RETURN']['lineno'], doc_info['RETURN']['lineno'],
self.name, 'RETURN') self.name, 'RETURN')
if data: if data:
add_collection_to_versions_and_dates(data, self.collection_name, is_module=True, return_docs=True) add_collection_to_versions_and_dates(data, self.collection_name,
self._validate_docs_schema(data, return_schema(for_collection=bool(self.collection)), is_module=self.plugin_type == 'module', return_docs=True)
self._validate_docs_schema(data,
return_schema(for_collection=bool(self.collection), plugin_type=self.plugin_type),
'RETURN', 'return-syntax-error') 'RETURN', 'return-syntax-error')
for error in errors: for error in errors:
@ -1421,7 +1438,8 @@ class ModuleValidator(Validator):
try: try:
if not context: if not context:
add_fragments(docs, self.object_path, fragment_loader=fragment_loader, is_module=True) add_fragments(docs, self.object_path, fragment_loader=fragment_loader,
is_module=self.plugin_type == 'module')
except Exception: except Exception:
# Cannot merge fragments # Cannot merge fragments
return return
@ -2005,7 +2023,8 @@ class ModuleValidator(Validator):
with CaptureStd(): with CaptureStd():
try: try:
existing_doc, dummy_examples, dummy_return, existing_metadata = get_docstring( existing_doc, dummy_examples, dummy_return, existing_metadata = get_docstring(
self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name, is_module=True) self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name,
is_module=self.plugin_type == 'module')
existing_options = existing_doc.get('options', {}) or {} existing_options = existing_doc.get('options', {}) or {}
except AssertionError: except AssertionError:
fragment = doc['extends_documentation_fragment'] fragment = doc['extends_documentation_fragment']
@ -2207,15 +2226,18 @@ class ModuleValidator(Validator):
pass pass
if self._python_module() and not self._just_docs() and not end_of_deprecation_should_be_removed_only: if self._python_module() and not self._just_docs() and not end_of_deprecation_should_be_removed_only:
self._validate_ansible_module_call(docs) if self.plugin_type == 'module':
self._validate_ansible_module_call(docs)
self._check_for_sys_exit() self._check_for_sys_exit()
self._find_rejectlist_imports() self._find_rejectlist_imports()
self._find_module_utils() if self.plugin_type == 'module':
self._find_module_utils()
self._find_has_import() self._find_has_import()
first_callable = self._get_first_callable() or 1000000 # use a bogus "high" line number if no callable exists first_callable = self._get_first_callable() or 1000000 # use a bogus "high" line number if no callable exists
self._ensure_imports_below_docs(doc_info, first_callable) self._ensure_imports_below_docs(doc_info, first_callable)
self._check_for_subprocess() if self.plugin_type == 'module':
self._check_for_os_call() self._check_for_subprocess()
self._check_for_os_call()
if self._powershell_module(): if self._powershell_module():
if self.basename in self.PS_DOC_REJECTLIST: if self.basename in self.PS_DOC_REJECTLIST:
@ -2233,7 +2255,8 @@ class ModuleValidator(Validator):
self._check_gpl3_header() self._check_gpl3_header()
if not self._just_docs() and not end_of_deprecation_should_be_removed_only: if not self._just_docs() and not end_of_deprecation_should_be_removed_only:
self._check_interpreter(powershell=self._powershell_module()) if self.plugin_type == 'module':
self._check_interpreter(powershell=self._powershell_module())
self._check_type_instead_of_isinstance( self._check_type_instead_of_isinstance(
powershell=self._powershell_module() powershell=self._powershell_module()
) )
@ -2288,8 +2311,8 @@ def re_compile(value):
def run(): def run():
parser = argparse.ArgumentParser(prog="validate-modules") parser = argparse.ArgumentParser(prog="validate-modules")
parser.add_argument('modules', nargs='+', parser.add_argument('plugins', nargs='+',
help='Path to module or module directory') help='Path to module/plugin or module/plugin directory')
parser.add_argument('-w', '--warnings', help='Show warnings', parser.add_argument('-w', '--warnings', help='Show warnings',
action='store_true') action='store_true')
parser.add_argument('--exclude', help='RegEx exclusion pattern', parser.add_argument('--exclude', help='RegEx exclusion pattern',
@ -2311,13 +2334,16 @@ def run():
parser.add_argument('--collection-version', parser.add_argument('--collection-version',
help='The collection\'s version number used to check ' help='The collection\'s version number used to check '
'deprecations') 'deprecations')
parser.add_argument('--plugin-type',
default='module',
help='The plugin type to validate. Defaults to %(default)s')
args = parser.parse_args() args = parser.parse_args()
args.modules = [m.rstrip('/') for m in args.modules] args.plugins = [m.rstrip('/') for m in args.plugins]
reporter = Reporter() reporter = Reporter()
git_cache = GitCache(args.base_branch) git_cache = GitCache(args.base_branch, args.plugin_type)
check_dirs = set() check_dirs = set()
@ -2334,25 +2360,26 @@ def run():
except Exception as ex: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: YAML load failed: %s' % (routing_file, 0, 0, re.sub(r'\s+', ' ', str(ex)))) print('%s:%d:%d: YAML load failed: %s' % (routing_file, 0, 0, re.sub(r'\s+', ' ', str(ex))))
for module in args.modules: for plugin in args.plugins:
if os.path.isfile(module): if os.path.isfile(plugin):
path = module path = plugin
if args.exclude and args.exclude.search(path): if args.exclude and args.exclude.search(path):
continue continue
if ModuleValidator.is_on_rejectlist(path): if ModuleValidator.is_on_rejectlist(path):
continue continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version, with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
analyze_arg_spec=args.arg_spec, base_branch=args.base_branch, analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
git_cache=git_cache, reporter=reporter, routing=routing) as mv1: git_cache=git_cache, reporter=reporter, routing=routing,
plugin_type=args.plugin_type) as mv1:
mv1.validate() mv1.validate()
check_dirs.add(os.path.dirname(path)) check_dirs.add(os.path.dirname(path))
for root, dirs, files in os.walk(module): for root, dirs, files in os.walk(plugin):
basedir = root[len(module) + 1:].split('/', 1)[0] basedir = root[len(plugin) + 1:].split('/', 1)[0]
if basedir in REJECTLIST_DIRS: if basedir in REJECTLIST_DIRS:
continue continue
for dirname in dirs: for dirname in dirs:
if root == module and dirname in REJECTLIST_DIRS: if root == plugin and dirname in REJECTLIST_DIRS:
continue continue
path = os.path.join(root, dirname) path = os.path.join(root, dirname)
if args.exclude and args.exclude.search(path): if args.exclude and args.exclude.search(path):
@ -2367,10 +2394,11 @@ def run():
continue continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version, with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
analyze_arg_spec=args.arg_spec, base_branch=args.base_branch, analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
git_cache=git_cache, reporter=reporter, routing=routing) as mv2: git_cache=git_cache, reporter=reporter, routing=routing,
plugin_type=args.plugin_type) as mv2:
mv2.validate() mv2.validate()
if not args.collection: if not args.collection and args.plugin_type == 'module':
for path in sorted(check_dirs): for path in sorted(check_dirs):
pv = PythonPackageValidator(path, reporter=reporter) pv = PythonPackageValidator(path, reporter=reporter)
pv.validate() pv.validate()
@ -2382,16 +2410,21 @@ def run():
class GitCache: class GitCache:
def __init__(self, base_branch): def __init__(self, base_branch, plugin_type):
self.base_branch = base_branch self.base_branch = base_branch
self.plugin_type = plugin_type
self.rel_path = 'lib/ansible/modules/'
if plugin_type != 'module':
self.rel_path = 'lib/ansible/plugins/%s/' % plugin_type
if self.base_branch: if self.base_branch:
self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, 'lib/ansible/modules/']) self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, self.rel_path])
else: else:
self.base_tree = [] self.base_tree = []
try: try:
self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', 'lib/ansible/modules/']) self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', self.rel_path])
except GitError as ex: except GitError as ex:
if ex.status == 128: if ex.status == 128:
# fallback when there is no .git directory # fallback when there is no .git directory
@ -2405,7 +2438,10 @@ class GitCache:
else: else:
raise raise
self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in ('.py', '.ps1')) allowed_exts = ('.py', '.ps1')
if plugin_type != 'module':
allowed_exts = ('.py', )
self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in allowed_exts)
self.base_module_paths.pop('__init__.py', None) self.base_module_paths.pop('__init__.py', None)
@ -2418,11 +2454,10 @@ class GitCache:
if os.path.islink(path): if os.path.islink(path):
self.head_aliased_modules.add(os.path.basename(os.path.realpath(path))) self.head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
@staticmethod def _get_module_files(self):
def _get_module_files():
module_files = [] module_files = []
for (dir_path, dir_names, file_names) in os.walk('lib/ansible/modules/'): for (dir_path, dir_names, file_names) in os.walk(self.rel_path):
for file_name in file_names: for file_name in file_names:
module_files.append(os.path.join(dir_path, file_name)) module_files.append(os.path.join(dir_path, file_name))

@ -11,7 +11,7 @@ from ansible.module_utils.compat.version import StrictVersion
from functools import partial from functools import partial
from urllib.parse import urlparse from urllib.parse import urlparse
from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid, Exclusive
from ansible.module_utils.six import string_types from ansible.module_utils.six import string_types
from ansible.module_utils.common.collections import is_iterable from ansible.module_utils.common.collections import is_iterable
from ansible.utils.version import SemanticVersion from ansible.utils.version import SemanticVersion
@ -384,47 +384,129 @@ def version_added(v, error_code='version-added-invalid', accept_historical=False
return v return v
def list_dict_option_schema(for_collection): def list_dict_option_schema(for_collection, plugin_type):
suboption_schema = Schema( if plugin_type == 'module':
{ option_types = Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str')
Required('description'): doc_string_or_strings, element_types = option_types
'required': bool, else:
'choices': list, option_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'none',
'aliases': Any(list_string_types), 'path', 'tmp', 'temppath', 'tmppath', 'pathspec', 'pathlist', 'str', 'string', 'raw')
'version_added': version(for_collection), element_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'path', 'str', 'string', 'raw')
'version_added_collection': collection_name,
'default': json_value, basic_option_schema = {
# Note: Types are strings, not literal bools, such as True or False Required('description'): doc_string_or_strings,
'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), 'required': bool,
# in case of type='list' elements define type of individual item in list 'choices': list,
'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), 'aliases': Any(list_string_types),
# Recursive suboptions 'version_added': version(for_collection),
'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)), 'version_added_collection': collection_name,
}, 'default': json_value,
extra=PREVENT_EXTRA # Note: Types are strings, not literal bools, such as True or False
) 'type': option_types,
# in case of type='list' elements define type of individual item in list
'elements': element_types,
}
if plugin_type != 'module':
basic_option_schema['name'] = Any(*string_types)
deprecated_schema = All(
Schema(
All(
{
# This definition makes sure everything has the correct types/values
'why': doc_string,
'alternatives': doc_string,
# vod stands for 'version or date'; this is the name of the exclusive group
Exclusive('removed_at_date', 'vod'): date(),
Exclusive('version', 'vod'): version(for_collection),
'collection_name': collection_name,
},
{
# This definition makes sure that everything we require is there
Required('why'): Any(*string_types),
'alternatives': Any(*string_types),
Required(Any('removed_at_date', 'version')): Any(*string_types),
Required('collection_name'): Any(*string_types),
},
),
extra=PREVENT_EXTRA
),
partial(check_removal_version,
version_field='version',
collection_name_field='collection_name',
error_code='invalid-removal-version')
)
env_schema = All(
Schema({
Required('name'): Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
ini_schema = All(
Schema({
Required('key'): Any(*string_types),
Required('section'): Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
vars_schema = All(
Schema({
Required('name'): Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
cli_schema = All(
Schema({
Required('name'): Any(*string_types),
'option': Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
keyword_schema = All(
Schema({
Required('name'): Any(*string_types),
'deprecated': deprecated_schema,
'version_added': version(for_collection),
'version_added_collection': collection_name,
}, extra=PREVENT_EXTRA),
partial(version_added, error_code='option-invalid-version-added')
)
basic_option_schema.update({
'env': [env_schema],
'ini': [ini_schema],
'vars': [vars_schema],
'cli': [cli_schema],
'keyword': [keyword_schema],
'deprecated': deprecated_schema,
})
suboption_schema = dict(basic_option_schema)
suboption_schema.update({
# Recursive suboptions
'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)),
})
suboption_schema = Schema(suboption_schema, extra=PREVENT_EXTRA)
# This generates list of dicts with keys from string_types and suboption_schema value # This generates list of dicts with keys from string_types and suboption_schema value
# for example in Python 3: {str: suboption_schema} # for example in Python 3: {str: suboption_schema}
list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types] list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types]
option_schema = Schema( option_schema = dict(basic_option_schema)
{ option_schema.update({
Required('description'): doc_string_or_strings, 'suboptions': Any(None, *list_dict_suboption_schema),
'required': bool, })
'choices': list, option_schema = Schema(option_schema, extra=PREVENT_EXTRA)
'aliases': Any(list_string_types),
'version_added': version(for_collection),
'version_added_collection': collection_name,
'default': json_value,
'suboptions': Any(None, *list_dict_suboption_schema),
# Note: Types are strings, not literal bools, such as True or False
'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
# in case of type='list' elements define type of individual item in list
'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
},
extra=PREVENT_EXTRA
)
option_version_added = Schema( option_version_added = Schema(
All({ All({
@ -450,24 +532,38 @@ def return_contains(v):
return v return v
def return_schema(for_collection): def return_schema(for_collection, plugin_type='module'):
if plugin_type == 'module':
return_types = Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str')
element_types = Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str')
else:
return_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'path', 'str', 'string', 'raw')
element_types = return_types
basic_return_option_schema = {
Required('description'): doc_string_or_strings,
'returned': doc_string,
'version_added': version(for_collection),
'version_added_collection': collection_name,
'sample': json_value,
'example': json_value,
# in case of type='list' elements define type of individual item in list
'elements': element_types,
'choices': Any([object], (object,)),
}
if plugin_type == 'module':
# type is only required for modules right now
basic_return_option_schema[Required('type')] = return_types
else:
basic_return_option_schema['type'] = return_types
inner_return_option_schema = dict(basic_return_option_schema)
inner_return_option_schema.update({
'contains': Any(None, *list({str_type: Self} for str_type in string_types)),
})
return_contains_schema = Any( return_contains_schema = Any(
All( All(
Schema( Schema(inner_return_option_schema),
{
Required('description'): doc_string_or_strings,
'returned': doc_string, # only returned on top level
Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'),
'version_added': version(for_collection),
'version_added_collection': collection_name,
'choices': Any([object], (object,)),
'sample': json_value,
'example': json_value,
'contains': Any(None, *list({str_type: Self} for str_type in string_types)),
# in case of type='list' elements define type of individual item in list
'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
}
),
Schema(return_contains), Schema(return_contains),
Schema(partial(version_added, error_code='option-invalid-version-added')), Schema(partial(version_added, error_code='option-invalid-version-added')),
), ),
@ -478,23 +574,19 @@ def return_schema(for_collection):
# for example in Python 3: {str: return_contains_schema} # for example in Python 3: {str: return_contains_schema}
list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types] list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types]
return_option_schema = dict(basic_return_option_schema)
return_option_schema.update({
'contains': Any(None, *list_dict_return_contains_schema),
})
if plugin_type == 'module':
# 'returned' is required on top-level
del return_option_schema['returned']
return_option_schema[Required('returned')] = Any(*string_types)
return Any( return Any(
All( All(
Schema( Schema(
{ {
any_string_types: { any_string_types: return_option_schema
Required('description'): doc_string_or_strings,
Required('returned'): doc_string,
Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'),
'version_added': version(for_collection),
'version_added_collection': collection_name,
'choices': Any([object], (object,)),
'sample': json_value,
'example': json_value,
'contains': Any(None, *list_dict_return_contains_schema),
# in case of type='list' elements define type of individual item in list
'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
}
} }
), ),
Schema({any_string_types: return_contains}), Schema({any_string_types: return_contains}),
@ -560,24 +652,35 @@ def author(value):
return value return value
def doc_schema(module_name, for_collection=False, deprecated_module=False): def doc_schema(module_name, for_collection=False, deprecated_module=False, plugin_type='module'):
if module_name.startswith('_'): if module_name.startswith('_'):
module_name = module_name[1:] module_name = module_name[1:]
deprecated_module = True deprecated_module = True
if for_collection is False and plugin_type == 'connection' and module_name == 'paramiko_ssh':
# The plugin loader has a hard-coded exception: when the builtin connection 'paramiko' is
# referenced, it loads 'paramiko_ssh' instead. That's why in this plugin, the name must be
# 'paramiko' and not 'paramiko_ssh'.
module_name = 'paramiko'
doc_schema_dict = { doc_schema_dict = {
Required('module'): module_name, Required('module' if plugin_type == 'module' else 'name'): module_name,
Required('short_description'): doc_string, Required('short_description'): doc_string,
Required('description'): doc_string_or_strings, Required('description'): doc_string_or_strings,
Required('author'): All(Any(None, list_string_types, *string_types), author),
'notes': Any(None, [doc_string]), 'notes': Any(None, [doc_string]),
'seealso': Any(None, seealso_schema), 'seealso': Any(None, seealso_schema),
'requirements': [doc_string], 'requirements': [doc_string],
'todo': Any(None, doc_string_or_strings), 'todo': Any(None, doc_string_or_strings),
'options': Any(None, *list_dict_option_schema(for_collection)), 'options': Any(None, *list_dict_option_schema(for_collection, plugin_type)),
'extends_documentation_fragment': Any(list_string_types, *string_types), 'extends_documentation_fragment': Any(list_string_types, *string_types),
'version_added_collection': collection_name, 'version_added_collection': collection_name,
} }
if plugin_type == 'module':
doc_schema_dict[Required('author')] = All(Any(None, list_string_types, *string_types), author)
else:
# author is optional for plugins (for now)
doc_schema_dict['author'] = All(Any(None, list_string_types, *string_types), author)
if plugin_type == 'callback':
doc_schema_dict[Required('type')] = Any('aggregate', 'notification', 'stdout')
if for_collection: if for_collection:
# Optional # Optional

Loading…
Cancel
Save