You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ansible/test/sanity/validate-modules/validate-modules

986 lines
35 KiB
Python

#!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Matt Martz <matt@sivel.net>
# Copyright (C) 2015 Rackspace US, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import abc
import argparse
import ast
import json
import os
import re
import subprocess
import sys
import tempfile
import traceback
from collections import OrderedDict
from contextlib import contextmanager
from distutils.version import StrictVersion
from fnmatch import fnmatch
from ansible import __version__ as ansible_version
from ansible.executor.module_common import REPLACER_WINDOWS
from ansible.utils.module_docs import BLACKLIST_MODULES, get_docstring
from module_args import get_argument_spec
from schema import doc_schema, option_schema, metadata_schema
from utils import CaptureStd, parse_yaml
from voluptuous.humanize import humanize_error
BLACKLIST_DIRS = frozenset(('.git', 'test', '.github', '.idea'))
INDENT_REGEX = re.compile(r'([\t]*)')
TYPE_REGEX = re.compile(r'.*(if|or)(\s+[^"\']*|\s+)(?<!_)(?<!str\()type\(.*')
BLACKLIST_IMPORTS = {
'requests': {
'new_only': True,
'msg': (
203,
('requests import found, should use '
'ansible.module_utils.urls instead')
)
},
'boto(?:\.|$)': {
'new_only': True,
'msg': (
204,
('boto import found, new modules should use boto3')
)
},
}
class Reporter(object):
@staticmethod
@contextmanager
def _output_handle(output):
if output != '-':
handle = open(output, 'w+')
else:
handle = sys.stdout
yield handle
handle.flush()
handle.close()
@staticmethod
def _filter_out_ok(reports):
temp_reports = reports.copy()
for path, report in temp_reports.items():
if not (report['errors'] or report['warnings']):
del temp_reports[path]
return temp_reports
@staticmethod
def plain(reports, warnings=False, output='-'):
"""Print out the test results in plain format
output is ignored here for now
"""
ret = []
for path, report in Reporter._filter_out_ok(reports).items():
if report['errors'] or (warnings and report['warnings']):
print('=' * 76)
print(path)
print('=' * 76)
traces = report['traces'][:]
if warnings and report['warnings']:
traces.extend(report['warning_traces'])
for trace in traces:
print('TRACE:')
print('\n '.join((' %s' % trace).splitlines()))
for error in report['errors']:
print('ERROR:%(code)d:%(msg)s' % error)
ret.append(1)
if warnings:
for warning in report['warnings']:
print('WARNING:%(code)d:%(msg)s' % warning)
# ret.append(1) # Don't incrememt exit status for warnings
if report['errors'] or (warnings and report['warnings']):
print()
return 3 if ret else 0
@staticmethod
def json(reports, warnings=False, output='-'):
"""Print out the test results in json format
warnings is not respected in this output
"""
ret = [len(r['errors']) for _, r in reports.items()]
with Reporter._output_handle(output) as handle:
print(json.dumps(Reporter._filter_out_ok(reports), indent=4), file=handle)
return 3 if sum(ret) else 0
class Validator(object):
"""Validator instances are intended to be run on a single object. if you
are scanning multiple objects for problems, you'll want to have a separate
Validator for each one."""
__metaclass__ = abc.ABCMeta
def __init__(self):
self.reset()
def reset(self):
"""Reset the test results"""
self.errors = []
self.warnings = []
self.traces = []
self.warning_traces = []
@abc.abstractproperty
def object_name(self):
"""Name of the object we validated"""
pass
@abc.abstractproperty
def object_path(self):
"""Path of the object we validated"""
pass
@abc.abstractmethod
def validate(self, reset=True):
"""Run this method to generate the test results"""
if reset:
self.reset()
def report(self):
return {
self.object_path: OrderedDict([
('errors', [{'code': code, 'msg': msg} for code, msg in self.errors]),
('traces', self.traces[:]),
('warnings', [{'code': code, 'msg': msg} for code, msg in self.warnings]),
('warning_traces', self.warning_traces[:])
])
}
class ModuleValidator(Validator):
BLACKLIST_PATTERNS = ('.git*', '*.pyc', '*.pyo', '.*', '*.md', '*.txt')
BLACKLIST_FILES = frozenset(('.git', '.gitignore', '.travis.yml',
'shippable.yml',
'.gitattributes', '.gitmodules', 'COPYING',
'__init__.py', 'VERSION', 'test-docs.sh'))
BLACKLIST = BLACKLIST_FILES.union(BLACKLIST_MODULES)
PS_DOC_BLACKLIST = frozenset((
'async_status.ps1',
'slurp.ps1',
'setup.ps1'
))
def __init__(self, path, analyze_arg_spec=False, base_branch=None):
super(ModuleValidator, self).__init__()
self.path = path
self.basename = os.path.basename(self.path)
self.name, _ = os.path.splitext(self.basename)
self.analyze_arg_spec = analyze_arg_spec
self.base_branch = base_branch
self._python_module_override = False
with open(path) as f:
self.text = f.read()
self.length = len(self.text.splitlines())
try:
self.ast = ast.parse(self.text)
except:
self.ast = None
if base_branch:
self.base_module = self._get_base_file()
else:
self.base_module = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self.base_module:
return
try:
os.remove(self.base_module)
except:
pass
@property
def object_name(self):
return self.basename
@property
def object_path(self):
return self.path
def _python_module(self):
if self.path.endswith('.py') or self._python_module_override:
return True
return False
def _powershell_module(self):
if self.path.endswith('.ps1'):
return True
return False
def _just_docs(self):
try:
for child in self.ast.body:
if not isinstance(child, ast.Assign):
return False
return True
except AttributeError:
return False
def _get_base_file(self):
command = ['git', 'show', '%s:%s' % (self.base_branch, self.path)]
p = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if int(p.returncode) != 0:
return None
t = tempfile.NamedTemporaryFile(delete=False)
t.write(stdout)
t.close()
return t.name
def _is_new_module(self):
return not self.object_name.startswith('_') and bool(self.base_branch) and not bool(self.base_module)
def _check_interpreter(self, powershell=False):
if powershell:
if not self.text.startswith('#!powershell\n'):
self.errors.append((102, 'Interpreter line is not "#!powershell"'))
return
if not self.text.startswith('#!/usr/bin/python'):
self.errors.append((101, 'Interpreter line is not "#!/usr/bin/python"'))
def _check_type_instead_of_isinstance(self, powershell=False):
if powershell:
return
for line_no, line in enumerate(self.text.splitlines()):
typekeyword = TYPE_REGEX.match(line)
if typekeyword:
self.errors.append((
403,
('Type comparison using type() found on '
'line %d. Use isinstance() instead' % (line_no + 1))
))
def _check_for_sys_exit(self):
if 'sys.exit(' in self.text:
self.errors.append(
(
205,
'sys.exit() call found. Should be exit_json/fail_json'
)
)
def _check_for_gpl3_header(self):
if ('GNU General Public License' not in self.text and
'version 3' not in self.text):
self.errors.append((105, 'GPLv3 license header not found'))
def _check_for_tabs(self):
for line_no, line in enumerate(self.text.splitlines()):
indent = INDENT_REGEX.search(line)
if indent and '\t' in line:
index = line.index('\t')
self.errors.append((
402,
'indentation contains tabs. line %d column %d' % (line_no + 1, index)
))
def _find_blacklist_imports(self):
for child in self.ast.body:
names = []
if isinstance(child, ast.Import):
names.extend(child.names)
elif isinstance(child, ast.TryExcept):
bodies = child.body
for handler in child.handlers:
bodies.extend(handler.body)
for grandchild in bodies:
if isinstance(grandchild, ast.Import):
names.extend(grandchild.names)
for name in names:
for blacklist_import, options in BLACKLIST_IMPORTS.items():
if re.search(blacklist_import, name.name):
msg = options['msg']
new_only = options['new_only']
if self._is_new_module() and new_only:
self.errors.append(msg)
elif not new_only:
self.errors.append(msg)
def _find_module_utils(self, main):
linenos = []
found_basic = False
for child in self.ast.body:
if isinstance(child, (ast.Import, ast.ImportFrom)):
names = []
try:
names.append(child.module)
if child.module.endswith('.basic'):
found_basic = True
except AttributeError:
pass
names.extend([n.name for n in child.names])
if [n for n in names if n.startswith('ansible.module_utils')]:
linenos.append(child.lineno)
for name in child.names:
if ('module_utils' in getattr(child, 'module', '') and
isinstance(name, ast.alias) and
name.name == '*'):
msg = (
208,
('module_utils imports should import specific '
'components, not "*". line %d' % child.lineno)
)
if self._is_new_module():
self.errors.append(msg)
else:
self.warnings.append(msg)
if (isinstance(name, ast.alias) and
name.name == 'basic'):
found_basic = True
if not linenos:
self.errors.append((201, 'Did not find a module_utils import'))
elif not found_basic:
self.warnings.append(
(
292,
'Did not find "ansible.module_utils.basic" import'
)
)
return linenos
def _get_first_callable(self):
linenos = []
for child in self.ast.body:
if isinstance(child, (ast.FunctionDef, ast.ClassDef)):
linenos.append(child.lineno)
return min(linenos)
def _find_main_call(self):
lineno = False
if_bodies = []
for child in self.ast.body:
if isinstance(child, ast.If):
try:
if child.test.left.id == '__name__':
if_bodies.extend(child.body)
except AttributeError:
pass
bodies = self.ast.body
bodies.extend(if_bodies)
for child in bodies:
if isinstance(child, ast.Expr):
if isinstance(child.value, ast.Call):
if (isinstance(child.value.func, ast.Name) and
child.value.func.id == 'main'):
lineno = child.lineno
if lineno < self.length - 1:
self.errors.append((
104,
'Call to main() not the last line'
))
if not lineno:
self.errors.append((103, 'Did not find a call to main'))
return lineno or 0
def _find_has_import(self):
for child in self.ast.body:
found_try_except_import = False
found_has = False
if isinstance(child, ast.TryExcept):
bodies = child.body
for handler in child.handlers:
bodies.extend(handler.body)
for grandchild in bodies:
if isinstance(grandchild, ast.Import):
found_try_except_import = True
if isinstance(grandchild, ast.Assign):
for target in grandchild.targets:
if target.id.lower().startswith('has_'):
found_has = True
if found_try_except_import and not found_has:
self.warnings.append((
291,
'Found Try/Except block without HAS_ assginment'
))
def _ensure_imports_below_docs(self, doc_info, first_callable):
min_doc_line = min(
[doc_info[key]['lineno'] for key in doc_info if doc_info[key]['lineno']]
)
max_doc_line = max(
[doc_info[key]['end_lineno'] for key in doc_info if doc_info[key]['end_lineno']]
)
import_lines = []
for child in self.ast.body:
if isinstance(child, (ast.Import, ast.ImportFrom)):
import_lines.append(child.lineno)
if child.lineno < min_doc_line:
self.errors.append((
106,
('Import found before documentation variables. '
'All imports must appear below '
'DOCUMENTATION/EXAMPLES/RETURN/ANSIBLE_METADATA. '
'line %d' % (child.lineno,))
))
break
elif isinstance(child, ast.TryExcept):
bodies = child.body
for handler in child.handlers:
bodies.extend(handler.body)
for grandchild in bodies:
if isinstance(grandchild, (ast.Import, ast.ImportFrom)):
import_lines.append(grandchild.lineno)
if grandchild.lineno < min_doc_line:
self.errors.append((
106,
('Import found before documentation '
'variables. All imports must appear below '
'DOCUMENTATION/EXAMPLES/RETURN/'
'ANSIBLE_METADATA. line %d' %
(child.lineno,))
))
break
for import_line in import_lines:
if not (max_doc_line < import_line < first_callable):
msg = (
107,
('Imports should be directly below DOCUMENTATION/EXAMPLES/'
'RETURN/ANSIBLE_METADATA. line %d' % import_line)
)
if self._is_new_module():
self.errors.append(msg)
else:
self.warnings.append(msg)
def _find_ps_replacers(self):
if 'WANT_JSON' not in self.text:
self.errors.append((206, 'WANT_JSON not found in module'))
if REPLACER_WINDOWS not in self.text:
self.errors.append((207, '"%s" not found in module' % REPLACER_WINDOWS))
def _find_ps_docs_py_file(self):
if self.object_name in self.PS_DOC_BLACKLIST:
return
py_path = self.path.replace('.ps1', '.py')
if not os.path.isfile(py_path):
self.errors.append((503, 'Missing python documentation file'))
def _get_docs(self):
docs = {
'DOCUMENTATION': {
'value': None,
'lineno': 0,
'end_lineno': 0,
},
'EXAMPLES': {
'value': None,
'lineno': 0,
'end_lineno': 0,
},
'RETURN': {
'value': None,
'lineno': 0,
'end_lineno': 0,
},
'ANSIBLE_METADATA': {
'value': None,
'lineno': 0,
'end_lineno': 0,
}
}
for child in self.ast.body:
if isinstance(child, ast.Assign):
for grandchild in child.targets:
if grandchild.id == 'DOCUMENTATION':
docs['DOCUMENTATION']['value'] = child.value.s
docs['DOCUMENTATION']['lineno'] = child.lineno
docs['DOCUMENTATION']['end_lineno'] = (
child.lineno + len(child.value.s.splitlines())
)
elif grandchild.id == 'EXAMPLES':
docs['EXAMPLES']['value'] = child.value.s
docs['EXAMPLES']['lineno'] = child.lineno
docs['EXAMPLES']['end_lineno'] = (
child.lineno + len(child.value.s.splitlines())
)
elif grandchild.id == 'RETURN':
docs['RETURN']['value'] = child.value.s
docs['RETURN']['lineno'] = child.lineno
docs['RETURN']['end_lineno'] = (
child.lineno + len(child.value.s.splitlines())
)
elif grandchild.id == 'ANSIBLE_METADATA':
docs['ANSIBLE_METADATA']['value'] = child.value
docs['ANSIBLE_METADATA']['lineno'] = child.lineno
try:
docs['ANSIBLE_METADATA']['end_lineno'] = (
child.lineno + len(child.value.s.splitlines())
)
except AttributeError:
docs['ANSIBLE_METADATA']['end_lineno'] = (
child.value.values[-1].lineno
)
return docs
def _validate_docs_schema(self, doc, schema, name, error_code):
errors = []
try:
schema(doc)
except Exception as e:
for error in e.errors:
error.data = doc
errors.extend(e.errors)
options = doc.get('options', {})
for key, option in (options or {}).items():
try:
option_schema(option)
except Exception as e:
for error in e.errors:
error.path[:0] = ['options', key]
error.data = option
errors.extend(e.errors)
for error in errors:
path = [str(p) for p in error.path]
if isinstance(error.data, dict):
error_message = humanize_error(error.data, error)
else:
error_message = error
self.errors.append((
error_code,
'%s.%s: %s' % (name, '.'.join(path), error_message)
))
def _validate_docs(self):
doc_info = self._get_docs()
if not bool(doc_info['DOCUMENTATION']['value']):
self.errors.append((301, 'No DOCUMENTATION provided'))
else:
doc, errors, traces = parse_yaml(
doc_info['DOCUMENTATION']['value'],
doc_info['DOCUMENTATION']['lineno'],
self.name, 'DOCUMENTATION'
)
self.errors.extend([(302, e) for e in errors])
self.traces.extend(traces)
if not errors and not traces:
with CaptureStd():
try:
get_docstring(self.path, verbose=True)
except AssertionError:
fragment = doc['extends_documentation_fragment']
self.errors.append((
303,
'DOCUMENTATION fragment missing: %s' % fragment
))
except Exception:
self.traces.append(traceback.format_exc())
self.errors.append((
304,
'Unknown DOCUMENTATION error, see TRACE'
))
if 'options' in doc and doc['options'] is None and doc.get('extends_documentation_fragment'):
self.errors.append((
305,
('DOCUMENTATION.options must be a dictionary/hash when used '
'with DOCUMENTATION.extends_documentation_fragment')
))
if self.object_name.startswith('_') and not os.path.islink(self.object_path):
if 'deprecated' not in doc or not doc.get('deprecated'):
self.errors.append((
318,
'Module deprecated, but DOCUMENTATION.deprecated is missing'
))
self._validate_docs_schema(doc, doc_schema, 'DOCUMENTATION', 305)
self._check_version_added(doc)
self._check_for_new_args(doc)
if not bool(doc_info['EXAMPLES']['value']):
self.errors.append((310, 'No EXAMPLES provided'))
else:
_, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'],
doc_info['EXAMPLES']['lineno'],
self.name, 'EXAMPLES', load_all=True)
self.errors.extend([(311, error) for error in errors])
self.traces.extend(traces)
if not bool(doc_info['RETURN']['value']):
if self._is_new_module():
self.errors.append((312, 'No RETURN documentation provided'))
else:
self.warnings.append((312, 'No RETURN provided'))
else:
_, errors, traces = parse_yaml(doc_info['RETURN']['value'],
doc_info['RETURN']['lineno'],
self.name, 'RETURN')
self.errors.extend([(313, error) for error in errors])
self.traces.extend(traces)
if not bool(doc_info['ANSIBLE_METADATA']['value']):
self.errors.append((314, 'No ANSIBLE_METADATA provided'))
else:
metadata = None
if isinstance(doc_info['ANSIBLE_METADATA']['value'], ast.Dict):
metadata = ast.literal_eval(
doc_info['ANSIBLE_METADATA']['value']
)
else:
metadata, errors, traces = parse_yaml(
doc_info['ANSIBLE_METADATA']['value'].s,
doc_info['ANSIBLE_METADATA']['lineno'],
self.name, 'ANSIBLE_METADATA'
)
self.errors.extend([(315, error) for error in errors])
self.traces.extend(traces)
if metadata:
self._validate_docs_schema(metadata, metadata_schema,
'ANSIBLE_METADATA', 316)
return doc_info
def _check_version_added(self, doc):
if not self._is_new_module():
return
try:
version_added = StrictVersion(str(doc.get('version_added', '0.0')))
except ValueError:
version_added = doc.get('version_added', '0.0')
self.errors.append((
306,
'version_added is not a valid version number: %r' % version_added
))
return
should_be = '.'.join(ansible_version.split('.')[:2])
strict_ansible_version = StrictVersion(should_be)
if (version_added < strict_ansible_version or
strict_ansible_version < version_added):
self.errors.append((
307,
'version_added should be %s. Currently %s' % (should_be, version_added)
))
def _validate_argument_spec(self):
if not self.analyze_arg_spec:
return
spec = get_argument_spec(self.path)
for arg, data in spec.items():
if data.get('required') and data.get('default', object) != object:
self.errors.append((
317,
('"%s" is marked as required but specifies '
'a default. Arguments with a default '
'should not be marked as required' % arg)
))
def _check_for_new_args(self, doc):
if not self.base_branch or self._is_new_module():
return
with CaptureStd():
try:
existing_doc, _, _, _ = get_docstring(self.base_module, verbose=True)
existing_options = existing_doc.get('options', {})
except AssertionError:
fragment = doc['extends_documentation_fragment']
self.warnings.append((
392,
'Pre-existing DOCUMENTATION fragment missing: %s' % fragment
))
return
except Exception as e:
self.warning_traces.append(e)
self.warnings.append((
391,
('Unknown pre-existing DOCUMENTATION '
'error, see TRACE. Submodule refs may '
'need updated')
))
return
try:
mod_version_added = StrictVersion(
str(existing_doc.get('version_added', '0.0'))
)
except ValueError:
mod_version_added = StrictVersion('0.0')
options = doc.get('options', {})
should_be = '.'.join(ansible_version.split('.')[:2])
strict_ansible_version = StrictVersion(should_be)
for option, details in options.items():
new = not bool(existing_options.get(option))
if not new:
continue
try:
version_added = StrictVersion(
str(details.get('version_added', '0.0'))
)
except ValueError:
version_added = details.get('version_added', '0.0')
self.errors.append((
308,
('version_added for new option (%s) '
'is not a valid version number: %r' %
(option, version_added))
))
continue
except:
# If there is any other exception it should have been caught
# in schema validation, so we won't duplicate errors by
# listing it again
continue
if (strict_ansible_version != mod_version_added and
(version_added < strict_ansible_version or
strict_ansible_version < version_added)):
self.errors.append((
309,
('version_added for new option (%s) should '
'be %s. Currently %s' %
(option, should_be, version_added))
))
def validate(self):
super(ModuleValidator, self).validate()
# Blacklists -- these files are not checked
if not frozenset((self.basename,
self.name)).isdisjoint(self.BLACKLIST):
return
for pat in self.BLACKLIST_PATTERNS:
if fnmatch(self.basename, pat):
return
# if self._powershell_module():
# self.warnings.append('Cannot check powershell modules at this '
# 'time. Skipping')
# return
if not self._python_module() and not self._powershell_module():
self.errors.append((
501,
('Official Ansible modules must have a .py '
'extension for python modules or a .ps1 '
'for powershell modules')
))
self._python_module_override = True
if self._python_module() and self.ast is None:
self.errors.append((401, 'Python SyntaxError while parsing module'))
try:
compile(self.text, self.path, 'exec')
except Exception:
self.traces.append(traceback.format_exc())
return
if self._python_module():
doc_info = self._validate_docs()
if self._python_module() and not self._just_docs():
self._validate_argument_spec()
self._check_for_sys_exit()
self._find_blacklist_imports()
main = self._find_main_call()
self._find_module_utils(main)
self._find_has_import()
self._check_for_tabs()
first_callable = self._get_first_callable()
self._ensure_imports_below_docs(doc_info, first_callable)
if self._powershell_module():
self._find_ps_replacers()
self._find_ps_docs_py_file()
self._check_for_gpl3_header()
if not self._just_docs():
self._check_interpreter(powershell=self._powershell_module())
self._check_type_instead_of_isinstance(
powershell=self._powershell_module()
)
class PythonPackageValidator(Validator):
BLACKLIST_FILES = frozenset(('__pycache__',))
def __init__(self, path):
super(PythonPackageValidator, self).__init__()
self.path = path
self.basename = os.path.basename(path)
@property
def object_name(self):
return self.basename
@property
def object_path(self):
return self.path
def validate(self):
super(PythonPackageValidator, self).validate()
if self.basename in self.BLACKLIST_FILES:
return
init_file = os.path.join(self.path, '__init__.py')
if not os.path.exists(init_file):
self.errors.append(
(
502,
'Ansible module subdirectories must contain an __init__.py'
)
)
def re_compile(value):
"""
Argparse expects things to raise TypeError, re.compile raises an re.error
exception
This function is a shorthand to convert the re.error exception to a
TypeError
"""
try:
return re.compile(value)
except re.error as e:
raise TypeError(e)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('modules', nargs='+',
help='Path to module or module directory')
parser.add_argument('-w', '--warnings', help='Show warnings',
action='store_true')
parser.add_argument('--exclude', help='RegEx exclusion pattern',
type=re_compile)
parser.add_argument('--arg-spec', help='Analyze module argument spec',
action='store_true', default=False)
parser.add_argument('--base-branch', default=None,
help='Used in determining if new options were added')
parser.add_argument('--format', choices=['json', 'plain'], default='plain',
help='Output format. Default: "%(default)s"')
parser.add_argument('--output', default='-',
help='Output location, use "-" for stdout. '
'Default "%(default)s"')
args = parser.parse_args()
args.modules[:] = [m.rstrip('/') for m in args.modules]
reports = OrderedDict()
for module in args.modules:
if os.path.isfile(module):
path = module
if args.exclude and args.exclude.search(path):
sys.exit(0)
with ModuleValidator(path, analyze_arg_spec=args.arg_spec,
base_branch=args.base_branch) as mv:
mv.validate()
reports.update(mv.report())
for root, dirs, files in os.walk(module):
basedir = root[len(module)+1:].split('/', 1)[0]
if basedir in BLACKLIST_DIRS:
continue
for dirname in dirs:
if root == module and dirname in BLACKLIST_DIRS:
continue
path = os.path.join(root, dirname)
if args.exclude and args.exclude.search(path):
continue
pv = PythonPackageValidator(path)
pv.validate()
reports.update(pv.report())
for filename in files:
path = os.path.join(root, filename)
if args.exclude and args.exclude.search(path):
continue
with ModuleValidator(path, analyze_arg_spec=args.arg_spec,
base_branch=args.base_branch) as mv:
mv.validate()
reports.update(mv.report())
if args.format == 'plain':
sys.exit(Reporter.plain(reports, warnings=args.warnings, output=args.output))
else:
sys.exit(Reporter.json(reports, warnings=args.warnings, output=args.output))
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass