@ -236,7 +236,7 @@ class ModuleValidator(Validator):
WHITELIST_FUTURE_IMPORTS = frozenset ( ( ' absolute_import ' , ' division ' , ' print_function ' ) )
def __init__ ( self , path , analyze_arg_spec = False , base_branch= None , git_cache = None , reporter = None ) :
def __init__ ( self , path , analyze_arg_spec = False , collection= None , base_branch= None , git_cache = None , reporter = None ) :
super ( ModuleValidator , self ) . __init__ ( reporter = reporter or Reporter ( ) )
self . path = path
@ -245,6 +245,8 @@ class ModuleValidator(Validator):
self . analyze_arg_spec = analyze_arg_spec
self . collection = collection
self . base_branch = base_branch
self . git_cache = git_cache or GitCache ( )
@ -283,6 +285,11 @@ class ModuleValidator(Validator):
def object_path ( self ) :
return self . path
def _get_collection_meta ( self ) :
""" Implement if we need this for version_added comparisons
"""
pass
def _python_module ( self ) :
if self . path . endswith ( ' .py ' ) or self . _python_module_override :
return True
@ -886,44 +893,45 @@ class ModuleValidator(Validator):
# Have to check the metadata first so that we know if the module is removed or deprecated
metadata = None
if not bool ( doc_info [ ' ANSIBLE_METADATA ' ] [ ' value ' ] ) :
self . reporter . error (
path = self . object_path ,
code = 314 ,
msg = ' No ANSIBLE_METADATA provided '
)
else :
if isinstance ( doc_info [ ' ANSIBLE_METADATA ' ] [ ' value ' ] , ast . Dict ) :
metadata = ast . literal_eval (
doc_info [ ' ANSIBLE_METADATA ' ] [ ' value ' ]
)
else :
# ANSIBLE_METADATA doesn't properly support YAML
# we should consider removing it from the spec
# Below code kept, incase we change our minds
# metadata, errors, traces = parse_yaml(
# doc_info['ANSIBLE_METADATA']['value'].s,
# doc_info['ANSIBLE_METADATA']['lineno'],
# self.name, 'ANSIBLE_METADATA'
# )
# for error in errors:
# self.reporter.error(
# path=self.object_path,
# code=315,
# **error
# )
# for trace in traces:
# self.reporter.trace(
# path=self.object_path,
# tracebk=trace
# )
if not self . collection :
if not bool ( doc_info [ ' ANSIBLE_METADATA ' ] [ ' value ' ] ) :
self . reporter . error (
path = self . object_path ,
code = 31 5 ,
msg = ' ANSIBLE_METADATA was not provided as a dict, YAML not support ed'
code = 314 ,
msg = ' No ANSIBLE_METADATA provided '
)
else :
if isinstance ( doc_info [ ' ANSIBLE_METADATA ' ] [ ' value ' ] , ast . Dict ) :
metadata = ast . literal_eval (
doc_info [ ' ANSIBLE_METADATA ' ] [ ' value ' ]
)
else :
# ANSIBLE_METADATA doesn't properly support YAML
# we should consider removing it from the spec
# Below code kept, incase we change our minds
# metadata, errors, traces = parse_yaml(
# doc_info['ANSIBLE_METADATA']['value'].s,
# doc_info['ANSIBLE_METADATA']['lineno'],
# self.name, 'ANSIBLE_METADATA'
# )
# for error in errors:
# self.reporter.error(
# path=self.object_path,
# code=315,
# **error
# )
# for trace in traces:
# self.reporter.trace(
# path=self.object_path,
# tracebk=trace
# )
self . reporter . error (
path = self . object_path ,
code = 315 ,
msg = ' ANSIBLE_METADATA was not provided as a dict, YAML not supported '
)
if metadata :
self . _validate_docs_schema ( metadata , metadata_1_1_schema ( ) ,
@ -968,6 +976,7 @@ class ModuleValidator(Validator):
tracebk = trace
)
if not errors and not traces :
missing_fragment = False
with CaptureStd ( ) :
try :
get_docstring ( self . path , fragment_loader , verbose = True )
@ -978,6 +987,7 @@ class ModuleValidator(Validator):
code = 303 ,
msg = ' DOCUMENTATION fragment missing: %s ' % fragment
)
missing_fragment = True
except Exception as e :
self . reporter . trace (
path = self . object_path ,
@ -989,7 +999,8 @@ class ModuleValidator(Validator):
msg = ' Unknown DOCUMENTATION error, see TRACE: %s ' % e
)
add_fragments ( doc , self . object_path , fragment_loader = fragment_loader )
if not missing_fragment :
add_fragments ( doc , self . object_path , fragment_loader = fragment_loader )
if ' options ' in doc and doc [ ' options ' ] is None :
self . reporter . error (
@ -1006,13 +1017,30 @@ class ModuleValidator(Validator):
if os . path . islink ( self . object_path ) :
# This module has an alias, which we can tell as it's a symlink
# Rather than checking for `module: $filename` we need to check against the true filename
self . _validate_docs_schema ( doc , doc_schema ( os . readlink ( self . object_path ) . split ( ' . ' ) [ 0 ] ) , ' DOCUMENTATION ' , 305 )
self . _validate_docs_schema (
doc ,
doc_schema (
os . readlink ( self . object_path ) . split ( ' . ' ) [ 0 ] ,
version_added = not bool ( self . collection )
) ,
' DOCUMENTATION ' ,
305
)
else :
# This is the normal case
self . _validate_docs_schema ( doc , doc_schema ( self . object_name . split ( ' . ' ) [ 0 ] ) , ' DOCUMENTATION ' , 305 )
self . _validate_docs_schema (
doc ,
doc_schema (
self . object_name . split ( ' . ' ) [ 0 ] ,
version_added = not bool ( self . collection )
) ,
' DOCUMENTATION ' ,
305
)
existing_doc = self . _check_for_new_args ( doc , metadata )
self . _check_version_added ( doc , existing_doc )
if not self . collection :
existing_doc = self . _check_for_new_args ( doc , metadata )
self . _check_version_added ( doc , existing_doc )
if not bool ( doc_info [ ' EXAMPLES ' ] [ ' value ' ] ) :
self . reporter . error (
@ -1723,6 +1751,11 @@ def run():
parser . add_argument ( ' --output ' , default = ' - ' ,
help = ' Output location, use " - " for stdout. '
' Default " %(default)s " ' )
parser . add_argument ( ' --collection ' ,
help = ' Specifies the path to the collection, when '
' validating files within a collection. Ensure '
' that ANSIBLE_COLLECTIONS_PATHS is set so the '
' contents of the collection can be located ' )
args = parser . parse_args ( )
@ -1740,7 +1773,7 @@ def run():
continue
if ModuleValidator . is_blacklisted ( path ) :
continue
with ModuleValidator ( path , analyze_arg_spec= args . arg_spec ,
with ModuleValidator ( path , collection= args . collection , analyze_arg_spec= args . arg_spec ,
base_branch = args . base_branch , git_cache = git_cache , reporter = reporter ) as mv1 :
mv1 . validate ( )
check_dirs . add ( os . path . dirname ( path ) )
@ -1763,13 +1796,14 @@ def run():
continue
if ModuleValidator . is_blacklisted ( path ) :
continue
with ModuleValidator ( path , analyze_arg_spec= args . arg_spec ,
with ModuleValidator ( path , collection= args . collection , analyze_arg_spec= args . arg_spec ,
base_branch = args . base_branch , git_cache = git_cache , reporter = reporter ) as mv2 :
mv2 . validate ( )
for path in sorted ( check_dirs ) :
pv = PythonPackageValidator ( path , reporter = reporter )
pv . validate ( )
if not args . collection :
for path in sorted ( check_dirs ) :
pv = PythonPackageValidator ( path , reporter = reporter )
pv . validate ( )
if args . format == ' plain ' :
sys . exit ( reporter . plain ( warnings = args . warnings , output = args . output ) )