@ -15,6 +15,22 @@ from ansible.module_utils.common.collections import is_iterable
from ansible . module_utils . common . text . converters import to_bytes , to_native , to_text
from ansible . module_utils . common . text . converters import to_bytes , to_native , to_text
from ansible . module_utils . common . text . formatters import lenient_lowercase
from ansible . module_utils . common . text . formatters import lenient_lowercase
from ansible . module_utils . common . warnings import warn
from ansible . module_utils . common . warnings import warn
from ansible . module_utils . errors import (
AliasError ,
AnsibleFallbackNotFound ,
AnsibleValidationErrorMultiple ,
ArgumentTypeError ,
ArgumentValueError ,
ElementError ,
MutuallyExclusiveError ,
NoLogError ,
RequiredByError ,
RequiredError ,
RequiredIfError ,
RequiredOneOfError ,
RequiredTogetherError ,
SubParameterTypeError ,
)
from ansible . module_utils . parsing . convert_bool import BOOLEANS_FALSE , BOOLEANS_TRUE
from ansible . module_utils . parsing . convert_bool import BOOLEANS_FALSE , BOOLEANS_TRUE
from ansible . module_utils . common . _collections_compat import (
from ansible . module_utils . common . _collections_compat import (
@ -59,6 +75,13 @@ from ansible.module_utils.common.validation import (
# Python2 & 3 way to get NoneType
# Python2 & 3 way to get NoneType
NoneType = type ( None )
NoneType = type ( None )
_ADDITIONAL_CHECKS = (
{ ' func ' : check_required_together , ' attr ' : ' required_together ' , ' err ' : RequiredTogetherError } ,
{ ' func ' : check_required_one_of , ' attr ' : ' required_one_of ' , ' err ' : RequiredOneOfError } ,
{ ' func ' : check_required_if , ' attr ' : ' required_if ' , ' err ' : RequiredIfError } ,
{ ' func ' : check_required_by , ' attr ' : ' required_by ' , ' err ' : RequiredByError } ,
)
# if adding boolean attribute, also add to PASS_BOOL
# if adding boolean attribute, also add to PASS_BOOL
# some of this dupes defaults from controller config
# some of this dupes defaults from controller config
PASS_VARS = {
PASS_VARS = {
@ -97,8 +120,221 @@ DEFAULT_TYPE_VALIDATORS = {
}
}
class AnsibleFallbackNotFound ( Exception ) :
def _get_type_validator ( wanted ) :
pass
""" Returns the callable used to validate a wanted type and the type name.
: arg wanted : String or callable . If a string , get the corresponding
validation function from DEFAULT_TYPE_VALIDATORS . If callable ,
get the name of the custom callable and return that for the type_checker .
: returns : Tuple of callable function or None , and a string that is the name
of the wanted type .
"""
# Use one our our builtin validators.
if not callable ( wanted ) :
if wanted is None :
# Default type for parameters
wanted = ' str '
type_checker = DEFAULT_TYPE_VALIDATORS . get ( wanted )
# Use the custom callable for validation.
else :
type_checker = wanted
wanted = getattr ( wanted , ' __name__ ' , to_native ( type ( wanted ) ) )
return type_checker , wanted
def _get_legal_inputs ( argument_spec , parameters , aliases = None ) :
if aliases is None :
aliases = _handle_aliases ( argument_spec , parameters )
return list ( aliases . keys ( ) ) + list ( argument_spec . keys ( ) )
def _get_unsupported_parameters ( argument_spec , parameters , legal_inputs = None , options_context = None ) :
""" Check keys in parameters against those provided in legal_inputs
to ensure they contain legal values . If legal_inputs are not supplied ,
they will be generated using the argument_spec .
: arg argument_spec : Dictionary of parameters , their type , and valid values .
: arg parameters : Dictionary of parameters .
: arg legal_inputs : List of valid key names property names . Overrides values
in argument_spec .
: arg options_context : List of parent keys for tracking the context of where
a parameter is defined .
: returns : Set of unsupported parameters . Empty set if no unsupported parameters
are found .
"""
if legal_inputs is None :
legal_inputs = _get_legal_inputs ( argument_spec , parameters )
unsupported_parameters = set ( )
for k in parameters . keys ( ) :
if k not in legal_inputs :
context = k
if options_context :
context = tuple ( options_context + [ k ] )
unsupported_parameters . add ( context )
return unsupported_parameters
def _handle_aliases ( argument_spec , parameters , alias_warnings = None , alias_deprecations = None ) :
""" Process aliases from an argument_spec including warnings and deprecations.
Modify ` ` parameters ` ` by adding a new key for each alias with the supplied
value from ` ` parameters ` ` .
If a list is provided to the alias_warnings parameter , it will be filled with tuples
( option , alias ) in every case where both an option and its alias are specified .
If a list is provided to alias_deprecations , it will be populated with dictionaries ,
each containing deprecation information for each alias found in argument_spec .
: param argument_spec : Dictionary of parameters , their type , and valid values .
: type argument_spec : dict
: param parameters : Dictionary of parameters .
: type parameters : dict
: param alias_warnings :
: type alias_warnings : list
: param alias_deprecations :
: type alias_deprecations : list
"""
aliases_results = { } # alias:canon
for ( k , v ) in argument_spec . items ( ) :
aliases = v . get ( ' aliases ' , None )
default = v . get ( ' default ' , None )
required = v . get ( ' required ' , False )
if alias_deprecations is not None :
for alias in argument_spec [ k ] . get ( ' deprecated_aliases ' , [ ] ) :
if alias . get ( ' name ' ) in parameters :
alias_deprecations . append ( alias )
if default is not None and required :
# not alias specific but this is a good place to check this
raise ValueError ( " internal error: required and default are mutually exclusive for %s " % k )
if aliases is None :
continue
if not is_iterable ( aliases ) or isinstance ( aliases , ( binary_type , text_type ) ) :
raise TypeError ( ' internal error: aliases must be a list or tuple ' )
for alias in aliases :
aliases_results [ alias ] = k
if alias in parameters :
if k in parameters and alias_warnings is not None :
alias_warnings . append ( ( k , alias ) )
parameters [ k ] = parameters [ alias ]
return aliases_results
def _list_deprecations ( argument_spec , parameters , prefix = ' ' ) :
""" Return a list of deprecations
: arg argument_spec : An argument spec dictionary
: arg parameters : Dictionary of parameters
: returns : List of dictionaries containing a message and version in which
the deprecated parameter will be removed , or an empty list : :
[ { ' msg ' : " Param ' deptest ' is deprecated. See the module docs for more information " , ' version ' : ' 2.9 ' } ]
"""
deprecations = [ ]
for arg_name , arg_opts in argument_spec . items ( ) :
if arg_name in parameters :
if prefix :
sub_prefix = ' %s [ " %s " ] ' % ( prefix , arg_name )
else :
sub_prefix = arg_name
if arg_opts . get ( ' removed_at_date ' ) is not None :
deprecations . append ( {
' msg ' : " Param ' %s ' is deprecated. See the module docs for more information " % sub_prefix ,
' date ' : arg_opts . get ( ' removed_at_date ' ) ,
' collection_name ' : arg_opts . get ( ' removed_from_collection ' ) ,
} )
elif arg_opts . get ( ' removed_in_version ' ) is not None :
deprecations . append ( {
' msg ' : " Param ' %s ' is deprecated. See the module docs for more information " % sub_prefix ,
' version ' : arg_opts . get ( ' removed_in_version ' ) ,
' collection_name ' : arg_opts . get ( ' removed_from_collection ' ) ,
} )
# Check sub-argument spec
sub_argument_spec = arg_opts . get ( ' options ' )
if sub_argument_spec is not None :
sub_arguments = parameters [ arg_name ]
if isinstance ( sub_arguments , Mapping ) :
sub_arguments = [ sub_arguments ]
if isinstance ( sub_arguments , list ) :
for sub_params in sub_arguments :
if isinstance ( sub_params , Mapping ) :
deprecations . extend ( _list_deprecations ( sub_argument_spec , sub_params , prefix = sub_prefix ) )
return deprecations
def _list_no_log_values ( argument_spec , params ) :
""" Return set of no log values
: arg argument_spec : An argument spec dictionary
: arg params : Dictionary of all parameters
: returns : Set of strings that should be hidden from output : :
{ ' secret_dict_value ' , ' secret_list_item_one ' , ' secret_list_item_two ' , ' secret_string ' }
"""
no_log_values = set ( )
for arg_name , arg_opts in argument_spec . items ( ) :
if arg_opts . get ( ' no_log ' , False ) :
# Find the value for the no_log'd param
no_log_object = params . get ( arg_name , None )
if no_log_object :
try :
no_log_values . update ( _return_datastructure_name ( no_log_object ) )
except TypeError as e :
raise TypeError ( ' Failed to convert " %s " : %s ' % ( arg_name , to_native ( e ) ) )
# Get no_log values from suboptions
sub_argument_spec = arg_opts . get ( ' options ' )
if sub_argument_spec is not None :
wanted_type = arg_opts . get ( ' type ' )
sub_parameters = params . get ( arg_name )
if sub_parameters is not None :
if wanted_type == ' dict ' or ( wanted_type == ' list ' and arg_opts . get ( ' elements ' , ' ' ) == ' dict ' ) :
# Sub parameters can be a dict or list of dicts. Ensure parameters are always a list.
if not isinstance ( sub_parameters , list ) :
sub_parameters = [ sub_parameters ]
for sub_param in sub_parameters :
# Validate dict fields in case they came in as strings
if isinstance ( sub_param , string_types ) :
sub_param = check_type_dict ( sub_param )
if not isinstance ( sub_param , Mapping ) :
raise TypeError ( " Value ' {1} ' in the sub parameter field ' {0} ' must by a {2} , "
" not ' {1.__class__.__name__} ' " . format ( arg_name , sub_param , wanted_type ) )
no_log_values . update ( _list_no_log_values ( sub_argument_spec , sub_param ) )
return no_log_values
def _return_datastructure_name ( obj ) :
def _return_datastructure_name ( obj ) :
@ -217,6 +453,43 @@ def _remove_values_conditions(value, no_log_strings, deferred_removals):
return value
return value
def _set_defaults ( argument_spec , parameters , set_default = True ) :
""" Set default values for parameters when no value is supplied.
Modifies parameters directly .
: param argument_spec : Argument spec
: type argument_spec : dict
: param parameters : Parameters to evaluate
: type parameters : dict
: param set_default : Whether or not to set the default values
: type set_default : bool
: returns : Set of strings that should not be logged .
: rtype : set
"""
no_log_values = set ( )
for param , value in argument_spec . items ( ) :
# TODO: Change the default value from None to Sentinel to differentiate between
# user supplied None and a default value set by this function.
default = value . get ( ' default ' , None )
# This prevents setting defaults on required items on the 1st run,
# otherwise will set things without a default to None on the 2nd.
if param not in parameters and ( default is not None or set_default ) :
# Make sure any default value for no_log fields are masked.
if value . get ( ' no_log ' , False ) and default :
no_log_values . add ( default )
parameters [ param ] = default
return no_log_values
def _sanitize_keys_conditions ( value , no_log_strings , ignore_keys , deferred_removals ) :
def _sanitize_keys_conditions ( value , no_log_strings , ignore_keys , deferred_removals ) :
""" Helper method to sanitize_keys() to build deferred_removals and avoid deep recursion. """
""" Helper method to sanitize_keys() to build deferred_removals and avoid deep recursion. """
if isinstance ( value , ( text_type , binary_type ) ) :
if isinstance ( value , ( text_type , binary_type ) ) :
@ -255,366 +528,23 @@ def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_remov
raise TypeError ( ' Value of unknown type: %s , %s ' % ( type ( value ) , value ) )
raise TypeError ( ' Value of unknown type: %s , %s ' % ( type ( value ) , value ) )
def env_fallback ( * args , * * kwargs ) :
def _validate_elements ( wanted_type , parameter , values , options_context = None , errors = None ) :
""" Load value from environment variable """
for arg in args :
if errors is None :
if arg in os . environ :
errors = AnsibleValidationErrorMultiple ( )
return os . environ [ arg ]
raise AnsibleFallbackNotFound
type_checker , wanted_element_type = _get_type_validator ( wanted_type )
validated_parameters = [ ]
# Get param name for strings so we can later display this value in a useful error message if needed
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
kwargs = { }
if wanted_element_type == ' str ' and isinstance ( wanted_type , string_types ) :
if isinstance ( parameter , string_types ) :
kwargs [ ' param ' ] = parameter
elif isinstance ( parameter , dict ) :
kwargs [ ' param ' ] = list ( parameter . keys ( ) ) [ 0 ]
def set_fallbacks ( argument_spec , parameters ) :
for value in values :
no_log_values = set ( )
for param , value in argument_spec . items ( ) :
fallback = value . get ( ' fallback ' , ( None , ) )
fallback_strategy = fallback [ 0 ]
fallback_args = [ ]
fallback_kwargs = { }
if param not in parameters and fallback_strategy is not None :
for item in fallback [ 1 : ] :
if isinstance ( item , dict ) :
fallback_kwargs = item
else :
fallback_args = item
try :
fallback_value = fallback_strategy ( * fallback_args , * * fallback_kwargs )
except AnsibleFallbackNotFound :
continue
else :
if value . get ( ' no_log ' , False ) and fallback_value :
no_log_values . add ( fallback_value )
parameters [ param ] = fallback_value
return no_log_values
def set_defaults ( argument_spec , parameters , set_default = True ) :
""" Set default values for parameters when no value is supplied.
Modifies parameters directly .
: param argument_spec : Argument spec
: type argument_spec : dict
: param parameters : Parameters to evaluate
: type parameters : dict
: param set_default : Whether or not to set the default values
: type set_default : bool
: returns : Set of strings that should not be logged .
: rtype : set
"""
no_log_values = set ( )
for param , value in argument_spec . items ( ) :
# TODO: Change the default value from None to Sentinel to differentiate between
# user supplied None and a default value set by this function.
default = value . get ( ' default ' , None )
# This prevents setting defaults on required items on the 1st run,
# otherwise will set things without a default to None on the 2nd.
if param not in parameters and ( default is not None or set_default ) :
# Make sure any default value for no_log fields are masked.
if value . get ( ' no_log ' , False ) and default :
no_log_values . add ( default )
parameters [ param ] = default
return no_log_values
def list_no_log_values ( argument_spec , params ) :
""" Return set of no log values
: arg argument_spec : An argument spec dictionary from a module
: arg params : Dictionary of all parameters
: returns : Set of strings that should be hidden from output : :
{ ' secret_dict_value ' , ' secret_list_item_one ' , ' secret_list_item_two ' , ' secret_string ' }
"""
no_log_values = set ( )
for arg_name , arg_opts in argument_spec . items ( ) :
if arg_opts . get ( ' no_log ' , False ) :
# Find the value for the no_log'd param
no_log_object = params . get ( arg_name , None )
if no_log_object :
try :
no_log_values . update ( _return_datastructure_name ( no_log_object ) )
except TypeError as e :
raise TypeError ( ' Failed to convert " %s " : %s ' % ( arg_name , to_native ( e ) ) )
# Get no_log values from suboptions
sub_argument_spec = arg_opts . get ( ' options ' )
if sub_argument_spec is not None :
wanted_type = arg_opts . get ( ' type ' )
sub_parameters = params . get ( arg_name )
if sub_parameters is not None :
if wanted_type == ' dict ' or ( wanted_type == ' list ' and arg_opts . get ( ' elements ' , ' ' ) == ' dict ' ) :
# Sub parameters can be a dict or list of dicts. Ensure parameters are always a list.
if not isinstance ( sub_parameters , list ) :
sub_parameters = [ sub_parameters ]
for sub_param in sub_parameters :
# Validate dict fields in case they came in as strings
if isinstance ( sub_param , string_types ) :
sub_param = check_type_dict ( sub_param )
if not isinstance ( sub_param , Mapping ) :
raise TypeError ( " Value ' {1} ' in the sub parameter field ' {0} ' must by a {2} , "
" not ' {1.__class__.__name__} ' " . format ( arg_name , sub_param , wanted_type ) )
no_log_values . update ( list_no_log_values ( sub_argument_spec , sub_param ) )
return no_log_values
def list_deprecations ( argument_spec , parameters , prefix = ' ' ) :
""" Return a list of deprecations
: arg argument_spec : An argument spec dictionary from a module
: arg parameters : Dictionary of parameters
: returns : List of dictionaries containing a message and version in which
the deprecated parameter will be removed , or an empty list : :
[ { ' msg ' : " Param ' deptest ' is deprecated. See the module docs for more information " , ' version ' : ' 2.9 ' } ]
"""
deprecations = [ ]
for arg_name , arg_opts in argument_spec . items ( ) :
if arg_name in parameters :
if prefix :
sub_prefix = ' %s [ " %s " ] ' % ( prefix , arg_name )
else :
sub_prefix = arg_name
if arg_opts . get ( ' removed_at_date ' ) is not None :
deprecations . append ( {
' msg ' : " Param ' %s ' is deprecated. See the module docs for more information " % sub_prefix ,
' date ' : arg_opts . get ( ' removed_at_date ' ) ,
' collection_name ' : arg_opts . get ( ' removed_from_collection ' ) ,
} )
elif arg_opts . get ( ' removed_in_version ' ) is not None :
deprecations . append ( {
' msg ' : " Param ' %s ' is deprecated. See the module docs for more information " % sub_prefix ,
' version ' : arg_opts . get ( ' removed_in_version ' ) ,
' collection_name ' : arg_opts . get ( ' removed_from_collection ' ) ,
} )
# Check sub-argument spec
sub_argument_spec = arg_opts . get ( ' options ' )
if sub_argument_spec is not None :
sub_arguments = parameters [ arg_name ]
if isinstance ( sub_arguments , Mapping ) :
sub_arguments = [ sub_arguments ]
if isinstance ( sub_arguments , list ) :
for sub_params in sub_arguments :
if isinstance ( sub_params , Mapping ) :
deprecations . extend ( list_deprecations ( sub_argument_spec , sub_params , prefix = sub_prefix ) )
return deprecations
def sanitize_keys ( obj , no_log_strings , ignore_keys = frozenset ( ) ) :
""" Sanitize the keys in a container object by removing no_log values from key names.
This is a companion function to the ` remove_values ( ) ` function . Similar to that function ,
we make use of deferred_removals to avoid hitting maximum recursion depth in cases of
large data structures .
: param obj : The container object to sanitize . Non - container objects are returned unmodified .
: param no_log_strings : A set of string values we do not want logged .
: param ignore_keys : A set of string values of keys to not sanitize .
: returns : An object with sanitized keys .
"""
deferred_removals = deque ( )
no_log_strings = [ to_native ( s , errors = ' surrogate_or_strict ' ) for s in no_log_strings ]
new_value = _sanitize_keys_conditions ( obj , no_log_strings , ignore_keys , deferred_removals )
while deferred_removals :
old_data , new_data = deferred_removals . popleft ( )
if isinstance ( new_data , Mapping ) :
for old_key , old_elem in old_data . items ( ) :
if old_key in ignore_keys or old_key . startswith ( ' _ansible ' ) :
new_data [ old_key ] = _sanitize_keys_conditions ( old_elem , no_log_strings , ignore_keys , deferred_removals )
else :
# Sanitize the old key. We take advantage of the sanitizing code in
# _remove_values_conditions() rather than recreating it here.
new_key = _remove_values_conditions ( old_key , no_log_strings , None )
new_data [ new_key ] = _sanitize_keys_conditions ( old_elem , no_log_strings , ignore_keys , deferred_removals )
else :
for elem in old_data :
new_elem = _sanitize_keys_conditions ( elem , no_log_strings , ignore_keys , deferred_removals )
if isinstance ( new_data , MutableSequence ) :
new_data . append ( new_elem )
elif isinstance ( new_data , MutableSet ) :
new_data . add ( new_elem )
else :
raise TypeError ( ' Unknown container type encountered when removing private values from keys ' )
return new_value
def remove_values ( value , no_log_strings ) :
""" Remove strings in no_log_strings from value. If value is a container
type , then remove a lot more .
Use of deferred_removals exists , rather than a pure recursive solution ,
because of the potential to hit the maximum recursion depth when dealing with
large amounts of data ( see issue #24560).
"""
deferred_removals = deque ( )
no_log_strings = [ to_native ( s , errors = ' surrogate_or_strict ' ) for s in no_log_strings ]
new_value = _remove_values_conditions ( value , no_log_strings , deferred_removals )
while deferred_removals :
old_data , new_data = deferred_removals . popleft ( )
if isinstance ( new_data , Mapping ) :
for old_key , old_elem in old_data . items ( ) :
new_elem = _remove_values_conditions ( old_elem , no_log_strings , deferred_removals )
new_data [ old_key ] = new_elem
else :
for elem in old_data :
new_elem = _remove_values_conditions ( elem , no_log_strings , deferred_removals )
if isinstance ( new_data , MutableSequence ) :
new_data . append ( new_elem )
elif isinstance ( new_data , MutableSet ) :
new_data . add ( new_elem )
else :
raise TypeError ( ' Unknown container type encountered when removing private values from output ' )
return new_value
def handle_aliases ( argument_spec , parameters , alias_warnings = None , alias_deprecations = None ) :
""" Return a two item tuple. The first is a dictionary of aliases, the second is
a list of legal inputs .
Modify supplied parameters by adding a new key for each alias .
If a list is provided to the alias_warnings parameter , it will be filled with tuples
( option , alias ) in every case where both an option and its alias are specified .
If a list is provided to alias_deprecations , it will be populated with dictionaries ,
each containing deprecation information for each alias found in argument_spec .
"""
legal_inputs = [ ' _ansible_ %s ' % k for k in PASS_VARS ]
aliases_results = { } # alias:canon
for ( k , v ) in argument_spec . items ( ) :
legal_inputs . append ( k )
aliases = v . get ( ' aliases ' , None )
default = v . get ( ' default ' , None )
required = v . get ( ' required ' , False )
if alias_deprecations is not None :
for alias in argument_spec [ k ] . get ( ' deprecated_aliases ' , [ ] ) :
if alias . get ( ' name ' ) in parameters :
alias_deprecations . append ( alias )
if default is not None and required :
# not alias specific but this is a good place to check this
raise ValueError ( " internal error: required and default are mutually exclusive for %s " % k )
if aliases is None :
continue
if not is_iterable ( aliases ) or isinstance ( aliases , ( binary_type , text_type ) ) :
raise TypeError ( ' internal error: aliases must be a list or tuple ' )
for alias in aliases :
legal_inputs . append ( alias )
aliases_results [ alias ] = k
if alias in parameters :
if k in parameters and alias_warnings is not None :
alias_warnings . append ( ( k , alias ) )
parameters [ k ] = parameters [ alias ]
return aliases_results , legal_inputs
def get_unsupported_parameters ( argument_spec , parameters , legal_inputs = None ) :
""" Check keys in parameters against those provided in legal_inputs
to ensure they contain legal values . If legal_inputs are not supplied ,
they will be generated using the argument_spec .
: arg argument_spec : Dictionary of parameters , their type , and valid values .
: arg parameters : Dictionary of parameters .
: arg legal_inputs : List of valid key names property names . Overrides values
in argument_spec .
: returns : Set of unsupported parameters . Empty set if no unsupported parameters
are found .
"""
if legal_inputs is None :
aliases , legal_inputs = handle_aliases ( argument_spec , parameters )
unsupported_parameters = set ( )
for k in parameters . keys ( ) :
if k not in legal_inputs :
unsupported_parameters . add ( k )
return unsupported_parameters
def get_type_validator ( wanted ) :
""" Returns the callable used to validate a wanted type and the type name.
: arg wanted : String or callable . If a string , get the corresponding
validation function from DEFAULT_TYPE_VALIDATORS . If callable ,
get the name of the custom callable and return that for the type_checker .
: returns : Tuple of callable function or None , and a string that is the name
of the wanted type .
"""
# Use one our our builtin validators.
if not callable ( wanted ) :
if wanted is None :
# Default type for parameters
wanted = ' str '
type_checker = DEFAULT_TYPE_VALIDATORS . get ( wanted )
# Use the custom callable for validation.
else :
type_checker = wanted
wanted = getattr ( wanted , ' __name__ ' , to_native ( type ( wanted ) ) )
return type_checker , wanted
def validate_elements ( wanted_type , parameter , values , options_context = None , errors = None ) :
if errors is None :
errors = [ ]
type_checker , wanted_element_type = get_type_validator ( wanted_type )
validated_parameters = [ ]
# Get param name for strings so we can later display this value in a useful error message if needed
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
kwargs = { }
if wanted_element_type == ' str ' and isinstance ( wanted_type , string_types ) :
if isinstance ( parameter , string_types ) :
kwargs [ ' param ' ] = parameter
elif isinstance ( parameter , dict ) :
kwargs [ ' param ' ] = list ( parameter . keys ( ) ) [ 0 ]
for value in values :
try :
try :
validated_parameters . append ( type_checker ( value , * * kwargs ) )
validated_parameters . append ( type_checker ( value , * * kwargs ) )
except ( TypeError , ValueError ) as e :
except ( TypeError , ValueError ) as e :
@ -622,11 +552,11 @@ def validate_elements(wanted_type, parameter, values, options_context=None, erro
if options_context :
if options_context :
msg + = " found in ' %s ' " % " -> " . join ( options_context )
msg + = " found in ' %s ' " % " -> " . join ( options_context )
msg + = " is of type %s and we were unable to convert to %s : %s " % ( type ( value ) , wanted_element_type , to_native ( e ) )
msg + = " is of type %s and we were unable to convert to %s : %s " % ( type ( value ) , wanted_element_type , to_native ( e ) )
errors . append ( msg)
errors . append ( ElementError( msg) )
return validated_parameters
return validated_parameters
def validate_argument_types( argument_spec , parameters , prefix = ' ' , options_context = None , errors = None ) :
def _ validate_argument_types( argument_spec , parameters , prefix = ' ' , options_context = None , errors = None ) :
""" Validate that parameter types match the type in the argument spec.
""" Validate that parameter types match the type in the argument spec.
Determine the appropriate type checker function and run each
Determine the appropriate type checker function and run each
@ -637,7 +567,7 @@ def validate_argument_types(argument_spec, parameters, prefix='', options_contex
: param argument_spec : Argument spec
: param argument_spec : Argument spec
: type argument_spec : dict
: type argument_spec : dict
: param parameters : Parameters passed to module
: param parameters : Parameters
: type parameters : dict
: type parameters : dict
: param prefix : Name of the parent key that contains the spec . Used in the error message
: param prefix : Name of the parent key that contains the spec . Used in the error message
@ -653,7 +583,7 @@ def validate_argument_types(argument_spec, parameters, prefix='', options_contex
"""
"""
if errors is None :
if errors is None :
errors = [ ]
errors = AnsibleValidationErrorMultiple ( )
for param , spec in argument_spec . items ( ) :
for param , spec in argument_spec . items ( ) :
if param not in parameters :
if param not in parameters :
@ -664,7 +594,7 @@ def validate_argument_types(argument_spec, parameters, prefix='', options_contex
continue
continue
wanted_type = spec . get ( ' type ' )
wanted_type = spec . get ( ' type ' )
type_checker , wanted_name = get_type_validator( wanted_type )
type_checker , wanted_name = _ get_type_validator( wanted_type )
# Get param name for strings so we can later display this value in a useful error message if needed
# Get param name for strings so we can later display this value in a useful error message if needed
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
kwargs = { }
kwargs = { }
@ -685,22 +615,22 @@ def validate_argument_types(argument_spec, parameters, prefix='', options_contex
if options_context :
if options_context :
msg + = " found in ' %s ' . " % " -> " . join ( options_context )
msg + = " found in ' %s ' . " % " -> " . join ( options_context )
msg + = " , elements value check is supported only with ' list ' type "
msg + = " , elements value check is supported only with ' list ' type "
errors . append ( msg)
errors . append ( ArgumentTypeError( msg) )
parameters [ param ] = validate_elements( elements_wanted_type , param , elements , options_context , errors )
parameters [ param ] = _ validate_elements( elements_wanted_type , param , elements , options_context , errors )
except ( TypeError , ValueError ) as e :
except ( TypeError , ValueError ) as e :
msg = " argument ' %s ' is of type %s " % ( param , type ( value ) )
msg = " argument ' %s ' is of type %s " % ( param , type ( value ) )
if options_context :
if options_context :
msg + = " found in ' %s ' . " % " -> " . join ( options_context )
msg + = " found in ' %s ' . " % " -> " . join ( options_context )
msg + = " and we were unable to convert to %s : %s " % ( wanted_name , to_native ( e ) )
msg + = " and we were unable to convert to %s : %s " % ( wanted_name , to_native ( e ) )
errors . append ( msg)
errors . append ( ArgumentTypeError( msg) )
def validate_argument_values( argument_spec , parameters , options_context = None , errors = None ) :
def _ validate_argument_values( argument_spec , parameters , options_context = None , errors = None ) :
""" Ensure all arguments have the requested values, and there are no stray arguments """
""" Ensure all arguments have the requested values, and there are no stray arguments """
if errors is None :
if errors is None :
errors = [ ]
errors = AnsibleValidationErrorMultiple ( )
for param , spec in argument_spec . items ( ) :
for param , spec in argument_spec . items ( ) :
choices = spec . get ( ' choices ' )
choices = spec . get ( ' choices ' )
@ -716,8 +646,8 @@ def validate_argument_values(argument_spec, parameters, options_context=None, er
choices_str = " , " . join ( [ to_native ( c ) for c in choices ] )
choices_str = " , " . join ( [ to_native ( c ) for c in choices ] )
msg = " value of %s must be one or more of: %s . Got no match for: %s " % ( param , choices_str , diff_list )
msg = " value of %s must be one or more of: %s . Got no match for: %s " % ( param , choices_str , diff_list )
if options_context :
if options_context :
msg += " found in %s " % " -> " . join ( options_context )
msg = " {0} found in {1} " . format ( msg , " -> " . join ( options_context ) )
errors . append ( msg)
errors . append ( ArgumentValueError( msg) )
elif parameters [ param ] not in choices :
elif parameters [ param ] not in choices :
# PyYaml converts certain strings to bools. If we can unambiguously convert back, do so before checking
# PyYaml converts certain strings to bools. If we can unambiguously convert back, do so before checking
# the value. If we can't figure this out, module author is responsible.
# the value. If we can't figure this out, module author is responsible.
@ -740,23 +670,23 @@ def validate_argument_values(argument_spec, parameters, options_context=None, er
choices_str = " , " . join ( [ to_native ( c ) for c in choices ] )
choices_str = " , " . join ( [ to_native ( c ) for c in choices ] )
msg = " value of %s must be one of: %s , got: %s " % ( param , choices_str , parameters [ param ] )
msg = " value of %s must be one of: %s , got: %s " % ( param , choices_str , parameters [ param ] )
if options_context :
if options_context :
msg += " found in %s " % " -> " . join ( options_context )
msg = " {0} found in {1} " . format ( msg , " -> " . join ( options_context ) )
errors . append ( msg)
errors . append ( ArgumentValueError( msg) )
else :
else :
msg = " internal error: choices for argument %s are not iterable: %s " % ( param , choices )
msg = " internal error: choices for argument %s are not iterable: %s " % ( param , choices )
if options_context :
if options_context :
msg += " found in %s " % " -> " . join ( options_context )
msg = " {0} found in {1} " . format ( msg , " -> " . join ( options_context ) )
errors . append ( msg)
errors . append ( ArgumentTypeError( msg) )
def validate_sub_spec( argument_spec , parameters , prefix = ' ' , options_context = None , errors = None , no_log_values = None , unsupported_parameters = None ) :
def _ validate_sub_spec( argument_spec , parameters , prefix = ' ' , options_context = None , errors = None , no_log_values = None , unsupported_parameters = None ) :
""" Validate sub argument spec. This function is recursive. """
""" Validate sub argument spec. This function is recursive. """
if options_context is None :
if options_context is None :
options_context = [ ]
options_context = [ ]
if errors is None :
if errors is None :
errors = [ ]
errors = AnsibleValidationErrorMultiple ( )
if no_log_values is None :
if no_log_values is None :
no_log_values = set ( )
no_log_values = set ( )
@ -766,11 +696,11 @@ def validate_sub_spec(argument_spec, parameters, prefix='', options_context=None
for param , value in argument_spec . items ( ) :
for param , value in argument_spec . items ( ) :
wanted = value . get ( ' type ' )
wanted = value . get ( ' type ' )
if wanted == ' dict ' or ( wanted == ' list ' and value . get ( ' elements ' , ' ' ) == dict ) :
if wanted == ' dict ' or ( wanted == ' list ' and value . get ( ' elements ' , ' ' ) == ' dict ' ) :
sub_spec = value . get ( ' options ' )
sub_spec = value . get ( ' options ' )
if value . get ( ' apply_defaults ' , False ) :
if value . get ( ' apply_defaults ' , False ) :
if sub_spec is not None :
if sub_spec is not None :
if parameters . get ( value ) is None :
if parameters . get ( param ) is None :
parameters [ param ] = { }
parameters [ param ] = { }
else :
else :
continue
continue
@ -788,7 +718,7 @@ def validate_sub_spec(argument_spec, parameters, prefix='', options_context=None
for idx , sub_parameters in enumerate ( elements ) :
for idx , sub_parameters in enumerate ( elements ) :
if not isinstance ( sub_parameters , dict ) :
if not isinstance ( sub_parameters , dict ) :
errors . append ( " value of ' %s ' must be of type dict or list of dicts " % param )
errors . append ( SubParameterTypeError ( " value of ' %s ' must be of type dict or list of dicts " % param ) )
# Set prefix for warning messages
# Set prefix for warning messages
new_prefix = prefix + param
new_prefix = prefix + param
@ -799,53 +729,159 @@ def validate_sub_spec(argument_spec, parameters, prefix='', options_context=None
no_log_values . update ( set_fallbacks ( sub_spec , sub_parameters ) )
no_log_values . update ( set_fallbacks ( sub_spec , sub_parameters ) )
alias_warnings = [ ]
alias_warnings = [ ]
alias_deprecations = [ ]
try :
try :
options_aliases , legal_inputs = handle_aliases( sub_spec , sub_parameters , alias_warning s)
options_aliases = _ handle_aliases( sub_spec , sub_parameters , alias_warning s, alias_deprecation s)
except ( TypeError , ValueError ) as e :
except ( TypeError , ValueError ) as e :
options_aliases = { }
options_aliases = { }
legal_inputs = None
errors . append ( AliasError ( to_native ( e ) ) )
errors . append ( to_native ( e ) )
for option , alias in alias_warnings :
for option , alias in alias_warnings :
warn ( ' Both option %s and its alias %s are set. ' % ( option , alias ) )
warn ( ' Both option %s and its alias %s are set. ' % ( option , alias ) )
no_log_values . update ( list_no_log_values ( sub_spec , sub_parameters ) )
try :
no_log_values . update ( _list_no_log_values ( sub_spec , sub_parameters ) )
except TypeError as te :
errors . append ( NoLogError ( to_native ( te ) ) )
if legal_inputs is None :
legal_inputs = _get_legal_inputs ( sub_spec , sub_parameters , options_aliases )
legal_inputs = list ( options_aliases . keys ( ) ) + list ( sub_spec . keys ( ) )
unsupported_parameters . update ( _get_unsupported_parameters ( sub_spec , sub_parameters , legal_inputs , options_context ) )
unsupported_parameters . update ( get_unsupported_parameters ( sub_spec , sub_parameters , legal_inputs ) )
try :
try :
check_mutually_exclusive ( value . get ( ' mutually_exclusive ' ) , sub_parameters )
check_mutually_exclusive ( value . get ( ' mutually_exclusive ' ) , sub_parameters , options_context )
except TypeError as e :
except TypeError as e :
errors . append ( to_native( e ) )
errors . append ( MutuallyExclusiveError( to_native( e ) ) )
no_log_values . update ( set_defaults( sub_spec , sub_parameters , False ) )
no_log_values . update ( _ set_defaults( sub_spec , sub_parameters , False ) )
try :
try :
check_required_arguments ( sub_spec , sub_parameters )
check_required_arguments ( sub_spec , sub_parameters , options_context )
except TypeError as e :
except TypeError as e :
errors . append ( to_native ( e ) )
errors . append ( RequiredError ( to_native ( e ) ) )
validate_argument_types ( sub_spec , sub_parameters , new_prefix , options_context , errors = errors )
validate_argument_values ( sub_spec , sub_parameters , options_context , errors = errors )
checks = [
_validate_argument_types ( sub_spec , sub_parameters , new_prefix , options_context , errors = errors )
( check_required_together , ' required_together ' ) ,
_validate_argument_values ( sub_spec , sub_parameters , options_context , errors = errors )
( check_required_one_of , ' required_one_of ' ) ,
( check_required_if , ' required_if ' ) ,
( check_required_by , ' required_by ' ) ,
]
for check in checks :
for check in _ADDITIONAL_CHECKS :
try :
try :
check [ 0 ] ( value . get ( check [ 1 ] ) , parameters )
check [ ' func ' ] ( value . get ( check [ ' attr ' ] ) , sub_parameters , options_context )
except TypeError as e :
except TypeError as e :
errors . append ( to_native( e ) )
errors . append ( check[ ' err ' ] ( to_native( e ) ) )
no_log_values . update ( set_defaults( sub_spec , sub_parameters ) )
no_log_values . update ( _ set_defaults( sub_spec , sub_parameters ) )
# Handle nested specs
# Handle nested specs
validate_sub_spec( sub_spec , sub_parameters , new_prefix , options_context , errors , no_log_values , unsupported_parameters )
_ validate_sub_spec( sub_spec , sub_parameters , new_prefix , options_context , errors , no_log_values , unsupported_parameters )
options_context . pop ( )
options_context . pop ( )
def env_fallback ( * args , * * kwargs ) :
""" Load value from environment variable """
for arg in args :
if arg in os . environ :
return os . environ [ arg ]
raise AnsibleFallbackNotFound
def set_fallbacks ( argument_spec , parameters ) :
no_log_values = set ( )
for param , value in argument_spec . items ( ) :
fallback = value . get ( ' fallback ' , ( None , ) )
fallback_strategy = fallback [ 0 ]
fallback_args = [ ]
fallback_kwargs = { }
if param not in parameters and fallback_strategy is not None :
for item in fallback [ 1 : ] :
if isinstance ( item , dict ) :
fallback_kwargs = item
else :
fallback_args = item
try :
fallback_value = fallback_strategy ( * fallback_args , * * fallback_kwargs )
except AnsibleFallbackNotFound :
continue
else :
if value . get ( ' no_log ' , False ) and fallback_value :
no_log_values . add ( fallback_value )
parameters [ param ] = fallback_value
return no_log_values
def sanitize_keys ( obj , no_log_strings , ignore_keys = frozenset ( ) ) :
""" Sanitize the keys in a container object by removing no_log values from key names.
This is a companion function to the ` remove_values ( ) ` function . Similar to that function ,
we make use of deferred_removals to avoid hitting maximum recursion depth in cases of
large data structures .
: param obj : The container object to sanitize . Non - container objects are returned unmodified .
: param no_log_strings : A set of string values we do not want logged .
: param ignore_keys : A set of string values of keys to not sanitize .
: returns : An object with sanitized keys .
"""
deferred_removals = deque ( )
no_log_strings = [ to_native ( s , errors = ' surrogate_or_strict ' ) for s in no_log_strings ]
new_value = _sanitize_keys_conditions ( obj , no_log_strings , ignore_keys , deferred_removals )
while deferred_removals :
old_data , new_data = deferred_removals . popleft ( )
if isinstance ( new_data , Mapping ) :
for old_key , old_elem in old_data . items ( ) :
if old_key in ignore_keys or old_key . startswith ( ' _ansible ' ) :
new_data [ old_key ] = _sanitize_keys_conditions ( old_elem , no_log_strings , ignore_keys , deferred_removals )
else :
# Sanitize the old key. We take advantage of the sanitizing code in
# _remove_values_conditions() rather than recreating it here.
new_key = _remove_values_conditions ( old_key , no_log_strings , None )
new_data [ new_key ] = _sanitize_keys_conditions ( old_elem , no_log_strings , ignore_keys , deferred_removals )
else :
for elem in old_data :
new_elem = _sanitize_keys_conditions ( elem , no_log_strings , ignore_keys , deferred_removals )
if isinstance ( new_data , MutableSequence ) :
new_data . append ( new_elem )
elif isinstance ( new_data , MutableSet ) :
new_data . add ( new_elem )
else :
raise TypeError ( ' Unknown container type encountered when removing private values from keys ' )
return new_value
def remove_values ( value , no_log_strings ) :
""" Remove strings in no_log_strings from value. If value is a container
type , then remove a lot more .
Use of deferred_removals exists , rather than a pure recursive solution ,
because of the potential to hit the maximum recursion depth when dealing with
large amounts of data ( see issue #24560).
"""
deferred_removals = deque ( )
no_log_strings = [ to_native ( s , errors = ' surrogate_or_strict ' ) for s in no_log_strings ]
new_value = _remove_values_conditions ( value , no_log_strings , deferred_removals )
while deferred_removals :
old_data , new_data = deferred_removals . popleft ( )
if isinstance ( new_data , Mapping ) :
for old_key , old_elem in old_data . items ( ) :
new_elem = _remove_values_conditions ( old_elem , no_log_strings , deferred_removals )
new_data [ old_key ] = new_elem
else :
for elem in old_data :
new_elem = _remove_values_conditions ( elem , no_log_strings , deferred_removals )
if isinstance ( new_data , MutableSequence ) :
new_data . append ( new_elem )
elif isinstance ( new_data , MutableSet ) :
new_data . add ( new_elem )
else :
raise TypeError ( ' Unknown container type encountered when removing private values from output ' )
return new_value