Update triple single quotes to triple double quotes (#84099)

* Update triple single quotes to triple double quotes

This change was fully automated.

The updated Python files have been verified to tokenize the same as the originals, except for the expected change in quoting of strings, which were verified through literal_eval.

* Manual conversion of docstring quotes
pull/84100/head
Matt Clay 1 month ago committed by GitHub
parent 62ce21b6e4
commit a0495fc314
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -32,14 +32,14 @@ TICKET_NUMBER = re.compile(r'(?:^|\s)#(\d+)')
def normalize_pr_url(pr, allow_non_ansible_ansible=False, only_number=False): def normalize_pr_url(pr, allow_non_ansible_ansible=False, only_number=False):
''' """
Given a PullRequest, or a string containing a PR number, PR URL, Given a PullRequest, or a string containing a PR number, PR URL,
or internal PR URL (e.g. ansible-collections/community.general#1234), or internal PR URL (e.g. ansible-collections/community.general#1234),
return either a full github URL to the PR (if only_number is False), return either a full github URL to the PR (if only_number is False),
or an int containing the PR number (if only_number is True). or an int containing the PR number (if only_number is True).
Throws if it can't parse the input. Throws if it can't parse the input.
''' """
if isinstance(pr, PullRequest): if isinstance(pr, PullRequest):
return pr.html_url return pr.html_url
@ -71,10 +71,10 @@ def normalize_pr_url(pr, allow_non_ansible_ansible=False, only_number=False):
def url_to_org_repo(url): def url_to_org_repo(url):
''' """
Given a full Github PR URL, extract the user/org and repo name. Given a full Github PR URL, extract the user/org and repo name.
Return them in the form: "user/repo" Return them in the form: "user/repo"
''' """
match = PULL_HTTP_URL_RE.match(url) match = PULL_HTTP_URL_RE.match(url)
if not match: if not match:
return '' return ''
@ -82,7 +82,7 @@ def url_to_org_repo(url):
def generate_new_body(pr, source_pr): def generate_new_body(pr, source_pr):
''' """
Given the new PR (the backport) and the originating (source) PR, Given the new PR (the backport) and the originating (source) PR,
construct the new body for the backport PR. construct the new body for the backport PR.
@ -93,7 +93,7 @@ def generate_new_body(pr, source_pr):
This function does not side-effect, it simply returns the new body as a This function does not side-effect, it simply returns the new body as a
string. string.
''' """
backport_text = '\nBackport of {0}\n'.format(source_pr) backport_text = '\nBackport of {0}\n'.format(source_pr)
body_lines = pr.body.split('\n') body_lines = pr.body.split('\n')
new_body_lines = [] new_body_lines = []
@ -115,10 +115,10 @@ def generate_new_body(pr, source_pr):
def get_prs_for_commit(g, commit): def get_prs_for_commit(g, commit):
''' """
Given a commit hash, attempt to find the hash in any repo in the Given a commit hash, attempt to find the hash in any repo in the
ansible orgs, and then use it to determine what, if any, PR it appeared in. ansible orgs, and then use it to determine what, if any, PR it appeared in.
''' """
commits = g.search_commits( commits = g.search_commits(
'hash:{0} org:ansible org:ansible-collections is:public'.format(commit) 'hash:{0} org:ansible org:ansible-collections is:public'.format(commit)
@ -132,7 +132,7 @@ def get_prs_for_commit(g, commit):
def search_backport(pr, g, ansible_ansible): def search_backport(pr, g, ansible_ansible):
''' """
Do magic. This is basically the "brain" of 'auto'. Do magic. This is basically the "brain" of 'auto'.
It will search the PR (the newest PR - the backport) and try to find where It will search the PR (the newest PR - the backport) and try to find where
it originated. it originated.
@ -148,7 +148,7 @@ def search_backport(pr, g, ansible_ansible):
It will take all of the above, and return a list of "possibilities", It will take all of the above, and return a list of "possibilities",
which is a list of PullRequest objects. which is a list of PullRequest objects.
''' """
possibilities = [] possibilities = []
@ -198,20 +198,20 @@ def search_backport(pr, g, ansible_ansible):
def prompt_add(): def prompt_add():
''' """
Prompt the user and return whether or not they agree. Prompt the user and return whether or not they agree.
''' """
res = input('Shall I add the reference? [Y/n]: ') res = input('Shall I add the reference? [Y/n]: ')
return res.lower() in ('', 'y', 'yes') return res.lower() in ('', 'y', 'yes')
def commit_edit(new_pr, pr): def commit_edit(new_pr, pr):
''' """
Given the new PR (the backport), and the "possibility" that we have decided Given the new PR (the backport), and the "possibility" that we have decided
on, prompt the user and then add the reference to the body of the new PR. on, prompt the user and then add the reference to the body of the new PR.
This method does the actual "destructive" work of editing the PR body. This method does the actual "destructive" work of editing the PR body.
''' """
print('I think this PR might have come from:') print('I think this PR might have come from:')
print(pr.title) print(pr.title)
print('-' * 50) print('-' * 50)

@ -116,7 +116,7 @@ except ImportError:
class CLI(ABC): class CLI(ABC):
''' code behind bin/ansible* programs ''' """ code behind bin/ansible* programs """
PAGER = C.config.get_config_value('PAGER') PAGER = C.config.get_config_value('PAGER')
@ -317,7 +317,7 @@ class CLI(ABC):
@staticmethod @staticmethod
def ask_passwords(): def ask_passwords():
''' prompt for connection and become passwords if needed ''' """ prompt for connection and become passwords if needed """
op = context.CLIARGS op = context.CLIARGS
sshpass = None sshpass = None
@ -347,7 +347,7 @@ class CLI(ABC):
return (sshpass, becomepass) return (sshpass, becomepass)
def validate_conflicts(self, op, runas_opts=False, fork_opts=False): def validate_conflicts(self, op, runas_opts=False, fork_opts=False):
''' check for conflicting options ''' """ check for conflicting options """
if fork_opts: if fork_opts:
if op.forks < 1: if op.forks < 1:
@ -459,7 +459,7 @@ class CLI(ABC):
@staticmethod @staticmethod
def version_info(gitinfo=False): def version_info(gitinfo=False):
''' return full ansible version info ''' """ return full ansible version info """
if gitinfo: if gitinfo:
# expensive call, user with care # expensive call, user with care
ansible_version_string = opt_help.version() ansible_version_string = opt_help.version()
@ -485,7 +485,7 @@ class CLI(ABC):
@staticmethod @staticmethod
def pager(text): def pager(text):
''' find reasonable way to display text ''' """ find reasonable way to display text """
# this is a much simpler form of what is in pydoc.py # this is a much simpler form of what is in pydoc.py
if not sys.stdout.isatty(): if not sys.stdout.isatty():
display.display(text, screen_only=True) display.display(text, screen_only=True)
@ -504,7 +504,7 @@ class CLI(ABC):
@staticmethod @staticmethod
def pager_pipe(text): def pager_pipe(text):
''' pipe text through a pager ''' """ pipe text through a pager """
if 'less' in CLI.PAGER: if 'less' in CLI.PAGER:
os.environ['LESS'] = CLI.LESS_OPTS os.environ['LESS'] = CLI.LESS_OPTS
try: try:

@ -24,14 +24,14 @@ display = Display()
class AdHocCLI(CLI): class AdHocCLI(CLI):
''' is an extra-simple tool/framework/API for doing 'remote things'. """ is an extra-simple tool/framework/API for doing 'remote things'.
this command allows you to define and run a single task 'playbook' against a set of hosts this command allows you to define and run a single task 'playbook' against a set of hosts
''' """
name = 'ansible' name = 'ansible'
def init_parser(self): def init_parser(self):
''' create an options parser for bin/ansible ''' """ create an options parser for bin/ansible """
super(AdHocCLI, self).init_parser(usage='%prog <host-pattern> [options]', super(AdHocCLI, self).init_parser(usage='%prog <host-pattern> [options]',
desc="Define and run a single task 'playbook' against a set of hosts", desc="Define and run a single task 'playbook' against a set of hosts",
epilog="Some actions do not make sense in Ad-Hoc (include, meta, etc)") epilog="Some actions do not make sense in Ad-Hoc (include, meta, etc)")
@ -60,7 +60,7 @@ class AdHocCLI(CLI):
self.parser.add_argument('args', metavar='pattern', help='host pattern') self.parser.add_argument('args', metavar='pattern', help='host pattern')
def post_process_args(self, options): def post_process_args(self, options):
'''Post process and validate options for bin/ansible ''' """Post process and validate options for bin/ansible """
options = super(AdHocCLI, self).post_process_args(options) options = super(AdHocCLI, self).post_process_args(options)
@ -98,7 +98,7 @@ class AdHocCLI(CLI):
tasks=[mytask]) tasks=[mytask])
def run(self): def run(self):
''' create and execute the single task playbook ''' """ create and execute the single task playbook """
super(AdHocCLI, self).run() super(AdHocCLI, self).run()

@ -47,14 +47,14 @@ def yaml_short(data):
def get_constants(): def get_constants():
''' helper method to ensure we can template based on existing constants ''' """ helper method to ensure we can template based on existing constants """
if not hasattr(get_constants, 'cvars'): if not hasattr(get_constants, 'cvars'):
get_constants.cvars = {k: getattr(C, k) for k in dir(C) if not k.startswith('__')} get_constants.cvars = {k: getattr(C, k) for k in dir(C) if not k.startswith('__')}
return get_constants.cvars return get_constants.cvars
def _ansible_env_vars(varname): def _ansible_env_vars(varname):
''' return true or false depending if variable name is possibly a 'configurable' ansible env variable ''' """ return true or false depending if variable name is possibly a 'configurable' ansible env variable """
return all( return all(
[ [
varname.startswith("ANSIBLE_"), varname.startswith("ANSIBLE_"),
@ -188,9 +188,9 @@ class ConfigCLI(CLI):
context.CLIARGS['func']() context.CLIARGS['func']()
def execute_update(self): def execute_update(self):
''' """
Updates a single setting in the specified ansible.cfg Updates a single setting in the specified ansible.cfg
''' """
raise AnsibleError("Option not implemented yet") raise AnsibleError("Option not implemented yet")
# pylint: disable=unreachable # pylint: disable=unreachable
@ -212,9 +212,9 @@ class ConfigCLI(CLI):
]) ])
def execute_view(self): def execute_view(self):
''' """
Displays the current config file Displays the current config file
''' """
try: try:
with open(self.config_file, 'rb') as f: with open(self.config_file, 'rb') as f:
self.pager(to_text(f.read(), errors='surrogate_or_strict')) self.pager(to_text(f.read(), errors='surrogate_or_strict'))
@ -222,9 +222,9 @@ class ConfigCLI(CLI):
raise AnsibleError("Failed to open config file: %s" % to_native(e)) raise AnsibleError("Failed to open config file: %s" % to_native(e))
def execute_edit(self): def execute_edit(self):
''' """
Opens ansible.cfg in the default EDITOR Opens ansible.cfg in the default EDITOR
''' """
raise AnsibleError("Option not implemented yet") raise AnsibleError("Option not implemented yet")
# pylint: disable=unreachable # pylint: disable=unreachable
@ -266,9 +266,9 @@ class ConfigCLI(CLI):
return entries return entries
def _list_entries_from_args(self): def _list_entries_from_args(self):
''' """
build a dict with the list requested configs build a dict with the list requested configs
''' """
config_entries = {} config_entries = {}
if context.CLIARGS['type'] in ('base', 'all'): if context.CLIARGS['type'] in ('base', 'all'):
@ -294,9 +294,9 @@ class ConfigCLI(CLI):
return config_entries return config_entries
def execute_list(self): def execute_list(self):
''' """
list and output available configs list and output available configs
''' """
config_entries = self._list_entries_from_args() config_entries = self._list_entries_from_args()
if context.CLIARGS['format'] == 'yaml': if context.CLIARGS['format'] == 'yaml':
@ -599,9 +599,9 @@ class ConfigCLI(CLI):
return output return output
def execute_dump(self): def execute_dump(self):
''' """
Shows the current settings, merges ansible.cfg if specified Shows the current settings, merges ansible.cfg if specified
''' """
output = [] output = []
if context.CLIARGS['type'] in ('base', 'all'): if context.CLIARGS['type'] in ('base', 'all'):
# deal with base # deal with base

@ -35,7 +35,7 @@ display = Display()
class ConsoleCLI(CLI, cmd.Cmd): class ConsoleCLI(CLI, cmd.Cmd):
''' """
A REPL that allows for running ad-hoc tasks against a chosen inventory A REPL that allows for running ad-hoc tasks against a chosen inventory
from a nice shell with built-in tab completion (based on dominis' from a nice shell with built-in tab completion (based on dominis'
``ansible-shell``). ``ansible-shell``).
@ -62,7 +62,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
- ``help [command/module]``: display documentation for - ``help [command/module]``: display documentation for
the command or module the command or module
- ``exit``: exit ``ansible-console`` - ``exit``: exit ``ansible-console``
''' """
name = 'ansible-console' name = 'ansible-console'
modules = [] # type: list[str] | None modules = [] # type: list[str] | None
@ -579,7 +579,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
self.cmdloop() self.cmdloop()
def __getattr__(self, name): def __getattr__(self, name):
''' handle not found to populate dynamically a module function if module matching name exists ''' """ handle not found to populate dynamically a module function if module matching name exists """
attr = None attr = None
if name.startswith('do_'): if name.startswith('do_'):

@ -409,7 +409,7 @@ def _doclink(url):
def _format(string, *args): def _format(string, *args):
''' add ascii formatting or delimiters ''' """ add ascii formatting or delimiters """
for style in args: for style in args:
@ -433,10 +433,10 @@ def _format(string, *args):
class DocCLI(CLI, RoleMixin): class DocCLI(CLI, RoleMixin):
''' displays information on modules installed in Ansible libraries. """ displays information on modules installed in Ansible libraries.
It displays a terse listing of plugins and their short descriptions, It displays a terse listing of plugins and their short descriptions,
provides a printout of their DOCUMENTATION strings, provides a printout of their DOCUMENTATION strings,
and it can create a short "snippet" which can be pasted into a playbook. ''' and it can create a short "snippet" which can be pasted into a playbook. """
name = 'ansible-doc' name = 'ansible-doc'
@ -850,14 +850,14 @@ class DocCLI(CLI, RoleMixin):
return plugin_docs return plugin_docs
def _get_roles_path(self): def _get_roles_path(self):
''' """
Add any 'roles' subdir in playbook dir to the roles search path. Add any 'roles' subdir in playbook dir to the roles search path.
And as a last resort, add the playbook dir itself. Order being: And as a last resort, add the playbook dir itself. Order being:
- 'roles' subdir of playbook dir - 'roles' subdir of playbook dir
- DEFAULT_ROLES_PATH (default in cliargs) - DEFAULT_ROLES_PATH (default in cliargs)
- playbook dir (basedir) - playbook dir (basedir)
NOTE: This matches logic in RoleDefinition._load_role_path() method. NOTE: This matches logic in RoleDefinition._load_role_path() method.
''' """
roles_path = context.CLIARGS['roles_path'] roles_path = context.CLIARGS['roles_path']
if context.CLIARGS['basedir'] is not None: if context.CLIARGS['basedir'] is not None:
subdir = os.path.join(context.CLIARGS['basedir'], "roles") subdir = os.path.join(context.CLIARGS['basedir'], "roles")
@ -868,7 +868,7 @@ class DocCLI(CLI, RoleMixin):
@staticmethod @staticmethod
def _prep_loader(plugin_type): def _prep_loader(plugin_type):
''' return a plugint type specific loader ''' """ return a plugint type specific loader """
loader = getattr(plugin_loader, '%s_loader' % plugin_type) loader = getattr(plugin_loader, '%s_loader' % plugin_type)
# add to plugin paths from command line # add to plugin paths from command line
@ -1058,7 +1058,7 @@ class DocCLI(CLI, RoleMixin):
@staticmethod @staticmethod
def format_snippet(plugin, plugin_type, doc): def format_snippet(plugin, plugin_type, doc):
''' return heavily commented plugin use to insert into play ''' """ return heavily commented plugin use to insert into play """
if plugin_type == 'inventory' and doc.get('options', {}).get('plugin'): if plugin_type == 'inventory' and doc.get('options', {}).get('plugin'):
# these do not take a yaml config that we can write a snippet for # these do not take a yaml config that we can write a snippet for
raise ValueError('The {0} inventory plugin does not take YAML type config source' raise ValueError('The {0} inventory plugin does not take YAML type config source'
@ -1140,7 +1140,7 @@ class DocCLI(CLI, RoleMixin):
@staticmethod @staticmethod
def print_paths(finder): def print_paths(finder):
''' Returns a string suitable for printing of the search path ''' """ Returns a string suitable for printing of the search path """
# Uses a list to get the order right # Uses a list to get the order right
ret = [] ret = []
@ -1280,7 +1280,7 @@ class DocCLI(CLI, RoleMixin):
DocCLI.add_fields(text, subdata, limit, opt_indent + ' ', return_values, opt_indent) DocCLI.add_fields(text, subdata, limit, opt_indent + ' ', return_values, opt_indent)
def get_role_man_text(self, role, role_json): def get_role_man_text(self, role, role_json):
'''Generate text for the supplied role suitable for display. """Generate text for the supplied role suitable for display.
This is similar to get_man_text(), but roles are different enough that we have This is similar to get_man_text(), but roles are different enough that we have
a separate method for formatting their display. a separate method for formatting their display.
@ -1289,7 +1289,7 @@ class DocCLI(CLI, RoleMixin):
:param role_json: The JSON for the given role as returned from _create_role_doc(). :param role_json: The JSON for the given role as returned from _create_role_doc().
:returns: A array of text suitable for displaying to screen. :returns: A array of text suitable for displaying to screen.
''' """
text = [] text = []
opt_indent = " " opt_indent = " "
pad = display.columns * 0.20 pad = display.columns * 0.20

@ -177,11 +177,11 @@ class RoleDistributionServer:
class GalaxyCLI(CLI): class GalaxyCLI(CLI):
'''Command to manage Ansible roles and collections. """Command to manage Ansible roles and collections.
None of the CLI tools are designed to run concurrently with themselves. None of the CLI tools are designed to run concurrently with themselves.
Use an external scheduler and/or locking to ensure there are no clashing operations. Use an external scheduler and/or locking to ensure there are no clashing operations.
''' """
name = 'ansible-galaxy' name = 'ansible-galaxy'
@ -212,7 +212,7 @@ class GalaxyCLI(CLI):
super(GalaxyCLI, self).__init__(args) super(GalaxyCLI, self).__init__(args)
def init_parser(self): def init_parser(self):
''' create an options parser for bin/ansible ''' """ create an options parser for bin/ansible """
super(GalaxyCLI, self).init_parser( super(GalaxyCLI, self).init_parser(
desc="Perform various Role and Collection related operations.", desc="Perform various Role and Collection related operations.",
@ -1721,7 +1721,7 @@ class GalaxyCLI(CLI):
publish_collection(collection_path, self.api, wait, timeout) publish_collection(collection_path, self.api, wait, timeout)
def execute_search(self): def execute_search(self):
''' searches for roles on the Ansible Galaxy server''' """ searches for roles on the Ansible Galaxy server"""
page_size = 1000 page_size = 1000
search = None search = None

@ -26,7 +26,7 @@ display = Display()
class InventoryCLI(CLI): class InventoryCLI(CLI):
''' used to display or dump the configured inventory as Ansible sees it ''' """ used to display or dump the configured inventory as Ansible sees it """
name = 'ansible-inventory' name = 'ansible-inventory'

@ -29,8 +29,8 @@ display = Display()
class PlaybookCLI(CLI): class PlaybookCLI(CLI):
''' the tool to run *Ansible playbooks*, which are a configuration and multinode deployment system. """ the tool to run *Ansible playbooks*, which are a configuration and multinode deployment system.
See the project home page (https://docs.ansible.com) for more information. ''' See the project home page (https://docs.ansible.com) for more information. """
name = 'ansible-playbook' name = 'ansible-playbook'

@ -33,7 +33,7 @@ display = Display()
class PullCLI(CLI): class PullCLI(CLI):
''' Used to pull a remote copy of ansible on each managed node, """ Used to pull a remote copy of ansible on each managed node,
each set to run via cron and update playbook source via a source repository. each set to run via cron and update playbook source via a source repository.
This inverts the default *push* architecture of ansible into a *pull* architecture, This inverts the default *push* architecture of ansible into a *pull* architecture,
which has near-limitless scaling potential. which has near-limitless scaling potential.
@ -45,7 +45,7 @@ class PullCLI(CLI):
This is useful both for extreme scale-out as well as periodic remediation. This is useful both for extreme scale-out as well as periodic remediation.
Usage of the 'fetch' module to retrieve logs from ansible-pull runs would be an Usage of the 'fetch' module to retrieve logs from ansible-pull runs would be an
excellent way to gather and analyze remote logs from ansible-pull. excellent way to gather and analyze remote logs from ansible-pull.
''' """
name = 'ansible-pull' name = 'ansible-pull'
@ -76,7 +76,7 @@ class PullCLI(CLI):
return inv_opts return inv_opts
def init_parser(self): def init_parser(self):
''' create an options parser for bin/ansible ''' """ create an options parser for bin/ansible """
super(PullCLI, self).init_parser( super(PullCLI, self).init_parser(
usage='%prog -U <repository> [options] [<playbook.yml>]', usage='%prog -U <repository> [options] [<playbook.yml>]',
@ -157,7 +157,7 @@ class PullCLI(CLI):
return options return options
def run(self): def run(self):
''' use Runner lib to do SSH things ''' """ use Runner lib to do SSH things """
super(PullCLI, self).run() super(PullCLI, self).run()

@ -57,10 +57,10 @@ def file_lock(lock_path):
class ConnectionProcess(object): class ConnectionProcess(object):
''' """
The connection process wraps around a Connection object that manages The connection process wraps around a Connection object that manages
the connection to a remote device that persists over the playbook the connection to a remote device that persists over the playbook
''' """
def __init__(self, fd, play_context, socket_path, original_path, task_uuid=None, ansible_playbook_pid=None): def __init__(self, fd, play_context, socket_path, original_path, task_uuid=None, ansible_playbook_pid=None):
self.play_context = play_context self.play_context = play_context
self.socket_path = socket_path self.socket_path = socket_path

@ -25,7 +25,7 @@ display = Display()
class VaultCLI(CLI): class VaultCLI(CLI):
''' can encrypt any structured data file used by Ansible. """ can encrypt any structured data file used by Ansible.
This can include *group_vars/* or *host_vars/* inventory variables, This can include *group_vars/* or *host_vars/* inventory variables,
variables loaded by *include_vars* or *vars_files*, or variable files variables loaded by *include_vars* or *vars_files*, or variable files
passed on the ansible-playbook command line with *-e @file.yml* or *-e @file.json*. passed on the ansible-playbook command line with *-e @file.yml* or *-e @file.json*.
@ -33,7 +33,7 @@ class VaultCLI(CLI):
Because Ansible tasks, handlers, and other objects are data, these can also be encrypted with vault. Because Ansible tasks, handlers, and other objects are data, these can also be encrypted with vault.
If you'd like to not expose what variables you are using, you can keep an individual task file entirely encrypted. If you'd like to not expose what variables you are using, you can keep an individual task file entirely encrypted.
''' """
name = 'ansible-vault' name = 'ansible-vault'
@ -252,7 +252,7 @@ class VaultCLI(CLI):
os.umask(old_umask) os.umask(old_umask)
def execute_encrypt(self): def execute_encrypt(self):
''' encrypt the supplied file using the provided vault secret ''' """ encrypt the supplied file using the provided vault secret """
if not context.CLIARGS['args'] and sys.stdin.isatty(): if not context.CLIARGS['args'] and sys.stdin.isatty():
display.display("Reading plaintext input from stdin", stderr=True) display.display("Reading plaintext input from stdin", stderr=True)
@ -286,7 +286,7 @@ class VaultCLI(CLI):
return yaml_ciphertext return yaml_ciphertext
def execute_encrypt_string(self): def execute_encrypt_string(self):
''' encrypt the supplied string using the provided vault secret ''' """ encrypt the supplied string using the provided vault secret """
b_plaintext = None b_plaintext = None
# Holds tuples (the_text, the_source_of_the_string, the variable name if its provided). # Holds tuples (the_text, the_source_of_the_string, the variable name if its provided).
@ -431,7 +431,7 @@ class VaultCLI(CLI):
return output return output
def execute_decrypt(self): def execute_decrypt(self):
''' decrypt the supplied file using the provided vault secret ''' """ decrypt the supplied file using the provided vault secret """
if not context.CLIARGS['args'] and sys.stdin.isatty(): if not context.CLIARGS['args'] and sys.stdin.isatty():
display.display("Reading ciphertext input from stdin", stderr=True) display.display("Reading ciphertext input from stdin", stderr=True)
@ -443,7 +443,7 @@ class VaultCLI(CLI):
display.display("Decryption successful", stderr=True) display.display("Decryption successful", stderr=True)
def execute_create(self): def execute_create(self):
''' create and open a file in an editor that will be encrypted with the provided vault secret when closed''' """ create and open a file in an editor that will be encrypted with the provided vault secret when closed"""
if len(context.CLIARGS['args']) != 1: if len(context.CLIARGS['args']) != 1:
raise AnsibleOptionsError("ansible-vault create can take only one filename argument") raise AnsibleOptionsError("ansible-vault create can take only one filename argument")
@ -455,12 +455,12 @@ class VaultCLI(CLI):
raise AnsibleOptionsError("not a tty, editor cannot be opened") raise AnsibleOptionsError("not a tty, editor cannot be opened")
def execute_edit(self): def execute_edit(self):
''' open and decrypt an existing vaulted file in an editor, that will be encrypted again when closed''' """ open and decrypt an existing vaulted file in an editor, that will be encrypted again when closed"""
for f in context.CLIARGS['args']: for f in context.CLIARGS['args']:
self.editor.edit_file(f) self.editor.edit_file(f)
def execute_view(self): def execute_view(self):
''' open, decrypt and view an existing vaulted file using a pager using the supplied vault secret ''' """ open, decrypt and view an existing vaulted file using a pager using the supplied vault secret """
for f in context.CLIARGS['args']: for f in context.CLIARGS['args']:
# Note: vault should return byte strings because it could encrypt # Note: vault should return byte strings because it could encrypt
@ -472,7 +472,7 @@ class VaultCLI(CLI):
self.pager(to_text(plaintext)) self.pager(to_text(plaintext))
def execute_rekey(self): def execute_rekey(self):
''' re-encrypt a vaulted file with a new secret, the previous secret is required ''' """ re-encrypt a vaulted file with a new secret, the previous secret is required """
for f in context.CLIARGS['args']: for f in context.CLIARGS['args']:
# FIXME: plumb in vault_id, use the default new_vault_secret for now # FIXME: plumb in vault_id, use the default new_vault_secret for now
self.editor.rekey_file(f, self.new_encrypt_secret, self.editor.rekey_file(f, self.new_encrypt_secret,

@ -15,9 +15,9 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
''' """
Compat library for ansible. This contains compatibility definitions for older python Compat library for ansible. This contains compatibility definitions for older python
When we need to import a module differently depending on python version, do it When we need to import a module differently depending on python version, do it
here. Then in the code we can simply import from compat in order to get what we want. here. Then in the code we can simply import from compat in order to get what we want.
''' """
from __future__ import annotations from __future__ import annotations

@ -53,7 +53,7 @@ GALAXY_SERVER_ADDITIONAL = {
def _get_entry(plugin_type, plugin_name, config): def _get_entry(plugin_type, plugin_name, config):
''' construct entry for requested config ''' """ construct entry for requested config """
entry = '' entry = ''
if plugin_type: if plugin_type:
entry += 'plugin_type: %s ' % plugin_type entry += 'plugin_type: %s ' % plugin_type
@ -65,7 +65,7 @@ def _get_entry(plugin_type, plugin_name, config):
# FIXME: see if we can unify in module_utils with similar function used by argspec # FIXME: see if we can unify in module_utils with similar function used by argspec
def ensure_type(value, value_type, origin=None, origin_ftype=None): def ensure_type(value, value_type, origin=None, origin_ftype=None):
''' return a configuration variable with casting """ return a configuration variable with casting
:arg value: The value to ensure correct typing of :arg value: The value to ensure correct typing of
:kwarg value_type: The type of the value. This can be any of the following strings: :kwarg value_type: The type of the value. This can be any of the following strings:
:boolean: sets the value to a True or False value :boolean: sets the value to a True or False value
@ -88,7 +88,7 @@ def ensure_type(value, value_type, origin=None, origin_ftype=None):
tildes's in the value. tildes's in the value.
:str: Sets the value to string types. :str: Sets the value to string types.
:string: Same as 'str' :string: Same as 'str'
''' """
errmsg = '' errmsg = ''
basedir = None basedir = None
@ -190,7 +190,7 @@ def ensure_type(value, value_type, origin=None, origin_ftype=None):
# FIXME: see if this can live in utils/path # FIXME: see if this can live in utils/path
def resolve_path(path, basedir=None): def resolve_path(path, basedir=None):
''' resolve relative or 'variable' paths ''' """ resolve relative or 'variable' paths """
if '{{CWD}}' in path: # allow users to force CWD using 'magic' {{CWD}} if '{{CWD}}' in path: # allow users to force CWD using 'magic' {{CWD}}
path = path.replace('{{CWD}}', os.getcwd()) path = path.replace('{{CWD}}', os.getcwd())
@ -215,7 +215,7 @@ def get_config_type(cfile):
# FIXME: can move to module_utils for use for ini plugins also? # FIXME: can move to module_utils for use for ini plugins also?
def get_ini_config_value(p, entry): def get_ini_config_value(p, entry):
''' returns the value of last ini entry found ''' """ returns the value of last ini entry found """
value = None value = None
if p is not None: if p is not None:
try: try:
@ -226,7 +226,7 @@ def get_ini_config_value(p, entry):
def find_ini_config_file(warnings=None): def find_ini_config_file(warnings=None):
''' Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible ''' """ Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible """
# FIXME: eventually deprecate ini configs # FIXME: eventually deprecate ini configs
if warnings is None: if warnings is None:
@ -289,7 +289,7 @@ def find_ini_config_file(warnings=None):
def _add_base_defs_deprecations(base_defs): def _add_base_defs_deprecations(base_defs):
'''Add deprecation source 'ansible.builtin' to deprecations in base.yml''' """Add deprecation source 'ansible.builtin' to deprecations in base.yml"""
def process(entry): def process(entry):
if 'deprecated' in entry: if 'deprecated' in entry:
entry['deprecated']['collection_name'] = 'ansible.builtin' entry['deprecated']['collection_name'] = 'ansible.builtin'
@ -388,7 +388,7 @@ class ConfigManager(object):
"Missing base YAML definition file (bad install?): %s" % to_native(yml_file)) "Missing base YAML definition file (bad install?): %s" % to_native(yml_file))
def _parse_config_file(self, cfile=None): def _parse_config_file(self, cfile=None):
''' return flat configuration settings from file(s) ''' """ return flat configuration settings from file(s) """
# TODO: take list of files with merge/nomerge # TODO: take list of files with merge/nomerge
if cfile is None: if cfile is None:
@ -415,7 +415,7 @@ class ConfigManager(object):
raise AnsibleOptionsError("Unsupported configuration file type: %s" % to_native(ftype)) raise AnsibleOptionsError("Unsupported configuration file type: %s" % to_native(ftype))
def _find_yaml_config_files(self): def _find_yaml_config_files(self):
''' Load YAML Config Files in order, check merge flags, keep origin of settings''' """ Load YAML Config Files in order, check merge flags, keep origin of settings"""
pass pass
def get_plugin_options(self, plugin_type, name, keys=None, variables=None, direct=None): def get_plugin_options(self, plugin_type, name, keys=None, variables=None, direct=None):
@ -467,7 +467,7 @@ class ConfigManager(object):
return has return has
def get_configuration_definitions(self, plugin_type=None, name=None, ignore_private=False): def get_configuration_definitions(self, plugin_type=None, name=None, ignore_private=False):
''' just list the possible settings, either base or for specific plugins or plugin ''' """ just list the possible settings, either base or for specific plugins or plugin """
ret = {} ret = {}
if plugin_type is None: if plugin_type is None:
@ -484,7 +484,7 @@ class ConfigManager(object):
return ret return ret
def _loop_entries(self, container, entry_list): def _loop_entries(self, container, entry_list):
''' repeat code for value entry assignment ''' """ repeat code for value entry assignment """
value = None value = None
origin = None origin = None
@ -510,7 +510,7 @@ class ConfigManager(object):
return value, origin return value, origin
def get_config_value(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None): def get_config_value(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None):
''' wrapper ''' """ wrapper """
try: try:
value, _drop = self.get_config_value_and_origin(config, cfile=cfile, plugin_type=plugin_type, plugin_name=plugin_name, value, _drop = self.get_config_value_and_origin(config, cfile=cfile, plugin_type=plugin_type, plugin_name=plugin_name,
@ -522,7 +522,7 @@ class ConfigManager(object):
return value return value
def get_config_value_and_origin(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None): def get_config_value_and_origin(self, config, cfile=None, plugin_type=None, plugin_name=None, keys=None, variables=None, direct=None):
''' Given a config key figure out the actual value and report on the origin of the settings ''' """ Given a config key figure out the actual value and report on the origin of the settings """
if cfile is None: if cfile is None:
# use default config # use default config
cfile = self._config_file cfile = self._config_file

@ -21,7 +21,7 @@ config = ConfigManager()
def _warning(msg): def _warning(msg):
''' display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write ''' """ display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write """
try: try:
from ansible.utils.display import Display from ansible.utils.display import Display
Display().warning(msg) Display().warning(msg)
@ -31,7 +31,7 @@ def _warning(msg):
def _deprecated(msg, version): def _deprecated(msg, version):
''' display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write ''' """ display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write """
try: try:
from ansible.utils.display import Display from ansible.utils.display import Display
Display().deprecated(msg, version=version) Display().deprecated(msg, version=version)
@ -63,7 +63,7 @@ def handle_config_noise(display=None):
def set_constant(name, value, export=vars()): def set_constant(name, value, export=vars()):
''' sets constants and returns resolved options dict ''' """ sets constants and returns resolved options dict """
export[name] = value export[name] = value

@ -36,7 +36,7 @@ from ansible.module_utils.common.text.converters import to_native, to_text
class AnsibleError(Exception): class AnsibleError(Exception):
''' """
This is the base class for all errors raised from Ansible code, This is the base class for all errors raised from Ansible code,
and can be instantiated with two optional parameters beyond the and can be instantiated with two optional parameters beyond the
error message to control whether detailed information is displayed error message to control whether detailed information is displayed
@ -48,7 +48,7 @@ class AnsibleError(Exception):
Where "obj" is some subclass of ansible.parsing.yaml.objects.AnsibleBaseYAMLObject, Where "obj" is some subclass of ansible.parsing.yaml.objects.AnsibleBaseYAMLObject,
which should be returned by the DataLoader() class. which should be returned by the DataLoader() class.
''' """
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None): def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None):
super(AnsibleError, self).__init__(message) super(AnsibleError, self).__init__(message)
@ -92,11 +92,11 @@ class AnsibleError(Exception):
return self.message return self.message
def _get_error_lines_from_file(self, file_name, line_number): def _get_error_lines_from_file(self, file_name, line_number):
''' """
Returns the line in the file which corresponds to the reported error Returns the line in the file which corresponds to the reported error
location, as well as the line preceding it (if the error did not location, as well as the line preceding it (if the error did not
occur on the first line), to provide context to the error. occur on the first line), to provide context to the error.
''' """
target_line = '' target_line = ''
prev_line = '' prev_line = ''
@ -125,7 +125,7 @@ class AnsibleError(Exception):
return (target_line, prev_line) return (target_line, prev_line)
def _get_extended_error(self): def _get_extended_error(self):
''' """
Given an object reporting the location of the exception in a file, return Given an object reporting the location of the exception in a file, return
detailed information regarding it including: detailed information regarding it including:
@ -134,7 +134,7 @@ class AnsibleError(Exception):
If this error was created with show_content=False, the reporting of content If this error was created with show_content=False, the reporting of content
is suppressed, as the file contents may be sensitive (ie. vault data). is suppressed, as the file contents may be sensitive (ie. vault data).
''' """
error_message = '' error_message = ''
@ -214,85 +214,85 @@ class AnsibleError(Exception):
class AnsiblePromptInterrupt(AnsibleError): class AnsiblePromptInterrupt(AnsibleError):
'''User interrupt''' """User interrupt"""
class AnsiblePromptNoninteractive(AnsibleError): class AnsiblePromptNoninteractive(AnsibleError):
'''Unable to get user input''' """Unable to get user input"""
class AnsibleAssertionError(AnsibleError, AssertionError): class AnsibleAssertionError(AnsibleError, AssertionError):
'''Invalid assertion''' """Invalid assertion"""
pass pass
class AnsibleOptionsError(AnsibleError): class AnsibleOptionsError(AnsibleError):
''' bad or incomplete options passed ''' """ bad or incomplete options passed """
pass pass
class AnsibleRequiredOptionError(AnsibleOptionsError): class AnsibleRequiredOptionError(AnsibleOptionsError):
''' bad or incomplete options passed ''' """ bad or incomplete options passed """
pass pass
class AnsibleParserError(AnsibleError): class AnsibleParserError(AnsibleError):
''' something was detected early that is wrong about a playbook or data file ''' """ something was detected early that is wrong about a playbook or data file """
pass pass
class AnsibleInternalError(AnsibleError): class AnsibleInternalError(AnsibleError):
''' internal safeguards tripped, something happened in the code that should never happen ''' """ internal safeguards tripped, something happened in the code that should never happen """
pass pass
class AnsibleRuntimeError(AnsibleError): class AnsibleRuntimeError(AnsibleError):
''' ansible had a problem while running a playbook ''' """ ansible had a problem while running a playbook """
pass pass
class AnsibleModuleError(AnsibleRuntimeError): class AnsibleModuleError(AnsibleRuntimeError):
''' a module failed somehow ''' """ a module failed somehow """
pass pass
class AnsibleConnectionFailure(AnsibleRuntimeError): class AnsibleConnectionFailure(AnsibleRuntimeError):
''' the transport / connection_plugin had a fatal error ''' """ the transport / connection_plugin had a fatal error """
pass pass
class AnsibleAuthenticationFailure(AnsibleConnectionFailure): class AnsibleAuthenticationFailure(AnsibleConnectionFailure):
'''invalid username/password/key''' """invalid username/password/key"""
pass pass
class AnsibleCallbackError(AnsibleRuntimeError): class AnsibleCallbackError(AnsibleRuntimeError):
''' a callback failure ''' """ a callback failure """
pass pass
class AnsibleTemplateError(AnsibleRuntimeError): class AnsibleTemplateError(AnsibleRuntimeError):
'''A template related error''' """A template related error"""
pass pass
class AnsibleFilterError(AnsibleTemplateError): class AnsibleFilterError(AnsibleTemplateError):
''' a templating failure ''' """ a templating failure """
pass pass
class AnsibleLookupError(AnsibleTemplateError): class AnsibleLookupError(AnsibleTemplateError):
''' a lookup failure ''' """ a lookup failure """
pass pass
class AnsibleUndefinedVariable(AnsibleTemplateError): class AnsibleUndefinedVariable(AnsibleTemplateError):
''' a templating failure ''' """ a templating failure """
pass pass
class AnsibleFileNotFound(AnsibleRuntimeError): class AnsibleFileNotFound(AnsibleRuntimeError):
''' a file missing failure ''' """ a file missing failure """
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, paths=None, file_name=None): def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, paths=None, file_name=None):
@ -322,7 +322,7 @@ class AnsibleFileNotFound(AnsibleRuntimeError):
# DO NOT USE as they will probably be removed soon. # DO NOT USE as they will probably be removed soon.
# We will port the action modules in our tree to use a context manager instead. # We will port the action modules in our tree to use a context manager instead.
class AnsibleAction(AnsibleRuntimeError): class AnsibleAction(AnsibleRuntimeError):
''' Base Exception for Action plugin flow control ''' """ Base Exception for Action plugin flow control """
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None): def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
@ -335,7 +335,7 @@ class AnsibleAction(AnsibleRuntimeError):
class AnsibleActionSkip(AnsibleAction): class AnsibleActionSkip(AnsibleAction):
''' an action runtime skip''' """ an action runtime skip"""
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None): def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleActionSkip, self).__init__(message=message, obj=obj, show_content=show_content, super(AnsibleActionSkip, self).__init__(message=message, obj=obj, show_content=show_content,
@ -344,7 +344,7 @@ class AnsibleActionSkip(AnsibleAction):
class AnsibleActionFail(AnsibleAction): class AnsibleActionFail(AnsibleAction):
''' an action runtime failure''' """ an action runtime failure"""
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None): def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleActionFail, self).__init__(message=message, obj=obj, show_content=show_content, super(AnsibleActionFail, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result) suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result)
@ -352,37 +352,37 @@ class AnsibleActionFail(AnsibleAction):
class _AnsibleActionDone(AnsibleAction): class _AnsibleActionDone(AnsibleAction):
''' an action runtime early exit''' """ an action runtime early exit"""
pass pass
class AnsiblePluginError(AnsibleError): class AnsiblePluginError(AnsibleError):
''' base class for Ansible plugin-related errors that do not need AnsibleError contextual data ''' """ base class for Ansible plugin-related errors that do not need AnsibleError contextual data """
def __init__(self, message=None, plugin_load_context=None): def __init__(self, message=None, plugin_load_context=None):
super(AnsiblePluginError, self).__init__(message) super(AnsiblePluginError, self).__init__(message)
self.plugin_load_context = plugin_load_context self.plugin_load_context = plugin_load_context
class AnsiblePluginRemovedError(AnsiblePluginError): class AnsiblePluginRemovedError(AnsiblePluginError):
''' a requested plugin has been removed ''' """ a requested plugin has been removed """
pass pass
class AnsiblePluginCircularRedirect(AnsiblePluginError): class AnsiblePluginCircularRedirect(AnsiblePluginError):
'''a cycle was detected in plugin redirection''' """a cycle was detected in plugin redirection"""
pass pass
class AnsibleCollectionUnsupportedVersionError(AnsiblePluginError): class AnsibleCollectionUnsupportedVersionError(AnsiblePluginError):
'''a collection is not supported by this version of Ansible''' """a collection is not supported by this version of Ansible"""
pass pass
class AnsibleFilterTypeError(AnsibleTemplateError, TypeError): class AnsibleFilterTypeError(AnsibleTemplateError, TypeError):
''' a Jinja filter templating failure due to bad type''' """ a Jinja filter templating failure due to bad type"""
pass pass
class AnsiblePluginNotFound(AnsiblePluginError): class AnsiblePluginNotFound(AnsiblePluginError):
''' Indicates we did not find an Ansible plugin ''' """ Indicates we did not find an Ansible plugin """
pass pass

@ -74,7 +74,7 @@ _MODULE_UTILS_PATH = os.path.join(os.path.dirname(__file__), '..', 'module_utils
# ****************************************************************************** # ******************************************************************************
ANSIBALLZ_TEMPLATE = u'''%(shebang)s ANSIBALLZ_TEMPLATE = u"""%(shebang)s
%(coding)s %(coding)s
_ANSIBALLZ_WRAPPER = True # For test-module.py script to tell this is a ANSIBALLZ_WRAPPER _ANSIBALLZ_WRAPPER = True # For test-module.py script to tell this is a ANSIBALLZ_WRAPPER
# This code is part of Ansible, but is an independent component. # This code is part of Ansible, but is an independent component.
@ -333,9 +333,9 @@ def _ansiballz_main():
if __name__ == '__main__': if __name__ == '__main__':
_ansiballz_main() _ansiballz_main()
''' """
ANSIBALLZ_COVERAGE_TEMPLATE = ''' ANSIBALLZ_COVERAGE_TEMPLATE = """
os.environ['COVERAGE_FILE'] = %(coverage_output)r + '=python-%%s=coverage' %% '.'.join(str(v) for v in sys.version_info[:2]) os.environ['COVERAGE_FILE'] = %(coverage_output)r + '=python-%%s=coverage' %% '.'.join(str(v) for v in sys.version_info[:2])
import atexit import atexit
@ -355,9 +355,9 @@ ANSIBALLZ_COVERAGE_TEMPLATE = '''
atexit.register(atexit_coverage) atexit.register(atexit_coverage)
cov.start() cov.start()
''' """
ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = ''' ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = """
try: try:
if PY3: if PY3:
import importlib.util import importlib.util
@ -369,9 +369,9 @@ ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = '''
except ImportError: except ImportError:
print('{"msg": "Could not find `coverage` module.", "failed": true}') print('{"msg": "Could not find `coverage` module.", "failed": true}')
sys.exit(1) sys.exit(1)
''' """
ANSIBALLZ_RLIMIT_TEMPLATE = ''' ANSIBALLZ_RLIMIT_TEMPLATE = """
import resource import resource
existing_soft, existing_hard = resource.getrlimit(resource.RLIMIT_NOFILE) existing_soft, existing_hard = resource.getrlimit(resource.RLIMIT_NOFILE)
@ -385,7 +385,7 @@ ANSIBALLZ_RLIMIT_TEMPLATE = '''
except ValueError: except ValueError:
# some platforms (eg macOS) lie about their hard limit # some platforms (eg macOS) lie about their hard limit
pass pass
''' """
def _strip_comments(source): def _strip_comments(source):

@ -563,9 +563,9 @@ class PlayIterator:
self._clear_state_errors(state.always_child_state) self._clear_state_errors(state.always_child_state)
def get_active_state(self, state): def get_active_state(self, state):
''' """
Finds the active state, recursively if necessary when there are child states. Finds the active state, recursively if necessary when there are child states.
''' """
if state.run_state == IteratingStates.TASKS and state.tasks_child_state is not None: if state.run_state == IteratingStates.TASKS and state.tasks_child_state is not None:
return self.get_active_state(state.tasks_child_state) return self.get_active_state(state.tasks_child_state)
elif state.run_state == IteratingStates.RESCUE and state.rescue_child_state is not None: elif state.run_state == IteratingStates.RESCUE and state.rescue_child_state is not None:
@ -575,10 +575,10 @@ class PlayIterator:
return state return state
def is_any_block_rescuing(self, state): def is_any_block_rescuing(self, state):
''' """
Given the current HostState state, determines if the current block, or any child blocks, Given the current HostState state, determines if the current block, or any child blocks,
are in rescue mode. are in rescue mode.
''' """
if state.run_state == IteratingStates.TASKS and state.get_current_block().rescue: if state.run_state == IteratingStates.TASKS and state.get_current_block().rescue:
return True return True
if state.tasks_child_state is not None: if state.tasks_child_state is not None:

@ -40,10 +40,10 @@ display = Display()
class PlaybookExecutor: class PlaybookExecutor:
''' """
This is the primary class for executing playbooks, and thus the This is the primary class for executing playbooks, and thus the
basis for bin/ansible-playbook operation. basis for bin/ansible-playbook operation.
''' """
def __init__(self, playbooks, inventory, variable_manager, loader, passwords): def __init__(self, playbooks, inventory, variable_manager, loader, passwords):
self._playbooks = playbooks self._playbooks = playbooks
@ -74,10 +74,10 @@ class PlaybookExecutor:
set_default_transport() set_default_transport()
def run(self): def run(self):
''' """
Run the given playbook, based on the settings in the play which Run the given playbook, based on the settings in the play which
may limit the runs to serialized groups, etc. may limit the runs to serialized groups, etc.
''' """
result = 0 result = 0
entrylist = [] entrylist = []
@ -267,10 +267,10 @@ class PlaybookExecutor:
return result return result
def _get_serialized_batches(self, play): def _get_serialized_batches(self, play):
''' """
Returns a list of hosts, subdivided into batches based on Returns a list of hosts, subdivided into batches based on
the serial size specified in the play. the serial size specified in the play.
''' """
# make sure we have a unique list of hosts # make sure we have a unique list of hosts
all_hosts = self._inventory.get_hosts(play.hosts, order=play.order) all_hosts = self._inventory.get_hosts(play.hosts, order=play.order)
@ -313,11 +313,11 @@ class PlaybookExecutor:
return serialized_batches return serialized_batches
def _generate_retry_inventory(self, retry_path, replay_hosts): def _generate_retry_inventory(self, retry_path, replay_hosts):
''' """
Called when a playbook run fails. It generates an inventory which allows Called when a playbook run fails. It generates an inventory which allows
re-running on ONLY the failed hosts. This may duplicate some variable re-running on ONLY the failed hosts. This may duplicate some variable
information in group_vars/host_vars but that is ok, and expected. information in group_vars/host_vars but that is ok, and expected.
''' """
try: try:
makedirs_safe(os.path.dirname(retry_path)) makedirs_safe(os.path.dirname(retry_path))
with open(retry_path, 'w') as fd: with open(retry_path, 'w') as fd:

@ -47,11 +47,11 @@ class WorkerQueue(Queue):
class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defined] class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defined]
''' """
The worker thread class, which uses TaskExecutor to run tasks The worker thread class, which uses TaskExecutor to run tasks
read from a job queue and pushes results into a results queue read from a job queue and pushes results into a results queue
for reading later. for reading later.
''' """
def __init__(self, final_q, task_vars, host, task, play_context, loader, variable_manager, shared_loader_obj, worker_id): def __init__(self, final_q, task_vars, host, task, play_context, loader, variable_manager, shared_loader_obj, worker_id):
@ -91,13 +91,13 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
self._new_stdin = open(os.devnull) self._new_stdin = open(os.devnull)
def start(self): def start(self):
''' """
multiprocessing.Process replaces the worker's stdin with a new file multiprocessing.Process replaces the worker's stdin with a new file
but we wish to preserve it if it is connected to a terminal. but we wish to preserve it if it is connected to a terminal.
Therefore dup a copy prior to calling the real start(), Therefore dup a copy prior to calling the real start(),
ensuring the descriptor is preserved somewhere in the new child, and ensuring the descriptor is preserved somewhere in the new child, and
make sure it is closed in the parent when start() completes. make sure it is closed in the parent when start() completes.
''' """
self._save_stdin() self._save_stdin()
# FUTURE: this lock can be removed once a more generalized pre-fork thread pause is in place # FUTURE: this lock can be removed once a more generalized pre-fork thread pause is in place
@ -108,12 +108,12 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
self._new_stdin.close() self._new_stdin.close()
def _hard_exit(self, e): def _hard_exit(self, e):
''' """
There is no safe exception to return to higher level code that does not There is no safe exception to return to higher level code that does not
risk an innocent try/except finding itself executing in the wrong risk an innocent try/except finding itself executing in the wrong
process. All code executing above WorkerProcess.run() on the stack process. All code executing above WorkerProcess.run() on the stack
conceptually belongs to another program. conceptually belongs to another program.
''' """
try: try:
display.debug(u"WORKER HARD EXIT: %s" % to_text(e)) display.debug(u"WORKER HARD EXIT: %s" % to_text(e))
@ -126,7 +126,7 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
os._exit(1) os._exit(1)
def run(self): def run(self):
''' """
Wrap _run() to ensure no possibility an errant exception can cause Wrap _run() to ensure no possibility an errant exception can cause
control to return to the StrategyBase task loop, or any other code control to return to the StrategyBase task loop, or any other code
higher in the stack. higher in the stack.
@ -134,7 +134,7 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
As multiprocessing in Python 2.x provides no protection, it is possible As multiprocessing in Python 2.x provides no protection, it is possible
a try/except added in far-away code can cause a crashed child process a try/except added in far-away code can cause a crashed child process
to suddenly assume the role and prior state of its parent. to suddenly assume the role and prior state of its parent.
''' """
try: try:
return self._run() return self._run()
except BaseException as e: except BaseException as e:
@ -155,11 +155,11 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
sys.stdout = sys.stderr = open(os.devnull, 'w') sys.stdout = sys.stderr = open(os.devnull, 'w')
def _run(self): def _run(self):
''' """
Called when the process is started. Pushes the result onto the Called when the process is started. Pushes the result onto the
results queue. We also remove the host from the blocked hosts list, to results queue. We also remove the host from the blocked hosts list, to
signify that they are ready for their next task. signify that they are ready for their next task.
''' """
# import cProfile, pstats, StringIO # import cProfile, pstats, StringIO
# pr = cProfile.Profile() # pr = cProfile.Profile()

@ -23,7 +23,7 @@ from ansible.utils.vars import merge_hash
class AggregateStats: class AggregateStats:
''' holds stats about per-host activity during playbook runs ''' """ holds stats about per-host activity during playbook runs """
def __init__(self): def __init__(self):
@ -40,7 +40,7 @@ class AggregateStats:
self.custom = {} self.custom = {}
def increment(self, what, host): def increment(self, what, host):
''' helper function to bump a statistic ''' """ helper function to bump a statistic """
self.processed[host] = 1 self.processed[host] = 1
prev = (getattr(self, what)).get(host, 0) prev = (getattr(self, what)).get(host, 0)
@ -57,7 +57,7 @@ class AggregateStats:
_what[host] = 0 _what[host] = 0
def summarize(self, host): def summarize(self, host):
''' return information about a particular host ''' """ return information about a particular host """
return dict( return dict(
ok=self.ok.get(host, 0), ok=self.ok.get(host, 0),
@ -70,7 +70,7 @@ class AggregateStats:
) )
def set_custom_stats(self, which, what, host=None): def set_custom_stats(self, which, what, host=None):
''' allow setting of a custom stat''' """ allow setting of a custom stat"""
if host is None: if host is None:
host = '_run' host = '_run'
@ -80,7 +80,7 @@ class AggregateStats:
self.custom[host][which] = what self.custom[host][which] = what
def update_custom_stats(self, which, what, host=None): def update_custom_stats(self, which, what, host=None):
''' allow aggregation of a custom stat''' """ allow aggregation of a custom stat"""
if host is None: if host is None:
host = '_run' host = '_run'

@ -61,10 +61,10 @@ def task_timeout(signum, frame):
def remove_omit(task_args, omit_token): def remove_omit(task_args, omit_token):
''' """
Remove args with a value equal to the ``omit_token`` recursively Remove args with a value equal to the ``omit_token`` recursively
to align with now having suboptions in the argument_spec to align with now having suboptions in the argument_spec
''' """
if not isinstance(task_args, dict): if not isinstance(task_args, dict):
return task_args return task_args
@ -85,12 +85,12 @@ def remove_omit(task_args, omit_token):
class TaskExecutor: class TaskExecutor:
''' """
This is the main worker class for the executor pipeline, which This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner() a given host. This class roughly corresponds to the old Runner()
class. class.
''' """
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, final_q, variable_manager): def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, final_q, variable_manager):
self._host = host self._host = host
@ -108,12 +108,12 @@ class TaskExecutor:
self._task.squash() self._task.squash()
def run(self): def run(self):
''' """
The main executor entrypoint, where we determine if the specified The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with self._run_loop() task requires looping and either runs the task with self._run_loop()
or self._execute(). After that, the returned results are parsed and or self._execute(). After that, the returned results are parsed and
returned as a dict. returned as a dict.
''' """
display.debug("in run() - task %s" % self._task._uuid) display.debug("in run() - task %s" % self._task._uuid)
@ -218,10 +218,10 @@ class TaskExecutor:
display.debug(u"error closing connection: %s" % to_text(e)) display.debug(u"error closing connection: %s" % to_text(e))
def _get_loop_items(self): def _get_loop_items(self):
''' """
Loads a lookup plugin to handle the with_* portion of a task (if specified), Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result. and returns the items result.
''' """
# get search path for this task to pass to lookup plugins # get search path for this task to pass to lookup plugins
self._job_vars['ansible_search_path'] = self._task.get_search_path() self._job_vars['ansible_search_path'] = self._task.get_search_path()
@ -266,11 +266,11 @@ class TaskExecutor:
return items return items
def _run_loop(self, items): def _run_loop(self, items):
''' """
Runs the task with the loop items specified and collates the result Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result into an array named 'results' which is inserted into the final result
along with the item for which the loop ran. along with the item for which the loop ran.
''' """
task_vars = self._job_vars task_vars = self._job_vars
templar = Templar(loader=self._loader, variables=task_vars) templar = Templar(loader=self._loader, variables=task_vars)
@ -452,11 +452,11 @@ class TaskExecutor:
variables.update(delegated_vars) variables.update(delegated_vars)
def _execute(self, variables=None): def _execute(self, variables=None):
''' """
The primary workhorse of the executor system, this runs the task The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution the retry/until and block rescue/always execution
''' """
if variables is None: if variables is None:
variables = self._job_vars variables = self._job_vars
@ -858,9 +858,9 @@ class TaskExecutor:
return result return result
def _poll_async_result(self, result, templar, task_vars=None): def _poll_async_result(self, result, templar, task_vars=None):
''' """
Polls for the specified JID to be complete Polls for the specified JID to be complete
''' """
if task_vars is None: if task_vars is None:
task_vars = self._job_vars task_vars = self._job_vars
@ -976,10 +976,10 @@ class TaskExecutor:
return become return become
def _get_connection(self, cvars, templar, current_connection): def _get_connection(self, cvars, templar, current_connection):
''' """
Reads the connection property for the host, and returns the Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins correct connection object from the list of connection plugins
''' """
self._play_context.connection = current_connection self._play_context.connection = current_connection
@ -1134,15 +1134,15 @@ class TaskExecutor:
return varnames return varnames
def _get_action_handler(self, templar): def _get_action_handler(self, templar):
''' """
Returns the correct action plugin to handle the requestion task action Returns the correct action plugin to handle the requestion task action
''' """
return self._get_action_handler_with_module_context(templar)[0] return self._get_action_handler_with_module_context(templar)[0]
def _get_action_handler_with_module_context(self, templar): def _get_action_handler_with_module_context(self, templar):
''' """
Returns the correct action plugin to handle the requestion task action and the module context Returns the correct action plugin to handle the requestion task action and the module context
''' """
module_collection, separator, module_name = self._task.action.rpartition(".") module_collection, separator, module_name = self._task.action.rpartition(".")
module_prefix = module_name.split('_')[0] module_prefix = module_name.split('_')[0]
if module_collection: if module_collection:
@ -1216,9 +1216,9 @@ CLI_STUB_NAME = 'ansible_connection_cli_stub.py'
def start_connection(play_context, options, task_uuid): def start_connection(play_context, options, task_uuid):
''' """
Starts the persistent connection Starts the persistent connection
''' """
env = os.environ.copy() env = os.environ.copy()
env.update({ env.update({

@ -112,7 +112,7 @@ class AnsibleEndPlay(Exception):
class TaskQueueManager: class TaskQueueManager:
''' """
This class handles the multiprocessing requirements of Ansible by This class handles the multiprocessing requirements of Ansible by
creating a pool of worker forks, a result handler fork, and a creating a pool of worker forks, a result handler fork, and a
manager object with shared datastructures/queues for coordinating manager object with shared datastructures/queues for coordinating
@ -120,7 +120,7 @@ class TaskQueueManager:
The queue manager is responsible for loading the play strategy plugin, The queue manager is responsible for loading the play strategy plugin,
which dispatches the Play's tasks to hosts. which dispatches the Play's tasks to hosts.
''' """
RUN_OK = 0 RUN_OK = 0
RUN_ERROR = 1 RUN_ERROR = 1
@ -176,11 +176,11 @@ class TaskQueueManager:
self._workers.append(None) self._workers.append(None)
def load_callbacks(self): def load_callbacks(self):
''' """
Loads all available callbacks, with the exception of those which Loads all available callbacks, with the exception of those which
utilize the CALLBACK_TYPE option. When CALLBACK_TYPE is set to 'stdout', utilize the CALLBACK_TYPE option. When CALLBACK_TYPE is set to 'stdout',
only one such callback plugin will be loaded. only one such callback plugin will be loaded.
''' """
if self._callbacks_loaded: if self._callbacks_loaded:
return return
@ -269,13 +269,13 @@ class TaskQueueManager:
self._callbacks_loaded = True self._callbacks_loaded = True
def run(self, play): def run(self, play):
''' """
Iterates over the roles/tasks in a play, using the given (or default) Iterates over the roles/tasks in a play, using the given (or default)
strategy for queueing tasks. The default is the linear strategy, which strategy for queueing tasks. The default is the linear strategy, which
operates like classic Ansible by keeping all hosts in lock-step with operates like classic Ansible by keeping all hosts in lock-step with
a given task (meaning no hosts move on to the next task until all hosts a given task (meaning no hosts move on to the next task until all hosts
are done with the current task). are done with the current task).
''' """
if not self._callbacks_loaded: if not self._callbacks_loaded:
self.load_callbacks() self.load_callbacks()

@ -22,11 +22,11 @@ CLEAN_EXCEPTIONS = (
class TaskResult: class TaskResult:
''' """
This class is responsible for interpreting the resulting data This class is responsible for interpreting the resulting data
from an executed task, and provides helper methods for determining from an executed task, and provides helper methods for determining
the result of a given task. the result of a given task.
''' """
def __init__(self, host, task, return_data, task_fields=None): def __init__(self, host, task, return_data, task_fields=None):
self._host = host self._host = host
@ -93,7 +93,7 @@ class TaskResult:
return ret return ret
def _check_key(self, key): def _check_key(self, key):
'''get a specific key from the result or its items''' """get a specific key from the result or its items"""
if isinstance(self._result, dict) and key in self._result: if isinstance(self._result, dict) and key in self._result:
return self._result.get(key, False) return self._result.get(key, False)
@ -106,7 +106,7 @@ class TaskResult:
def clean_copy(self): def clean_copy(self):
''' returns 'clean' taskresult object ''' """ returns 'clean' taskresult object """
# FIXME: clean task_fields, _task and _host copies # FIXME: clean task_fields, _task and _host copies
result = TaskResult(self._host, self._task, {}, self._task_fields) result = TaskResult(self._host, self._task, {}, self._task_fields)

@ -18,7 +18,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# #
######################################################################## ########################################################################
''' This manages remote shared Ansible objects, mainly roles''' """ This manages remote shared Ansible objects, mainly roles"""
from __future__ import annotations from __future__ import annotations
@ -40,7 +40,7 @@ def get_collections_galaxy_meta_info():
class Galaxy(object): class Galaxy(object):
''' Keeps global galaxy info ''' """ Keeps global galaxy info """
def __init__(self): def __init__(self):
# TODO: eventually remove this as it contains a mismash of properties that aren't really global # TODO: eventually remove this as it contains a mismash of properties that aren't really global

@ -40,10 +40,10 @@ display = Display()
class KeycloakToken(object): class KeycloakToken(object):
'''A token granted by a Keycloak server. """A token granted by a Keycloak server.
Like sso.redhat.com as used by cloud.redhat.com Like sso.redhat.com as used by cloud.redhat.com
ie Automation Hub''' ie Automation Hub"""
token_type = 'Bearer' token_type = 'Bearer'
@ -105,7 +105,7 @@ class KeycloakToken(object):
class GalaxyToken(object): class GalaxyToken(object):
''' Class to storing and retrieving local galaxy token ''' """ Class to storing and retrieving local galaxy token """
token_type = 'Token' token_type = 'Token'

@ -101,7 +101,7 @@ class InventoryData(object):
return new_host return new_host
def reconcile_inventory(self): def reconcile_inventory(self):
''' Ensure inventory basic rules, run after updates ''' """ Ensure inventory basic rules, run after updates """
display.debug('Reconcile groups and hosts in inventory.') display.debug('Reconcile groups and hosts in inventory.')
self.current_source = None self.current_source = None
@ -145,7 +145,7 @@ class InventoryData(object):
self._groups_dict_cache = {} self._groups_dict_cache = {}
def get_host(self, hostname): def get_host(self, hostname):
''' fetch host object using name deal with implicit localhost ''' """ fetch host object using name deal with implicit localhost """
matching_host = self.hosts.get(hostname, None) matching_host = self.hosts.get(hostname, None)
@ -157,7 +157,7 @@ class InventoryData(object):
return matching_host return matching_host
def add_group(self, group): def add_group(self, group):
''' adds a group to inventory if not there already, returns named actually used ''' """ adds a group to inventory if not there already, returns named actually used """
if group: if group:
if not isinstance(group, string_types): if not isinstance(group, string_types):
@ -188,7 +188,7 @@ class InventoryData(object):
h.remove_group(group) h.remove_group(group)
def add_host(self, host, group=None, port=None): def add_host(self, host, group=None, port=None):
''' adds a host to inventory and possibly a group if not there already ''' """ adds a host to inventory and possibly a group if not there already """
if host: if host:
if not isinstance(host, string_types): if not isinstance(host, string_types):
@ -242,7 +242,7 @@ class InventoryData(object):
g.remove_host(host) g.remove_host(host)
def set_variable(self, entity, varname, value): def set_variable(self, entity, varname, value):
''' sets a variable for an inventory object ''' """ sets a variable for an inventory object """
if entity in self.groups: if entity in self.groups:
inv_object = self.groups[entity] inv_object = self.groups[entity]
@ -255,7 +255,7 @@ class InventoryData(object):
display.debug('set %s for %s' % (varname, entity)) display.debug('set %s for %s' % (varname, entity))
def add_child(self, group, child): def add_child(self, group, child):
''' Add host or group to group ''' """ Add host or group to group """
added = False added = False
if group in self.groups: if group in self.groups:
g = self.groups[group] g = self.groups[group]

@ -59,7 +59,7 @@ class InventoryObjectType(Enum):
class Group: class Group:
''' a group of ansible hosts ''' """ a group of ansible hosts """
base_type = InventoryObjectType.GROUP base_type = InventoryObjectType.GROUP
# __slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ] # __slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
@ -120,7 +120,7 @@ class Group:
self.parent_groups.append(g) self.parent_groups.append(g)
def _walk_relationship(self, rel, include_self=False, preserve_ordering=False): def _walk_relationship(self, rel, include_self=False, preserve_ordering=False):
''' """
Given `rel` that is an iterable property of Group, Given `rel` that is an iterable property of Group,
consitituting a directed acyclic graph among all groups, consitituting a directed acyclic graph among all groups,
Returns a set of all groups in full tree Returns a set of all groups in full tree
@ -132,7 +132,7 @@ class Group:
| / are directed upward | / are directed upward
F F
Called on F, returns set of (A, B, C, D, E) Called on F, returns set of (A, B, C, D, E)
''' """
seen = set([]) seen = set([])
unprocessed = set(getattr(self, rel)) unprocessed = set(getattr(self, rel))
if include_self: if include_self:

@ -28,7 +28,7 @@ __all__ = ['Host']
class Host: class Host:
''' a single ansible host ''' """ a single ansible host """
base_type = InventoryObjectType.HOST base_type = InventoryObjectType.HOST
# __slots__ = [ 'name', 'vars', 'groups' ] # __slots__ = [ 'name', 'vars', 'groups' ]

@ -50,7 +50,7 @@ IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS
IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS)) IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS))
PATTERN_WITH_SUBSCRIPT = re.compile( PATTERN_WITH_SUBSCRIPT = re.compile(
r'''^ r"""^
(.+) # A pattern expression ending with... (.+) # A pattern expression ending with...
\[(?: # A [subscript] expression comprising: \[(?: # A [subscript] expression comprising:
(-?[0-9]+)| # A single positive or negative number (-?[0-9]+)| # A single positive or negative number
@ -58,12 +58,12 @@ PATTERN_WITH_SUBSCRIPT = re.compile(
([0-9]*) ([0-9]*)
)\] )\]
$ $
''', re.X """, re.X
) )
def order_patterns(patterns): def order_patterns(patterns):
''' takes a list of patterns and reorders them by modifier to apply them consistently ''' """ takes a list of patterns and reorders them by modifier to apply them consistently """
# FIXME: this goes away if we apply patterns incrementally or by groups # FIXME: this goes away if we apply patterns incrementally or by groups
pattern_regular = [] pattern_regular = []
@ -125,19 +125,19 @@ def split_host_pattern(pattern):
# This mishandles IPv6 addresses, and is retained only for backwards # This mishandles IPv6 addresses, and is retained only for backwards
# compatibility. # compatibility.
patterns = re.findall( patterns = re.findall(
to_text(r'''(?: # We want to match something comprising: to_text(r"""(?: # We want to match something comprising:
[^\s:\[\]] # (anything other than whitespace or ':[]' [^\s:\[\]] # (anything other than whitespace or ':[]'
| # ...or... | # ...or...
\[[^\]]*\] # a single complete bracketed expression) \[[^\]]*\] # a single complete bracketed expression)
)+ # occurring once or more )+ # occurring once or more
'''), pattern, re.X """), pattern, re.X
) )
return [p.strip() for p in patterns if p.strip()] return [p.strip() for p in patterns if p.strip()]
class InventoryManager(object): class InventoryManager(object):
''' Creates and manages inventory ''' """ Creates and manages inventory """
def __init__(self, loader, sources=None, parse=True, cache=True): def __init__(self, loader, sources=None, parse=True, cache=True):
@ -197,7 +197,7 @@ class InventoryManager(object):
return self._inventory.get_host(hostname) return self._inventory.get_host(hostname)
def _fetch_inventory_plugins(self): def _fetch_inventory_plugins(self):
''' sets up loaded inventory plugins for usage ''' """ sets up loaded inventory plugins for usage """
display.vvvv('setting up inventory plugins') display.vvvv('setting up inventory plugins')
@ -215,7 +215,7 @@ class InventoryManager(object):
return plugins return plugins
def parse_sources(self, cache=False): def parse_sources(self, cache=False):
''' iterate over inventory sources and parse each one to populate it''' """ iterate over inventory sources and parse each one to populate it"""
parsed = False parsed = False
# allow for multiple inventory parsing # allow for multiple inventory parsing
@ -243,7 +243,7 @@ class InventoryManager(object):
host.vars = combine_vars(host.vars, get_vars_from_inventory_sources(self._loader, self._sources, [host], 'inventory')) host.vars = combine_vars(host.vars, get_vars_from_inventory_sources(self._loader, self._sources, [host], 'inventory'))
def parse_source(self, source, cache=False): def parse_source(self, source, cache=False):
''' Generate or update inventory for the source provided ''' """ Generate or update inventory for the source provided """
parsed = False parsed = False
failures = [] failures = []
@ -335,12 +335,12 @@ class InventoryManager(object):
return parsed return parsed
def clear_caches(self): def clear_caches(self):
''' clear all caches ''' """ clear all caches """
self._hosts_patterns_cache = {} self._hosts_patterns_cache = {}
self._pattern_cache = {} self._pattern_cache = {}
def refresh_inventory(self): def refresh_inventory(self):
''' recalculate inventory ''' """ recalculate inventory """
self.clear_caches() self.clear_caches()
self._inventory = InventoryData() self._inventory = InventoryData()
@ -657,9 +657,9 @@ class InventoryManager(object):
self._pattern_cache = {} self._pattern_cache = {}
def add_dynamic_host(self, host_info, result_item): def add_dynamic_host(self, host_info, result_item):
''' """
Helper function to add a new host to inventory based on a task result. Helper function to add a new host to inventory based on a task result.
''' """
changed = False changed = False
if not result_item.get('refresh'): if not result_item.get('refresh'):
@ -697,10 +697,10 @@ class InventoryManager(object):
result_item['changed'] = changed result_item['changed'] = changed
def add_dynamic_group(self, host, result_item): def add_dynamic_group(self, host, result_item):
''' """
Helper function to add a group (if it does not exist), and to assign the Helper function to add a group (if it does not exist), and to assign the
specified host to that group. specified host to that group.
''' """
changed = False changed = False

@ -199,14 +199,14 @@ PERMS_RE = re.compile(r'^[rwxXstugo]*$')
# #
def get_platform(): def get_platform():
''' """
**Deprecated** Use :py:func:`platform.system` directly. **Deprecated** Use :py:func:`platform.system` directly.
:returns: Name of the platform the module is running on in a native string :returns: Name of the platform the module is running on in a native string
Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is
the result of calling :py:func:`platform.system`. the result of calling :py:func:`platform.system`.
''' """
return platform.system() return platform.system()
# End deprecated functions # End deprecated functions
@ -231,7 +231,7 @@ def get_all_subclasses(cls):
def heuristic_log_sanitize(data, no_log_values=None): def heuristic_log_sanitize(data, no_log_values=None):
''' Remove strings that look like passwords from log messages ''' """ Remove strings that look like passwords from log messages """
# Currently filters: # Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo # user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are # This code has false positives and consumes parts of logs that are
@ -296,7 +296,7 @@ def heuristic_log_sanitize(data, no_log_values=None):
def _load_params(): def _load_params():
''' read the modules parameters and store them globally. """ read the modules parameters and store them globally.
This function may be needed for certain very dynamic custom modules which This function may be needed for certain very dynamic custom modules which
want to process the parameters that are being handed the module. Since want to process the parameters that are being handed the module. Since
@ -305,7 +305,7 @@ def _load_params():
will try not to break it gratuitously. It is certainly more future-proof will try not to break it gratuitously. It is certainly more future-proof
to call this function and consume its outputs than to implement the logic to call this function and consume its outputs than to implement the logic
inside it as a copy in your own code. inside it as a copy in your own code.
''' """
global _ANSIBLE_ARGS global _ANSIBLE_ARGS
if _ANSIBLE_ARGS is not None: if _ANSIBLE_ARGS is not None:
buffer = _ANSIBLE_ARGS buffer = _ANSIBLE_ARGS
@ -363,13 +363,13 @@ class AnsibleModule(object):
required_one_of=None, add_file_common_args=False, required_one_of=None, add_file_common_args=False,
supports_check_mode=False, required_if=None, required_by=None): supports_check_mode=False, required_if=None, required_by=None):
''' """
Common code for quickly building an ansible module in Python Common code for quickly building an ansible module in Python
(although you can write modules with anything that can return JSON). (although you can write modules with anything that can return JSON).
See :ref:`developing_modules_general` for a general introduction See :ref:`developing_modules_general` for a general introduction
and :ref:`developing_program_flow_modules` for more detailed explanation. and :ref:`developing_program_flow_modules` for more detailed explanation.
''' """
self._name = os.path.basename(__file__) # initialize name until we can parse from options self._name = os.path.basename(__file__) # initialize name until we can parse from options
self.argument_spec = argument_spec self.argument_spec = argument_spec
@ -516,13 +516,13 @@ class AnsibleModule(object):
self.log('[DEPRECATION WARNING] %s %s' % (msg, version)) self.log('[DEPRECATION WARNING] %s %s' % (msg, version))
def load_file_common_arguments(self, params, path=None): def load_file_common_arguments(self, params, path=None):
''' """
many modules deal with files, this encapsulates common many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly options that the file module accepts such that it is directly
available to all modules and they can share code. available to all modules and they can share code.
Allows to overwrite the path/dest module argument by providing path. Allows to overwrite the path/dest module argument by providing path.
''' """
if path is None: if path is None:
path = params.get('path', params.get('dest', None)) path = params.get('path', params.get('dest', None))
@ -635,12 +635,12 @@ class AnsibleModule(object):
return (uid, gid) return (uid, gid)
def find_mount_point(self, path): def find_mount_point(self, path):
''' """
Takes a path and returns its mount point Takes a path and returns its mount point
:param path: a string type with a filesystem path :param path: a string type with a filesystem path
:returns: the path to the mount point as a text type :returns: the path to the mount point as a text type
''' """
b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict')) b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict'))
while not os.path.ismount(b_path): while not os.path.ismount(b_path):
@ -1115,10 +1115,10 @@ class AnsibleModule(object):
return self.set_fs_attributes_if_different(file_args, changed, diff, expand) return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
def add_path_info(self, kwargs): def add_path_info(self, kwargs):
''' """
for results that are files, supplement the info about the file for results that are files, supplement the info about the file
in the return path with stats about the file path. in the return path with stats about the file path.
''' """
path = kwargs.get('path', kwargs.get('dest', None)) path = kwargs.get('path', kwargs.get('dest', None))
if path is None: if path is None:
@ -1155,10 +1155,10 @@ class AnsibleModule(object):
return kwargs return kwargs
def _check_locale(self): def _check_locale(self):
''' """
Uses the locale module to test the currently set locale Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings) (per the LANG and LC_CTYPE environment settings)
''' """
try: try:
# setting the locale to '' uses the default locale # setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale() # as it would be returned by locale.getdefaultlocale()
@ -1206,11 +1206,11 @@ class AnsibleModule(object):
return safe_eval(value, locals, include_exceptions) return safe_eval(value, locals, include_exceptions)
def _load_params(self): def _load_params(self):
''' read the input and set the params attribute. """ read the input and set the params attribute.
This method is for backwards compatibility. The guts of the function This method is for backwards compatibility. The guts of the function
were moved out in 2.1 so that custom modules could read the parameters. were moved out in 2.1 so that custom modules could read the parameters.
''' """
# debug overrides to read args from file or cmdline # debug overrides to read args from file or cmdline
self.params = _load_params() self.params = _load_params()
@ -1297,7 +1297,7 @@ class AnsibleModule(object):
self._log_to_syslog(journal_msg) self._log_to_syslog(journal_msg)
def _log_invocation(self): def _log_invocation(self):
''' log that ansible ran the module ''' """ log that ansible ran the module """
# TODO: generalize a separate log function and make log_invocation use it # TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging. # Sanitize possible password argument when logging.
log_args = dict() log_args = dict()
@ -1350,7 +1350,7 @@ class AnsibleModule(object):
return None return None
def get_bin_path(self, arg, required=False, opt_dirs=None): def get_bin_path(self, arg, required=False, opt_dirs=None):
''' """
Find system executable in PATH. Find system executable in PATH.
:param arg: The executable to find. :param arg: The executable to find.
@ -1358,7 +1358,7 @@ class AnsibleModule(object):
:param opt_dirs: optional list of directories to search in addition to ``PATH`` :param opt_dirs: optional list of directories to search in addition to ``PATH``
:returns: if found return full path; otherwise return original arg, unless 'warning' then return None :returns: if found return full path; otherwise return original arg, unless 'warning' then return None
:raises: Sysexit: if arg is not found and required=True (via fail_json) :raises: Sysexit: if arg is not found and required=True (via fail_json)
''' """
bin_path = None bin_path = None
try: try:
@ -1370,7 +1370,7 @@ class AnsibleModule(object):
return bin_path return bin_path
def boolean(self, arg): def boolean(self, arg):
'''Convert the argument to a boolean''' """Convert the argument to a boolean"""
if arg is None: if arg is None:
return arg return arg
@ -1447,14 +1447,14 @@ class AnsibleModule(object):
print('\n%s' % self.jsonify(kwargs)) print('\n%s' % self.jsonify(kwargs))
def exit_json(self, **kwargs): def exit_json(self, **kwargs):
''' return from the module, without error ''' """ return from the module, without error """
self.do_cleanup_files() self.do_cleanup_files()
self._return_formatted(kwargs) self._return_formatted(kwargs)
sys.exit(0) sys.exit(0)
def fail_json(self, msg, **kwargs): def fail_json(self, msg, **kwargs):
''' return from the module, with an error message ''' """ return from the module, with an error message """
kwargs['failed'] = True kwargs['failed'] = True
kwargs['msg'] = msg kwargs['msg'] = msg
@ -1477,7 +1477,7 @@ class AnsibleModule(object):
self.fail_json(msg=to_native(e)) self.fail_json(msg=to_native(e))
def digest_from_file(self, filename, algorithm): def digest_from_file(self, filename, algorithm):
''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. ''' """ Return hex digest of local file for a digest_method specified by name, or None if file is not present. """
b_filename = to_bytes(filename, errors='surrogate_or_strict') b_filename = to_bytes(filename, errors='surrogate_or_strict')
if not os.path.exists(b_filename): if not os.path.exists(b_filename):
@ -1505,7 +1505,7 @@ class AnsibleModule(object):
return digest_method.hexdigest() return digest_method.hexdigest()
def md5(self, filename): def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file(). """ Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for: Do not use this function unless you have no other choice for:
1) Optional backwards compatibility 1) Optional backwards compatibility
@ -1514,21 +1514,21 @@ class AnsibleModule(object):
This function will not work on systems complying with FIPS-140-2. This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead. Most uses of this function can use the module.sha1 function instead.
''' """
if 'md5' not in AVAILABLE_HASH_ALGORITHMS: if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
raise ValueError('MD5 not available. Possibly running in FIPS mode') raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, 'md5') return self.digest_from_file(filename, 'md5')
def sha1(self, filename): def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). ''' """ Return SHA1 hex digest of local file using digest_from_file(). """
return self.digest_from_file(filename, 'sha1') return self.digest_from_file(filename, 'sha1')
def sha256(self, filename): def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). ''' """ Return SHA-256 hex digest of local file using digest_from_file(). """
return self.digest_from_file(filename, 'sha256') return self.digest_from_file(filename, 'sha256')
def backup_local(self, fn): def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure''' """make a date-marked backup of the specified file, return True or False on success or failure"""
backupdest = '' backupdest = ''
if os.path.exists(fn): if os.path.exists(fn):
@ -1586,9 +1586,9 @@ class AnsibleModule(object):
self.set_attributes_if_different(dest, current_attribs, True) self.set_attributes_if_different(dest, current_attribs, True)
def atomic_move(self, src, dest, unsafe_writes=False, keep_dest_attrs=True): def atomic_move(self, src, dest, unsafe_writes=False, keep_dest_attrs=True):
'''atomically move src to dest, copying attributes from dest, returns true on success """atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible''' to work around limitations, corner cases and ensure selinux context is saved if possible"""
context = None context = None
dest_stat = None dest_stat = None
b_src = to_bytes(src, errors='surrogate_or_strict') b_src = to_bytes(src, errors='surrogate_or_strict')
@ -1756,7 +1756,7 @@ class AnsibleModule(object):
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None,
use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict', use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict',
expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None, ignore_invalid_cwd=True, handle_exceptions=True): expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None, ignore_invalid_cwd=True, handle_exceptions=True):
''' """
Execute a command, returns rc, stdout, and stderr. Execute a command, returns rc, stdout, and stderr.
The mechanism of this method for reading stdout and stderr differs from The mechanism of this method for reading stdout and stderr differs from
@ -1825,7 +1825,7 @@ class AnsibleModule(object):
byte strings. On python3, stdout and stderr are text strings converted byte strings. On python3, stdout and stderr are text strings converted
according to the encoding and errors parameters. If you want byte according to the encoding and errors parameters. If you want byte
strings on python3, use encoding=None to turn decoding to text off. strings on python3, use encoding=None to turn decoding to text off.
''' """
# used by clean args later on # used by clean args later on
self._clean = None self._clean = None

@ -10,7 +10,7 @@ from __future__ import annotations
def get_all_subclasses(cls): def get_all_subclasses(cls):
''' """
Recursively search and find all subclasses of a given class Recursively search and find all subclasses of a given class
:arg cls: A python class :arg cls: A python class
@ -21,7 +21,7 @@ def get_all_subclasses(cls):
of a class exist. However, `__subclasses__` only goes one level deep. This function searches of a class exist. However, `__subclasses__` only goes one level deep. This function searches
each child class's `__subclasses__` method to find all of the descendent classes. It then each child class's `__subclasses__` method to find all of the descendent classes. It then
returns an iterable of the descendent classes. returns an iterable of the descendent classes.
''' """
# Retrieve direct subclasses # Retrieve direct subclasses
subclasses = set(cls.__subclasses__()) subclasses = set(cls.__subclasses__())
to_visit = list(subclasses) to_visit = list(subclasses)

@ -109,9 +109,9 @@ def _camel_to_snake(name, reversible=False):
def dict_merge(a, b): def dict_merge(a, b):
'''recursively merges dicts. not just simple a['key'] = b['key'], if """recursively merges dicts. not just simple a['key'] = b['key'], if
both a and b have a key whose value is a dict then dict_merge is called both a and b have a key whose value is a dict then dict_merge is called
on both values and the result stored in the returned dictionary.''' on both values and the result stored in the returned dictionary."""
if not isinstance(b, dict): if not isinstance(b, dict):
return b return b
result = deepcopy(a) result = deepcopy(a)

@ -55,7 +55,7 @@ def is_executable(path):
# This method is reused by the basic module, # This method is reused by the basic module,
# the repetition helps the basic module's html documentation come out right. # the repetition helps the basic module's html documentation come out right.
# http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_docstring_signature # http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_docstring_signature
'''is_executable(path) """is_executable(path)
is the given path executable? is the given path executable?
@ -66,7 +66,7 @@ def is_executable(path):
* Does not account for FSACLs. * Does not account for FSACLs.
* Most times we really want to know "Can the current user execute this * Most times we really want to know "Can the current user execute this
file". This function does not tell us that, only if any execute bit is set. file". This function does not tell us that, only if any execute bit is set.
''' """
# These are all bitfields so first bitwise-or all the permissions we're # These are all bitfields so first bitwise-or all the permissions we're
# looking for, then bitwise-and with the file's mode to determine if any # looking for, then bitwise-and with the file's mode to determine if any
# execute bits are set. # execute bits are set.

@ -42,9 +42,9 @@ def json_dump(structure):
class AnsibleJSONEncoder(json.JSONEncoder): class AnsibleJSONEncoder(json.JSONEncoder):
''' """
Simple encoder class to deal with JSON encoding of Ansible internal types Simple encoder class to deal with JSON encoding of Ansible internal types
''' """
def __init__(self, preprocess_unsafe=False, vault_to_text=False, **kwargs): def __init__(self, preprocess_unsafe=False, vault_to_text=False, **kwargs):
self._preprocess_unsafe = preprocess_unsafe self._preprocess_unsafe = preprocess_unsafe

@ -7,7 +7,7 @@ from ansible.module_utils.common.text.converters import to_native
def get_best_parsable_locale(module, preferences=None, raise_on_locale=False): def get_best_parsable_locale(module, preferences=None, raise_on_locale=False):
''' """
Attempts to return the best possible locale for parsing output in English Attempts to return the best possible locale for parsing output in English
useful for scraping output with i18n tools. When this raises an exception useful for scraping output with i18n tools. When this raises an exception
and the caller wants to continue, it should use the 'C' locale. and the caller wants to continue, it should use the 'C' locale.
@ -17,7 +17,7 @@ def get_best_parsable_locale(module, preferences=None, raise_on_locale=False):
:param raise_on_locale: boolean that determines if we raise exception or not :param raise_on_locale: boolean that determines if we raise exception or not
due to locale CLI issues due to locale CLI issues
:returns: The first matched preferred locale or 'C' which is the default :returns: The first matched preferred locale or 'C' which is the default
''' """
found = 'C' # default posix, its ascii but always there found = 'C' # default posix, its ascii but always there
try: try:

@ -10,7 +10,7 @@ from ansible.module_utils.common.warnings import deprecate
def get_bin_path(arg, opt_dirs=None, required=None): def get_bin_path(arg, opt_dirs=None, required=None):
''' """
Find system executable in PATH. Raises ValueError if the executable is not found. Find system executable in PATH. Raises ValueError if the executable is not found.
:param arg: the executable to find :param arg: the executable to find
@ -24,7 +24,7 @@ def get_bin_path(arg, opt_dirs=None, required=None):
In addition to PATH and opt_dirs, this function also looks through /sbin, /usr/sbin and /usr/local/sbin. A lot of In addition to PATH and opt_dirs, this function also looks through /sbin, /usr/sbin and /usr/local/sbin. A lot of
modules, especially for gathering facts, depend on this behaviour. modules, especially for gathering facts, depend on this behaviour.
''' """
if required is not None: if required is not None:
deprecate( deprecate(
msg="The `required` parameter in `get_bin_path` API is deprecated.", msg="The `required` parameter in `get_bin_path` API is deprecated.",

@ -74,7 +74,7 @@ def _create_payload():
raise Exception('unable to access ansible.module_utils.basic._ANSIBLE_ARGS (not launched by AnsiballZ?)') raise Exception('unable to access ansible.module_utils.basic._ANSIBLE_ARGS (not launched by AnsiballZ?)')
module_fqn = sys.modules['__main__']._module_fqn module_fqn = sys.modules['__main__']._module_fqn
modlib_path = sys.modules['__main__']._modlib_path modlib_path = sys.modules['__main__']._modlib_path
respawn_code_template = ''' respawn_code_template = """
import runpy import runpy
import sys import sys
@ -89,7 +89,7 @@ if __name__ == '__main__':
basic._ANSIBLE_ARGS = smuggled_args basic._ANSIBLE_ARGS = smuggled_args
runpy.run_module(module_fqn, init_globals=dict(_respawned=True), run_name='__main__', alter_sys=True) runpy.run_module(module_fqn, init_globals=dict(_respawned=True), run_name='__main__', alter_sys=True)
''' """
respawn_code = respawn_code_template.format(module_fqn=module_fqn, modlib_path=modlib_path, smuggled_args=smuggled_args.strip()) respawn_code = respawn_code_template.format(module_fqn=module_fqn, modlib_path=modlib_path, smuggled_args=smuggled_args.strip())

@ -14,7 +14,7 @@ __all__ = ('get_distribution', 'get_distribution_version', 'get_platform_subclas
def get_distribution(): def get_distribution():
''' """
Return the name of the distribution the module is running on. Return the name of the distribution the module is running on.
:rtype: NativeString or None :rtype: NativeString or None
@ -23,7 +23,7 @@ def get_distribution():
This function attempts to determine what distribution the code is running This function attempts to determine what distribution the code is running
on and return a string representing that value. If the platform is Linux on and return a string representing that value. If the platform is Linux
and the distribution cannot be determined, it returns ``OtherLinux``. and the distribution cannot be determined, it returns ``OtherLinux``.
''' """
distribution = distro.id().capitalize() distribution = distro.id().capitalize()
if platform.system() == 'Linux': if platform.system() == 'Linux':
@ -38,14 +38,14 @@ def get_distribution():
def get_distribution_version(): def get_distribution_version():
''' """
Get the version of the distribution the code is running on Get the version of the distribution the code is running on
:rtype: NativeString or None :rtype: NativeString or None
:returns: A string representation of the version of the distribution. If it :returns: A string representation of the version of the distribution. If it
cannot determine the version, it returns an empty string. If this is not run on cannot determine the version, it returns an empty string. If this is not run on
a Linux machine it returns None. a Linux machine it returns None.
''' """
version = None version = None
needs_best_version = frozenset(( needs_best_version = frozenset((
@ -79,12 +79,12 @@ def get_distribution_version():
def get_distribution_codename(): def get_distribution_codename():
''' """
Return the code name for this Linux Distribution Return the code name for this Linux Distribution
:rtype: NativeString or None :rtype: NativeString or None
:returns: A string representation of the distribution's codename or None if not a Linux distro :returns: A string representation of the distribution's codename or None if not a Linux distro
''' """
codename = None codename = None
if platform.system() == 'Linux': if platform.system() == 'Linux':
# Until this gets merged and we update our bundled copy of distro: # Until this gets merged and we update our bundled copy of distro:
@ -109,7 +109,7 @@ def get_distribution_codename():
def get_platform_subclass(cls): def get_platform_subclass(cls):
''' """
Finds a subclass implementing desired functionality on the platform the code is running on Finds a subclass implementing desired functionality on the platform the code is running on
:arg cls: Class to find an appropriate subclass for :arg cls: Class to find an appropriate subclass for
@ -135,7 +135,7 @@ def get_platform_subclass(cls):
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
new_cls = get_platform_subclass(User) new_cls = get_platform_subclass(User)
return super(cls, new_cls).__new__(new_cls) return super(cls, new_cls).__new__(new_cls)
''' """
this_platform = platform.system() this_platform = platform.system()
distribution = get_distribution() distribution = get_distribution()

@ -278,11 +278,11 @@ def jsonify(data, **kwargs):
def container_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'): def container_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'):
''' Recursively convert dict keys and values to byte str """ Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples, Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns) and dict container types (the containers that the json module returns)
''' """
if isinstance(d, text_type): if isinstance(d, text_type):
return to_bytes(d, encoding=encoding, errors=errors) return to_bytes(d, encoding=encoding, errors=errors)

@ -16,9 +16,9 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
''' """
Compat distro library. Compat distro library.
''' """
from __future__ import annotations from __future__ import annotations
# The following makes it easier for us to script updates of the bundled code # The following makes it easier for us to script updates of the bundled code

@ -39,13 +39,13 @@ from ansible.module_utils.common.collections import is_string
class AnsibleFactCollector(collector.BaseFactCollector): class AnsibleFactCollector(collector.BaseFactCollector):
'''A FactCollector that returns results under 'ansible_facts' top level key. """A FactCollector that returns results under 'ansible_facts' top level key.
If a namespace if provided, facts will be collected under that namespace. If a namespace if provided, facts will be collected under that namespace.
For ex, a ansible.module_utils.facts.namespace.PrefixFactNamespace(prefix='ansible_') For ex, a ansible.module_utils.facts.namespace.PrefixFactNamespace(prefix='ansible_')
Has a 'from_gather_subset() constructor that populates collectors based on a Has a 'from_gather_subset() constructor that populates collectors based on a
gather_subset specifier.''' gather_subset specifier."""
def __init__(self, collectors=None, namespace=None, filter_spec=None): def __init__(self, collectors=None, namespace=None, filter_spec=None):
@ -102,7 +102,7 @@ class AnsibleFactCollector(collector.BaseFactCollector):
class CollectorMetaDataCollector(collector.BaseFactCollector): class CollectorMetaDataCollector(collector.BaseFactCollector):
'''Collector that provides a facts with the gather_subset metadata.''' """Collector that provides a facts with the gather_subset metadata."""
name = 'gather_subset' name = 'gather_subset'
_fact_ids = set() # type: t.Set[str] _fact_ids = set() # type: t.Set[str]

@ -38,13 +38,13 @@ from ansible.module_utils.facts import timeout
class CycleFoundInFactDeps(Exception): class CycleFoundInFactDeps(Exception):
'''Indicates there is a cycle in fact collector deps """Indicates there is a cycle in fact collector deps
If collector-B requires collector-A, and collector-A requires If collector-B requires collector-A, and collector-A requires
collector-B, that is a cycle. In that case, there is no ordering collector-B, that is a cycle. In that case, there is no ordering
that will satisfy B before A and A and before B. That will cause this that will satisfy B before A and A and before B. That will cause this
error to be raised. error to be raised.
''' """
pass pass
@ -64,9 +64,9 @@ class BaseFactCollector:
required_facts = set() # type: t.Set[str] required_facts = set() # type: t.Set[str]
def __init__(self, collectors=None, namespace=None): def __init__(self, collectors=None, namespace=None):
'''Base class for things that collect facts. """Base class for things that collect facts.
'collectors' is an optional list of other FactCollectors for composing.''' 'collectors' is an optional list of other FactCollectors for composing."""
self.collectors = collectors or [] self.collectors = collectors or []
# self.namespace is a object with a 'transform' method that transforms # self.namespace is a object with a 'transform' method that transforms
@ -88,7 +88,7 @@ class BaseFactCollector:
return key_name return key_name
def _transform_dict_keys(self, fact_dict): def _transform_dict_keys(self, fact_dict):
'''update a dicts keys to use new names as transformed by self._transform_name''' """update a dicts keys to use new names as transformed by self._transform_name"""
if fact_dict is None: if fact_dict is None:
return {} return {}
@ -107,7 +107,7 @@ class BaseFactCollector:
return facts_dict return facts_dict
def collect(self, module=None, collected_facts=None): def collect(self, module=None, collected_facts=None):
'''do the fact collection """do the fact collection
'collected_facts' is a object (a dict, likely) that holds all previously 'collected_facts' is a object (a dict, likely) that holds all previously
facts. This is intended to be used if a FactCollector needs to reference facts. This is intended to be used if a FactCollector needs to reference
@ -115,7 +115,7 @@ class BaseFactCollector:
Returns a dict of facts. Returns a dict of facts.
''' """
facts_dict = {} facts_dict = {}
return facts_dict return facts_dict
@ -125,12 +125,12 @@ def get_collector_names(valid_subsets=None,
gather_subset=None, gather_subset=None,
aliases_map=None, aliases_map=None,
platform_info=None): platform_info=None):
'''return a set of FactCollector names based on gather_subset spec. """return a set of FactCollector names based on gather_subset spec.
gather_subset is a spec describing which facts to gather. gather_subset is a spec describing which facts to gather.
valid_subsets is a frozenset of potential matches for gather_subset ('all', 'network') etc valid_subsets is a frozenset of potential matches for gather_subset ('all', 'network') etc
minimal_gather_subsets is a frozenset of matches to always use, even for gather_subset='!all' minimal_gather_subsets is a frozenset of matches to always use, even for gather_subset='!all'
''' """
# Retrieve module parameters # Retrieve module parameters
gather_subset = gather_subset or ['all'] gather_subset = gather_subset or ['all']
@ -267,11 +267,11 @@ def _get_requires_by_collector_name(collector_name, all_fact_subsets):
def find_unresolved_requires(collector_names, all_fact_subsets): def find_unresolved_requires(collector_names, all_fact_subsets):
'''Find any collector names that have unresolved requires """Find any collector names that have unresolved requires
Returns a list of collector names that correspond to collector Returns a list of collector names that correspond to collector
classes whose .requires_facts() are not in collector_names. classes whose .requires_facts() are not in collector_names.
''' """
unresolved = set() unresolved = set()
for collector_name in collector_names: for collector_name in collector_names:
@ -351,7 +351,7 @@ def collector_classes_from_gather_subset(all_collector_classes=None,
gather_subset=None, gather_subset=None,
gather_timeout=None, gather_timeout=None,
platform_info=None): platform_info=None):
'''return a list of collector classes that match the args''' """return a list of collector classes that match the args"""
# use gather_name etc to get the list of collectors # use gather_name etc to get the list of collectors

@ -34,19 +34,19 @@ from ansible.module_utils.facts import ansible_collector
def get_all_facts(module): def get_all_facts(module):
'''compat api for ansible 2.2/2.3 module_utils.facts.get_all_facts method """compat api for ansible 2.2/2.3 module_utils.facts.get_all_facts method
Expects module to be an instance of AnsibleModule, with a 'gather_subset' param. Expects module to be an instance of AnsibleModule, with a 'gather_subset' param.
returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to
the fact value.''' the fact value."""
gather_subset = module.params['gather_subset'] gather_subset = module.params['gather_subset']
return ansible_facts(module, gather_subset=gather_subset) return ansible_facts(module, gather_subset=gather_subset)
def ansible_facts(module, gather_subset=None): def ansible_facts(module, gather_subset=None):
'''Compat api for ansible 2.0/2.2/2.3 module_utils.facts.ansible_facts method """Compat api for ansible 2.0/2.2/2.3 module_utils.facts.ansible_facts method
2.3/2.3 expects a gather_subset arg. 2.3/2.3 expects a gather_subset arg.
2.0/2.1 does not except a gather_subset arg 2.0/2.1 does not except a gather_subset arg
@ -57,7 +57,7 @@ def ansible_facts(module, gather_subset=None):
returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to
the fact value. the fact value.
''' """
gather_subset = gather_subset or module.params.get('gather_subset', ['all']) gather_subset = gather_subset or module.params.get('gather_subset', ['all'])
gather_timeout = module.params.get('gather_timeout', 10) gather_timeout = module.params.get('gather_timeout', 10)

@ -224,9 +224,9 @@ class FreeBSDHardware(Hardware):
return device_facts return device_facts
def get_dmi_facts(self): def get_dmi_facts(self):
''' learn dmi facts from system """ learn dmi facts from system
Use dmidecode executable if available''' Use dmidecode executable if available"""
dmi_facts = {} dmi_facts = {}

@ -311,10 +311,10 @@ class LinuxHardware(Hardware):
return cpu_facts return cpu_facts
def get_dmi_facts(self): def get_dmi_facts(self):
''' learn dmi facts from system """ learn dmi facts from system
Try /sys first for dmi related facts. Try /sys first for dmi related facts.
If that is not available, fall back to dmidecode executable ''' If that is not available, fall back to dmidecode executable """
dmi_facts = {} dmi_facts = {}
@ -423,13 +423,13 @@ class LinuxHardware(Hardware):
'NA' 'NA'
) )
sysinfo_re = re.compile( sysinfo_re = re.compile(
r''' r"""
^ ^
(?:Manufacturer:\s+(?P<system_vendor>.+))| (?:Manufacturer:\s+(?P<system_vendor>.+))|
(?:Type:\s+(?P<product_name>.+))| (?:Type:\s+(?P<product_name>.+))|
(?:Sequence\ Code:\s+0+(?P<product_serial>.+)) (?:Sequence\ Code:\s+0+(?P<product_serial>.+))
$ $
''', """,
re.VERBOSE | re.MULTILINE re.VERBOSE | re.MULTILINE
) )
data = get_file_content('/proc/sysinfo') data = get_file_content('/proc/sysinfo')

@ -33,7 +33,7 @@ class FactNamespace:
self.namespace_name = namespace_name self.namespace_name = namespace_name
def transform(self, name): def transform(self, name):
'''Take a text name, and transforms it as needed (add a namespace prefix, etc)''' """Take a text name, and transforms it as needed (add a namespace prefix, etc)"""
return name return name
def _underscore(self, name): def _underscore(self, name):

@ -25,7 +25,7 @@ from ansible.module_utils.facts.collector import BaseFactCollector
class OhaiFactCollector(BaseFactCollector): class OhaiFactCollector(BaseFactCollector):
'''This is a subclass of Facts for including information gathered from Ohai.''' """This is a subclass of Facts for including information gathered from Ohai."""
name = 'ohai' name = 'ohai'
_fact_ids = set() # type: t.Set[str] _fact_ids = set() # type: t.Set[str]

@ -46,7 +46,7 @@ def _file_exists(path, allow_empty=False):
class DistributionFiles: class DistributionFiles:
'''has-a various distro file parsers (os-release, etc) and logic for finding the right one.''' """has-a various distro file parsers (os-release, etc) and logic for finding the right one."""
# every distribution name mentioned here, must have one of # every distribution name mentioned here, must have one of
# - allowempty == True # - allowempty == True
# - be listed in SEARCH_STRING # - be listed in SEARCH_STRING

@ -20,7 +20,7 @@ import os
def get_file_content(path, default=None, strip=True): def get_file_content(path, default=None, strip=True):
''' """
Return the contents of a given file path Return the contents of a given file path
:args path: path to file to return contents from :args path: path to file to return contents from
@ -28,7 +28,7 @@ def get_file_content(path, default=None, strip=True):
:args strip: controls if we strip whitespace from the result or not :args strip: controls if we strip whitespace from the result or not
:returns: String with file contents (optionally stripped) or 'default' value :returns: String with file contents (optionally stripped) or 'default' value
''' """
data = default data = default
if os.path.exists(path) and os.access(path, os.R_OK): if os.path.exists(path) and os.access(path, os.R_OK):
datafile = None datafile = None
@ -62,7 +62,7 @@ def get_file_content(path, default=None, strip=True):
def get_file_lines(path, strip=True, line_sep=None): def get_file_lines(path, strip=True, line_sep=None):
'''get list of lines from file''' """get list of lines from file"""
data = get_file_content(path, strip=strip) data = get_file_content(path, strip=strip)
if data: if data:
if line_sep is None: if line_sep is None:

@ -32,13 +32,13 @@ import json # pylint: disable=unused-import
# NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any # NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any
# changes are propagated there. # changes are propagated there.
def _filter_non_json_lines(data, objects_only=False): def _filter_non_json_lines(data, objects_only=False):
''' """
Used to filter unrelated output around module JSON output, like messages from Used to filter unrelated output around module JSON output, like messages from
tcagetattr, or where dropbear spews MOTD on every single command (which is nuts). tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
Filters leading lines before first line-starting occurrence of '{' or '[', and filter all Filters leading lines before first line-starting occurrence of '{' or '[', and filter all
trailing lines after matching close character (working from the bottom of output). trailing lines after matching close character (working from the bottom of output).
''' """
warnings = [] warnings = []
# Filter initial junk # Filter initial junk

@ -42,13 +42,13 @@ from ansible.module_utils.common.text.converters import to_bytes, to_text
def sysv_is_enabled(name, runlevel=None): def sysv_is_enabled(name, runlevel=None):
''' """
This function will check if the service name supplied This function will check if the service name supplied
is enabled in any of the sysv runlevels is enabled in any of the sysv runlevels
:arg name: name of the service to test for :arg name: name of the service to test for
:kw runlevel: runlevel to check (default: None) :kw runlevel: runlevel to check (default: None)
''' """
if runlevel: if runlevel:
if not os.path.isdir('/etc/rc0.d/'): if not os.path.isdir('/etc/rc0.d/'):
return bool(glob.glob('/etc/init.d/rc%s.d/S??%s' % (runlevel, name))) return bool(glob.glob('/etc/init.d/rc%s.d/S??%s' % (runlevel, name)))
@ -60,12 +60,12 @@ def sysv_is_enabled(name, runlevel=None):
def get_sysv_script(name): def get_sysv_script(name):
''' """
This function will return the expected path for an init script This function will return the expected path for an init script
corresponding to the service name supplied. corresponding to the service name supplied.
:arg name: name or path of the service to test for :arg name: name or path of the service to test for
''' """
if name.startswith('/'): if name.startswith('/'):
result = name result = name
else: else:
@ -75,19 +75,19 @@ def get_sysv_script(name):
def sysv_exists(name): def sysv_exists(name):
''' """
This function will return True or False depending on This function will return True or False depending on
the existence of an init script corresponding to the service name supplied. the existence of an init script corresponding to the service name supplied.
:arg name: name of the service to test for :arg name: name of the service to test for
''' """
return os.path.exists(get_sysv_script(name)) return os.path.exists(get_sysv_script(name))
def get_ps(module, pattern): def get_ps(module, pattern):
''' """
Last resort to find a service by trying to match pattern to programs in memory Last resort to find a service by trying to match pattern to programs in memory
''' """
found = False found = False
if platform.system() == 'SunOS': if platform.system() == 'SunOS':
flags = '-ef' flags = '-ef'
@ -106,7 +106,7 @@ def get_ps(module, pattern):
def fail_if_missing(module, found, service, msg=''): def fail_if_missing(module, found, service, msg=''):
''' """
This function will return an error or exit gracefully depending on check mode status This function will return an error or exit gracefully depending on check mode status
and if the service is missing or not. and if the service is missing or not.
@ -114,16 +114,16 @@ def fail_if_missing(module, found, service, msg=''):
:arg found: boolean indicating if services were found or not :arg found: boolean indicating if services were found or not
:arg service: name of service :arg service: name of service
:kw msg: extra info to append to error/success msg when missing :kw msg: extra info to append to error/success msg when missing
''' """
if not found: if not found:
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg)) module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))
def fork_process(): def fork_process():
''' """
This function performs the double fork process to detach from the This function performs the double fork process to detach from the
parent process and execute. parent process and execute.
''' """
pid = os.fork() pid = os.fork()
if pid == 0: if pid == 0:
@ -162,7 +162,7 @@ def fork_process():
def daemonize(module, cmd): def daemonize(module, cmd):
''' """
Execute a command while detaching as a daemon, returns rc, stdout, and stderr. Execute a command while detaching as a daemon, returns rc, stdout, and stderr.
:arg module: is an AnsibleModule object, used for it's utility methods :arg module: is an AnsibleModule object, used for it's utility methods
@ -171,7 +171,7 @@ def daemonize(module, cmd):
This is complex because daemonization is hard for people. This is complex because daemonization is hard for people.
What we do is daemonize a part of this module, the daemon runs the command, What we do is daemonize a part of this module, the daemon runs the command,
picks up the return code and output, and returns it to the main process. picks up the return code and output, and returns it to the main process.
''' """
# init some vars # init some vars
chunk = 4096 # FIXME: pass in as arg? chunk = 4096 # FIXME: pass in as arg?

@ -30,10 +30,10 @@ from __future__ import annotations
def _get_quote_state(token, quote_char): def _get_quote_state(token, quote_char):
''' """
the goal of this block is to determine if the quoted string the goal of this block is to determine if the quoted string
is unterminated in which case it needs to be put back together is unterminated in which case it needs to be put back together
''' """
# the char before the current one, used to see if # the char before the current one, used to see if
# the current character is escaped # the current character is escaped
prev_char = None prev_char = None
@ -50,11 +50,11 @@ def _get_quote_state(token, quote_char):
def _count_jinja2_blocks(token, cur_depth, open_token, close_token): def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
''' """
this function counts the number of opening/closing blocks for a this function counts the number of opening/closing blocks for a
given opening/closing type and adjusts the current depth for that given opening/closing type and adjusts the current depth for that
block based on the difference block based on the difference
''' """
num_open = token.count(open_token) num_open = token.count(open_token)
num_close = token.count(close_token) num_close = token.count(close_token)
if num_open != num_close: if num_open != num_close:
@ -65,7 +65,7 @@ def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
def split_args(args): def split_args(args):
''' """
Splits args on whitespace, but intelligently reassembles Splits args on whitespace, but intelligently reassembles
those that may have been split over a jinja2 block or quotes. those that may have been split over a jinja2 block or quotes.
@ -78,7 +78,7 @@ def split_args(args):
Basically this is a variation shlex that has some more intelligence for Basically this is a variation shlex that has some more intelligence for
how Ansible needs to use it. how Ansible needs to use it.
''' """
# the list of params parsed out of the arg string # the list of params parsed out of the arg string
# this is going to be the result value when we are done # this is going to be the result value when we are done
@ -212,7 +212,7 @@ def is_quoted(data):
def unquote(data): def unquote(data):
''' removes first and last quotes from a string, if the string starts and ends with the same quotes ''' """ removes first and last quotes from a string, if the string starts and ends with the same quotes """
if is_quoted(data): if is_quoted(data):
return data[1:-1] return data[1:-1]
return data return data

@ -12,7 +12,7 @@
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) # Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
''' """
The **urls** utils module offers a replacement for the urllib python library. The **urls** utils module offers a replacement for the urllib python library.
urllib is the python stdlib way to retrieve files from the Internet but it urllib is the python stdlib way to retrieve files from the Internet but it
@ -25,7 +25,7 @@ to replace urllib with a more secure library. However, all third party libraries
require that the library be installed on the managed machine. That is an extra step require that the library be installed on the managed machine. That is an extra step
for users making use of a module. If possible, avoid third party libraries by using for users making use of a module. If possible, avoid third party libraries by using
this code instead. this code instead.
''' """
from __future__ import annotations from __future__ import annotations
@ -223,10 +223,10 @@ UnixHTTPSConnection = None
if HAS_SSL: if HAS_SSL:
@contextmanager @contextmanager
def unix_socket_patch_httpconnection_connect(): def unix_socket_patch_httpconnection_connect():
'''Monkey patch ``http.client.HTTPConnection.connect`` to be ``UnixHTTPConnection.connect`` """Monkey patch ``http.client.HTTPConnection.connect`` to be ``UnixHTTPConnection.connect``
so that when calling ``super(UnixHTTPSConnection, self).connect()`` we get the so that when calling ``super(UnixHTTPSConnection, self).connect()`` we get the
correct behavior of creating self.sock for the unix socket correct behavior of creating self.sock for the unix socket
''' """
_connect = http.client.HTTPConnection.connect _connect = http.client.HTTPConnection.connect
http.client.HTTPConnection.connect = UnixHTTPConnection.connect http.client.HTTPConnection.connect = UnixHTTPConnection.connect
yield yield
@ -270,7 +270,7 @@ if HAS_SSL:
class UnixHTTPConnection(http.client.HTTPConnection): class UnixHTTPConnection(http.client.HTTPConnection):
'''Handles http requests to a unix socket file''' """Handles http requests to a unix socket file"""
def __init__(self, unix_socket): def __init__(self, unix_socket):
self._unix_socket = unix_socket self._unix_socket = unix_socket
@ -290,7 +290,7 @@ class UnixHTTPConnection(http.client.HTTPConnection):
class UnixHTTPHandler(urllib.request.HTTPHandler): class UnixHTTPHandler(urllib.request.HTTPHandler):
'''Handler for Unix urls''' """Handler for Unix urls"""
def __init__(self, unix_socket, **kwargs): def __init__(self, unix_socket, **kwargs):
super().__init__(**kwargs) super().__init__(**kwargs)
@ -301,29 +301,29 @@ class UnixHTTPHandler(urllib.request.HTTPHandler):
class ParseResultDottedDict(dict): class ParseResultDottedDict(dict):
''' """
A dict that acts similarly to the ParseResult named tuple from urllib A dict that acts similarly to the ParseResult named tuple from urllib
''' """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.__dict__ = self self.__dict__ = self
def as_list(self): def as_list(self):
''' """
Generate a list from this dict, that looks like the ParseResult named tuple Generate a list from this dict, that looks like the ParseResult named tuple
''' """
return [self.get(k, None) for k in ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')] return [self.get(k, None) for k in ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')]
def generic_urlparse(parts): def generic_urlparse(parts):
''' """
Returns a dictionary of url parts as parsed by urlparse, Returns a dictionary of url parts as parsed by urlparse,
but accounts for the fact that older versions of that but accounts for the fact that older versions of that
library do not support named attributes (ie. .netloc) library do not support named attributes (ie. .netloc)
This method isn't of much use any longer, but is kept This method isn't of much use any longer, but is kept
in a minimal state for backwards compat. in a minimal state for backwards compat.
''' """
result = ParseResultDottedDict(parts._asdict()) result = ParseResultDottedDict(parts._asdict())
result.update({ result.update({
'username': parts.username, 'username': parts.username,
@ -989,11 +989,11 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
client_cert=None, client_key=None, cookies=None, client_cert=None, client_key=None, cookies=None,
use_gssapi=False, unix_socket=None, ca_path=None, use_gssapi=False, unix_socket=None, ca_path=None,
unredirected_headers=None, decompress=True, ciphers=None, use_netrc=True): unredirected_headers=None, decompress=True, ciphers=None, use_netrc=True):
''' """
Sends a request via HTTP(S) or FTP using urllib (Python3) Sends a request via HTTP(S) or FTP using urllib (Python3)
Does not require the module environment Does not require the module environment
''' """
method = method or ('POST' if data else 'GET') method = method or ('POST' if data else 'GET')
return Request().open(method, url, data=data, headers=headers, use_proxy=use_proxy, return Request().open(method, url, data=data, headers=headers, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs, force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
@ -1117,10 +1117,10 @@ def basic_auth_header(username, password):
def url_argument_spec(): def url_argument_spec():
''' """
Creates an argument spec that can be used with any module Creates an argument spec that can be used with any module
that will be requesting content via urllib/urllib2 that will be requesting content via urllib/urllib2
''' """
return dict( return dict(
url=dict(type='str'), url=dict(type='str'),
force=dict(type='bool', default=False), force=dict(type='bool', default=False),
@ -1333,7 +1333,7 @@ def _split_multiext(name, min=3, max=4, count=2):
def fetch_file(module, url, data=None, headers=None, method=None, def fetch_file(module, url, data=None, headers=None, method=None,
use_proxy=True, force=False, last_mod_time=None, timeout=10, use_proxy=True, force=False, last_mod_time=None, timeout=10,
unredirected_headers=None, decompress=True, ciphers=None): unredirected_headers=None, decompress=True, ciphers=None):
'''Download and save a file via HTTP(S) or FTP (needs the module as parameter). """Download and save a file via HTTP(S) or FTP (needs the module as parameter).
This is basically a wrapper around fetch_url(). This is basically a wrapper around fetch_url().
:arg module: The AnsibleModule (used to get username, password etc. (s.b.). :arg module: The AnsibleModule (used to get username, password etc. (s.b.).
@ -1351,7 +1351,7 @@ def fetch_file(module, url, data=None, headers=None, method=None,
:kwarg ciphers: (optional) List of ciphers to use :kwarg ciphers: (optional) List of ciphers to use
:returns: A string, the path to the downloaded file. :returns: A string, the path to the downloaded file.
''' """
# download file # download file
bufsize = 65536 bufsize = 65536
parts = urlparse(url) parts = urlparse(url)

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: add_host module: add_host
short_description: Add a host (and alternatively a group) to the ansible-playbook in-memory inventory short_description: Add a host (and alternatively a group) to the ansible-playbook in-memory inventory
@ -69,9 +69,9 @@ seealso:
author: author:
- Ansible Core Team - Ansible Core Team
- Seth Vidal (@skvidal) - Seth Vidal (@skvidal)
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Add host to group 'just_created' with variable foo=42 - name: Add host to group 'just_created' with variable foo=42
ansible.builtin.add_host: ansible.builtin.add_host:
name: '{{ ip_from_ec2 }}' name: '{{ ip_from_ec2 }}'
@ -111,4 +111,4 @@ EXAMPLES = r'''
name: '{{ item }}' name: '{{ item }}'
groups: done groups: done
loop: "{{ ansible_play_hosts }}" loop: "{{ ansible_play_hosts }}"
''' """

@ -9,7 +9,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: apt module: apt
short_description: Manages apt-packages short_description: Manages apt-packages
@ -217,9 +217,9 @@ notes:
- If the interpreter can't import C(python-apt)/C(python3-apt) the module will check for it in system-owned interpreters as well. - If the interpreter can't import C(python-apt)/C(python3-apt) the module will check for it in system-owned interpreters as well.
If the dependency can't be found, the module will attempt to install it. If the dependency can't be found, the module will attempt to install it.
If the dependency is found or installed, the module will be respawned under the correct interpreter. If the dependency is found or installed, the module will be respawned under the correct interpreter.
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Install apache httpd (state=present is optional) - name: Install apache httpd (state=present is optional)
ansible.builtin.apt: ansible.builtin.apt:
name: apache2 name: apache2
@ -327,9 +327,9 @@ EXAMPLES = '''
- name: Run the equivalent of "apt-get clean" as a separate step - name: Run the equivalent of "apt-get clean" as a separate step
ansible.builtin.apt: ansible.builtin.apt:
clean: yes clean: yes
''' """
RETURN = ''' RETURN = """
cache_updated: cache_updated:
description: if the cache was updated or not description: if the cache was updated or not
returned: success, in some cases returned: success, in some cases
@ -355,7 +355,7 @@ stderr:
returned: success, when needed returned: success, when needed
type: str type: str
sample: "AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 127.0.1.1. Set the 'ServerName' directive globally to ..." sample: "AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 127.0.1.1. Set the 'ServerName' directive globally to ..."
''' # NOQA """ # NOQA
# added to stave off future warnings about apt api # added to stave off future warnings about apt api
import warnings import warnings
@ -1184,7 +1184,7 @@ def get_updated_cache_time():
# https://github.com/ansible/ansible-modules-core/issues/2951 # https://github.com/ansible/ansible-modules-core/issues/2951
def get_cache(module): def get_cache(module):
'''Attempt to get the cache object and update till it works''' """Attempt to get the cache object and update till it works"""
cache = None cache = None
try: try:
cache = apt.Cache() cache = apt.Cache()

@ -8,7 +8,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: apt_key module: apt_key
author: author:
@ -79,9 +79,9 @@ options:
on personally controlled sites using self-signed certificates. on personally controlled sites using self-signed certificates.
type: bool type: bool
default: 'yes' default: 'yes'
''' """
EXAMPLES = ''' EXAMPLES = """
- name: One way to avoid apt_key once it is removed from your distro, armored keys should use .asc extension, binary should use .gpg - name: One way to avoid apt_key once it is removed from your distro, armored keys should use .asc extension, binary should use .gpg
block: block:
- name: somerepo | no apt key - name: somerepo | no apt key
@ -133,9 +133,9 @@ EXAMPLES = '''
id: 9FED2BCBDCD29CDF762678CBAED4B06F473041FA id: 9FED2BCBDCD29CDF762678CBAED4B06F473041FA
file: /tmp/apt.gpg file: /tmp/apt.gpg
state: present state: present
''' """
RETURN = ''' RETURN = """
after: after:
description: List of apt key ids or fingerprints after any modification description: List of apt key ids or fingerprints after any modification
returned: on change returned: on change
@ -166,7 +166,7 @@ short_id:
returned: always returned: always
type: str type: str
sample: "A88D21E9" sample: "A88D21E9"
''' """
import os import os

@ -9,7 +9,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: apt_repository module: apt_repository
short_description: Add and remove APT repositories short_description: Add and remove APT repositories
@ -101,9 +101,9 @@ requirements:
- python-apt (python 2) - python-apt (python 2)
- python3-apt (python 3) - python3-apt (python 3)
- apt-key or gpg - apt-key or gpg
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Add specified repository into sources list - name: Add specified repository into sources list
ansible.builtin.apt_repository: ansible.builtin.apt_repository:
repo: deb http://archive.canonical.com/ubuntu hardy partner repo: deb http://archive.canonical.com/ubuntu hardy partner
@ -145,9 +145,9 @@ EXAMPLES = '''
ansible.builtin.apt_repository: ansible.builtin.apt_repository:
repo: "deb [arch=amd64 signed-by=/etc/apt/keyrings/myrepo.asc] https://download.example.com/linux/ubuntu {{ ansible_distribution_release }} stable" repo: "deb [arch=amd64 signed-by=/etc/apt/keyrings/myrepo.asc] https://download.example.com/linux/ubuntu {{ ansible_distribution_release }} stable"
state: present state: present
''' """
RETURN = ''' RETURN = """
repo: repo:
description: A source string for the repository description: A source string for the repository
returned: always returned: always
@ -167,7 +167,7 @@ sources_removed:
type: list type: list
sample: ["/etc/apt/sources.list.d/artifacts_elastic_co_packages_6_x_apt.list"] sample: ["/etc/apt/sources.list.d/artifacts_elastic_co_packages_6_x_apt.list"]
version_added: "2.15" version_added: "2.15"
''' """
import copy import copy
import glob import glob
@ -245,7 +245,7 @@ class SourcesList(object):
self.load(file) self.load(file)
def __iter__(self): def __iter__(self):
'''Simple iterator to go over all sources. Empty, non-source, and other not valid lines will be skipped.''' """Simple iterator to go over all sources. Empty, non-source, and other not valid lines will be skipped."""
for file, sources in self.files.items(): for file, sources in self.files.items():
for n, valid, enabled, source, comment in sources: for n, valid, enabled, source, comment in sources:
if valid: if valid:
@ -315,9 +315,9 @@ class SourcesList(object):
@staticmethod @staticmethod
def _apt_cfg_file(filespec): def _apt_cfg_file(filespec):
''' """
Wrapper for `apt_pkg` module for running with Python 2.5 Wrapper for `apt_pkg` module for running with Python 2.5
''' """
try: try:
result = apt_pkg.config.find_file(filespec) result = apt_pkg.config.find_file(filespec)
except AttributeError: except AttributeError:
@ -326,9 +326,9 @@ class SourcesList(object):
@staticmethod @staticmethod
def _apt_cfg_dir(dirspec): def _apt_cfg_dir(dirspec):
''' """
Wrapper for `apt_pkg` module for running with Python 2.5 Wrapper for `apt_pkg` module for running with Python 2.5
''' """
try: try:
result = apt_pkg.config.find_dir(dirspec) result = apt_pkg.config.find_dir(dirspec)
except AttributeError: except AttributeError:
@ -413,10 +413,10 @@ class SourcesList(object):
return new return new
def modify(self, file, n, enabled=None, source=None, comment=None): def modify(self, file, n, enabled=None, source=None, comment=None):
''' """
This function to be used with iterator, so we don't care of invalid sources. This function to be used with iterator, so we don't care of invalid sources.
If source, enabled, or comment is None, original value from line ``n`` will be preserved. If source, enabled, or comment is None, original value from line ``n`` will be preserved.
''' """
valid, enabled_old, source_old, comment_old = self.files[file][n][1:] valid, enabled_old, source_old, comment_old = self.files[file][n][1:]
self.files[file][n] = (n, valid, self._choice(enabled, enabled_old), self._choice(source, source_old), self._choice(comment, comment_old)) self.files[file][n] = (n, valid, self._choice(enabled, enabled_old), self._choice(source, source_old), self._choice(comment, comment_old))
@ -616,7 +616,7 @@ class UbuntuSourcesList(SourcesList):
def revert_sources_list(sources_before, sources_after, sourceslist_before): def revert_sources_list(sources_before, sources_after, sourceslist_before):
'''Revert the sourcelist files to their previous state.''' """Revert the sourcelist files to their previous state."""
# First remove any new files that were created: # First remove any new files that were created:
for filename in set(sources_after.keys()).difference(sources_before.keys()): for filename in set(sources_after.keys()).difference(sources_before.keys()):

@ -8,7 +8,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: assemble module: assemble
short_description: Assemble configuration files from fragments short_description: Assemble configuration files from fragments
@ -102,9 +102,9 @@ extends_documentation_fragment:
- action_common_attributes.files - action_common_attributes.files
- decrypt - decrypt
- files - files
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Assemble from fragments from a directory - name: Assemble from fragments from a directory
ansible.builtin.assemble: ansible.builtin.assemble:
src: /etc/someapp/fragments src: /etc/someapp/fragments
@ -121,9 +121,9 @@ EXAMPLES = r'''
src: /etc/ssh/conf.d/ src: /etc/ssh/conf.d/
dest: /etc/ssh/sshd_config dest: /etc/ssh/sshd_config
validate: /usr/sbin/sshd -t -f %s validate: /usr/sbin/sshd -t -f %s
''' """
RETURN = r'''#''' RETURN = r"""#"""
import codecs import codecs
import os import os
@ -136,7 +136,7 @@ from ansible.module_utils.common.text.converters import to_native
def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, tmpdir=None): def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, tmpdir=None):
''' assemble a file from a directory of fragments ''' """ assemble a file from a directory of fragments """
tmpfd, temp_path = tempfile.mkstemp(dir=tmpdir) tmpfd, temp_path = tempfile.mkstemp(dir=tmpdir)
tmp = os.fdopen(tmpfd, 'wb') tmp = os.fdopen(tmpfd, 'wb')
delimit_me = False delimit_me = False

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: assert module: assert
short_description: Asserts given expressions are true short_description: Asserts given expressions are true
@ -70,9 +70,9 @@ seealso:
author: author:
- Ansible Core Team - Ansible Core Team
- Michael DeHaan - Michael DeHaan
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: A single condition can be supplied as string instead of list - name: A single condition can be supplied as string instead of list
ansible.builtin.assert: ansible.builtin.assert:
that: "ansible_os_family != 'RedHat'" that: "ansible_os_family != 'RedHat'"
@ -106,4 +106,4 @@ EXAMPLES = r'''
- my_param <= 100 - my_param <= 100
- my_param >= 0 - my_param >= 0
quiet: true quiet: true
''' """

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: async_status module: async_status
short_description: Obtain status of asynchronous task short_description: Obtain status of asynchronous task
@ -51,9 +51,9 @@ seealso:
author: author:
- Ansible Core Team - Ansible Core Team
- Michael DeHaan - Michael DeHaan
''' """
EXAMPLES = r''' EXAMPLES = r"""
--- ---
- name: Asynchronous dnf task - name: Asynchronous dnf task
ansible.builtin.dnf: ansible.builtin.dnf:
@ -75,9 +75,9 @@ EXAMPLES = r'''
ansible.builtin.async_status: ansible.builtin.async_status:
jid: '{{ dnf_sleeper.ansible_job_id }}' jid: '{{ dnf_sleeper.ansible_job_id }}'
mode: cleanup mode: cleanup
''' """
RETURN = r''' RETURN = r"""
ansible_job_id: ansible_job_id:
description: The asynchronous job id description: The asynchronous job id
returned: success returned: success
@ -105,7 +105,7 @@ erased:
description: Path to erased job file description: Path to erased job file
returned: when file is erased returned: when file is erased
type: str type: str
''' """
import json import json
import os import os

@ -75,13 +75,13 @@ def daemonize_self():
# NB: this function copied from module_utils/json_utils.py. Ensure any changes are propagated there. # NB: this function copied from module_utils/json_utils.py. Ensure any changes are propagated there.
# FUTURE: AnsibleModule-ify this module so it's Ansiballz-compatible and can use the module_utils copy of this function. # FUTURE: AnsibleModule-ify this module so it's Ansiballz-compatible and can use the module_utils copy of this function.
def _filter_non_json_lines(data): def _filter_non_json_lines(data):
''' """
Used to filter unrelated output around module JSON output, like messages from Used to filter unrelated output around module JSON output, like messages from
tcagetattr, or where dropbear spews MOTD on every single command (which is nuts). tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
Filters leading lines before first line-starting occurrence of '{', and filter all Filters leading lines before first line-starting occurrence of '{', and filter all
trailing lines after matching close character (working from the bottom of output). trailing lines after matching close character (working from the bottom of output).
''' """
warnings = [] warnings = []
# Filter initial junk # Filter initial junk

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: blockinfile module: blockinfile
short_description: Insert/update/remove a text block surrounded by marker lines short_description: Insert/update/remove a text block surrounded by marker lines
@ -125,9 +125,9 @@ attributes:
platforms: posix platforms: posix
vault: vault:
support: none support: none
''' """
EXAMPLES = r''' EXAMPLES = r"""
# Before Ansible 2.3, option 'dest' or 'name' was used instead of 'path' # Before Ansible 2.3, option 'dest' or 'name' was used instead of 'path'
- name: Insert/Update "Match User" configuration block in /etc/ssh/sshd_config prepending and appending a new line - name: Insert/Update "Match User" configuration block in /etc/ssh/sshd_config prepending and appending a new line
ansible.builtin.blockinfile: ansible.builtin.blockinfile:
@ -187,7 +187,7 @@ EXAMPLES = r'''
insertafter: '(?m)SID_LIST_LISTENER_DG =\n.*\(SID_LIST =' insertafter: '(?m)SID_LIST_LISTENER_DG =\n.*\(SID_LIST ='
marker: " <!-- {mark} ANSIBLE MANAGED BLOCK -->" marker: " <!-- {mark} ANSIBLE MANAGED BLOCK -->"
''' """
import re import re
import os import os

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: command module: command
short_description: Execute commands on targets short_description: Execute commands on targets
@ -118,9 +118,9 @@ seealso:
author: author:
- Ansible Core Team - Ansible Core Team
- Michael DeHaan - Michael DeHaan
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Return motd to registered var - name: Return motd to registered var
ansible.builtin.command: cat /etc/motd ansible.builtin.command: cat /etc/motd
register: mymotd register: mymotd
@ -174,9 +174,9 @@ EXAMPLES = r'''
- name: Safely use templated variable to run command. Always use the quote filter to avoid injection issues - name: Safely use templated variable to run command. Always use the quote filter to avoid injection issues
ansible.builtin.command: cat {{ myfile|quote }} ansible.builtin.command: cat {{ myfile|quote }}
register: myoutput register: myoutput
''' """
RETURN = r''' RETURN = r"""
msg: msg:
description: changed description: changed
returned: always returned: always
@ -229,7 +229,7 @@ stderr_lines:
returned: always returned: always
type: list type: list
sample: [u'ls cannot access foo: No such file or directory', u'ls …'] sample: [u'ls cannot access foo: No such file or directory', u'ls …']
''' """
import datetime import datetime
import glob import glob

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: copy module: copy
version_added: historical version_added: historical
@ -154,9 +154,9 @@ attributes:
vault: vault:
support: full support: full
version_added: '2.2' version_added: '2.2'
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Copy file with owner and permissions - name: Copy file with owner and permissions
ansible.builtin.copy: ansible.builtin.copy:
src: /srv/myfiles/foo.conf src: /srv/myfiles/foo.conf
@ -219,9 +219,9 @@ EXAMPLES = r'''
src: /etc/foo.conf src: /etc/foo.conf
dest: /path/to/link # link to /path/to/file dest: /path/to/link # link to /path/to/file
follow: no follow: no
''' """
RETURN = r''' RETURN = r"""
dest: dest:
description: Destination file/path. description: Destination file/path.
returned: success returned: success
@ -282,7 +282,7 @@ state:
returned: success returned: success
type: str type: str
sample: file sample: file
''' """
import errno import errno
import filecmp import filecmp
@ -305,9 +305,9 @@ class AnsibleModuleError(Exception):
def split_pre_existing_dir(dirname): def split_pre_existing_dir(dirname):
''' """
Return the first pre-existing directory and a list of the new directories that will be created. Return the first pre-existing directory and a list of the new directories that will be created.
''' """
head, tail = os.path.split(dirname) head, tail = os.path.split(dirname)
b_head = to_bytes(head, errors='surrogate_or_strict') b_head = to_bytes(head, errors='surrogate_or_strict')
if head == '': if head == '':
@ -323,9 +323,9 @@ def split_pre_existing_dir(dirname):
def adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed): def adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed):
''' """
Walk the new directories list and make sure that permissions are as we would expect Walk the new directories list and make sure that permissions are as we would expect
''' """
if new_directory_list: if new_directory_list:
working_dir = os.path.join(pre_existing_dir, new_directory_list.pop(0)) working_dir = os.path.join(pre_existing_dir, new_directory_list.pop(0))

@ -10,7 +10,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: cron module: cron
short_description: Manage cron.d and crontab entries short_description: Manage cron.d and crontab entries
@ -150,9 +150,9 @@ attributes:
platform: platform:
support: full support: full
platforms: posix platforms: posix
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null" - name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null"
ansible.builtin.cron: ansible.builtin.cron:
name: "check dirs" name: "check dirs"
@ -205,9 +205,9 @@ EXAMPLES = r'''
name: APP_HOME name: APP_HOME
env: yes env: yes
state: absent state: absent
''' """
RETURN = r'''#''' RETURN = r"""#"""
import os import os
import platform import platform

@ -4,7 +4,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
author: 'Ansible Core Team (@ansible)' author: 'Ansible Core Team (@ansible)'
short_description: 'Add and remove deb822 formatted repositories' short_description: 'Add and remove deb822 formatted repositories'
description: description:
@ -145,9 +145,9 @@ options:
requirements: requirements:
- python3-debian / python-debian - python3-debian / python-debian
version_added: '2.15' version_added: '2.15'
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Add debian repo - name: Add debian repo
deb822_repository: deb822_repository:
name: debian name: debian
@ -189,9 +189,9 @@ EXAMPLES = '''
components: stable components: stable
architectures: amd64 architectures: amd64
signed_by: https://download.example.com/linux/ubuntu/gpg signed_by: https://download.example.com/linux/ubuntu/gpg
''' """
RETURN = ''' RETURN = """
repo: repo:
description: A source string for the repository description: A source string for the repository
returned: always returned: always
@ -224,7 +224,7 @@ key_filename:
returned: always returned: always
type: str type: str
sample: /etc/apt/keyrings/debian.gpg sample: /etc/apt/keyrings/debian.gpg
''' """
import os import os
import re import re

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: debconf module: debconf
short_description: Configure a .deb package short_description: Configure a .deb package
@ -86,9 +86,9 @@ options:
default: false default: false
author: author:
- Brian Coca (@bcoca) - Brian Coca (@bcoca)
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Set default locale to fr_FR.UTF-8 - name: Set default locale to fr_FR.UTF-8
ansible.builtin.debconf: ansible.builtin.debconf:
name: locales name: locales
@ -121,9 +121,9 @@ EXAMPLES = r'''
value: "{{ site_passphrase }}" value: "{{ site_passphrase }}"
vtype: password vtype: password
no_log: True no_log: True
''' """
RETURN = r'''#''' RETURN = r"""#"""
from ansible.module_utils.common.text.converters import to_text, to_native from ansible.module_utils.common.text.converters import to_text, to_native
from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import AnsibleModule

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: debug module: debug
short_description: Print statements during execution short_description: Print statements during execution
@ -68,9 +68,9 @@ seealso:
author: author:
- Dag Wieers (@dagwieers) - Dag Wieers (@dagwieers)
- Michael DeHaan - Michael DeHaan
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Print the gateway for each host when defined - name: Print the gateway for each host when defined
ansible.builtin.debug: ansible.builtin.debug:
msg: System {{ inventory_hostname }} has gateway {{ ansible_default_ipv4.gateway }} msg: System {{ inventory_hostname }} has gateway {{ ansible_default_ipv4.gateway }}
@ -95,4 +95,4 @@ EXAMPLES = r'''
msg: msg:
- "Provisioning based on YOUR_KEY which is: {{ lookup('ansible.builtin.env', 'YOUR_KEY') }}" - "Provisioning based on YOUR_KEY which is: {{ lookup('ansible.builtin.env', 'YOUR_KEY') }}"
- "These servers were built using the password of '{{ password_used }}'. Please retain this for later use." - "These servers were built using the password of '{{ password_used }}'. Please retain this for later use."
''' """

@ -9,7 +9,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: dnf module: dnf
version_added: 1.9 version_added: 1.9
@ -306,9 +306,9 @@ author:
- Cristian van Ee (@DJMuggs) <cristian at cvee.org> - Cristian van Ee (@DJMuggs) <cristian at cvee.org>
- Berend De Schouwer (@berenddeschouwer) - Berend De Schouwer (@berenddeschouwer)
- Adam Miller (@maxamillion) <admiller@redhat.com> - Adam Miller (@maxamillion) <admiller@redhat.com>
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Install the latest version of Apache - name: Install the latest version of Apache
ansible.builtin.dnf: ansible.builtin.dnf:
name: httpd name: httpd
@ -394,7 +394,7 @@ EXAMPLES = '''
ansible.builtin.dnf: ansible.builtin.dnf:
name: '@postgresql/client' name: '@postgresql/client'
state: present state: present
''' """
import os import os
import sys import sys

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: dpkg_selections module: dpkg_selections
short_description: Dpkg package selection selections short_description: Dpkg package selection selections
@ -39,8 +39,8 @@ attributes:
platforms: debian platforms: debian
notes: notes:
- This module will not cause any packages to be installed/removed/purged, use the M(ansible.builtin.apt) module for that. - This module will not cause any packages to be installed/removed/purged, use the M(ansible.builtin.apt) module for that.
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Prevent python from being upgraded - name: Prevent python from being upgraded
ansible.builtin.dpkg_selections: ansible.builtin.dpkg_selections:
name: python name: python
@ -50,7 +50,7 @@ EXAMPLES = '''
ansible.builtin.dpkg_selections: ansible.builtin.dpkg_selections:
name: python name: python
selection: install selection: install
''' """
from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.locale import get_best_parsable_locale from ansible.module_utils.common.locale import get_best_parsable_locale

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: expect module: expect
version_added: '2.0' version_added: '2.0'
@ -83,9 +83,9 @@ seealso:
- module: ansible.builtin.script - module: ansible.builtin.script
- module: ansible.builtin.shell - module: ansible.builtin.shell
author: "Matt Martz (@sivel)" author: "Matt Martz (@sivel)"
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Case insensitive password string match - name: Case insensitive password string match
ansible.builtin.expect: ansible.builtin.expect:
command: passwd username command: passwd username
@ -116,7 +116,7 @@ EXAMPLES = r'''
- "{{ db_username }}" - "{{ db_username }}"
"Database password": "Database password":
- "{{ db_password }}" - "{{ db_password }}"
''' """
import datetime import datetime
import os import os

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: fail module: fail
short_description: Fail with custom message short_description: Fail with custom message
@ -52,11 +52,11 @@ seealso:
- module: ansible.builtin.meta - module: ansible.builtin.meta
author: author:
- Dag Wieers (@dagwieers) - Dag Wieers (@dagwieers)
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Example using fail and when together - name: Example using fail and when together
ansible.builtin.fail: ansible.builtin.fail:
msg: The system may not be provisioned according to the CMDB status. msg: The system may not be provisioned according to the CMDB status.
when: cmdb_status != "to-be-staged" when: cmdb_status != "to-be-staged"
''' """

@ -8,7 +8,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: fetch module: fetch
short_description: Fetch files from remote nodes short_description: Fetch files from remote nodes
@ -95,9 +95,9 @@ seealso:
author: author:
- Ansible Core Team - Ansible Core Team
- Michael DeHaan - Michael DeHaan
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Store file into /tmp/fetched/host.example.com/tmp/somefile - name: Store file into /tmp/fetched/host.example.com/tmp/somefile
ansible.builtin.fetch: ansible.builtin.fetch:
src: /tmp/somefile src: /tmp/somefile
@ -120,4 +120,4 @@ EXAMPLES = r'''
src: /tmp/uniquefile src: /tmp/uniquefile
dest: special/prefix-{{ inventory_hostname }} dest: special/prefix-{{ inventory_hostname }}
flat: yes flat: yes
''' """

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: file module: file
version_added: historical version_added: historical
@ -123,9 +123,9 @@ attributes:
author: author:
- Ansible Core Team - Ansible Core Team
- Michael DeHaan - Michael DeHaan
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Change file ownership, group and permissions - name: Change file ownership, group and permissions
ansible.builtin.file: ansible.builtin.file:
path: /etc/foo.conf path: /etc/foo.conf
@ -214,8 +214,8 @@ EXAMPLES = r'''
path: /etc/foo path: /etc/foo
state: absent state: absent
''' """
RETURN = r''' RETURN = r"""
dest: dest:
description: Destination file/path, equal to the value passed to O(path). description: Destination file/path, equal to the value passed to O(path).
returned: O(state=touch), O(state=hard), O(state=link) returned: O(state=touch), O(state=hard), O(state=link)
@ -226,7 +226,7 @@ path:
returned: O(state=absent), O(state=directory), O(state=file) returned: O(state=absent), O(state=directory), O(state=file)
type: str type: str
sample: /path/to/file.txt sample: /path/to/file.txt
''' """
import errno import errno
import os import os
@ -296,7 +296,7 @@ def additional_parameter_handling(module):
def get_state(path): def get_state(path):
''' Find out current state ''' """ Find out current state """
b_path = to_bytes(path, errors='surrogate_or_strict') b_path = to_bytes(path, errors='surrogate_or_strict')
try: try:

@ -9,7 +9,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: find module: find
author: Brian Coca (@bcoca) author: Brian Coca (@bcoca)
@ -174,10 +174,10 @@ attributes:
platforms: posix platforms: posix
seealso: seealso:
- module: ansible.windows.win_find - module: ansible.windows.win_find
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Recursively find /tmp files older than 2 days - name: Recursively find /tmp files older than 2 days
ansible.builtin.find: ansible.builtin.find:
paths: /tmp paths: /tmp
@ -246,9 +246,9 @@ EXAMPLES = r'''
use_regex: true use_regex: true
recurse: true recurse: true
limit: 1 limit: 1
''' """
RETURN = r''' RETURN = r"""
files: files:
description: All matches found with the specified criteria (see stat module for full output of each dictionary) description: All matches found with the specified criteria (see stat module for full output of each dictionary)
returned: success returned: success
@ -279,7 +279,7 @@ skipped_paths:
type: dict type: dict
sample: {"/laskdfj": "'/laskdfj' is not a directory"} sample: {"/laskdfj": "'/laskdfj' is not a directory"}
version_added: '2.12' version_added: '2.12'
''' """
import errno import errno
import fnmatch import fnmatch
@ -302,7 +302,7 @@ class _Object:
def pfilter(f, patterns=None, excludes=None, use_regex=False): def pfilter(f, patterns=None, excludes=None, use_regex=False):
'''filter using glob patterns''' """filter using glob patterns"""
if not patterns and not excludes: if not patterns and not excludes:
return True return True
@ -341,7 +341,7 @@ def pfilter(f, patterns=None, excludes=None, use_regex=False):
def agefilter(st, now, age, timestamp): def agefilter(st, now, age, timestamp):
'''filter files older than age''' """filter files older than age"""
if age is None: if age is None:
return True return True
elif age >= 0 and now - getattr(st, "st_%s" % timestamp) >= abs(age): elif age >= 0 and now - getattr(st, "st_%s" % timestamp) >= abs(age):
@ -352,7 +352,7 @@ def agefilter(st, now, age, timestamp):
def sizefilter(st, size): def sizefilter(st, size):
'''filter files greater than size''' """filter files greater than size"""
if size is None: if size is None:
return True return True
elif size >= 0 and st.st_size >= abs(size): elif size >= 0 and st.st_size >= abs(size):

@ -5,7 +5,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: gather_facts module: gather_facts
version_added: 2.8 version_added: 2.8
@ -57,7 +57,7 @@ notes:
Order is not guaranteed, when doing parallel gathering on multiple modules. Order is not guaranteed, when doing parallel gathering on multiple modules.
author: author:
- "Ansible Core Team" - "Ansible Core Team"
''' """
RETURN = """ RETURN = """
# depends on the fact module called # depends on the fact module called

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: get_url module: get_url
short_description: Downloads files from HTTP, HTTPS, or FTP to node short_description: Downloads files from HTTP, HTTPS, or FTP to node
@ -219,9 +219,9 @@ seealso:
- module: ansible.windows.win_get_url - module: ansible.windows.win_get_url
author: author:
- Jan-Piet Mens (@jpmens) - Jan-Piet Mens (@jpmens)
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Download foo.conf - name: Download foo.conf
ansible.builtin.get_url: ansible.builtin.get_url:
url: http://example.com/path/file.conf url: http://example.com/path/file.conf
@ -272,9 +272,9 @@ EXAMPLES = r'''
dest: /etc/foo.conf dest: /etc/foo.conf
username: bar username: bar
password: '{{ mysecret }}' password: '{{ mysecret }}'
''' """
RETURN = r''' RETURN = r"""
backup_file: backup_file:
description: name of backup file created after download description: name of backup file created after download
returned: changed and if backup=yes returned: changed and if backup=yes
@ -365,7 +365,7 @@ url:
returned: always returned: always
type: str type: str
sample: https://www.ansible.com/ sample: https://www.ansible.com/
''' """
import email.message import email.message
import os import os

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: getent module: getent
short_description: A wrapper to the unix getent utility short_description: A wrapper to the unix getent utility
@ -58,9 +58,9 @@ notes:
- Not all databases support enumeration, check system documentation for details. - Not all databases support enumeration, check system documentation for details.
author: author:
- Brian Coca (@bcoca) - Brian Coca (@bcoca)
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Get root user info - name: Get root user info
ansible.builtin.getent: ansible.builtin.getent:
database: passwd database: passwd
@ -97,9 +97,9 @@ EXAMPLES = '''
- ansible.builtin.debug: - ansible.builtin.debug:
var: ansible_facts.getent_shadow var: ansible_facts.getent_shadow
''' """
RETURN = ''' RETURN = """
ansible_facts: ansible_facts:
description: Facts to add to ansible_facts. description: Facts to add to ansible_facts.
returned: always returned: always
@ -112,7 +112,7 @@ ansible_facts:
- Starting at 2.11 it now returns multiple duplicate entries, previously it only returned the last one - Starting at 2.11 it now returns multiple duplicate entries, previously it only returned the last one
returned: always returned: always
type: list type: list
''' """
import traceback import traceback

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: git module: git
author: author:
@ -236,9 +236,9 @@ notes:
one solution is to use the option accept_hostkey. Another solution is to one solution is to use the option accept_hostkey. Another solution is to
add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling
the git module, with the following command: C(ssh-keyscan -H remote_host.com >> /etc/ssh/ssh_known_hosts)." the git module, with the following command: C(ssh-keyscan -H remote_host.com >> /etc/ssh/ssh_known_hosts)."
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Git checkout - name: Git checkout
ansible.builtin.git: ansible.builtin.git:
repo: 'https://github.com/ansible/ansible.git' repo: 'https://github.com/ansible/ansible.git'
@ -295,9 +295,9 @@ EXAMPLES = '''
environment: environment:
GIT_TERMINAL_PROMPT: 0 # reports "terminal prompts disabled" on missing password GIT_TERMINAL_PROMPT: 0 # reports "terminal prompts disabled" on missing password
# or GIT_ASKPASS: /bin/true # for git before version 2.3.0, reports "Authentication failed" on missing password # or GIT_ASKPASS: /bin/true # for git before version 2.3.0, reports "Authentication failed" on missing password
''' """
RETURN = ''' RETURN = """
after: after:
description: Last commit revision of the repository retrieved during the update. description: Last commit revision of the repository retrieved during the update.
returned: success returned: success
@ -328,7 +328,7 @@ git_dir_before:
returned: success returned: success
type: str type: str
sample: /path/to/old/git/dir sample: /path/to/old/git/dir
''' """
import filecmp import filecmp
import os import os
@ -366,7 +366,7 @@ def relocate_repo(module, result, repo_dir, old_repo_dir, worktree_dir):
def head_splitter(headfile, remote, module=None, fail_on_error=False): def head_splitter(headfile, remote, module=None, fail_on_error=False):
'''Extract the head reference''' """Extract the head reference"""
# https://github.com/ansible/ansible-modules-core/pull/907 # https://github.com/ansible/ansible-modules-core/pull/907
res = None res = None
@ -429,11 +429,11 @@ def get_submodule_update_params(module, git_path, cwd):
def write_ssh_wrapper(module): def write_ssh_wrapper(module):
''' """
This writes an shell wrapper for ssh options to be used with git This writes an shell wrapper for ssh options to be used with git
this is only relevant for older versions of gitthat cannot this is only relevant for older versions of gitthat cannot
handle the options themselves. Returns path to the script handle the options themselves. Returns path to the script
''' """
try: try:
# make sure we have full permission to the module_dir, which # make sure we have full permission to the module_dir, which
# may not be the case if we're sudo'ing to a non-root user # may not be the case if we're sudo'ing to a non-root user
@ -466,10 +466,10 @@ def write_ssh_wrapper(module):
def set_git_ssh_env(key_file, ssh_opts, git_version, module): def set_git_ssh_env(key_file, ssh_opts, git_version, module):
''' """
use environment variables to configure git's ssh execution, use environment variables to configure git's ssh execution,
which varies by version but this function should handle all. which varies by version but this function should handle all.
''' """
# initialise to existing ssh opts and/or append user provided # initialise to existing ssh opts and/or append user provided
if ssh_opts is None: if ssh_opts is None:
@ -519,7 +519,7 @@ def set_git_ssh_env(key_file, ssh_opts, git_version, module):
def get_version(module, git_path, dest, ref="HEAD"): def get_version(module, git_path, dest, ref="HEAD"):
''' samples the version of the git repo ''' """ samples the version of the git repo """
cmd = "%s rev-parse %s" % (git_path, ref) cmd = "%s rev-parse %s" % (git_path, ref)
rc, stdout, stderr = module.run_command(cmd, cwd=dest) rc, stdout, stderr = module.run_command(cmd, cwd=dest)
@ -571,7 +571,7 @@ def get_submodule_versions(git_path, module, dest, version='HEAD'):
def clone(git_path, module, repo, dest, remote, depth, version, bare, def clone(git_path, module, repo, dest, remote, depth, version, bare,
reference, refspec, git_version_used, verify_commit, separate_git_dir, result, gpg_allowlist, single_branch): reference, refspec, git_version_used, verify_commit, separate_git_dir, result, gpg_allowlist, single_branch):
''' makes a new git repo if it does not already exist ''' """ makes a new git repo if it does not already exist """
dest_dirname = os.path.dirname(dest) dest_dirname = os.path.dirname(dest)
try: try:
os.makedirs(dest_dirname) os.makedirs(dest_dirname)
@ -653,17 +653,17 @@ def has_local_mods(module, git_path, dest, bare):
def reset(git_path, module, dest): def reset(git_path, module, dest):
''' """
Resets the index and working tree to HEAD. Resets the index and working tree to HEAD.
Discards any changes to tracked files in working Discards any changes to tracked files in working
tree since that commit. tree since that commit.
''' """
cmd = "%s reset --hard HEAD" % (git_path,) cmd = "%s reset --hard HEAD" % (git_path,)
return module.run_command(cmd, check_rc=True, cwd=dest) return module.run_command(cmd, check_rc=True, cwd=dest)
def get_diff(module, git_path, dest, repo, remote, depth, bare, before, after): def get_diff(module, git_path, dest, repo, remote, depth, bare, before, after):
''' Return the difference between 2 versions ''' """ Return the difference between 2 versions """
if before is None: if before is None:
return {'prepared': '>> Newly checked out %s' % after} return {'prepared': '>> Newly checked out %s' % after}
elif before != after: elif before != after:
@ -817,13 +817,13 @@ def get_repo_path(dest, bare):
def get_head_branch(git_path, module, dest, remote, bare=False): def get_head_branch(git_path, module, dest, remote, bare=False):
''' """
Determine what branch HEAD is associated with. This is partly Determine what branch HEAD is associated with. This is partly
taken from lib/ansible/utils/__init__.py. It finds the correct taken from lib/ansible/utils/__init__.py. It finds the correct
path to .git/HEAD and reads from that file the branch that HEAD is path to .git/HEAD and reads from that file the branch that HEAD is
associated with. In the case of a detached HEAD, this will look associated with. In the case of a detached HEAD, this will look
up the branch in .git/refs/remotes/<remote>/HEAD. up the branch in .git/refs/remotes/<remote>/HEAD.
''' """
try: try:
repo_path = get_repo_path(dest, bare) repo_path = get_repo_path(dest, bare)
except (IOError, ValueError) as err: except (IOError, ValueError) as err:
@ -845,7 +845,7 @@ def get_head_branch(git_path, module, dest, remote, bare=False):
def get_remote_url(git_path, module, dest, remote): def get_remote_url(git_path, module, dest, remote):
'''Return URL of remote source for repo.''' """Return URL of remote source for repo."""
command = [git_path, 'ls-remote', '--get-url', remote] command = [git_path, 'ls-remote', '--get-url', remote]
(rc, out, err) = module.run_command(command, cwd=dest) (rc, out, err) = module.run_command(command, cwd=dest)
if rc != 0: if rc != 0:
@ -856,7 +856,7 @@ def get_remote_url(git_path, module, dest, remote):
def set_remote_url(git_path, module, repo, dest, remote): def set_remote_url(git_path, module, repo, dest, remote):
''' updates repo from remote sources ''' """ updates repo from remote sources """
# Return if remote URL isn't changing. # Return if remote URL isn't changing.
remote_url = get_remote_url(git_path, module, dest, remote) remote_url = get_remote_url(git_path, module, dest, remote)
if remote_url == repo or unfrackgitpath(remote_url) == unfrackgitpath(repo): if remote_url == repo or unfrackgitpath(remote_url) == unfrackgitpath(repo):
@ -874,7 +874,7 @@ def set_remote_url(git_path, module, repo, dest, remote):
def fetch(git_path, module, repo, dest, version, remote, depth, bare, refspec, git_version_used, force=False): def fetch(git_path, module, repo, dest, version, remote, depth, bare, refspec, git_version_used, force=False):
''' updates repo from remote sources ''' """ updates repo from remote sources """
set_remote_url(git_path, module, repo, dest, remote) set_remote_url(git_path, module, repo, dest, remote)
commands = [] commands = []
@ -981,7 +981,7 @@ def submodules_fetch(git_path, module, remote, track_submodules, dest):
def submodule_update(git_path, module, dest, track_submodules, force=False): def submodule_update(git_path, module, dest, track_submodules, force=False):
''' init and update any submodules ''' """ init and update any submodules """
# get the valid submodule params # get the valid submodule params
params = get_submodule_update_params(module, git_path, dest) params = get_submodule_update_params(module, git_path, dest)

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: group module: group
version_added: "0.0.2" version_added: "0.0.2"
@ -91,9 +91,9 @@ seealso:
- module: ansible.windows.win_group - module: ansible.windows.win_group
author: author:
- Stephen Fromm (@sfromm) - Stephen Fromm (@sfromm)
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Ensure group "somegroup" exists - name: Ensure group "somegroup" exists
ansible.builtin.group: ansible.builtin.group:
name: somegroup name: somegroup
@ -104,9 +104,9 @@ EXAMPLES = '''
name: docker name: docker
state: present state: present
gid: 1750 gid: 1750
''' """
RETURN = r''' RETURN = r"""
gid: gid:
description: Group ID of the group. description: Group ID of the group.
returned: When O(state) is C(present) returned: When O(state) is C(present)
@ -127,7 +127,7 @@ system:
returned: When O(state) is C(present) returned: When O(state) is C(present)
type: bool type: bool
sample: False sample: False
''' """
import grp import grp
import os import os

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: group_by module: group_by
short_description: Create Ansible groups based on facts short_description: Create Ansible groups based on facts
@ -65,9 +65,9 @@ seealso:
- module: ansible.builtin.add_host - module: ansible.builtin.add_host
author: author:
- Jeroen Hoekx (@jhoekx) - Jeroen Hoekx (@jhoekx)
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Create groups based on the machine architecture - name: Create groups based on the machine architecture
ansible.builtin.group_by: ansible.builtin.group_by:
key: machine_{{ ansible_machine }} key: machine_{{ ansible_machine }}
@ -85,4 +85,4 @@ EXAMPLES = r'''
- name: Add all active hosts to a static group - name: Add all active hosts to a static group
ansible.builtin.group_by: ansible.builtin.group_by:
key: done key: done
''' """

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: hostname module: hostname
author: author:
@ -52,9 +52,9 @@ attributes:
support: full support: full
platform: platform:
platforms: posix platforms: posix
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Set a hostname - name: Set a hostname
ansible.builtin.hostname: ansible.builtin.hostname:
name: web01 name: web01
@ -63,7 +63,7 @@ EXAMPLES = '''
ansible.builtin.hostname: ansible.builtin.hostname:
name: web01 name: web01
use: systemd use: systemd
''' """
import os import os
import platform import platform

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
author: Ansible Core Team (@ansible) author: Ansible Core Team (@ansible)
module: import_playbook module: import_playbook
@ -42,9 +42,9 @@ seealso:
- module: ansible.builtin.include_tasks - module: ansible.builtin.include_tasks
- ref: playbooks_reuse - ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks. description: More information related to including and importing playbooks, roles and tasks.
''' """
EXAMPLES = r''' EXAMPLES = r"""
- hosts: localhost - hosts: localhost
tasks: tasks:
- ansible.builtin.debug: - ansible.builtin.debug:
@ -69,8 +69,8 @@ EXAMPLES = r'''
- name: This fails because I'm inside a play already - name: This fails because I'm inside a play already
ansible.builtin.import_playbook: stuff.yaml ansible.builtin.import_playbook: stuff.yaml
''' """
RETURN = r''' RETURN = r"""
# This module does not return anything except plays to execute. # This module does not return anything except plays to execute.
''' """

@ -5,7 +5,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
author: Ansible Core Team (@ansible) author: Ansible Core Team (@ansible)
module: import_role module: import_role
@ -87,9 +87,9 @@ seealso:
- module: ansible.builtin.include_tasks - module: ansible.builtin.include_tasks
- ref: playbooks_reuse - ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks. description: More information related to including and importing playbooks, roles and tasks.
''' """
EXAMPLES = r''' EXAMPLES = r"""
- hosts: all - hosts: all
tasks: tasks:
- ansible.builtin.import_role: - ansible.builtin.import_role:
@ -110,8 +110,8 @@ EXAMPLES = r'''
ansible.builtin.import_role: ansible.builtin.import_role:
name: myrole name: myrole
when: not idontwanttorun when: not idontwanttorun
''' """
RETURN = r''' RETURN = r"""
# This module does not return anything except tasks to execute. # This module does not return anything except tasks to execute.
''' """

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
author: Ansible Core Team (@ansible) author: Ansible Core Team (@ansible)
module: import_tasks module: import_tasks
@ -46,9 +46,9 @@ seealso:
- module: ansible.builtin.include_tasks - module: ansible.builtin.include_tasks
- ref: playbooks_reuse - ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks. description: More information related to including and importing playbooks, roles and tasks.
''' """
EXAMPLES = r''' EXAMPLES = r"""
- hosts: all - hosts: all
tasks: tasks:
- ansible.builtin.debug: - ansible.builtin.debug:
@ -69,8 +69,8 @@ EXAMPLES = r'''
- name: Apply conditional to all imported tasks - name: Apply conditional to all imported tasks
ansible.builtin.import_tasks: stuff.yaml ansible.builtin.import_tasks: stuff.yaml
when: hostvar is defined when: hostvar is defined
''' """
RETURN = r''' RETURN = r"""
# This module does not return anything except tasks to execute. # This module does not return anything except tasks to execute.
''' """

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
author: Ansible Core Team (@ansible) author: Ansible Core Team (@ansible)
module: include_role module: include_role
@ -92,9 +92,9 @@ seealso:
- module: ansible.builtin.include_tasks - module: ansible.builtin.include_tasks
- ref: playbooks_reuse - ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks. description: More information related to including and importing playbooks, roles and tasks.
''' """
EXAMPLES = r''' EXAMPLES = r"""
- ansible.builtin.include_role: - ansible.builtin.include_role:
name: myrole name: myrole
@ -131,8 +131,8 @@ EXAMPLES = r'''
- install - install
tags: tags:
- always - always
''' """
RETURN = r''' RETURN = r"""
# This module does not return anything except tasks to execute. # This module does not return anything except tasks to execute.
''' """

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
author: Ansible Core Team (@ansible) author: Ansible Core Team (@ansible)
module: include_tasks module: include_tasks
@ -50,9 +50,9 @@ seealso:
- module: ansible.builtin.include_role - module: ansible.builtin.include_role
- ref: playbooks_reuse - ref: playbooks_reuse
description: More information related to including and importing playbooks, roles and tasks. description: More information related to including and importing playbooks, roles and tasks.
''' """
EXAMPLES = r''' EXAMPLES = r"""
- hosts: all - hosts: all
tasks: tasks:
- ansible.builtin.debug: - ansible.builtin.debug:
@ -91,8 +91,8 @@ EXAMPLES = r'''
- install - install
tags: tags:
- always - always
''' """
RETURN = r''' RETURN = r"""
# This module does not return anything except tasks to execute. # This module does not return anything except tasks to execute.
''' """

@ -5,7 +5,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
author: Allen Sanabria (@linuxdynasty) author: Allen Sanabria (@linuxdynasty)
module: include_vars module: include_vars
@ -112,9 +112,9 @@ seealso:
- module: ansible.builtin.set_fact - module: ansible.builtin.set_fact
- ref: playbooks_delegation - ref: playbooks_delegation
description: More information related to task delegation. description: More information related to task delegation.
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Include vars of stuff.yaml into the 'stuff' variable (2.2). - name: Include vars of stuff.yaml into the 'stuff' variable (2.2).
ansible.builtin.include_vars: ansible.builtin.include_vars:
file: stuff.yaml file: stuff.yaml
@ -179,9 +179,9 @@ EXAMPLES = r'''
- 'yaml' - 'yaml'
- 'yml' - 'yml'
- 'json' - 'json'
''' """
RETURN = r''' RETURN = r"""
ansible_facts: ansible_facts:
description: Variables that were included and their values description: Variables that were included and their values
returned: success returned: success
@ -193,4 +193,4 @@ ansible_included_var_files:
type: list type: list
sample: [ /path/to/file.json, /path/to/file.yaml ] sample: [ /path/to/file.json, /path/to/file.yaml ]
version_added: '2.4' version_added: '2.4'
''' """

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: iptables module: iptables
short_description: Modify iptables rules short_description: Modify iptables rules
@ -394,9 +394,9 @@ options:
type: bool type: bool
default: false default: false
version_added: "2.15" version_added: "2.15"
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Block specific IP - name: Block specific IP
ansible.builtin.iptables: ansible.builtin.iptables:
chain: INPUT chain: INPUT
@ -543,7 +543,7 @@ EXAMPLES = r'''
- "443" - "443"
- "8081:8083" - "8081:8083"
jump: ACCEPT jump: ACCEPT
''' """
import re import re

@ -5,7 +5,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: known_hosts module: known_hosts
short_description: Add or remove a host from the C(known_hosts) file short_description: Add or remove a host from the C(known_hosts) file
@ -65,9 +65,9 @@ extends_documentation_fragment:
- action_common_attributes - action_common_attributes
author: author:
- Matthew Vernon (@mcv21) - Matthew Vernon (@mcv21)
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Tell the host about our servers it might want to ssh to - name: Tell the host about our servers it might want to ssh to
ansible.builtin.known_hosts: ansible.builtin.known_hosts:
path: /etc/ssh/ssh_known_hosts path: /etc/ssh/ssh_known_hosts
@ -87,7 +87,7 @@ EXAMPLES = r'''
key: '[host1.example.com]:2222 ssh-rsa ASDeararAIUHI324324' # some key gibberish key: '[host1.example.com]:2222 ssh-rsa ASDeararAIUHI324324' # some key gibberish
path: /etc/ssh/ssh_known_hosts path: /etc/ssh/ssh_known_hosts
state: present state: present
''' """
# Makes sure public host keys are present or absent in the given known_hosts # Makes sure public host keys are present or absent in the given known_hosts
# file. # file.
@ -195,13 +195,13 @@ def enforce_state(module, params):
def sanity_check(module, host, key, sshkeygen): def sanity_check(module, host, key, sshkeygen):
'''Check supplied key is sensible """Check supplied key is sensible
host and key are parameters provided by the user; If the host host and key are parameters provided by the user; If the host
provided is inconsistent with the key supplied, then this function provided is inconsistent with the key supplied, then this function
quits, providing an error to the user. quits, providing an error to the user.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
''' """
# If no key supplied, we're doing a removal, and have nothing to check here. # If no key supplied, we're doing a removal, and have nothing to check here.
if not key: if not key:
return return
@ -232,7 +232,7 @@ def sanity_check(module, host, key, sshkeygen):
def search_for_host_key(module, host, key, path, sshkeygen): def search_for_host_key(module, host, key, path, sshkeygen):
'''search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line) """search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line)
Looks up host and keytype in the known_hosts file path; if it's there, looks to see Looks up host and keytype in the known_hosts file path; if it's there, looks to see
if one of those entries matches key. Returns: if one of those entries matches key. Returns:
@ -241,7 +241,7 @@ def search_for_host_key(module, host, key, path, sshkeygen):
found_line (int or None): the line where a key of the same type was found found_line (int or None): the line where a key of the same type was found
if found=False, then replace is always False. if found=False, then replace is always False.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
''' """
if os.path.exists(path) is False: if os.path.exists(path) is False:
return False, False, None return False, False, None
@ -304,14 +304,14 @@ def hash_host_key(host, key):
def normalize_known_hosts_key(key): def normalize_known_hosts_key(key):
''' """
Transform a key, either taken from a known_host file or provided by the Transform a key, either taken from a known_host file or provided by the
user, into a normalized form. user, into a normalized form.
The host part (which might include multiple hostnames or be hashed) gets The host part (which might include multiple hostnames or be hashed) gets
replaced by the provided host. Also, any spurious information gets removed replaced by the provided host. Also, any spurious information gets removed
from the end (like the username@host tag usually present in hostkeys, but from the end (like the username@host tag usually present in hostkeys, but
absent in known_hosts files) absent in known_hosts files)
''' """
key = key.strip() # trim trailing newline key = key.strip() # trim trailing newline
k = key.split() k = key.split()
d = dict() d = dict()

@ -8,7 +8,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: lineinfile module: lineinfile
short_description: Manage lines in text files short_description: Manage lines in text files
@ -152,9 +152,9 @@ author:
- Daniel Hokka Zakrissoni (@dhozac) - Daniel Hokka Zakrissoni (@dhozac)
- Ahti Kitsik (@ahtik) - Ahti Kitsik (@ahtik)
- Jose Angel Munoz (@imjoseangel) - Jose Angel Munoz (@imjoseangel)
''' """
EXAMPLES = r''' EXAMPLES = r"""
# NOTE: Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path' # NOTE: Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path'
- name: Ensure SELinux is set to enforcing mode - name: Ensure SELinux is set to enforcing mode
ansible.builtin.lineinfile: ansible.builtin.lineinfile:
@ -237,9 +237,9 @@ EXAMPLES = r'''
regexp: ^(host=).* regexp: ^(host=).*
line: \g<1>{{ hostname }} line: \g<1>{{ hostname }}
backrefs: yes backrefs: yes
''' """
RETURN = r'''#''' RETURN = r"""#"""
import os import os
import re import re

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
module: meta module: meta
short_description: Execute Ansible 'actions' short_description: Execute Ansible 'actions'
version_added: '1.2' version_added: '1.2'
@ -78,9 +78,9 @@ seealso:
- module: ansible.builtin.fail - module: ansible.builtin.fail
author: author:
- Ansible Core Team - Ansible Core Team
''' """
EXAMPLES = r''' EXAMPLES = r"""
# Example showing flushing handlers on demand, not at end of play # Example showing flushing handlers on demand, not at end of play
- ansible.builtin.template: - ansible.builtin.template:
src: new.j2 src: new.j2
@ -126,4 +126,4 @@ EXAMPLES = r'''
when: when:
- ansible_distribution == 'CentOS' - ansible_distribution == 'CentOS'
- ansible_distribution_major_version == '6' - ansible_distribution_major_version == '6'
''' """

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: package module: package
version_added: 2.0 version_added: 2.0
@ -66,8 +66,8 @@ attributes:
notes: notes:
- While M(ansible.builtin.package) abstracts package managers to ease dealing with multiple distributions, package name often differs for the same software. - While M(ansible.builtin.package) abstracts package managers to ease dealing with multiple distributions, package name often differs for the same software.
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Install ntpdate - name: Install ntpdate
ansible.builtin.package: ansible.builtin.package:
name: ntpdate name: ntpdate
@ -85,4 +85,4 @@ EXAMPLES = '''
- httpd - httpd
- mariadb-server - mariadb-server
state: latest state: latest
''' """

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
module: package_facts module: package_facts
short_description: Package information as facts short_description: Package information as facts
description: description:
@ -67,9 +67,9 @@ attributes:
support: full support: full
platform: platform:
platforms: posix platforms: posix
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Gather the package facts - name: Gather the package facts
ansible.builtin.package_facts: ansible.builtin.package_facts:
manager: auto manager: auto
@ -83,9 +83,9 @@ EXAMPLES = '''
msg: "{{ ansible_facts.packages['foobar'] | length }} versions of foobar are installed!" msg: "{{ ansible_facts.packages['foobar'] | length }} versions of foobar are installed!"
when: "'foobar' in ansible_facts.packages" when: "'foobar' in ansible_facts.packages"
''' """
RETURN = ''' RETURN = """
ansible_facts: ansible_facts:
description: Facts to add to ansible_facts. description: Facts to add to ansible_facts.
returned: always returned: always
@ -248,7 +248,7 @@ ansible_facts:
], ],
} }
} }
''' """
import re import re

@ -4,7 +4,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: pause module: pause
short_description: Pause playbook execution short_description: Pause playbook execution
@ -65,9 +65,9 @@ attributes:
notes: notes:
- Starting in 2.2, if you specify 0 or negative for minutes or seconds, it will wait for 1 second, previously it would wait indefinitely. - Starting in 2.2, if you specify 0 or negative for minutes or seconds, it will wait for 1 second, previously it would wait indefinitely.
- User input is not captured or echoed, regardless of echo setting, when minutes or seconds is specified. - User input is not captured or echoed, regardless of echo setting, when minutes or seconds is specified.
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Pause for 5 minutes to build app cache - name: Pause for 5 minutes to build app cache
ansible.builtin.pause: ansible.builtin.pause:
minutes: 5 minutes: 5
@ -83,9 +83,9 @@ EXAMPLES = '''
ansible.builtin.pause: ansible.builtin.pause:
prompt: "Enter a secret" prompt: "Enter a secret"
echo: no echo: no
''' """
RETURN = ''' RETURN = """
user_input: user_input:
description: User input from interactive console description: User input from interactive console
returned: if no waiting time set returned: if no waiting time set
@ -116,4 +116,4 @@ echo:
returned: always returned: always
type: bool type: bool
sample: true sample: true
''' """

@ -7,7 +7,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: ping module: ping
version_added: historical version_added: historical
@ -41,9 +41,9 @@ seealso:
author: author:
- Ansible Core Team - Ansible Core Team
- Michael DeHaan - Michael DeHaan
''' """
EXAMPLES = ''' EXAMPLES = """
# Test we can logon to 'webservers' and execute python with json lib. # Test we can logon to 'webservers' and execute python with json lib.
# ansible webservers -m ansible.builtin.ping # ansible webservers -m ansible.builtin.ping
@ -53,15 +53,15 @@ EXAMPLES = '''
- name: Induce an exception to see what happens - name: Induce an exception to see what happens
ansible.builtin.ping: ansible.builtin.ping:
data: crash data: crash
''' """
RETURN = ''' RETURN = """
ping: ping:
description: Value provided with the O(data) parameter. description: Value provided with the O(data) parameter.
returned: success returned: success
type: str type: str
sample: pong sample: pong
''' """
from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import AnsibleModule

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = ''' DOCUMENTATION = """
--- ---
module: pip module: pip
short_description: Manages Python library dependencies short_description: Manages Python library dependencies
@ -145,9 +145,9 @@ requirements:
- setuptools or packaging - setuptools or packaging
author: author:
- Matt Wright (@mattupstate) - Matt Wright (@mattupstate)
''' """
EXAMPLES = ''' EXAMPLES = """
- name: Install bottle python package - name: Install bottle python package
ansible.builtin.pip: ansible.builtin.pip:
name: bottle name: bottle
@ -262,9 +262,9 @@ EXAMPLES = '''
vars: vars:
venv_dir: /tmp/pick-a-better-venv-path venv_dir: /tmp/pick-a-better-venv-path
venv_python: "{{ venv_dir }}/bin/python" venv_python: "{{ venv_dir }}/bin/python"
''' """
RETURN = ''' RETURN = """
cmd: cmd:
description: pip command used by the module description: pip command used by the module
returned: success returned: success
@ -290,7 +290,7 @@ virtualenv:
returned: success, if a virtualenv path was provided returned: success, if a virtualenv path was provided
type: str type: str
sample: "/tmp/virtualenv" sample: "/tmp/virtualenv"
''' """
import argparse import argparse
import os import os
@ -417,7 +417,7 @@ def _get_cmd_options(module, cmd):
def _get_packages(module, pip, chdir): def _get_packages(module, pip, chdir):
'''Return results of pip command to get packages.''' """Return results of pip command to get packages."""
# Try 'pip list' command first. # Try 'pip list' command first.
command = pip + ['list', '--format=freeze'] command = pip + ['list', '--format=freeze']
locale = get_best_parsable_locale(module) locale = get_best_parsable_locale(module)
@ -435,7 +435,7 @@ def _get_packages(module, pip, chdir):
def _is_present(module, req, installed_pkgs, pkg_command): def _is_present(module, req, installed_pkgs, pkg_command):
'''Return whether or not package is installed.''' """Return whether or not package is installed."""
for pkg in installed_pkgs: for pkg in installed_pkgs:
if '==' in pkg: if '==' in pkg:
pkg_name, pkg_version = pkg.split('==') pkg_name, pkg_version = pkg.split('==')

@ -6,7 +6,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
--- ---
module: raw module: raw
short_description: Executes a low-down and dirty command short_description: Executes a low-down and dirty command
@ -70,9 +70,9 @@ seealso:
author: author:
- Ansible Core Team - Ansible Core Team
- Michael DeHaan - Michael DeHaan
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Bootstrap a host without python2 installed - name: Bootstrap a host without python2 installed
ansible.builtin.raw: dnf install -y python2 python2-dnf libselinux-python ansible.builtin.raw: dnf install -y python2 python2-dnf libselinux-python
@ -86,4 +86,4 @@ EXAMPLES = r'''
- name: List user accounts on a Windows system - name: List user accounts on a Windows system
ansible.builtin.raw: Get-WmiObject -Class Win32_UserAccount ansible.builtin.raw: Get-WmiObject -Class Win32_UserAccount
''' """

@ -5,7 +5,7 @@
from __future__ import annotations from __future__ import annotations
DOCUMENTATION = r''' DOCUMENTATION = r"""
module: reboot module: reboot
short_description: Reboot a machine short_description: Reboot a machine
notes: notes:
@ -100,9 +100,9 @@ seealso:
author: author:
- Matt Davis (@nitzmahone) - Matt Davis (@nitzmahone)
- Sam Doran (@samdoran) - Sam Doran (@samdoran)
''' """
EXAMPLES = r''' EXAMPLES = r"""
- name: Unconditionally reboot the machine with all defaults - name: Unconditionally reboot the machine with all defaults
ansible.builtin.reboot: ansible.builtin.reboot:
@ -124,9 +124,9 @@ EXAMPLES = r'''
ansible.builtin.reboot: ansible.builtin.reboot:
msg: "Rebooting machine in 5 seconds" msg: "Rebooting machine in 5 seconds"
''' """
RETURN = r''' RETURN = r"""
rebooted: rebooted:
description: true if the machine was rebooted description: true if the machine was rebooted
returned: always returned: always
@ -137,4 +137,4 @@ elapsed:
returned: always returned: always
type: int type: int
sample: 23 sample: 23
''' """

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save